hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f72de8120394b41d6ec62ea30dbf26d531028f1c | 6,117 | py | Python | src/oci/database/models/operations_insights_config.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/database/models/operations_insights_config.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/database/models/operations_insights_config.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class OperationsInsightsConfig(object):
"""
The configuration of Operations Insights for the external database
"""
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "ENABLING"
OPERATIONS_INSIGHTS_STATUS_ENABLING = "ENABLING"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "ENABLED"
OPERATIONS_INSIGHTS_STATUS_ENABLED = "ENABLED"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "DISABLING"
OPERATIONS_INSIGHTS_STATUS_DISABLING = "DISABLING"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "NOT_ENABLED"
OPERATIONS_INSIGHTS_STATUS_NOT_ENABLED = "NOT_ENABLED"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "FAILED_ENABLING"
OPERATIONS_INSIGHTS_STATUS_FAILED_ENABLING = "FAILED_ENABLING"
#: A constant which can be used with the operations_insights_status property of a OperationsInsightsConfig.
#: This constant has a value of "FAILED_DISABLING"
OPERATIONS_INSIGHTS_STATUS_FAILED_DISABLING = "FAILED_DISABLING"
def __init__(self, **kwargs):
"""
Initializes a new OperationsInsightsConfig object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param operations_insights_status:
The value to assign to the operations_insights_status property of this OperationsInsightsConfig.
Allowed values for this property are: "ENABLING", "ENABLED", "DISABLING", "NOT_ENABLED", "FAILED_ENABLING", "FAILED_DISABLING", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type operations_insights_status: str
:param operations_insights_connector_id:
The value to assign to the operations_insights_connector_id property of this OperationsInsightsConfig.
:type operations_insights_connector_id: str
"""
self.swagger_types = {
'operations_insights_status': 'str',
'operations_insights_connector_id': 'str'
}
self.attribute_map = {
'operations_insights_status': 'operationsInsightsStatus',
'operations_insights_connector_id': 'operationsInsightsConnectorId'
}
self._operations_insights_status = None
self._operations_insights_connector_id = None
@property
def operations_insights_status(self):
"""
**[Required]** Gets the operations_insights_status of this OperationsInsightsConfig.
The status of Operations Insights
Allowed values for this property are: "ENABLING", "ENABLED", "DISABLING", "NOT_ENABLED", "FAILED_ENABLING", "FAILED_DISABLING", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The operations_insights_status of this OperationsInsightsConfig.
:rtype: str
"""
return self._operations_insights_status
@operations_insights_status.setter
def operations_insights_status(self, operations_insights_status):
"""
Sets the operations_insights_status of this OperationsInsightsConfig.
The status of Operations Insights
:param operations_insights_status: The operations_insights_status of this OperationsInsightsConfig.
:type: str
"""
allowed_values = ["ENABLING", "ENABLED", "DISABLING", "NOT_ENABLED", "FAILED_ENABLING", "FAILED_DISABLING"]
if not value_allowed_none_or_none_sentinel(operations_insights_status, allowed_values):
operations_insights_status = 'UNKNOWN_ENUM_VALUE'
self._operations_insights_status = operations_insights_status
@property
def operations_insights_connector_id(self):
"""
Gets the operations_insights_connector_id of this OperationsInsightsConfig.
The `OCID`__ of the
:func:`create_external_database_connector_details`.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:return: The operations_insights_connector_id of this OperationsInsightsConfig.
:rtype: str
"""
return self._operations_insights_connector_id
@operations_insights_connector_id.setter
def operations_insights_connector_id(self, operations_insights_connector_id):
"""
Sets the operations_insights_connector_id of this OperationsInsightsConfig.
The `OCID`__ of the
:func:`create_external_database_connector_details`.
__ https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm
:param operations_insights_connector_id: The operations_insights_connector_id of this OperationsInsightsConfig.
:type: str
"""
self._operations_insights_connector_id = operations_insights_connector_id
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 43.692857 | 245 | 0.735818 |
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class OperationsInsightsConfig(object):
OPERATIONS_INSIGHTS_STATUS_ENABLING = "ENABLING"
OPERATIONS_INSIGHTS_STATUS_ENABLED = "ENABLED"
OPERATIONS_INSIGHTS_STATUS_DISABLING = "DISABLING"
OPERATIONS_INSIGHTS_STATUS_NOT_ENABLED = "NOT_ENABLED"
OPERATIONS_INSIGHTS_STATUS_FAILED_ENABLING = "FAILED_ENABLING"
OPERATIONS_INSIGHTS_STATUS_FAILED_DISABLING = "FAILED_DISABLING"
def __init__(self, **kwargs):
self.swagger_types = {
'operations_insights_status': 'str',
'operations_insights_connector_id': 'str'
}
self.attribute_map = {
'operations_insights_status': 'operationsInsightsStatus',
'operations_insights_connector_id': 'operationsInsightsConnectorId'
}
self._operations_insights_status = None
self._operations_insights_connector_id = None
@property
def operations_insights_status(self):
return self._operations_insights_status
@operations_insights_status.setter
def operations_insights_status(self, operations_insights_status):
allowed_values = ["ENABLING", "ENABLED", "DISABLING", "NOT_ENABLED", "FAILED_ENABLING", "FAILED_DISABLING"]
if not value_allowed_none_or_none_sentinel(operations_insights_status, allowed_values):
operations_insights_status = 'UNKNOWN_ENUM_VALUE'
self._operations_insights_status = operations_insights_status
@property
def operations_insights_connector_id(self):
return self._operations_insights_connector_id
@operations_insights_connector_id.setter
def operations_insights_connector_id(self, operations_insights_connector_id):
self._operations_insights_connector_id = operations_insights_connector_id
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f72de8d040132b36146027669f5a9472ddd8c3f9 | 14,494 | py | Python | recipes-app/measy-iot/files/usr/share/measy_iot/app/pyconnman/agent.py | fabiozehnder/meta-myir-st | 0d1e437baa499fba7e0e71935ae549fff2658893 | [
"MIT"
] | 1 | 2021-08-19T16:23:55.000Z | 2021-08-19T16:23:55.000Z | recipes-app/measy-iot/files/usr/share/measy_iot/app/pyconnman/agent.py | fabiozehnder/meta-myir-st | 0d1e437baa499fba7e0e71935ae549fff2658893 | [
"MIT"
] | null | null | null | recipes-app/measy-iot/files/usr/share/measy_iot/app/pyconnman/agent.py | fabiozehnder/meta-myir-st | 0d1e437baa499fba7e0e71935ae549fff2658893 | [
"MIT"
] | 2 | 2021-12-22T14:15:55.000Z | 2022-02-10T18:07:52.000Z | from __future__ import unicode_literals
from exceptions import ConnCanceledException
import dbus.service
class GenericAgent(dbus.service.Object):
"""
Generic agent service object class.
.. note:: GenericAgent can't be directly instantiated.
It should be sub-classed and provides a template for
implementing an agent service object.
:param str obj_path:
Freely definable object path for the agent service
e.g., '/agent/netman'.
"""
def __init__(self, obj_path):
bus = dbus.SystemBus()
super(GenericAgent, self).__init__(bus, obj_path)
"""
This method gets called when the service daemon
unregisters the agent. An agent can use it to do
cleanup tasks. There is no need to unregister the
agent, because when this method gets called it has
already been unregistered.
:return:
"""
@dbus.service.method("net.connman.Agent",
in_signature='', out_signature='')
def Release(self):
pass
"""
This method gets called when trying to connect to
a service and some extra input is required. For
example a passphrase or the name of a hidden network.
The return value should be a dictionary where the
keys are the field names and the values are the
actual fields. Alternatively an error indicating that
the request got canceled can be returned.
Most common return field names are "Name" and of
course "Passphrase".
The dictionary arguments contains field names with
their input parameters.
In case of WISPr credentials requests and if the user
prefers to login through the browser by himself, agent
will have to return a LaunchBrowser error (see below).
:Examples:
1. Requesting a passphrase for WPA2 network
--> RequestInput("/service1",
{ "Passphrase": { "Type": "psk",
"Requirement": "mandatory"
}
})
<-- Returns { "Passphrase" : "secret123" }
2. Requesting a passphrase after an error on the previous one:
--> RequestInput("/service1",
{ "Passphrase" : { "Type" : "psk",
"Requirement" : "mandatory"
},
"PreviousPassphrase" :
{ "Type" : "psk",
"Requirement : "informational",
"Value" : "secret123"
}
})
3. Requesting name for hidden network
--> RequestInput("/service2",
{ "Name" : { "Type" : "string",
"Requirement" : "mandatory",
"Alternates" : [ "SSID" ]
},
"SSID" : { "Type" : "ssid",
"Requirement" : "alternate"
}
}
<-- Returns { "Name" : "My hidden network" }
4. Requesting a passphrase for a WPA2 network with WPS alternative:
--> RequestInput("/service3",
{ "Passphrase" : { "Type" : "psk",
"Requirement" : "mandatory",
"Alternates" : [ "WPS" ]
},
"WPS" : { "Type" : "wpspin",
"Requirement" : "alternate"
}
}
<-- Returns { "WPS" : "123456" }
5. Requesting a passphrase for a WPA2 network with WPS alternative
after an error on the previous one:
--> RequestInput("/service3",
{ "Passphrase" : { "Type" : "psk",
"Requirement" : "mandatory",
"Alternates" : [ "WPS" ]
},
"WPS" : { "Type" : "wpspin",
"Requirement" : "alternate"
}
"PreviousPassphrase" :
{ "Type" : "wpspin",
"Requirement : "informational",
"Value" : "123456"
}
6. Requesting passphrase for a WPA-Enterprise network:
--> RequestInput("/service4",
{ "Identity" : { "Type" : "string",
"Requirement" : "mandatory"
},
"Passphrase" : { "Type" : "passphrase",
"Requirement" : "mandatory"
}
}
<-- Returns { "Identity" : "alice", "Passphrase": "secret123" }
7. Requesting challenge response for a WPA-Enterprise network:
--> RequestInput("/service4",
{ "Identity" : { "Type" : "string",
"Requirement" : "mandatory"
},
"Passphrase" : { "Type" : "response",
"Requirement" : "mandatory"
}
}
<-- Returns { "Identity" : "bob", "Passphrase": "secret123" }
8. Requesting username and password for a WISPr-enabled hotspot:
<-- RequestInput("/service5",
{ "Username" : { "Type" : "string",
"Requirement" : "mandatory"
},
"Password" : { "Type" : "passphrase",
"Requirement" : "mandatory"
}
}
--> { "Username" : "foo", "Password": "secret" }
:param: string path:
Object path of service object making the request
:param: dict fields:
Allowed field names follow:
* Type(string):
Contains the type of a field. For example "psk", "wep"
"passphrase", "response", "ssid", "wpspin" or plain
"string".
* Requirement(string):
Contains the requirement option. Valid values are
"mandatory", "optional", "alternate" or
"informational".
The "alternate" value specifies that this field can be
returned as an alternative to another one. An example
would be the network name or SSID.
All "mandatory" fields must be returned, while the
"optional" can be returned if available.
Nothing needs to be returned for "informational", as it
is here only to provide an information so a value is
attached to it.
* Alternates(array{string}):
Contains the list of alternate field names this
field can be represented by.
* Value(string):
Contains data as a string, relatively to an
"informational" argument.
:return: Allowed field names are:
* Name(string):
The name of a network. This field will be requested
when trying to connect to a hidden network.
* SSID(array{byte}):
This field is an alternative to "Name" for WiFi
networks and can be used to return the exact binary
representation of a network name.
Normally returning the "Name" field is the better
option here.
* Identity(string):
Identity (username) for EAP authentication methods.
* Passphrase(string):
The passphrase for authentication. For example a WEP
key, a PSK passphrase or a passphrase for EAP
authentication methods.
* PreviousPassphrase(string):
The previous passphrase successfully saved, i.e.
which lead to a successfull connection. This field is
provided as an informational argument when connecting
with it does not work anymore, for instance when it
has been changed on the AP. Such argument appears when
a RequestInput is raised after a retry. In case of WPS
association through PIN method: when retrying, the
previous wpspin will be provided.
* WPS(string):
This field requests the use of WPS to get associated.
This is an alternate choice against Passphrase when
requested service supports WPS. The reply can contain
either empty pin, if user wants to use push-button
method, or a pin code if user wants to use the pin
method.
* Username(string):
Username for WISPr authentication. This field will be
requested when connecting to a WISPr-enabled hotspot.
* Password(string):
Password for WISPr authentication. This field will be
requested when connecting to a WISPr-enabled hotspot.
:rtype: dict
:raises dbus.Exception: net.connman.Agent.Error.Canceled
:raises dbus.Exception: net.connman.Agent.Error.LaunchBrowser
"""
@dbus.service.method("net.connman.Agent",
in_signature='oa{sv}',
out_signature='a{sv}')
def RequestInput(self, path, fields):
pass
"""
This method gets called when it is required
to ask the user to open a website to procceed
with login handling.
This can happen if connected to a hotspot portal
page without WISPr support.
:return:
:raises dbus.Exception: net.connman.Agent.Error.Canceled
"""
@dbus.service.method("net.connman.Agent",
in_signature='os',
out_signature='')
def RequestBrowser(self, path, url):
pass
"""
This method gets called when an error has to be
reported to the user.
A special return value can be used to trigger a
retry of the failed transaction.
:return:
:raises dbus.Exception: net.connman.Agent.Error.Retry
"""
@dbus.service.method("net.connman.Agent",
in_signature='os',
out_signature='')
def ReportError(self, path, error):
pass
"""
This method gets called to indicate that the agent
request failed before a reply was returned.
:return:
"""
@dbus.service.method("net.connman.Agent",
in_signature='', out_signature='')
def Cancel(self):
pass
class SimpleWifiAgent(GenericAgent):
"""
SimpleWifiAgent is a service agent that allows the user
to join WiFi networks through a variety of different
WiFi access security schemes.
The agent is invoked whenever a 'connect' request
is made on a service using the 'wifi' technology,
depending on the security policy in place.
See :class:`.GenericAgent` which describes in more detail
the different security schemes supported and use-cases.
"""
def __init__(self, obj_path):
super(SimpleWifiAgent, self).__init__(obj_path)
self.service_params = {'*': {}}
"""
Set the service parameters to use by the WiFi agent
on a connection request.
:param string service:
Use '*' to apply to all services or specify the
service name which the settings apply to.
:param string name:
Network name to join when trying to connect to a
hidden network
:param string ssid:
Alternative to name for exact binary representation
of a network name
:param string username:
User name (for WISPr-enabled hotspot only)
:param string password:
User password (for WISPr-enabled hotspot only)
:param string identity:
Identity (username) for EAP authentication methods.
:param string passphrase:
WPA/WPA2 authentication passphrase.
:param string wpspin:
Where the WPS method is used this may be set to the
PIN code or to '' if the push button method is used.
:return:
"""
def set_service_params(self, service, name=None, ssid=None,
identity=None, username=None,
password=None, passphrase=None,
wpspin=None):
if (self.service_params.get(service) is None):
self.service_params[service] = {}
self.service_params[service]['Name'] = name
self.service_params[service]['SSID'] = ssid
self.service_params[service]['Identity'] = identity
self.service_params[service]['Username'] = username
self.service_params[service]['Password'] = password
self.service_params[service]['Passphrase'] = passphrase
self.service_params[service]['WPS'] = wpspin
@dbus.service.method("net.connman.Agent",
in_signature='oa{sv}',
out_signature='a{sv}')
def RequestInput(self, path, fields):
response = {}
services = self.service_params.keys()
if (path in services):
params = self.service_params[path]
else:
params = self.service_params['*']
if ('Error' in fields):
raise ConnCanceledException('Canceled')
if ('Name' in fields):
if (params.get('SSID')):
response['SSID'] = params.get('SSID')
if (params.get('Name')):
response['Name'] = params.get('Name')
if ('WPS' in fields):
if (params.get('WPS')):
response['WPS'] = params.get('WPS')
if ('Passphrase' in fields):
if (params.get('Passphrase')):
response['Passphrase'] = params.get('Passphrase')
if ('Identity' in fields):
if (params.get('Identity')):
response['Identity'] = params.get('Identity')
else:
raise ConnCanceledException('Identity not configured by user')
if ('Username' in fields):
if (params.get('Username')):
response['Username'] = params.get('Username')
else:
raise ConnCanceledException('Username not configured by user')
if ('Password' in fields):
if (params.get('Password')):
response['Password'] = params.get('Password')
else:
raise ConnCanceledException('Password not configured by user')
if (not response.keys()):
raise ConnCanceledException('Field(s) not configured by user')
return response
| 37.069054 | 78 | 0.548848 | from __future__ import unicode_literals
from exceptions import ConnCanceledException
import dbus.service
class GenericAgent(dbus.service.Object):
def __init__(self, obj_path):
bus = dbus.SystemBus()
super(GenericAgent, self).__init__(bus, obj_path)
@dbus.service.method("net.connman.Agent",
in_signature='', out_signature='')
def Release(self):
pass
@dbus.service.method("net.connman.Agent",
in_signature='oa{sv}',
out_signature='a{sv}')
def RequestInput(self, path, fields):
pass
@dbus.service.method("net.connman.Agent",
in_signature='os',
out_signature='')
def RequestBrowser(self, path, url):
pass
@dbus.service.method("net.connman.Agent",
in_signature='os',
out_signature='')
def ReportError(self, path, error):
pass
@dbus.service.method("net.connman.Agent",
in_signature='', out_signature='')
def Cancel(self):
pass
class SimpleWifiAgent(GenericAgent):
def __init__(self, obj_path):
super(SimpleWifiAgent, self).__init__(obj_path)
self.service_params = {'*': {}}
def set_service_params(self, service, name=None, ssid=None,
identity=None, username=None,
password=None, passphrase=None,
wpspin=None):
if (self.service_params.get(service) is None):
self.service_params[service] = {}
self.service_params[service]['Name'] = name
self.service_params[service]['SSID'] = ssid
self.service_params[service]['Identity'] = identity
self.service_params[service]['Username'] = username
self.service_params[service]['Password'] = password
self.service_params[service]['Passphrase'] = passphrase
self.service_params[service]['WPS'] = wpspin
@dbus.service.method("net.connman.Agent",
in_signature='oa{sv}',
out_signature='a{sv}')
def RequestInput(self, path, fields):
response = {}
services = self.service_params.keys()
if (path in services):
params = self.service_params[path]
else:
params = self.service_params['*']
if ('Error' in fields):
raise ConnCanceledException('Canceled')
if ('Name' in fields):
if (params.get('SSID')):
response['SSID'] = params.get('SSID')
if (params.get('Name')):
response['Name'] = params.get('Name')
if ('WPS' in fields):
if (params.get('WPS')):
response['WPS'] = params.get('WPS')
if ('Passphrase' in fields):
if (params.get('Passphrase')):
response['Passphrase'] = params.get('Passphrase')
if ('Identity' in fields):
if (params.get('Identity')):
response['Identity'] = params.get('Identity')
else:
raise ConnCanceledException('Identity not configured by user')
if ('Username' in fields):
if (params.get('Username')):
response['Username'] = params.get('Username')
else:
raise ConnCanceledException('Username not configured by user')
if ('Password' in fields):
if (params.get('Password')):
response['Password'] = params.get('Password')
else:
raise ConnCanceledException('Password not configured by user')
if (not response.keys()):
raise ConnCanceledException('Field(s) not configured by user')
return response
| true | true |
f72de92c921abc43ae9c7715bb40d6d1d47db08f | 1,503 | py | Python | Keras/image-augmentation.py | MaraniMatias/machine-learning | 5346a60d0a16942a889d67b4c313b9332eb8f50a | [
"MIT"
] | null | null | null | Keras/image-augmentation.py | MaraniMatias/machine-learning | 5346a60d0a16942a889d67b4c313b9332eb8f50a | [
"MIT"
] | null | null | null | Keras/image-augmentation.py | MaraniMatias/machine-learning | 5346a60d0a16942a889d67b4c313b9332eb8f50a | [
"MIT"
] | null | null | null |
# coding: utf-8
# # Image Augmentation
# - Check images/sample-train
# - Check images/sample-confirm is empty
#
# In[15]:
import numpy as np
# In[16]:
from keras.preprocessing.image import ImageDataGenerator,array_to_img,img_to_array,load_img
from keras.applications.inception_v3 import preprocess_input
# **Check that sample-confirm is empty**
# In[17]:
train_datagen = ImageDataGenerator(
preprocessing_function=preprocess_input,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True
)
test_datagen = ImageDataGenerator(
preprocessing_function=preprocess_input,
width_shift_range = 0.2,
height_shift_range = 0.2,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = True
)
jf_datagen = ImageDataGenerator(
preprocessing_function=preprocess_input,
horizontal_flip=True
)
# ## Check on a sample to see the image generators work in the way we expect
# In[18]:
train_generator = train_datagen.flow_from_directory('images/sample-train/',target_size=(150,150), save_to_dir='images/sample-confirm/')
# In[19]:
i=0
for batch in train_datagen.flow_from_directory('images/sample-train/', target_size=(150,150), save_to_dir='images/sample-confirm/'):
i+=1
if (i>10):
break
# In[20]:
j=0
for batch in jf_datagen.flow_from_directory('images/sample-train/', target_size=(150,150), save_to_dir='images/sample-confirm/'):
j+=1
if ( j > 10):
break
| 19.269231 | 135 | 0.717898 |
as np
from keras.preprocessing.image import ImageDataGenerator,array_to_img,img_to_array,load_img
from keras.applications.inception_v3 import preprocess_input
train_datagen = ImageDataGenerator(
preprocessing_function=preprocess_input,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True
)
test_datagen = ImageDataGenerator(
preprocessing_function=preprocess_input,
width_shift_range = 0.2,
height_shift_range = 0.2,
shear_range = 0.2,
zoom_range = 0.2,
horizontal_flip = True
)
jf_datagen = ImageDataGenerator(
preprocessing_function=preprocess_input,
horizontal_flip=True
)
=0
for batch in train_datagen.flow_from_directory('images/sample-train/', target_size=(150,150), save_to_dir='images/sample-confirm/'):
i+=1
if (i>10):
break
j=0
for batch in jf_datagen.flow_from_directory('images/sample-train/', target_size=(150,150), save_to_dir='images/sample-confirm/'):
j+=1
if ( j > 10):
break
| true | true |
f72de9d65785c053afa06e862ddebf0702883b01 | 173,390 | py | Python | Lib/test/test_io.py | nittaya1990/RustPython | 9fa5c5ac66b4e29c5a00d77d38dc25b824109f8f | [
"CC-BY-4.0",
"MIT"
] | 1 | 2021-09-03T15:58:49.000Z | 2021-09-03T15:58:49.000Z | Lib/test/test_io.py | nittaya1990/RustPython | 9fa5c5ac66b4e29c5a00d77d38dc25b824109f8f | [
"CC-BY-4.0",
"MIT"
] | 6 | 2021-10-01T13:51:43.000Z | 2021-11-17T13:27:14.000Z | Lib/test/test_io.py | nittaya1990/RustPython | 9fa5c5ac66b4e29c5a00d77d38dc25b824109f8f | [
"CC-BY-4.0",
"MIT"
] | null | null | null | """Unit tests for the io module."""
# Tests of io are scattered over the test suite:
# * test_bufio - tests file buffering
# * test_memoryio - tests BytesIO and StringIO
# * test_fileio - tests FileIO
# * test_file - tests the file interface
# * test_io - tests everything else in the io module
# * test_univnewlines - tests universal newline support
# * test_largefile - tests operations on a file greater than 2**32 bytes
# (only enabled with -ulargefile)
################################################################################
# ATTENTION TEST WRITERS!!!
################################################################################
# When writing tests for io, it's important to test both the C and Python
# implementations. This is usually done by writing a base test that refers to
# the type it is testing as an attribute. Then it provides custom subclasses to
# test both implementations. This file has lots of examples.
################################################################################
import abc
import array
import errno
import locale
import os
import pickle
import random
import signal
import sys
import sysconfig
import threading
import time
import unittest
import warnings
import weakref
from collections import deque, UserList
from itertools import cycle, count
from test import support
from test.support import os_helper
from test.support.script_helper import assert_python_ok, run_python_until_end
from test.support.os_helper import FakePath
import codecs
import io # C implementation of io
import _pyio as pyio # Python implementation of io
try:
import ctypes
except ImportError:
def byteslike(*pos, **kw):
return array.array("b", bytes(*pos, **kw))
else:
def byteslike(*pos, **kw):
"""Create a bytes-like object having no string or sequence methods"""
data = bytes(*pos, **kw)
obj = EmptyStruct()
ctypes.resize(obj, len(data))
memoryview(obj).cast("B")[:] = data
return obj
class EmptyStruct(ctypes.Structure):
pass
_cflags = sysconfig.get_config_var('CFLAGS') or ''
_config_args = sysconfig.get_config_var('CONFIG_ARGS') or ''
MEMORY_SANITIZER = (
'-fsanitize=memory' in _cflags or
'--with-memory-sanitizer' in _config_args
)
# Does io.IOBase finalizer log the exception if the close() method fails?
# The exception is ignored silently by default in release build.
IOBASE_EMITS_UNRAISABLE = (hasattr(sys, "gettotalrefcount") or sys.flags.dev_mode)
def _default_chunk_size():
"""Get the default TextIOWrapper chunk size"""
with open(__file__, "r", encoding="latin-1") as f:
return f._CHUNK_SIZE
class MockRawIOWithoutRead:
"""A RawIO implementation without read(), so as to exercise the default
RawIO.read() which calls readinto()."""
def __init__(self, read_stack=()):
self._read_stack = list(read_stack)
self._write_stack = []
self._reads = 0
self._extraneous_reads = 0
def write(self, b):
self._write_stack.append(bytes(b))
return len(b)
def writable(self):
return True
def fileno(self):
return 42
def readable(self):
return True
def seekable(self):
return True
def seek(self, pos, whence):
return 0 # wrong but we gotta return something
def tell(self):
return 0 # same comment as above
def readinto(self, buf):
self._reads += 1
max_len = len(buf)
try:
data = self._read_stack[0]
except IndexError:
self._extraneous_reads += 1
return 0
if data is None:
del self._read_stack[0]
return None
n = len(data)
if len(data) <= max_len:
del self._read_stack[0]
buf[:n] = data
return n
else:
buf[:] = data[:max_len]
self._read_stack[0] = data[max_len:]
return max_len
def truncate(self, pos=None):
return pos
class CMockRawIOWithoutRead(MockRawIOWithoutRead, io.RawIOBase):
pass
class PyMockRawIOWithoutRead(MockRawIOWithoutRead, pyio.RawIOBase):
pass
class MockRawIO(MockRawIOWithoutRead):
def read(self, n=None):
self._reads += 1
try:
return self._read_stack.pop(0)
except:
self._extraneous_reads += 1
return b""
class CMockRawIO(MockRawIO, io.RawIOBase):
pass
class PyMockRawIO(MockRawIO, pyio.RawIOBase):
pass
class MisbehavedRawIO(MockRawIO):
def write(self, b):
return super().write(b) * 2
def read(self, n=None):
return super().read(n) * 2
def seek(self, pos, whence):
return -123
def tell(self):
return -456
def readinto(self, buf):
super().readinto(buf)
return len(buf) * 5
class CMisbehavedRawIO(MisbehavedRawIO, io.RawIOBase):
pass
class PyMisbehavedRawIO(MisbehavedRawIO, pyio.RawIOBase):
pass
class SlowFlushRawIO(MockRawIO):
def __init__(self):
super().__init__()
self.in_flush = threading.Event()
def flush(self):
self.in_flush.set()
time.sleep(0.25)
class CSlowFlushRawIO(SlowFlushRawIO, io.RawIOBase):
pass
class PySlowFlushRawIO(SlowFlushRawIO, pyio.RawIOBase):
pass
class CloseFailureIO(MockRawIO):
closed = 0
def close(self):
if not self.closed:
self.closed = 1
raise OSError
class CCloseFailureIO(CloseFailureIO, io.RawIOBase):
pass
class PyCloseFailureIO(CloseFailureIO, pyio.RawIOBase):
pass
class MockFileIO:
def __init__(self, data):
self.read_history = []
super().__init__(data)
def read(self, n=None):
res = super().read(n)
self.read_history.append(None if res is None else len(res))
return res
def readinto(self, b):
res = super().readinto(b)
self.read_history.append(res)
return res
class CMockFileIO(MockFileIO, io.BytesIO):
pass
class PyMockFileIO(MockFileIO, pyio.BytesIO):
pass
class MockUnseekableIO:
def seekable(self):
return False
def seek(self, *args):
raise self.UnsupportedOperation("not seekable")
def tell(self, *args):
raise self.UnsupportedOperation("not seekable")
def truncate(self, *args):
raise self.UnsupportedOperation("not seekable")
class CMockUnseekableIO(MockUnseekableIO, io.BytesIO):
UnsupportedOperation = io.UnsupportedOperation
class PyMockUnseekableIO(MockUnseekableIO, pyio.BytesIO):
UnsupportedOperation = pyio.UnsupportedOperation
class MockNonBlockWriterIO:
def __init__(self):
self._write_stack = []
self._blocker_char = None
def pop_written(self):
s = b"".join(self._write_stack)
self._write_stack[:] = []
return s
def block_on(self, char):
"""Block when a given char is encountered."""
self._blocker_char = char
def readable(self):
return True
def seekable(self):
return True
def seek(self, pos, whence=0):
# naive implementation, enough for tests
return 0
def writable(self):
return True
def write(self, b):
b = bytes(b)
n = -1
if self._blocker_char:
try:
n = b.index(self._blocker_char)
except ValueError:
pass
else:
if n > 0:
# write data up to the first blocker
self._write_stack.append(b[:n])
return n
else:
# cancel blocker and indicate would block
self._blocker_char = None
return None
self._write_stack.append(b)
return len(b)
class CMockNonBlockWriterIO(MockNonBlockWriterIO, io.RawIOBase):
BlockingIOError = io.BlockingIOError
class PyMockNonBlockWriterIO(MockNonBlockWriterIO, pyio.RawIOBase):
BlockingIOError = pyio.BlockingIOError
class IOTest(unittest.TestCase):
def setUp(self):
os_helper.unlink(os_helper.TESTFN)
def tearDown(self):
os_helper.unlink(os_helper.TESTFN)
def write_ops(self, f):
self.assertEqual(f.write(b"blah."), 5)
f.truncate(0)
self.assertEqual(f.tell(), 5)
f.seek(0)
self.assertEqual(f.write(b"blah."), 5)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(b"Hello."), 6)
self.assertEqual(f.tell(), 6)
self.assertEqual(f.seek(-1, 1), 5)
self.assertEqual(f.tell(), 5)
buffer = bytearray(b" world\n\n\n")
self.assertEqual(f.write(buffer), 9)
buffer[:] = b"*" * 9 # Overwrite our copy of the data
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(b"h"), 1)
self.assertEqual(f.seek(-1, 2), 13)
self.assertEqual(f.tell(), 13)
self.assertEqual(f.truncate(12), 12)
self.assertEqual(f.tell(), 13)
self.assertRaises(TypeError, f.seek, 0.0)
def read_ops(self, f, buffered=False):
data = f.read(5)
self.assertEqual(data, b"hello")
data = byteslike(data)
self.assertEqual(f.readinto(data), 5)
self.assertEqual(bytes(data), b" worl")
data = bytearray(5)
self.assertEqual(f.readinto(data), 2)
self.assertEqual(len(data), 5)
self.assertEqual(data[:2], b"d\n")
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.read(20), b"hello world\n")
self.assertEqual(f.read(1), b"")
self.assertEqual(f.readinto(byteslike(b"x")), 0)
self.assertEqual(f.seek(-6, 2), 6)
self.assertEqual(f.read(5), b"world")
self.assertEqual(f.read(0), b"")
self.assertEqual(f.readinto(byteslike()), 0)
self.assertEqual(f.seek(-6, 1), 5)
self.assertEqual(f.read(5), b" worl")
self.assertEqual(f.tell(), 10)
self.assertRaises(TypeError, f.seek, 0.0)
if buffered:
f.seek(0)
self.assertEqual(f.read(), b"hello world\n")
f.seek(6)
self.assertEqual(f.read(), b"world\n")
self.assertEqual(f.read(), b"")
f.seek(0)
data = byteslike(5)
self.assertEqual(f.readinto1(data), 5)
self.assertEqual(bytes(data), b"hello")
LARGE = 2**31
def large_file_ops(self, f):
assert f.readable()
assert f.writable()
try:
self.assertEqual(f.seek(self.LARGE), self.LARGE)
except (OverflowError, ValueError):
self.skipTest("no largefile support")
self.assertEqual(f.tell(), self.LARGE)
self.assertEqual(f.write(b"xxx"), 3)
self.assertEqual(f.tell(), self.LARGE + 3)
self.assertEqual(f.seek(-1, 1), self.LARGE + 2)
self.assertEqual(f.truncate(), self.LARGE + 2)
self.assertEqual(f.tell(), self.LARGE + 2)
self.assertEqual(f.seek(0, 2), self.LARGE + 2)
self.assertEqual(f.truncate(self.LARGE + 1), self.LARGE + 1)
self.assertEqual(f.tell(), self.LARGE + 2)
self.assertEqual(f.seek(0, 2), self.LARGE + 1)
self.assertEqual(f.seek(-1, 2), self.LARGE)
self.assertEqual(f.read(2), b"x")
def test_invalid_operations(self):
# Try writing on a file opened in read mode and vice-versa.
exc = self.UnsupportedOperation
for mode in ("w", "wb"):
with self.open(os_helper.TESTFN, mode) as fp:
self.assertRaises(exc, fp.read)
self.assertRaises(exc, fp.readline)
with self.open(os_helper.TESTFN, "wb", buffering=0) as fp:
self.assertRaises(exc, fp.read)
self.assertRaises(exc, fp.readline)
with self.open(os_helper.TESTFN, "rb", buffering=0) as fp:
self.assertRaises(exc, fp.write, b"blah")
self.assertRaises(exc, fp.writelines, [b"blah\n"])
with self.open(os_helper.TESTFN, "rb") as fp:
self.assertRaises(exc, fp.write, b"blah")
self.assertRaises(exc, fp.writelines, [b"blah\n"])
with self.open(os_helper.TESTFN, "r") as fp:
self.assertRaises(exc, fp.write, "blah")
self.assertRaises(exc, fp.writelines, ["blah\n"])
# Non-zero seeking from current or end pos
self.assertRaises(exc, fp.seek, 1, self.SEEK_CUR)
self.assertRaises(exc, fp.seek, -1, self.SEEK_END)
def test_optional_abilities(self):
# Test for OSError when optional APIs are not supported
# The purpose of this test is to try fileno(), reading, writing and
# seeking operations with various objects that indicate they do not
# support these operations.
def pipe_reader():
[r, w] = os.pipe()
os.close(w) # So that read() is harmless
return self.FileIO(r, "r")
def pipe_writer():
[r, w] = os.pipe()
self.addCleanup(os.close, r)
# Guarantee that we can write into the pipe without blocking
thread = threading.Thread(target=os.read, args=(r, 100))
thread.start()
self.addCleanup(thread.join)
return self.FileIO(w, "w")
def buffered_reader():
return self.BufferedReader(self.MockUnseekableIO())
def buffered_writer():
return self.BufferedWriter(self.MockUnseekableIO())
def buffered_random():
return self.BufferedRandom(self.BytesIO())
def buffered_rw_pair():
return self.BufferedRWPair(self.MockUnseekableIO(),
self.MockUnseekableIO())
def text_reader():
class UnseekableReader(self.MockUnseekableIO):
writable = self.BufferedIOBase.writable
write = self.BufferedIOBase.write
return self.TextIOWrapper(UnseekableReader(), "ascii")
def text_writer():
class UnseekableWriter(self.MockUnseekableIO):
readable = self.BufferedIOBase.readable
read = self.BufferedIOBase.read
return self.TextIOWrapper(UnseekableWriter(), "ascii")
tests = (
(pipe_reader, "fr"), (pipe_writer, "fw"),
(buffered_reader, "r"), (buffered_writer, "w"),
(buffered_random, "rws"), (buffered_rw_pair, "rw"),
(text_reader, "r"), (text_writer, "w"),
(self.BytesIO, "rws"), (self.StringIO, "rws"),
)
for [test, abilities] in tests:
with self.subTest(test), test() as obj:
readable = "r" in abilities
self.assertEqual(obj.readable(), readable)
writable = "w" in abilities
self.assertEqual(obj.writable(), writable)
if isinstance(obj, self.TextIOBase):
data = "3"
elif isinstance(obj, (self.BufferedIOBase, self.RawIOBase)):
data = b"3"
else:
self.fail("Unknown base class")
if "f" in abilities:
obj.fileno()
else:
self.assertRaises(OSError, obj.fileno)
if readable:
obj.read(1)
obj.read()
else:
self.assertRaises(OSError, obj.read, 1)
self.assertRaises(OSError, obj.read)
if writable:
obj.write(data)
else:
self.assertRaises(OSError, obj.write, data)
if sys.platform.startswith("win") and test in (
pipe_reader, pipe_writer):
# Pipes seem to appear as seekable on Windows
continue
seekable = "s" in abilities
self.assertEqual(obj.seekable(), seekable)
if seekable:
obj.tell()
obj.seek(0)
else:
self.assertRaises(OSError, obj.tell)
self.assertRaises(OSError, obj.seek, 0)
if writable and seekable:
obj.truncate()
obj.truncate(0)
else:
self.assertRaises(OSError, obj.truncate)
self.assertRaises(OSError, obj.truncate, 0)
def test_open_handles_NUL_chars(self):
fn_with_NUL = 'foo\0bar'
self.assertRaises(ValueError, self.open, fn_with_NUL, 'w')
bytes_fn = bytes(fn_with_NUL, 'ascii')
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.assertRaises(ValueError, self.open, bytes_fn, 'w')
def test_raw_file_io(self):
with self.open(os_helper.TESTFN, "wb", buffering=0) as f:
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.write_ops(f)
with self.open(os_helper.TESTFN, "rb", buffering=0) as f:
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
self.read_ops(f)
def test_buffered_file_io(self):
with self.open(os_helper.TESTFN, "wb") as f:
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.write_ops(f)
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
self.read_ops(f, True)
def test_readline(self):
with self.open(os_helper.TESTFN, "wb") as f:
f.write(b"abc\ndef\nxyzzy\nfoo\x00bar\nanother line")
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.readline(), b"abc\n")
self.assertEqual(f.readline(10), b"def\n")
self.assertEqual(f.readline(2), b"xy")
self.assertEqual(f.readline(4), b"zzy\n")
self.assertEqual(f.readline(), b"foo\x00bar\n")
self.assertEqual(f.readline(None), b"another line")
self.assertRaises(TypeError, f.readline, 5.3)
with self.open(os_helper.TESTFN, "r") as f:
self.assertRaises(TypeError, f.readline, 5.3)
def test_readline_nonsizeable(self):
# Issue #30061
# Crash when readline() returns an object without __len__
class R(self.IOBase):
def readline(self):
return None
self.assertRaises((TypeError, StopIteration), next, R())
def test_next_nonsizeable(self):
# Issue #30061
# Crash when __next__() returns an object without __len__
class R(self.IOBase):
def __next__(self):
return None
self.assertRaises(TypeError, R().readlines, 1)
def test_raw_bytes_io(self):
f = self.BytesIO()
self.write_ops(f)
data = f.getvalue()
self.assertEqual(data, b"hello world\n")
f = self.BytesIO(data)
self.read_ops(f, True)
def test_large_file_ops(self):
# On Windows and Mac OSX this test consumes large resources; It takes
# a long time to build the >2 GiB file and takes >2 GiB of disk space
# therefore the resource must be enabled to run this test.
if sys.platform[:3] == 'win' or sys.platform == 'darwin':
support.requires(
'largefile',
'test requires %s bytes and a long time to run' % self.LARGE)
with self.open(os_helper.TESTFN, "w+b", 0) as f:
self.large_file_ops(f)
with self.open(os_helper.TESTFN, "w+b") as f:
self.large_file_ops(f)
def test_with_open(self):
for bufsize in (0, 100):
f = None
with self.open(os_helper.TESTFN, "wb", bufsize) as f:
f.write(b"xxx")
self.assertEqual(f.closed, True)
f = None
try:
with self.open(os_helper.TESTFN, "wb", bufsize) as f:
1/0
except ZeroDivisionError:
self.assertEqual(f.closed, True)
else:
self.fail("1/0 didn't raise an exception")
# issue 5008
def test_append_mode_tell(self):
with self.open(os_helper.TESTFN, "wb") as f:
f.write(b"xxx")
with self.open(os_helper.TESTFN, "ab", buffering=0) as f:
self.assertEqual(f.tell(), 3)
with self.open(os_helper.TESTFN, "ab") as f:
self.assertEqual(f.tell(), 3)
with self.open(os_helper.TESTFN, "a") as f:
self.assertGreater(f.tell(), 0)
def test_destructor(self):
record = []
class MyFileIO(self.FileIO):
def __del__(self):
record.append(1)
try:
f = super().__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(2)
super().close()
def flush(self):
record.append(3)
super().flush()
with support.check_warnings(('', ResourceWarning)):
f = MyFileIO(os_helper.TESTFN, "wb")
f.write(b"xxx")
del f
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"xxx")
def _check_base_destructor(self, base):
record = []
class MyIO(base):
def __init__(self):
# This exercises the availability of attributes on object
# destruction.
# (in the C version, close() is called by the tp_dealloc
# function, not by __del__)
self.on_del = 1
self.on_close = 2
self.on_flush = 3
def __del__(self):
record.append(self.on_del)
try:
f = super().__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(self.on_close)
super().close()
def flush(self):
record.append(self.on_flush)
super().flush()
f = MyIO()
del f
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
def test_IOBase_destructor(self):
self._check_base_destructor(self.IOBase)
def test_RawIOBase_destructor(self):
self._check_base_destructor(self.RawIOBase)
def test_BufferedIOBase_destructor(self):
self._check_base_destructor(self.BufferedIOBase)
def test_TextIOBase_destructor(self):
self._check_base_destructor(self.TextIOBase)
def test_close_flushes(self):
with self.open(os_helper.TESTFN, "wb") as f:
f.write(b"xxx")
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"xxx")
def test_array_writes(self):
a = array.array('i', range(10))
n = len(a.tobytes())
def check(f):
with f:
self.assertEqual(f.write(a), n)
f.writelines((a,))
check(self.BytesIO())
check(self.FileIO(os_helper.TESTFN, "w"))
check(self.BufferedWriter(self.MockRawIO()))
check(self.BufferedRandom(self.MockRawIO()))
check(self.BufferedRWPair(self.MockRawIO(), self.MockRawIO()))
def test_closefd(self):
self.assertRaises(ValueError, self.open, os_helper.TESTFN, 'w',
closefd=False)
def test_read_closed(self):
with self.open(os_helper.TESTFN, "w") as f:
f.write("egg\n")
with self.open(os_helper.TESTFN, "r") as f:
file = self.open(f.fileno(), "r", closefd=False)
self.assertEqual(file.read(), "egg\n")
file.seek(0)
file.close()
self.assertRaises(ValueError, file.read)
with self.open(os_helper.TESTFN, "rb") as f:
file = self.open(f.fileno(), "rb", closefd=False)
self.assertEqual(file.read()[:3], b"egg")
file.close()
self.assertRaises(ValueError, file.readinto, bytearray(1))
def test_no_closefd_with_filename(self):
# can't use closefd in combination with a file name
self.assertRaises(ValueError, self.open, os_helper.TESTFN, "r", closefd=False)
def test_closefd_attr(self):
with self.open(os_helper.TESTFN, "wb") as f:
f.write(b"egg\n")
with self.open(os_helper.TESTFN, "r") as f:
self.assertEqual(f.buffer.raw.closefd, True)
file = self.open(f.fileno(), "r", closefd=False)
self.assertEqual(file.buffer.raw.closefd, False)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_garbage_collection(self):
# FileIO objects are collected, and collecting them flushes
# all data to disk.
with support.check_warnings(('', ResourceWarning)):
f = self.FileIO(os_helper.TESTFN, "wb")
f.write(b"abcxxx")
f.f = f
wr = weakref.ref(f)
del f
support.gc_collect()
self.assertIsNone(wr(), wr)
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"abcxxx")
def test_unbounded_file(self):
# Issue #1174606: reading from an unbounded stream such as /dev/zero.
zero = "/dev/zero"
if not os.path.exists(zero):
self.skipTest("{0} does not exist".format(zero))
if sys.maxsize > 0x7FFFFFFF:
self.skipTest("test can only run in a 32-bit address space")
if support.real_max_memuse < support._2G:
self.skipTest("test requires at least 2 GiB of memory")
with self.open(zero, "rb", buffering=0) as f:
self.assertRaises(OverflowError, f.read)
with self.open(zero, "rb") as f:
self.assertRaises(OverflowError, f.read)
with self.open(zero, "r") as f:
self.assertRaises(OverflowError, f.read)
def check_flush_error_on_close(self, *args, **kwargs):
# Test that the file is closed despite failed flush
# and that flush() is called before file closed.
f = self.open(*args, **kwargs)
closed = []
def bad_flush():
closed[:] = [f.closed]
raise OSError()
f.flush = bad_flush
self.assertRaises(OSError, f.close) # exception not swallowed
self.assertTrue(f.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
f.flush = lambda: None # break reference loop
def test_flush_error_on_close(self):
# raw file
# Issue #5700: io.FileIO calls flush() after file closed
self.check_flush_error_on_close(os_helper.TESTFN, 'wb', buffering=0)
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', buffering=0)
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', buffering=0, closefd=False)
os.close(fd)
# buffered io
self.check_flush_error_on_close(os_helper.TESTFN, 'wb')
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb')
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', closefd=False)
os.close(fd)
# text io
self.check_flush_error_on_close(os_helper.TESTFN, 'w')
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'w')
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'w', closefd=False)
os.close(fd)
def test_multi_close(self):
f = self.open(os_helper.TESTFN, "wb", buffering=0)
f.close()
f.close()
f.close()
self.assertRaises(ValueError, f.flush)
def test_RawIOBase_read(self):
# Exercise the default limited RawIOBase.read(n) implementation (which
# calls readinto() internally).
rawio = self.MockRawIOWithoutRead((b"abc", b"d", None, b"efg", None))
self.assertEqual(rawio.read(2), b"ab")
self.assertEqual(rawio.read(2), b"c")
self.assertEqual(rawio.read(2), b"d")
self.assertEqual(rawio.read(2), None)
self.assertEqual(rawio.read(2), b"ef")
self.assertEqual(rawio.read(2), b"g")
self.assertEqual(rawio.read(2), None)
self.assertEqual(rawio.read(2), b"")
def test_types_have_dict(self):
test = (
self.IOBase(),
self.RawIOBase(),
self.TextIOBase(),
self.StringIO(),
self.BytesIO()
)
for obj in test:
self.assertTrue(hasattr(obj, "__dict__"))
def test_opener(self):
with self.open(os_helper.TESTFN, "w") as f:
f.write("egg\n")
fd = os.open(os_helper.TESTFN, os.O_RDONLY)
def opener(path, flags):
return fd
with self.open("non-existent", "r", opener=opener) as f:
self.assertEqual(f.read(), "egg\n")
def test_bad_opener_negative_1(self):
# Issue #27066.
def badopener(fname, flags):
return -1
with self.assertRaises(ValueError) as cm:
open('non-existent', 'r', opener=badopener)
self.assertEqual(str(cm.exception), 'opener returned -1')
def test_bad_opener_other_negative(self):
# Issue #27066.
def badopener(fname, flags):
return -2
with self.assertRaises(ValueError) as cm:
open('non-existent', 'r', opener=badopener)
self.assertEqual(str(cm.exception), 'opener returned -2')
def test_fileio_closefd(self):
# Issue #4841
with self.open(__file__, 'rb') as f1, \
self.open(__file__, 'rb') as f2:
fileio = self.FileIO(f1.fileno(), closefd=False)
# .__init__() must not close f1
fileio.__init__(f2.fileno(), closefd=False)
f1.readline()
# .close() must not close f2
fileio.close()
f2.readline()
def test_nonbuffered_textio(self):
with support.check_no_resource_warning(self):
with self.assertRaises(ValueError):
self.open(os_helper.TESTFN, 'w', buffering=0)
def test_invalid_newline(self):
with support.check_no_resource_warning(self):
with self.assertRaises(ValueError):
self.open(os_helper.TESTFN, 'w', newline='invalid')
def test_buffered_readinto_mixin(self):
# Test the implementation provided by BufferedIOBase
class Stream(self.BufferedIOBase):
def read(self, size):
return b"12345"
read1 = read
stream = Stream()
for method in ("readinto", "readinto1"):
with self.subTest(method):
buffer = byteslike(5)
self.assertEqual(getattr(stream, method)(buffer), 5)
self.assertEqual(bytes(buffer), b"12345")
def test_fspath_support(self):
def check_path_succeeds(path):
with self.open(path, "w") as f:
f.write("egg\n")
with self.open(path, "r") as f:
self.assertEqual(f.read(), "egg\n")
check_path_succeeds(FakePath(os_helper.TESTFN))
check_path_succeeds(FakePath(os_helper.TESTFN.encode('utf-8')))
with self.open(os_helper.TESTFN, "w") as f:
bad_path = FakePath(f.fileno())
with self.assertRaises(TypeError):
self.open(bad_path, 'w')
bad_path = FakePath(None)
with self.assertRaises(TypeError):
self.open(bad_path, 'w')
bad_path = FakePath(FloatingPointError)
with self.assertRaises(FloatingPointError):
self.open(bad_path, 'w')
# ensure that refcounting is correct with some error conditions
with self.assertRaisesRegex(ValueError, 'read/write/append mode'):
self.open(FakePath(os_helper.TESTFN), 'rwxa')
def test_RawIOBase_readall(self):
# Exercise the default unlimited RawIOBase.read() and readall()
# implementations.
rawio = self.MockRawIOWithoutRead((b"abc", b"d", b"efg"))
self.assertEqual(rawio.read(), b"abcdefg")
rawio = self.MockRawIOWithoutRead((b"abc", b"d", b"efg"))
self.assertEqual(rawio.readall(), b"abcdefg")
def test_BufferedIOBase_readinto(self):
# Exercise the default BufferedIOBase.readinto() and readinto1()
# implementations (which call read() or read1() internally).
class Reader(self.BufferedIOBase):
def __init__(self, avail):
self.avail = avail
def read(self, size):
result = self.avail[:size]
self.avail = self.avail[size:]
return result
def read1(self, size):
"""Returns no more than 5 bytes at once"""
return self.read(min(size, 5))
tests = (
# (test method, total data available, read buffer size, expected
# read size)
("readinto", 10, 5, 5),
("readinto", 10, 6, 6), # More than read1() can return
("readinto", 5, 6, 5), # Buffer larger than total available
("readinto", 6, 7, 6),
("readinto", 10, 0, 0), # Empty buffer
("readinto1", 10, 5, 5), # Result limited to single read1() call
("readinto1", 10, 6, 5), # Buffer larger than read1() can return
("readinto1", 5, 6, 5), # Buffer larger than total available
("readinto1", 6, 7, 5),
("readinto1", 10, 0, 0), # Empty buffer
)
UNUSED_BYTE = 0x81
for test in tests:
with self.subTest(test):
method, avail, request, result = test
reader = Reader(bytes(range(avail)))
buffer = bytearray((UNUSED_BYTE,) * request)
method = getattr(reader, method)
self.assertEqual(method(buffer), result)
self.assertEqual(len(buffer), request)
self.assertSequenceEqual(buffer[:result], range(result))
unused = (UNUSED_BYTE,) * (request - result)
self.assertSequenceEqual(buffer[result:], unused)
self.assertEqual(len(reader.avail), avail - result)
def test_close_assert(self):
class R(self.IOBase):
def __setattr__(self, name, value):
pass
def flush(self):
raise OSError()
f = R()
# This would cause an assertion failure.
self.assertRaises(OSError, f.close)
# Silence destructor error
R.flush = lambda self: None
class CIOTest(IOTest):
# TODO: RUSTPYTHON, cyclic gc
@unittest.expectedFailure
def test_IOBase_finalize(self):
# Issue #12149: segmentation fault on _PyIOBase_finalize when both a
# class which inherits IOBase and an object of this class are caught
# in a reference cycle and close() is already in the method cache.
class MyIO(self.IOBase):
def close(self):
pass
# create an instance to populate the method cache
MyIO()
obj = MyIO()
obj.obj = obj
wr = weakref.ref(obj)
del MyIO
del obj
support.gc_collect()
self.assertIsNone(wr(), wr)
# TODO: RUSTPYTHON, AssertionError: filter ('', ResourceWarning) did not catch any warning
@unittest.expectedFailure
def test_destructor(self):
super().test_destructor(self)
class PyIOTest(IOTest):
pass
@support.cpython_only
class APIMismatchTest(unittest.TestCase):
def test_RawIOBase_io_in_pyio_match(self):
"""Test that pyio RawIOBase class has all c RawIOBase methods"""
mismatch = support.detect_api_mismatch(pyio.RawIOBase, io.RawIOBase,
ignore=('__weakref__',))
self.assertEqual(mismatch, set(), msg='Python RawIOBase does not have all C RawIOBase methods')
def test_RawIOBase_pyio_in_io_match(self):
"""Test that c RawIOBase class has all pyio RawIOBase methods"""
mismatch = support.detect_api_mismatch(io.RawIOBase, pyio.RawIOBase)
self.assertEqual(mismatch, set(), msg='C RawIOBase does not have all Python RawIOBase methods')
class CommonBufferedTests:
# Tests common to BufferedReader, BufferedWriter and BufferedRandom
def test_detach(self):
raw = self.MockRawIO()
buf = self.tp(raw)
self.assertIs(buf.detach(), raw)
self.assertRaises(ValueError, buf.detach)
repr(buf) # Should still work
def test_fileno(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertEqual(42, bufio.fileno())
def test_invalid_args(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
# Invalid whence
self.assertRaises(ValueError, bufio.seek, 0, -1)
self.assertRaises(ValueError, bufio.seek, 0, 9)
def test_override_destructor(self):
tp = self.tp
record = []
class MyBufferedIO(tp):
def __del__(self):
record.append(1)
try:
f = super().__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(2)
super().close()
def flush(self):
record.append(3)
super().flush()
rawio = self.MockRawIO()
bufio = MyBufferedIO(rawio)
del bufio
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
def test_context_manager(self):
# Test usability as a context manager
rawio = self.MockRawIO()
bufio = self.tp(rawio)
def _with():
with bufio:
pass
_with()
# bufio should now be closed, and using it a second time should raise
# a ValueError.
self.assertRaises(ValueError, _with)
# TODO: RUSTPYTHON, sys.unraisablehook
@unittest.expectedFailure
def test_error_through_destructor(self):
# Test that the exception state is not modified by a destructor,
# even if close() fails.
rawio = self.CloseFailureIO()
with support.catch_unraisable_exception() as cm:
with self.assertRaises(AttributeError):
self.tp(rawio).xyzzy
if not IOBASE_EMITS_UNRAISABLE:
self.assertIsNone(cm.unraisable)
elif cm.unraisable is not None:
self.assertEqual(cm.unraisable.exc_type, OSError)
def test_repr(self):
raw = self.MockRawIO()
b = self.tp(raw)
clsname = r"(%s\.)?%s" % (self.tp.__module__, self.tp.__qualname__)
self.assertRegex(repr(b), "<%s>" % clsname)
raw.name = "dummy"
self.assertRegex(repr(b), "<%s name='dummy'>" % clsname)
raw.name = b"dummy"
self.assertRegex(repr(b), "<%s name=b'dummy'>" % clsname)
def test_recursive_repr(self):
# Issue #25455
raw = self.MockRawIO()
b = self.tp(raw)
with support.swap_attr(raw, 'name', b):
try:
repr(b) # Should not crash
except RuntimeError:
pass
def test_flush_error_on_close(self):
# Test that buffered file is closed despite failed flush
# and that flush() is called before file closed.
raw = self.MockRawIO()
closed = []
def bad_flush():
closed[:] = [b.closed, raw.closed]
raise OSError()
raw.flush = bad_flush
b = self.tp(raw)
self.assertRaises(OSError, b.close) # exception not swallowed
self.assertTrue(b.closed)
self.assertTrue(raw.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
self.assertFalse(closed[1])
raw.flush = lambda: None # break reference loop
def test_close_error_on_close(self):
raw = self.MockRawIO()
def bad_flush():
raise OSError('flush')
def bad_close():
raise OSError('close')
raw.close = bad_close
b = self.tp(raw)
b.flush = bad_flush
with self.assertRaises(OSError) as err: # exception not swallowed
b.close()
self.assertEqual(err.exception.args, ('close',))
self.assertIsInstance(err.exception.__context__, OSError)
self.assertEqual(err.exception.__context__.args, ('flush',))
self.assertFalse(b.closed)
# Silence destructor error
raw.close = lambda: None
b.flush = lambda: None
def test_nonnormalized_close_error_on_close(self):
# Issue #21677
raw = self.MockRawIO()
def bad_flush():
raise non_existing_flush
def bad_close():
raise non_existing_close
raw.close = bad_close
b = self.tp(raw)
b.flush = bad_flush
with self.assertRaises(NameError) as err: # exception not swallowed
b.close()
self.assertIn('non_existing_close', str(err.exception))
self.assertIsInstance(err.exception.__context__, NameError)
self.assertIn('non_existing_flush', str(err.exception.__context__))
self.assertFalse(b.closed)
# Silence destructor error
b.flush = lambda: None
raw.close = lambda: None
def test_multi_close(self):
raw = self.MockRawIO()
b = self.tp(raw)
b.close()
b.close()
b.close()
self.assertRaises(ValueError, b.flush)
def test_unseekable(self):
bufio = self.tp(self.MockUnseekableIO(b"A" * 10))
self.assertRaises(self.UnsupportedOperation, bufio.tell)
self.assertRaises(self.UnsupportedOperation, bufio.seek, 0)
def test_readonly_attributes(self):
raw = self.MockRawIO()
buf = self.tp(raw)
x = self.MockRawIO()
with self.assertRaises(AttributeError):
buf.raw = x
class SizeofTest:
@support.cpython_only
def test_sizeof(self):
bufsize1 = 4096
bufsize2 = 8192
rawio = self.MockRawIO()
bufio = self.tp(rawio, buffer_size=bufsize1)
size = sys.getsizeof(bufio) - bufsize1
rawio = self.MockRawIO()
bufio = self.tp(rawio, buffer_size=bufsize2)
self.assertEqual(sys.getsizeof(bufio), size + bufsize2)
@support.cpython_only
def test_buffer_freeing(self) :
bufsize = 4096
rawio = self.MockRawIO()
bufio = self.tp(rawio, buffer_size=bufsize)
size = sys.getsizeof(bufio) - bufsize
bufio.close()
self.assertEqual(sys.getsizeof(bufio), size)
class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
read_mode = "rb"
def test_constructor(self):
rawio = self.MockRawIO([b"abc"])
bufio = self.tp(rawio)
bufio.__init__(rawio)
bufio.__init__(rawio, buffer_size=1024)
bufio.__init__(rawio, buffer_size=16)
self.assertEqual(b"abc", bufio.read())
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
rawio = self.MockRawIO([b"abc"])
bufio.__init__(rawio)
self.assertEqual(b"abc", bufio.read())
def test_uninitialized(self):
bufio = self.tp.__new__(self.tp)
del bufio
bufio = self.tp.__new__(self.tp)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
bufio.read, 0)
bufio.__init__(self.MockRawIO())
self.assertEqual(bufio.read(0), b'')
def test_read(self):
for arg in (None, 7):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"abcdefg", bufio.read(arg))
# Invalid args
self.assertRaises(ValueError, bufio.read, -2)
def test_read1(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"a", bufio.read(1))
self.assertEqual(b"b", bufio.read1(1))
self.assertEqual(rawio._reads, 1)
self.assertEqual(b"", bufio.read1(0))
self.assertEqual(b"c", bufio.read1(100))
self.assertEqual(rawio._reads, 1)
self.assertEqual(b"d", bufio.read1(100))
self.assertEqual(rawio._reads, 2)
self.assertEqual(b"efg", bufio.read1(100))
self.assertEqual(rawio._reads, 3)
self.assertEqual(b"", bufio.read1(100))
self.assertEqual(rawio._reads, 4)
def test_read1_arbitrary(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"a", bufio.read(1))
self.assertEqual(b"bc", bufio.read1())
self.assertEqual(b"d", bufio.read1())
self.assertEqual(b"efg", bufio.read1(-1))
self.assertEqual(rawio._reads, 3)
self.assertEqual(b"", bufio.read1())
self.assertEqual(rawio._reads, 4)
def test_readinto(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
b = bytearray(2)
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"ab")
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"cd")
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"ef")
self.assertEqual(bufio.readinto(b), 1)
self.assertEqual(b, b"gf")
self.assertEqual(bufio.readinto(b), 0)
self.assertEqual(b, b"gf")
rawio = self.MockRawIO((b"abc", None))
bufio = self.tp(rawio)
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"ab")
self.assertEqual(bufio.readinto(b), 1)
self.assertEqual(b, b"cb")
def test_readinto1(self):
buffer_size = 10
rawio = self.MockRawIO((b"abc", b"de", b"fgh", b"jkl"))
bufio = self.tp(rawio, buffer_size=buffer_size)
b = bytearray(2)
self.assertEqual(bufio.peek(3), b'abc')
self.assertEqual(rawio._reads, 1)
self.assertEqual(bufio.readinto1(b), 2)
self.assertEqual(b, b"ab")
self.assertEqual(rawio._reads, 1)
self.assertEqual(bufio.readinto1(b), 1)
self.assertEqual(b[:1], b"c")
self.assertEqual(rawio._reads, 1)
self.assertEqual(bufio.readinto1(b), 2)
self.assertEqual(b, b"de")
self.assertEqual(rawio._reads, 2)
b = bytearray(2*buffer_size)
self.assertEqual(bufio.peek(3), b'fgh')
self.assertEqual(rawio._reads, 3)
self.assertEqual(bufio.readinto1(b), 6)
self.assertEqual(b[:6], b"fghjkl")
self.assertEqual(rawio._reads, 4)
def test_readinto_array(self):
buffer_size = 60
data = b"a" * 26
rawio = self.MockRawIO((data,))
bufio = self.tp(rawio, buffer_size=buffer_size)
# Create an array with element size > 1 byte
b = array.array('i', b'x' * 32)
assert len(b) != 16
# Read into it. We should get as many *bytes* as we can fit into b
# (which is more than the number of elements)
n = bufio.readinto(b)
self.assertGreater(n, len(b))
# Check that old contents of b are preserved
bm = memoryview(b).cast('B')
self.assertLess(n, len(bm))
self.assertEqual(bm[:n], data[:n])
self.assertEqual(bm[n:], b'x' * (len(bm[n:])))
def test_readinto1_array(self):
buffer_size = 60
data = b"a" * 26
rawio = self.MockRawIO((data,))
bufio = self.tp(rawio, buffer_size=buffer_size)
# Create an array with element size > 1 byte
b = array.array('i', b'x' * 32)
assert len(b) != 16
# Read into it. We should get as many *bytes* as we can fit into b
# (which is more than the number of elements)
n = bufio.readinto1(b)
self.assertGreater(n, len(b))
# Check that old contents of b are preserved
bm = memoryview(b).cast('B')
self.assertLess(n, len(bm))
self.assertEqual(bm[:n], data[:n])
self.assertEqual(bm[n:], b'x' * (len(bm[n:])))
def test_readlines(self):
def bufio():
rawio = self.MockRawIO((b"abc\n", b"d\n", b"ef"))
return self.tp(rawio)
self.assertEqual(bufio().readlines(), [b"abc\n", b"d\n", b"ef"])
self.assertEqual(bufio().readlines(5), [b"abc\n", b"d\n"])
self.assertEqual(bufio().readlines(None), [b"abc\n", b"d\n", b"ef"])
def test_buffering(self):
data = b"abcdefghi"
dlen = len(data)
tests = [
[ 100, [ 3, 1, 4, 8 ], [ dlen, 0 ] ],
[ 100, [ 3, 3, 3], [ dlen ] ],
[ 4, [ 1, 2, 4, 2 ], [ 4, 4, 1 ] ],
]
for bufsize, buf_read_sizes, raw_read_sizes in tests:
rawio = self.MockFileIO(data)
bufio = self.tp(rawio, buffer_size=bufsize)
pos = 0
for nbytes in buf_read_sizes:
self.assertEqual(bufio.read(nbytes), data[pos:pos+nbytes])
pos += nbytes
# this is mildly implementation-dependent
self.assertEqual(rawio.read_history, raw_read_sizes)
def test_read_non_blocking(self):
# Inject some None's in there to simulate EWOULDBLOCK
rawio = self.MockRawIO((b"abc", b"d", None, b"efg", None, None, None))
bufio = self.tp(rawio)
self.assertEqual(b"abcd", bufio.read(6))
self.assertEqual(b"e", bufio.read(1))
self.assertEqual(b"fg", bufio.read())
self.assertEqual(b"", bufio.peek(1))
self.assertIsNone(bufio.read())
self.assertEqual(b"", bufio.read())
rawio = self.MockRawIO((b"a", None, None))
self.assertEqual(b"a", rawio.readall())
self.assertIsNone(rawio.readall())
def test_read_past_eof(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"abcdefg", bufio.read(9000))
def test_read_all(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"abcdefg", bufio.read())
@support.requires_resource('cpu')
def test_threads(self):
try:
# Write out many bytes with exactly the same number of 0's,
# 1's... 255's. This will help us check that concurrent reading
# doesn't duplicate or forget contents.
N = 1000
l = list(range(256)) * N
random.shuffle(l)
s = bytes(bytearray(l))
with self.open(os_helper.TESTFN, "wb") as f:
f.write(s)
with self.open(os_helper.TESTFN, self.read_mode, buffering=0) as raw:
bufio = self.tp(raw, 8)
errors = []
results = []
def f():
try:
# Intra-buffer read then buffer-flushing read
for n in cycle([1, 19]):
s = bufio.read(n)
if not s:
break
# list.append() is atomic
results.append(s)
except Exception as e:
errors.append(e)
raise
threads = [threading.Thread(target=f) for x in range(20)]
with support.start_threads(threads):
time.sleep(0.02) # yield
self.assertFalse(errors,
"the following exceptions were caught: %r" % errors)
s = b''.join(results)
for i in range(256):
c = bytes(bytearray([i]))
self.assertEqual(s.count(c), N)
finally:
os_helper.unlink(os_helper.TESTFN)
def test_unseekable(self):
bufio = self.tp(self.MockUnseekableIO(b"A" * 10))
self.assertRaises(self.UnsupportedOperation, bufio.tell)
self.assertRaises(self.UnsupportedOperation, bufio.seek, 0)
bufio.read(1)
self.assertRaises(self.UnsupportedOperation, bufio.seek, 0)
self.assertRaises(self.UnsupportedOperation, bufio.tell)
def test_misbehaved_io(self):
rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertRaises(OSError, bufio.seek, 0)
self.assertRaises(OSError, bufio.tell)
# Silence destructor error
bufio.close = lambda: None
def test_no_extraneous_read(self):
# Issue #9550; when the raw IO object has satisfied the read request,
# we should not issue any additional reads, otherwise it may block
# (e.g. socket).
bufsize = 16
for n in (2, bufsize - 1, bufsize, bufsize + 1, bufsize * 2):
rawio = self.MockRawIO([b"x" * n])
bufio = self.tp(rawio, bufsize)
self.assertEqual(bufio.read(n), b"x" * n)
# Simple case: one raw read is enough to satisfy the request.
self.assertEqual(rawio._extraneous_reads, 0,
"failed for {}: {} != 0".format(n, rawio._extraneous_reads))
# A more complex case where two raw reads are needed to satisfy
# the request.
rawio = self.MockRawIO([b"x" * (n - 1), b"x"])
bufio = self.tp(rawio, bufsize)
self.assertEqual(bufio.read(n), b"x" * n)
self.assertEqual(rawio._extraneous_reads, 0,
"failed for {}: {} != 0".format(n, rawio._extraneous_reads))
def test_read_on_closed(self):
# Issue #23796
b = io.BufferedReader(io.BytesIO(b"12"))
b.read(1)
b.close()
self.assertRaises(ValueError, b.peek)
self.assertRaises(ValueError, b.read1, 1)
class CBufferedReaderTest(BufferedReaderTest, SizeofTest):
tp = io.BufferedReader
@unittest.skip("TODO: RUSTPYTHON, fallible allocation")
@unittest.skipIf(MEMORY_SANITIZER, "MSan defaults to crashing "
"instead of returning NULL for malloc failure.")
def test_constructor(self):
BufferedReaderTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
# than 2 GiB RAM and a 64-bit kernel.
if sys.maxsize > 0x7FFFFFFF:
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises((OverflowError, MemoryError, ValueError),
bufio.__init__, rawio, sys.maxsize)
def test_initialization(self):
rawio = self.MockRawIO([b"abc"])
bufio = self.tp(rawio)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.read)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.read)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
self.assertRaises(ValueError, bufio.read)
def test_misbehaved_io_read(self):
rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
# _pyio.BufferedReader seems to implement reading different, so that
# checking this is not so easy.
self.assertRaises(OSError, bufio.read, 10)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_garbage_collection(self):
# C BufferedReader objects are collected.
# The Python version has __del__, so it ends into gc.garbage instead
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with support.check_warnings(('', ResourceWarning)):
rawio = self.FileIO(os_helper.TESTFN, "w+b")
f = self.tp(rawio)
f.f = f
wr = weakref.ref(f)
del f
support.gc_collect()
self.assertIsNone(wr(), wr)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_args_error(self):
# Issue #17275
with self.assertRaisesRegex(TypeError, "BufferedReader"):
self.tp(io.BytesIO(), 1024, 1024, 1024)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_flush_error_on_close(self):
super().test_flush_error_on_close()
class PyBufferedReaderTest(BufferedReaderTest):
tp = pyio.BufferedReader
class BufferedWriterTest(unittest.TestCase, CommonBufferedTests):
write_mode = "wb"
def test_constructor(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
bufio.__init__(rawio)
bufio.__init__(rawio, buffer_size=1024)
bufio.__init__(rawio, buffer_size=16)
self.assertEqual(3, bufio.write(b"abc"))
bufio.flush()
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
bufio.__init__(rawio)
self.assertEqual(3, bufio.write(b"ghi"))
bufio.flush()
self.assertEqual(b"".join(rawio._write_stack), b"abcghi")
def test_uninitialized(self):
bufio = self.tp.__new__(self.tp)
del bufio
bufio = self.tp.__new__(self.tp)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
bufio.write, b'')
bufio.__init__(self.MockRawIO())
self.assertEqual(bufio.write(b''), 0)
def test_detach_flush(self):
raw = self.MockRawIO()
buf = self.tp(raw)
buf.write(b"howdy!")
self.assertFalse(raw._write_stack)
buf.detach()
self.assertEqual(raw._write_stack, [b"howdy!"])
def test_write(self):
# Write to the buffered IO but don't overflow the buffer.
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.write(b"abc")
self.assertFalse(writer._write_stack)
buffer = bytearray(b"def")
bufio.write(buffer)
buffer[:] = b"***" # Overwrite our copy of the data
bufio.flush()
self.assertEqual(b"".join(writer._write_stack), b"abcdef")
def test_write_overflow(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
contents = b"abcdefghijklmnop"
for n in range(0, len(contents), 3):
bufio.write(contents[n:n+3])
flushed = b"".join(writer._write_stack)
# At least (total - 8) bytes were implicitly flushed, perhaps more
# depending on the implementation.
self.assertTrue(flushed.startswith(contents[:-8]), flushed)
def check_writes(self, intermediate_func):
# Lots of writes, test the flushed output is as expected.
contents = bytes(range(256)) * 1000
n = 0
writer = self.MockRawIO()
bufio = self.tp(writer, 13)
# Generator of write sizes: repeat each N 15 times then proceed to N+1
def gen_sizes():
for size in count(1):
for i in range(15):
yield size
sizes = gen_sizes()
while n < len(contents):
size = min(next(sizes), len(contents) - n)
self.assertEqual(bufio.write(contents[n:n+size]), size)
intermediate_func(bufio)
n += size
bufio.flush()
self.assertEqual(contents, b"".join(writer._write_stack))
def test_writes(self):
self.check_writes(lambda bufio: None)
def test_writes_and_flushes(self):
self.check_writes(lambda bufio: bufio.flush())
def test_writes_and_seeks(self):
def _seekabs(bufio):
pos = bufio.tell()
bufio.seek(pos + 1, 0)
bufio.seek(pos - 1, 0)
bufio.seek(pos, 0)
self.check_writes(_seekabs)
def _seekrel(bufio):
pos = bufio.seek(0, 1)
bufio.seek(+1, 1)
bufio.seek(-1, 1)
bufio.seek(pos, 0)
self.check_writes(_seekrel)
def test_writes_and_truncates(self):
self.check_writes(lambda bufio: bufio.truncate(bufio.tell()))
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_write_non_blocking(self):
raw = self.MockNonBlockWriterIO()
bufio = self.tp(raw, 8)
self.assertEqual(bufio.write(b"abcd"), 4)
self.assertEqual(bufio.write(b"efghi"), 5)
# 1 byte will be written, the rest will be buffered
raw.block_on(b"k")
self.assertEqual(bufio.write(b"jklmn"), 5)
# 8 bytes will be written, 8 will be buffered and the rest will be lost
raw.block_on(b"0")
try:
bufio.write(b"opqrwxyz0123456789")
except self.BlockingIOError as e:
written = e.characters_written
else:
self.fail("BlockingIOError should have been raised")
self.assertEqual(written, 16)
self.assertEqual(raw.pop_written(),
b"abcdefghijklmnopqrwxyz")
self.assertEqual(bufio.write(b"ABCDEFGHI"), 9)
s = raw.pop_written()
# Previously buffered bytes were flushed
self.assertTrue(s.startswith(b"01234567A"), s)
def test_write_and_rewind(self):
raw = io.BytesIO()
bufio = self.tp(raw, 4)
self.assertEqual(bufio.write(b"abcdef"), 6)
self.assertEqual(bufio.tell(), 6)
bufio.seek(0, 0)
self.assertEqual(bufio.write(b"XY"), 2)
bufio.seek(6, 0)
self.assertEqual(raw.getvalue(), b"XYcdef")
self.assertEqual(bufio.write(b"123456"), 6)
bufio.flush()
self.assertEqual(raw.getvalue(), b"XYcdef123456")
def test_flush(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.write(b"abc")
bufio.flush()
self.assertEqual(b"abc", writer._write_stack[0])
def test_writelines(self):
l = [b'ab', b'cd', b'ef']
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.writelines(l)
bufio.flush()
self.assertEqual(b''.join(writer._write_stack), b'abcdef')
def test_writelines_userlist(self):
l = UserList([b'ab', b'cd', b'ef'])
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.writelines(l)
bufio.flush()
self.assertEqual(b''.join(writer._write_stack), b'abcdef')
def test_writelines_error(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
self.assertRaises(TypeError, bufio.writelines, [1, 2, 3])
self.assertRaises(TypeError, bufio.writelines, None)
self.assertRaises(TypeError, bufio.writelines, 'abc')
def test_destructor(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.write(b"abc")
del bufio
support.gc_collect()
self.assertEqual(b"abc", writer._write_stack[0])
def test_truncate(self):
# Truncate implicitly flushes the buffer.
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with self.open(os_helper.TESTFN, self.write_mode, buffering=0) as raw:
bufio = self.tp(raw, 8)
bufio.write(b"abcdef")
self.assertEqual(bufio.truncate(3), 3)
self.assertEqual(bufio.tell(), 6)
with self.open(os_helper.TESTFN, "rb", buffering=0) as f:
self.assertEqual(f.read(), b"abc")
def test_truncate_after_write(self):
# Ensure that truncate preserves the file position after
# writes longer than the buffer size.
# Issue: https://bugs.python.org/issue32228
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with self.open(os_helper.TESTFN, "wb") as f:
# Fill with some buffer
f.write(b'\x00' * 10000)
buffer_sizes = [8192, 4096, 200]
for buffer_size in buffer_sizes:
with self.open(os_helper.TESTFN, "r+b", buffering=buffer_size) as f:
f.write(b'\x00' * (buffer_size + 1))
# After write write_pos and write_end are set to 0
f.read(1)
# read operation makes sure that pos != raw_pos
f.truncate()
self.assertEqual(f.tell(), buffer_size + 2)
@support.requires_resource('cpu')
def test_threads(self):
try:
# Write out many bytes from many threads and test they were
# all flushed.
N = 1000
contents = bytes(range(256)) * N
sizes = cycle([1, 19])
n = 0
queue = deque()
while n < len(contents):
size = next(sizes)
queue.append(contents[n:n+size])
n += size
del contents
# We use a real file object because it allows us to
# exercise situations where the GIL is released before
# writing the buffer to the raw streams. This is in addition
# to concurrency issues due to switching threads in the middle
# of Python code.
with self.open(os_helper.TESTFN, self.write_mode, buffering=0) as raw:
bufio = self.tp(raw, 8)
errors = []
def f():
try:
while True:
try:
s = queue.popleft()
except IndexError:
return
bufio.write(s)
except Exception as e:
errors.append(e)
raise
threads = [threading.Thread(target=f) for x in range(20)]
with support.start_threads(threads):
time.sleep(0.02) # yield
self.assertFalse(errors,
"the following exceptions were caught: %r" % errors)
bufio.close()
with self.open(os_helper.TESTFN, "rb") as f:
s = f.read()
for i in range(256):
self.assertEqual(s.count(bytes([i])), N)
finally:
os_helper.unlink(os_helper.TESTFN)
def test_misbehaved_io(self):
rawio = self.MisbehavedRawIO()
bufio = self.tp(rawio, 5)
self.assertRaises(OSError, bufio.seek, 0)
self.assertRaises(OSError, bufio.tell)
self.assertRaises(OSError, bufio.write, b"abcdef")
# Silence destructor error
bufio.close = lambda: None
def test_max_buffer_size_removal(self):
with self.assertRaises(TypeError):
self.tp(self.MockRawIO(), 8, 12)
def test_write_error_on_close(self):
raw = self.MockRawIO()
def bad_write(b):
raise OSError()
raw.write = bad_write
b = self.tp(raw)
b.write(b'spam')
self.assertRaises(OSError, b.close) # exception not swallowed
self.assertTrue(b.closed)
def test_slow_close_from_thread(self):
# Issue #31976
rawio = self.SlowFlushRawIO()
bufio = self.tp(rawio, 8)
t = threading.Thread(target=bufio.close)
t.start()
rawio.in_flush.wait()
self.assertRaises(ValueError, bufio.write, b'spam')
self.assertTrue(bufio.closed)
t.join()
class CBufferedWriterTest(BufferedWriterTest, SizeofTest):
tp = io.BufferedWriter
@unittest.skip("TODO: RUSTPYTHON, fallible allocation")
@unittest.skipIf(MEMORY_SANITIZER, "MSan defaults to crashing "
"instead of returning NULL for malloc failure.")
def test_constructor(self):
BufferedWriterTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
# than 2 GiB RAM and a 64-bit kernel.
if sys.maxsize > 0x7FFFFFFF:
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises((OverflowError, MemoryError, ValueError),
bufio.__init__, rawio, sys.maxsize)
def test_initialization(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.write, b"def")
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.write, b"def")
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
self.assertRaises(ValueError, bufio.write, b"def")
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_garbage_collection(self):
# C BufferedWriter objects are collected, and collecting them flushes
# all data to disk.
# The Python version has __del__, so it ends into gc.garbage instead
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with support.check_warnings(('', ResourceWarning)):
rawio = self.FileIO(os_helper.TESTFN, "w+b")
f = self.tp(rawio)
f.write(b"123xxx")
f.x = f
wr = weakref.ref(f)
del f
support.gc_collect()
self.assertIsNone(wr(), wr)
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"123xxx")
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_args_error(self):
# Issue #17275
with self.assertRaisesRegex(TypeError, "BufferedWriter"):
self.tp(io.BytesIO(), 1024, 1024, 1024)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_flush_error_on_close(self):
super().test_flush_error_on_close()
class PyBufferedWriterTest(BufferedWriterTest):
tp = pyio.BufferedWriter
class BufferedRWPairTest(unittest.TestCase):
def test_constructor(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertFalse(pair.closed)
def test_uninitialized(self):
pair = self.tp.__new__(self.tp)
del pair
pair = self.tp.__new__(self.tp)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
pair.read, 0)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
pair.write, b'')
pair.__init__(self.MockRawIO(), self.MockRawIO())
self.assertEqual(pair.read(0), b'')
self.assertEqual(pair.write(b''), 0)
def test_detach(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertRaises(self.UnsupportedOperation, pair.detach)
def test_constructor_max_buffer_size_removal(self):
with self.assertRaises(TypeError):
self.tp(self.MockRawIO(), self.MockRawIO(), 8, 12)
def test_constructor_with_not_readable(self):
class NotReadable(MockRawIO):
def readable(self):
return False
self.assertRaises(OSError, self.tp, NotReadable(), self.MockRawIO())
def test_constructor_with_not_writeable(self):
class NotWriteable(MockRawIO):
def writable(self):
return False
self.assertRaises(OSError, self.tp, self.MockRawIO(), NotWriteable())
def test_read(self):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
self.assertEqual(pair.read(3), b"abc")
self.assertEqual(pair.read(1), b"d")
self.assertEqual(pair.read(), b"ef")
pair = self.tp(self.BytesIO(b"abc"), self.MockRawIO())
self.assertEqual(pair.read(None), b"abc")
def test_readlines(self):
pair = lambda: self.tp(self.BytesIO(b"abc\ndef\nh"), self.MockRawIO())
self.assertEqual(pair().readlines(), [b"abc\n", b"def\n", b"h"])
self.assertEqual(pair().readlines(), [b"abc\n", b"def\n", b"h"])
self.assertEqual(pair().readlines(5), [b"abc\n", b"def\n"])
def test_read1(self):
# .read1() is delegated to the underlying reader object, so this test
# can be shallow.
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
self.assertEqual(pair.read1(3), b"abc")
self.assertEqual(pair.read1(), b"def")
def test_readinto(self):
for method in ("readinto", "readinto1"):
with self.subTest(method):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
data = byteslike(b'\0' * 5)
self.assertEqual(getattr(pair, method)(data), 5)
self.assertEqual(bytes(data), b"abcde")
def test_write(self):
w = self.MockRawIO()
pair = self.tp(self.MockRawIO(), w)
pair.write(b"abc")
pair.flush()
buffer = bytearray(b"def")
pair.write(buffer)
buffer[:] = b"***" # Overwrite our copy of the data
pair.flush()
self.assertEqual(w._write_stack, [b"abc", b"def"])
def test_peek(self):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
self.assertTrue(pair.peek(3).startswith(b"abc"))
self.assertEqual(pair.read(3), b"abc")
def test_readable(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertTrue(pair.readable())
def test_writeable(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertTrue(pair.writable())
def test_seekable(self):
# BufferedRWPairs are never seekable, even if their readers and writers
# are.
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertFalse(pair.seekable())
# .flush() is delegated to the underlying writer object and has been
# tested in the test_write method.
def test_close_and_closed(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertFalse(pair.closed)
pair.close()
self.assertTrue(pair.closed)
def test_reader_close_error_on_close(self):
def reader_close():
reader_non_existing
reader = self.MockRawIO()
reader.close = reader_close
writer = self.MockRawIO()
pair = self.tp(reader, writer)
with self.assertRaises(NameError) as err:
pair.close()
self.assertIn('reader_non_existing', str(err.exception))
self.assertTrue(pair.closed)
self.assertFalse(reader.closed)
self.assertTrue(writer.closed)
# Silence destructor error
reader.close = lambda: None
# TODO: RUSTPYTHON, sys.unraisablehook
@unittest.expectedFailure
def test_writer_close_error_on_close(self):
def writer_close():
writer_non_existing
reader = self.MockRawIO()
writer = self.MockRawIO()
writer.close = writer_close
pair = self.tp(reader, writer)
with self.assertRaises(NameError) as err:
pair.close()
self.assertIn('writer_non_existing', str(err.exception))
self.assertFalse(pair.closed)
self.assertTrue(reader.closed)
self.assertFalse(writer.closed)
# Silence destructor error
writer.close = lambda: None
writer = None
# Ignore BufferedWriter (of the BufferedRWPair) unraisable exception
with support.catch_unraisable_exception():
# Ignore BufferedRWPair unraisable exception
with support.catch_unraisable_exception():
pair = None
support.gc_collect()
support.gc_collect()
def test_reader_writer_close_error_on_close(self):
def reader_close():
reader_non_existing
def writer_close():
writer_non_existing
reader = self.MockRawIO()
reader.close = reader_close
writer = self.MockRawIO()
writer.close = writer_close
pair = self.tp(reader, writer)
with self.assertRaises(NameError) as err:
pair.close()
self.assertIn('reader_non_existing', str(err.exception))
self.assertIsInstance(err.exception.__context__, NameError)
self.assertIn('writer_non_existing', str(err.exception.__context__))
self.assertFalse(pair.closed)
self.assertFalse(reader.closed)
self.assertFalse(writer.closed)
# Silence destructor error
reader.close = lambda: None
writer.close = lambda: None
def test_isatty(self):
class SelectableIsAtty(MockRawIO):
def __init__(self, isatty):
MockRawIO.__init__(self)
self._isatty = isatty
def isatty(self):
return self._isatty
pair = self.tp(SelectableIsAtty(False), SelectableIsAtty(False))
self.assertFalse(pair.isatty())
pair = self.tp(SelectableIsAtty(True), SelectableIsAtty(False))
self.assertTrue(pair.isatty())
pair = self.tp(SelectableIsAtty(False), SelectableIsAtty(True))
self.assertTrue(pair.isatty())
pair = self.tp(SelectableIsAtty(True), SelectableIsAtty(True))
self.assertTrue(pair.isatty())
def test_weakref_clearing(self):
brw = self.tp(self.MockRawIO(), self.MockRawIO())
ref = weakref.ref(brw)
brw = None
ref = None # Shouldn't segfault.
class CBufferedRWPairTest(BufferedRWPairTest):
tp = io.BufferedRWPair
class PyBufferedRWPairTest(BufferedRWPairTest):
tp = pyio.BufferedRWPair
class BufferedRandomTest(BufferedReaderTest, BufferedWriterTest):
read_mode = "rb+"
write_mode = "wb+"
def test_constructor(self):
BufferedReaderTest.test_constructor(self)
BufferedWriterTest.test_constructor(self)
def test_uninitialized(self):
BufferedReaderTest.test_uninitialized(self)
BufferedWriterTest.test_uninitialized(self)
def test_read_and_write(self):
raw = self.MockRawIO((b"asdf", b"ghjk"))
rw = self.tp(raw, 8)
self.assertEqual(b"as", rw.read(2))
rw.write(b"ddd")
rw.write(b"eee")
self.assertFalse(raw._write_stack) # Buffer writes
self.assertEqual(b"ghjk", rw.read())
self.assertEqual(b"dddeee", raw._write_stack[0])
def test_seek_and_tell(self):
raw = self.BytesIO(b"asdfghjkl")
rw = self.tp(raw)
self.assertEqual(b"as", rw.read(2))
self.assertEqual(2, rw.tell())
rw.seek(0, 0)
self.assertEqual(b"asdf", rw.read(4))
rw.write(b"123f")
rw.seek(0, 0)
self.assertEqual(b"asdf123fl", rw.read())
self.assertEqual(9, rw.tell())
rw.seek(-4, 2)
self.assertEqual(5, rw.tell())
rw.seek(2, 1)
self.assertEqual(7, rw.tell())
self.assertEqual(b"fl", rw.read(11))
rw.flush()
self.assertEqual(b"asdf123fl", raw.getvalue())
self.assertRaises(TypeError, rw.seek, 0.0)
def check_flush_and_read(self, read_func):
raw = self.BytesIO(b"abcdefghi")
bufio = self.tp(raw)
self.assertEqual(b"ab", read_func(bufio, 2))
bufio.write(b"12")
self.assertEqual(b"ef", read_func(bufio, 2))
self.assertEqual(6, bufio.tell())
bufio.flush()
self.assertEqual(6, bufio.tell())
self.assertEqual(b"ghi", read_func(bufio))
raw.seek(0, 0)
raw.write(b"XYZ")
# flush() resets the read buffer
bufio.flush()
bufio.seek(0, 0)
self.assertEqual(b"XYZ", read_func(bufio, 3))
def test_flush_and_read(self):
self.check_flush_and_read(lambda bufio, *args: bufio.read(*args))
def test_flush_and_readinto(self):
def _readinto(bufio, n=-1):
b = bytearray(n if n >= 0 else 9999)
n = bufio.readinto(b)
return bytes(b[:n])
self.check_flush_and_read(_readinto)
def test_flush_and_peek(self):
def _peek(bufio, n=-1):
# This relies on the fact that the buffer can contain the whole
# raw stream, otherwise peek() can return less.
b = bufio.peek(n)
if n != -1:
b = b[:n]
bufio.seek(len(b), 1)
return b
self.check_flush_and_read(_peek)
def test_flush_and_write(self):
raw = self.BytesIO(b"abcdefghi")
bufio = self.tp(raw)
bufio.write(b"123")
bufio.flush()
bufio.write(b"45")
bufio.flush()
bufio.seek(0, 0)
self.assertEqual(b"12345fghi", raw.getvalue())
self.assertEqual(b"12345fghi", bufio.read())
def test_threads(self):
BufferedReaderTest.test_threads(self)
BufferedWriterTest.test_threads(self)
def test_writes_and_peek(self):
def _peek(bufio):
bufio.peek(1)
self.check_writes(_peek)
def _peek(bufio):
pos = bufio.tell()
bufio.seek(-1, 1)
bufio.peek(1)
bufio.seek(pos, 0)
self.check_writes(_peek)
def test_writes_and_reads(self):
def _read(bufio):
bufio.seek(-1, 1)
bufio.read(1)
self.check_writes(_read)
def test_writes_and_read1s(self):
def _read1(bufio):
bufio.seek(-1, 1)
bufio.read1(1)
self.check_writes(_read1)
def test_writes_and_readintos(self):
def _read(bufio):
bufio.seek(-1, 1)
bufio.readinto(bytearray(1))
self.check_writes(_read)
def test_write_after_readahead(self):
# Issue #6629: writing after the buffer was filled by readahead should
# first rewind the raw stream.
for overwrite_size in [1, 5]:
raw = self.BytesIO(b"A" * 10)
bufio = self.tp(raw, 4)
# Trigger readahead
self.assertEqual(bufio.read(1), b"A")
self.assertEqual(bufio.tell(), 1)
# Overwriting should rewind the raw stream if it needs so
bufio.write(b"B" * overwrite_size)
self.assertEqual(bufio.tell(), overwrite_size + 1)
# If the write size was smaller than the buffer size, flush() and
# check that rewind happens.
bufio.flush()
self.assertEqual(bufio.tell(), overwrite_size + 1)
s = raw.getvalue()
self.assertEqual(s,
b"A" + b"B" * overwrite_size + b"A" * (9 - overwrite_size))
def test_write_rewind_write(self):
# Various combinations of reading / writing / seeking backwards / writing again
def mutate(bufio, pos1, pos2):
assert pos2 >= pos1
# Fill the buffer
bufio.seek(pos1)
bufio.read(pos2 - pos1)
bufio.write(b'\x02')
# This writes earlier than the previous write, but still inside
# the buffer.
bufio.seek(pos1)
bufio.write(b'\x01')
b = b"\x80\x81\x82\x83\x84"
for i in range(0, len(b)):
for j in range(i, len(b)):
raw = self.BytesIO(b)
bufio = self.tp(raw, 100)
mutate(bufio, i, j)
bufio.flush()
expected = bytearray(b)
expected[j] = 2
expected[i] = 1
self.assertEqual(raw.getvalue(), expected,
"failed result for i=%d, j=%d" % (i, j))
def test_truncate_after_read_or_write(self):
raw = self.BytesIO(b"A" * 10)
bufio = self.tp(raw, 100)
self.assertEqual(bufio.read(2), b"AA") # the read buffer gets filled
self.assertEqual(bufio.truncate(), 2)
self.assertEqual(bufio.write(b"BB"), 2) # the write buffer increases
self.assertEqual(bufio.truncate(), 4)
def test_misbehaved_io(self):
BufferedReaderTest.test_misbehaved_io(self)
BufferedWriterTest.test_misbehaved_io(self)
def test_interleaved_read_write(self):
# Test for issue #12213
with self.BytesIO(b'abcdefgh') as raw:
with self.tp(raw, 100) as f:
f.write(b"1")
self.assertEqual(f.read(1), b'b')
f.write(b'2')
self.assertEqual(f.read1(1), b'd')
f.write(b'3')
buf = bytearray(1)
f.readinto(buf)
self.assertEqual(buf, b'f')
f.write(b'4')
self.assertEqual(f.peek(1), b'h')
f.flush()
self.assertEqual(raw.getvalue(), b'1b2d3f4h')
with self.BytesIO(b'abc') as raw:
with self.tp(raw, 100) as f:
self.assertEqual(f.read(1), b'a')
f.write(b"2")
self.assertEqual(f.read(1), b'c')
f.flush()
self.assertEqual(raw.getvalue(), b'a2c')
def test_interleaved_readline_write(self):
with self.BytesIO(b'ab\ncdef\ng\n') as raw:
with self.tp(raw) as f:
f.write(b'1')
self.assertEqual(f.readline(), b'b\n')
f.write(b'2')
self.assertEqual(f.readline(), b'def\n')
f.write(b'3')
self.assertEqual(f.readline(), b'\n')
f.flush()
self.assertEqual(raw.getvalue(), b'1b\n2def\n3\n')
# You can't construct a BufferedRandom over a non-seekable stream.
test_unseekable = None
class CBufferedRandomTest(BufferedRandomTest, SizeofTest):
tp = io.BufferedRandom
@unittest.skip("TODO: RUSTPYTHON, fallible allocation")
@unittest.skipIf(MEMORY_SANITIZER, "MSan defaults to crashing "
"instead of returning NULL for malloc failure.")
def test_constructor(self):
BufferedRandomTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
# than 2 GiB RAM and a 64-bit kernel.
if sys.maxsize > 0x7FFFFFFF:
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises((OverflowError, MemoryError, ValueError),
bufio.__init__, rawio, sys.maxsize)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_garbage_collection(self):
CBufferedReaderTest.test_garbage_collection(self)
CBufferedWriterTest.test_garbage_collection(self)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_args_error(self):
# Issue #17275
with self.assertRaisesRegex(TypeError, "BufferedRandom"):
self.tp(io.BytesIO(), 1024, 1024, 1024)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_flush_error_on_close(self):
super().test_flush_error_on_close()
class PyBufferedRandomTest(BufferedRandomTest):
tp = pyio.BufferedRandom
# To fully exercise seek/tell, the StatefulIncrementalDecoder has these
# properties:
# - A single output character can correspond to many bytes of input.
# - The number of input bytes to complete the character can be
# undetermined until the last input byte is received.
# - The number of input bytes can vary depending on previous input.
# - A single input byte can correspond to many characters of output.
# - The number of output characters can be undetermined until the
# last input byte is received.
# - The number of output characters can vary depending on previous input.
class StatefulIncrementalDecoder(codecs.IncrementalDecoder):
"""
For testing seek/tell behavior with a stateful, buffering decoder.
Input is a sequence of words. Words may be fixed-length (length set
by input) or variable-length (period-terminated). In variable-length
mode, extra periods are ignored. Possible words are:
- 'i' followed by a number sets the input length, I (maximum 99).
When I is set to 0, words are space-terminated.
- 'o' followed by a number sets the output length, O (maximum 99).
- Any other word is converted into a word followed by a period on
the output. The output word consists of the input word truncated
or padded out with hyphens to make its length equal to O. If O
is 0, the word is output verbatim without truncating or padding.
I and O are initially set to 1. When I changes, any buffered input is
re-scanned according to the new I. EOF also terminates the last word.
"""
def __init__(self, errors='strict'):
codecs.IncrementalDecoder.__init__(self, errors)
self.reset()
def __repr__(self):
return '<SID %x>' % id(self)
def reset(self):
self.i = 1
self.o = 1
self.buffer = bytearray()
def getstate(self):
i, o = self.i ^ 1, self.o ^ 1 # so that flags = 0 after reset()
return bytes(self.buffer), i*100 + o
def setstate(self, state):
buffer, io = state
self.buffer = bytearray(buffer)
i, o = divmod(io, 100)
self.i, self.o = i ^ 1, o ^ 1
def decode(self, input, final=False):
output = ''
for b in input:
if self.i == 0: # variable-length, terminated with period
if b == ord('.'):
if self.buffer:
output += self.process_word()
else:
self.buffer.append(b)
else: # fixed-length, terminate after self.i bytes
self.buffer.append(b)
if len(self.buffer) == self.i:
output += self.process_word()
if final and self.buffer: # EOF terminates the last word
output += self.process_word()
return output
def process_word(self):
output = ''
if self.buffer[0] == ord('i'):
self.i = min(99, int(self.buffer[1:] or 0)) # set input length
elif self.buffer[0] == ord('o'):
self.o = min(99, int(self.buffer[1:] or 0)) # set output length
else:
output = self.buffer.decode('ascii')
if len(output) < self.o:
output += '-'*self.o # pad out with hyphens
if self.o:
output = output[:self.o] # truncate to output length
output += '.'
self.buffer = bytearray()
return output
codecEnabled = False
@classmethod
def lookupTestDecoder(cls, name):
if cls.codecEnabled and name == 'test_decoder':
latin1 = codecs.lookup('latin-1')
return codecs.CodecInfo(
name='test_decoder', encode=latin1.encode, decode=None,
incrementalencoder=None,
streamreader=None, streamwriter=None,
incrementaldecoder=cls)
# Register the previous decoder for testing.
# Disabled by default, tests will enable it.
codecs.register(StatefulIncrementalDecoder.lookupTestDecoder)
class StatefulIncrementalDecoderTest(unittest.TestCase):
"""
Make sure the StatefulIncrementalDecoder actually works.
"""
test_cases = [
# I=1, O=1 (fixed-length input == fixed-length output)
(b'abcd', False, 'a.b.c.d.'),
# I=0, O=0 (variable-length input, variable-length output)
(b'oiabcd', True, 'abcd.'),
# I=0, O=0 (should ignore extra periods)
(b'oi...abcd...', True, 'abcd.'),
# I=0, O=6 (variable-length input, fixed-length output)
(b'i.o6.x.xyz.toolongtofit.', False, 'x-----.xyz---.toolon.'),
# I=2, O=6 (fixed-length input < fixed-length output)
(b'i.i2.o6xyz', True, 'xy----.z-----.'),
# I=6, O=3 (fixed-length input > fixed-length output)
(b'i.o3.i6.abcdefghijklmnop', True, 'abc.ghi.mno.'),
# I=0, then 3; O=29, then 15 (with longer output)
(b'i.o29.a.b.cde.o15.abcdefghijabcdefghij.i3.a.b.c.d.ei00k.l.m', True,
'a----------------------------.' +
'b----------------------------.' +
'cde--------------------------.' +
'abcdefghijabcde.' +
'a.b------------.' +
'.c.------------.' +
'd.e------------.' +
'k--------------.' +
'l--------------.' +
'm--------------.')
]
def test_decoder(self):
# Try a few one-shot test cases.
for input, eof, output in self.test_cases:
d = StatefulIncrementalDecoder()
self.assertEqual(d.decode(input, eof), output)
# Also test an unfinished decode, followed by forcing EOF.
d = StatefulIncrementalDecoder()
self.assertEqual(d.decode(b'oiabcd'), '')
self.assertEqual(d.decode(b'', 1), 'abcd.')
class TextIOWrapperTest(unittest.TestCase):
def setUp(self):
self.testdata = b"AAA\r\nBBB\rCCC\r\nDDD\nEEE\r\n"
self.normalized = b"AAA\nBBB\nCCC\nDDD\nEEE\n".decode("ascii")
os_helper.unlink(os_helper.TESTFN)
def tearDown(self):
os_helper.unlink(os_helper.TESTFN)
def test_constructor(self):
r = self.BytesIO(b"\xc3\xa9\n\n")
b = self.BufferedReader(r, 1000)
t = self.TextIOWrapper(b)
t.__init__(b, encoding="latin-1", newline="\r\n")
self.assertEqual(t.encoding, "latin-1")
self.assertEqual(t.line_buffering, False)
t.__init__(b, encoding="utf-8", line_buffering=True)
self.assertEqual(t.encoding, "utf-8")
self.assertEqual(t.line_buffering, True)
self.assertEqual("\xe9\n", t.readline())
self.assertRaises(TypeError, t.__init__, b, newline=42)
self.assertRaises(ValueError, t.__init__, b, newline='xyzzy')
def test_uninitialized(self):
t = self.TextIOWrapper.__new__(self.TextIOWrapper)
del t
t = self.TextIOWrapper.__new__(self.TextIOWrapper)
self.assertRaises(Exception, repr, t)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
t.read, 0)
t.__init__(self.MockRawIO())
self.assertEqual(t.read(0), '')
def test_non_text_encoding_codecs_are_rejected(self):
# Ensure the constructor complains if passed a codec that isn't
# marked as a text encoding
# http://bugs.python.org/issue20404
r = self.BytesIO()
b = self.BufferedWriter(r)
with self.assertRaisesRegex(LookupError, "is not a text encoding"):
self.TextIOWrapper(b, encoding="hex")
def test_detach(self):
r = self.BytesIO()
b = self.BufferedWriter(r)
t = self.TextIOWrapper(b)
self.assertIs(t.detach(), b)
t = self.TextIOWrapper(b, encoding="ascii")
t.write("howdy")
self.assertFalse(r.getvalue())
t.detach()
self.assertEqual(r.getvalue(), b"howdy")
self.assertRaises(ValueError, t.detach)
# Operations independent of the detached stream should still work
repr(t)
self.assertEqual(t.encoding, "ascii")
self.assertEqual(t.errors, "strict")
self.assertFalse(t.line_buffering)
self.assertFalse(t.write_through)
def test_repr(self):
raw = self.BytesIO("hello".encode("utf-8"))
b = self.BufferedReader(raw)
t = self.TextIOWrapper(b, encoding="utf-8")
modname = self.TextIOWrapper.__module__
self.assertRegex(repr(t),
r"<(%s\.)?TextIOWrapper encoding='utf-8'>" % modname)
raw.name = "dummy"
self.assertRegex(repr(t),
r"<(%s\.)?TextIOWrapper name='dummy' encoding='utf-8'>" % modname)
t.mode = "r"
self.assertRegex(repr(t),
r"<(%s\.)?TextIOWrapper name='dummy' mode='r' encoding='utf-8'>" % modname)
raw.name = b"dummy"
self.assertRegex(repr(t),
r"<(%s\.)?TextIOWrapper name=b'dummy' mode='r' encoding='utf-8'>" % modname)
t.buffer.detach()
repr(t) # Should not raise an exception
def test_recursive_repr(self):
# Issue #25455
raw = self.BytesIO()
t = self.TextIOWrapper(raw)
with support.swap_attr(raw, 'name', t):
try:
repr(t) # Should not crash
except RuntimeError:
pass
def test_line_buffering(self):
r = self.BytesIO()
b = self.BufferedWriter(r, 1000)
t = self.TextIOWrapper(b, newline="\n", line_buffering=True)
t.write("X")
self.assertEqual(r.getvalue(), b"") # No flush happened
t.write("Y\nZ")
self.assertEqual(r.getvalue(), b"XY\nZ") # All got flushed
t.write("A\rB")
self.assertEqual(r.getvalue(), b"XY\nZA\rB")
def test_reconfigure_line_buffering(self):
r = self.BytesIO()
b = self.BufferedWriter(r, 1000)
t = self.TextIOWrapper(b, newline="\n", line_buffering=False)
t.write("AB\nC")
self.assertEqual(r.getvalue(), b"")
t.reconfigure(line_buffering=True) # implicit flush
self.assertEqual(r.getvalue(), b"AB\nC")
t.write("DEF\nG")
self.assertEqual(r.getvalue(), b"AB\nCDEF\nG")
t.write("H")
self.assertEqual(r.getvalue(), b"AB\nCDEF\nG")
t.reconfigure(line_buffering=False) # implicit flush
self.assertEqual(r.getvalue(), b"AB\nCDEF\nGH")
t.write("IJ")
self.assertEqual(r.getvalue(), b"AB\nCDEF\nGH")
# Keeping default value
t.reconfigure()
t.reconfigure(line_buffering=None)
self.assertEqual(t.line_buffering, False)
t.reconfigure(line_buffering=True)
t.reconfigure()
t.reconfigure(line_buffering=None)
self.assertEqual(t.line_buffering, True)
@unittest.skipIf(sys.flags.utf8_mode, "utf-8 mode is enabled")
def test_default_encoding(self):
old_environ = dict(os.environ)
try:
# try to get a user preferred encoding different than the current
# locale encoding to check that TextIOWrapper() uses the current
# locale encoding and not the user preferred encoding
for key in ('LC_ALL', 'LANG', 'LC_CTYPE'):
if key in os.environ:
del os.environ[key]
current_locale_encoding = locale.getpreferredencoding(False)
b = self.BytesIO()
t = self.TextIOWrapper(b)
self.assertEqual(t.encoding, current_locale_encoding)
finally:
os.environ.clear()
os.environ.update(old_environ)
@support.cpython_only
@unittest.skipIf(sys.flags.utf8_mode, "utf-8 mode is enabled")
def test_device_encoding(self):
# Issue 15989
import _testcapi
b = self.BytesIO()
b.fileno = lambda: _testcapi.INT_MAX + 1
self.assertRaises(OverflowError, self.TextIOWrapper, b)
b.fileno = lambda: _testcapi.UINT_MAX + 1
self.assertRaises(OverflowError, self.TextIOWrapper, b)
def test_encoding(self):
# Check the encoding attribute is always set, and valid
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="utf-8")
self.assertEqual(t.encoding, "utf-8")
t = self.TextIOWrapper(b)
self.assertIsNotNone(t.encoding)
codecs.lookup(t.encoding)
def test_encoding_errors_reading(self):
# (1) default
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii")
self.assertRaises(UnicodeError, t.read)
# (2) explicit strict
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii", errors="strict")
self.assertRaises(UnicodeError, t.read)
# (3) ignore
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii", errors="ignore")
self.assertEqual(t.read(), "abc\n\n")
# (4) replace
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii", errors="replace")
self.assertEqual(t.read(), "abc\n\ufffd\n")
def test_encoding_errors_writing(self):
# (1) default
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii")
self.assertRaises(UnicodeError, t.write, "\xff")
# (2) explicit strict
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii", errors="strict")
self.assertRaises(UnicodeError, t.write, "\xff")
# (3) ignore
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii", errors="ignore",
newline="\n")
t.write("abc\xffdef\n")
t.flush()
self.assertEqual(b.getvalue(), b"abcdef\n")
# (4) replace
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii", errors="replace",
newline="\n")
t.write("abc\xffdef\n")
t.flush()
self.assertEqual(b.getvalue(), b"abc?def\n")
def test_newlines(self):
input_lines = [ "unix\n", "windows\r\n", "os9\r", "last\n", "nonl" ]
tests = [
[ None, [ 'unix\n', 'windows\n', 'os9\n', 'last\n', 'nonl' ] ],
[ '', input_lines ],
[ '\n', [ "unix\n", "windows\r\n", "os9\rlast\n", "nonl" ] ],
[ '\r\n', [ "unix\nwindows\r\n", "os9\rlast\nnonl" ] ],
[ '\r', [ "unix\nwindows\r", "\nos9\r", "last\nnonl" ] ],
]
encodings = (
'utf-8', 'latin-1',
'utf-16', 'utf-16-le', 'utf-16-be',
'utf-32', 'utf-32-le', 'utf-32-be',
)
# Try a range of buffer sizes to test the case where \r is the last
# character in TextIOWrapper._pending_line.
for encoding in encodings:
# XXX: str.encode() should return bytes
data = bytes(''.join(input_lines).encode(encoding))
for do_reads in (False, True):
for bufsize in range(1, 10):
for newline, exp_lines in tests:
bufio = self.BufferedReader(self.BytesIO(data), bufsize)
textio = self.TextIOWrapper(bufio, newline=newline,
encoding=encoding)
if do_reads:
got_lines = []
while True:
c2 = textio.read(2)
if c2 == '':
break
self.assertEqual(len(c2), 2)
got_lines.append(c2 + textio.readline())
else:
got_lines = list(textio)
for got_line, exp_line in zip(got_lines, exp_lines):
self.assertEqual(got_line, exp_line)
self.assertEqual(len(got_lines), len(exp_lines))
def test_newlines_input(self):
testdata = b"AAA\nBB\x00B\nCCC\rDDD\rEEE\r\nFFF\r\nGGG"
normalized = testdata.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
for newline, expected in [
(None, normalized.decode("ascii").splitlines(keepends=True)),
("", testdata.decode("ascii").splitlines(keepends=True)),
("\n", ["AAA\n", "BB\x00B\n", "CCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r\n", ["AAA\nBB\x00B\nCCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r", ["AAA\nBB\x00B\nCCC\r", "DDD\r", "EEE\r", "\nFFF\r", "\nGGG"]),
]:
buf = self.BytesIO(testdata)
txt = self.TextIOWrapper(buf, encoding="ascii", newline=newline)
self.assertEqual(txt.readlines(), expected)
txt.seek(0)
self.assertEqual(txt.read(), "".join(expected))
def test_newlines_output(self):
testdict = {
"": b"AAA\nBBB\nCCC\nX\rY\r\nZ",
"\n": b"AAA\nBBB\nCCC\nX\rY\r\nZ",
"\r": b"AAA\rBBB\rCCC\rX\rY\r\rZ",
"\r\n": b"AAA\r\nBBB\r\nCCC\r\nX\rY\r\r\nZ",
}
tests = [(None, testdict[os.linesep])] + sorted(testdict.items())
for newline, expected in tests:
buf = self.BytesIO()
txt = self.TextIOWrapper(buf, encoding="ascii", newline=newline)
txt.write("AAA\nB")
txt.write("BB\nCCC\n")
txt.write("X\rY\r\nZ")
txt.flush()
self.assertEqual(buf.closed, False)
self.assertEqual(buf.getvalue(), expected)
def test_destructor(self):
l = []
base = self.BytesIO
class MyBytesIO(base):
def close(self):
l.append(self.getvalue())
base.close(self)
b = MyBytesIO()
t = self.TextIOWrapper(b, encoding="ascii")
t.write("abc")
del t
support.gc_collect()
self.assertEqual([b"abc"], l)
def test_override_destructor(self):
record = []
class MyTextIO(self.TextIOWrapper):
def __del__(self):
record.append(1)
try:
f = super().__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(2)
super().close()
def flush(self):
record.append(3)
super().flush()
b = self.BytesIO()
t = MyTextIO(b, encoding="ascii")
del t
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
# TODO: RUSTPYTHON, sys.unraisablehook
@unittest.expectedFailure
def test_error_through_destructor(self):
# Test that the exception state is not modified by a destructor,
# even if close() fails.
rawio = self.CloseFailureIO()
with support.catch_unraisable_exception() as cm:
with self.assertRaises(AttributeError):
self.TextIOWrapper(rawio).xyzzy
if not IOBASE_EMITS_UNRAISABLE:
self.assertIsNone(cm.unraisable)
elif cm.unraisable is not None:
self.assertEqual(cm.unraisable.exc_type, OSError)
# Systematic tests of the text I/O API
def test_basic_io(self):
for chunksize in (1, 2, 3, 4, 5, 15, 16, 17, 31, 32, 33, 63, 64, 65):
for enc in "ascii", "latin-1", "utf-8" :# , "utf-16-be", "utf-16-le":
f = self.open(os_helper.TESTFN, "w+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEqual(f.write("abc"), 3)
f.close()
f = self.open(os_helper.TESTFN, "r+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEqual(f.tell(), 0)
self.assertEqual(f.read(), "abc")
cookie = f.tell()
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.read(None), "abc")
f.seek(0)
self.assertEqual(f.read(2), "ab")
self.assertEqual(f.read(1), "c")
self.assertEqual(f.read(1), "")
self.assertEqual(f.read(), "")
self.assertEqual(f.tell(), cookie)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.seek(0, 2), cookie)
self.assertEqual(f.write("def"), 3)
self.assertEqual(f.seek(cookie), cookie)
self.assertEqual(f.read(), "def")
if enc.startswith("utf"):
self.multi_line_test(f, enc)
f.close()
def multi_line_test(self, f, enc):
f.seek(0)
f.truncate()
sample = "s\xff\u0fff\uffff"
wlines = []
for size in (0, 1, 2, 3, 4, 5, 30, 31, 32, 33, 62, 63, 64, 65, 1000):
chars = []
for i in range(size):
chars.append(sample[i % len(sample)])
line = "".join(chars) + "\n"
wlines.append((f.tell(), line))
f.write(line)
f.seek(0)
rlines = []
while True:
pos = f.tell()
line = f.readline()
if not line:
break
rlines.append((pos, line))
self.assertEqual(rlines, wlines)
def test_telling(self):
f = self.open(os_helper.TESTFN, "w+", encoding="utf-8")
p0 = f.tell()
f.write("\xff\n")
p1 = f.tell()
f.write("\xff\n")
p2 = f.tell()
f.seek(0)
self.assertEqual(f.tell(), p0)
self.assertEqual(f.readline(), "\xff\n")
self.assertEqual(f.tell(), p1)
self.assertEqual(f.readline(), "\xff\n")
self.assertEqual(f.tell(), p2)
f.seek(0)
for line in f:
self.assertEqual(line, "\xff\n")
self.assertRaises(OSError, f.tell)
self.assertEqual(f.tell(), p2)
f.close()
def test_seeking(self):
chunk_size = _default_chunk_size()
prefix_size = chunk_size - 2
u_prefix = "a" * prefix_size
prefix = bytes(u_prefix.encode("utf-8"))
self.assertEqual(len(u_prefix), len(prefix))
u_suffix = "\u8888\n"
suffix = bytes(u_suffix.encode("utf-8"))
line = prefix + suffix
with self.open(os_helper.TESTFN, "wb") as f:
f.write(line*2)
with self.open(os_helper.TESTFN, "r", encoding="utf-8") as f:
s = f.read(prefix_size)
self.assertEqual(s, str(prefix, "ascii"))
self.assertEqual(f.tell(), prefix_size)
self.assertEqual(f.readline(), u_suffix)
def test_seeking_too(self):
# Regression test for a specific bug
data = b'\xe0\xbf\xbf\n'
with self.open(os_helper.TESTFN, "wb") as f:
f.write(data)
with self.open(os_helper.TESTFN, "r", encoding="utf-8") as f:
f._CHUNK_SIZE # Just test that it exists
f._CHUNK_SIZE = 2
f.readline()
f.tell()
def test_seek_and_tell(self):
#Test seek/tell using the StatefulIncrementalDecoder.
# Make test faster by doing smaller seeks
CHUNK_SIZE = 128
def test_seek_and_tell_with_data(data, min_pos=0):
"""Tell/seek to various points within a data stream and ensure
that the decoded data returned by read() is consistent."""
f = self.open(os_helper.TESTFN, 'wb')
f.write(data)
f.close()
f = self.open(os_helper.TESTFN, encoding='test_decoder')
f._CHUNK_SIZE = CHUNK_SIZE
decoded = f.read()
f.close()
for i in range(min_pos, len(decoded) + 1): # seek positions
for j in [1, 5, len(decoded) - i]: # read lengths
f = self.open(os_helper.TESTFN, encoding='test_decoder')
self.assertEqual(f.read(i), decoded[:i])
cookie = f.tell()
self.assertEqual(f.read(j), decoded[i:i + j])
f.seek(cookie)
self.assertEqual(f.read(), decoded[i:])
f.close()
# Enable the test decoder.
StatefulIncrementalDecoder.codecEnabled = 1
# Run the tests.
try:
# Try each test case.
for input, _, _ in StatefulIncrementalDecoderTest.test_cases:
test_seek_and_tell_with_data(input)
# Position each test case so that it crosses a chunk boundary.
for input, _, _ in StatefulIncrementalDecoderTest.test_cases:
offset = CHUNK_SIZE - len(input)//2
prefix = b'.'*offset
# Don't bother seeking into the prefix (takes too long).
min_pos = offset*2
test_seek_and_tell_with_data(prefix + input, min_pos)
# Ensure our test decoder won't interfere with subsequent tests.
finally:
StatefulIncrementalDecoder.codecEnabled = 0
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_multibyte_seek_and_tell(self):
f = self.open(os_helper.TESTFN, "w", encoding="euc_jp")
f.write("AB\n\u3046\u3048\n")
f.close()
f = self.open(os_helper.TESTFN, "r", encoding="euc_jp")
self.assertEqual(f.readline(), "AB\n")
p0 = f.tell()
self.assertEqual(f.readline(), "\u3046\u3048\n")
p1 = f.tell()
f.seek(p0)
self.assertEqual(f.readline(), "\u3046\u3048\n")
self.assertEqual(f.tell(), p1)
f.close()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_seek_with_encoder_state(self):
f = self.open(os_helper.TESTFN, "w", encoding="euc_jis_2004")
f.write("\u00e6\u0300")
p0 = f.tell()
f.write("\u00e6")
f.seek(p0)
f.write("\u0300")
f.close()
f = self.open(os_helper.TESTFN, "r", encoding="euc_jis_2004")
self.assertEqual(f.readline(), "\u00e6\u0300\u0300")
f.close()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_encoded_writes(self):
data = "1234567890"
tests = ("utf-16",
"utf-16-le",
"utf-16-be",
"utf-32",
"utf-32-le",
"utf-32-be")
for encoding in tests:
buf = self.BytesIO()
f = self.TextIOWrapper(buf, encoding=encoding)
# Check if the BOM is written only once (see issue1753).
f.write(data)
f.write(data)
f.seek(0)
self.assertEqual(f.read(), data * 2)
f.seek(0)
self.assertEqual(f.read(), data * 2)
self.assertEqual(buf.getvalue(), (data * 2).encode(encoding))
def test_unreadable(self):
class UnReadable(self.BytesIO):
def readable(self):
return False
txt = self.TextIOWrapper(UnReadable())
self.assertRaises(OSError, txt.read)
def test_read_one_by_one(self):
txt = self.TextIOWrapper(self.BytesIO(b"AA\r\nBB"))
reads = ""
while True:
c = txt.read(1)
if not c:
break
reads += c
self.assertEqual(reads, "AA\nBB")
def test_readlines(self):
txt = self.TextIOWrapper(self.BytesIO(b"AA\nBB\nCC"))
self.assertEqual(txt.readlines(), ["AA\n", "BB\n", "CC"])
txt.seek(0)
self.assertEqual(txt.readlines(None), ["AA\n", "BB\n", "CC"])
txt.seek(0)
self.assertEqual(txt.readlines(5), ["AA\n", "BB\n"])
# read in amounts equal to TextIOWrapper._CHUNK_SIZE which is 128.
def test_read_by_chunk(self):
# make sure "\r\n" straddles 128 char boundary.
txt = self.TextIOWrapper(self.BytesIO(b"A" * 127 + b"\r\nB"))
reads = ""
while True:
c = txt.read(128)
if not c:
break
reads += c
self.assertEqual(reads, "A"*127+"\nB")
def test_writelines(self):
l = ['ab', 'cd', 'ef']
buf = self.BytesIO()
txt = self.TextIOWrapper(buf)
txt.writelines(l)
txt.flush()
self.assertEqual(buf.getvalue(), b'abcdef')
def test_writelines_userlist(self):
l = UserList(['ab', 'cd', 'ef'])
buf = self.BytesIO()
txt = self.TextIOWrapper(buf)
txt.writelines(l)
txt.flush()
self.assertEqual(buf.getvalue(), b'abcdef')
def test_writelines_error(self):
txt = self.TextIOWrapper(self.BytesIO())
self.assertRaises(TypeError, txt.writelines, [1, 2, 3])
self.assertRaises(TypeError, txt.writelines, None)
self.assertRaises(TypeError, txt.writelines, b'abc')
def test_issue1395_1(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
# read one char at a time
reads = ""
while True:
c = txt.read(1)
if not c:
break
reads += c
self.assertEqual(reads, self.normalized)
def test_issue1395_2(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = ""
while True:
c = txt.read(4)
if not c:
break
reads += c
self.assertEqual(reads, self.normalized)
def test_issue1395_3(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
reads += txt.read(4)
reads += txt.readline()
reads += txt.readline()
reads += txt.readline()
self.assertEqual(reads, self.normalized)
def test_issue1395_4(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
reads += txt.read()
self.assertEqual(reads, self.normalized)
def test_issue1395_5(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
pos = txt.tell()
txt.seek(0)
txt.seek(pos)
self.assertEqual(txt.read(4), "BBB\n")
def test_issue2282(self):
buffer = self.BytesIO(self.testdata)
txt = self.TextIOWrapper(buffer, encoding="ascii")
self.assertEqual(buffer.seekable(), txt.seekable())
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_append_bom(self):
# The BOM is not written again when appending to a non-empty file
filename = os_helper.TESTFN
for charset in ('utf-8-sig', 'utf-16', 'utf-32'):
with self.open(filename, 'w', encoding=charset) as f:
f.write('aaa')
pos = f.tell()
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'aaa'.encode(charset))
with self.open(filename, 'a', encoding=charset) as f:
f.write('xxx')
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'aaaxxx'.encode(charset))
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_seek_bom(self):
# Same test, but when seeking manually
filename = os_helper.TESTFN
for charset in ('utf-8-sig', 'utf-16', 'utf-32'):
with self.open(filename, 'w', encoding=charset) as f:
f.write('aaa')
pos = f.tell()
with self.open(filename, 'r+', encoding=charset) as f:
f.seek(pos)
f.write('zzz')
f.seek(0)
f.write('bbb')
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'bbbzzz'.encode(charset))
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_seek_append_bom(self):
# Same test, but first seek to the start and then to the end
filename = os_helper.TESTFN
for charset in ('utf-8-sig', 'utf-16', 'utf-32'):
with self.open(filename, 'w', encoding=charset) as f:
f.write('aaa')
with self.open(filename, 'a', encoding=charset) as f:
f.seek(0)
f.seek(0, self.SEEK_END)
f.write('xxx')
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'aaaxxx'.encode(charset))
def test_errors_property(self):
with self.open(os_helper.TESTFN, "w") as f:
self.assertEqual(f.errors, "strict")
with self.open(os_helper.TESTFN, "w", errors="replace") as f:
self.assertEqual(f.errors, "replace")
@support.no_tracing
def test_threads_write(self):
# Issue6750: concurrent writes could duplicate data
event = threading.Event()
with self.open(os_helper.TESTFN, "w", buffering=1) as f:
def run(n):
text = "Thread%03d\n" % n
event.wait()
f.write(text)
threads = [threading.Thread(target=run, args=(x,))
for x in range(20)]
with support.start_threads(threads, event.set):
time.sleep(0.02)
with self.open(os_helper.TESTFN) as f:
content = f.read()
for n in range(20):
self.assertEqual(content.count("Thread%03d\n" % n), 1)
def test_flush_error_on_close(self):
# Test that text file is closed despite failed flush
# and that flush() is called before file closed.
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
closed = []
def bad_flush():
closed[:] = [txt.closed, txt.buffer.closed]
raise OSError()
txt.flush = bad_flush
self.assertRaises(OSError, txt.close) # exception not swallowed
self.assertTrue(txt.closed)
self.assertTrue(txt.buffer.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
self.assertFalse(closed[1])
txt.flush = lambda: None # break reference loop
def test_close_error_on_close(self):
buffer = self.BytesIO(self.testdata)
def bad_flush():
raise OSError('flush')
def bad_close():
raise OSError('close')
buffer.close = bad_close
txt = self.TextIOWrapper(buffer, encoding="ascii")
txt.flush = bad_flush
with self.assertRaises(OSError) as err: # exception not swallowed
txt.close()
self.assertEqual(err.exception.args, ('close',))
self.assertIsInstance(err.exception.__context__, OSError)
self.assertEqual(err.exception.__context__.args, ('flush',))
self.assertFalse(txt.closed)
# Silence destructor error
buffer.close = lambda: None
txt.flush = lambda: None
def test_nonnormalized_close_error_on_close(self):
# Issue #21677
buffer = self.BytesIO(self.testdata)
def bad_flush():
raise non_existing_flush
def bad_close():
raise non_existing_close
buffer.close = bad_close
txt = self.TextIOWrapper(buffer, encoding="ascii")
txt.flush = bad_flush
with self.assertRaises(NameError) as err: # exception not swallowed
txt.close()
self.assertIn('non_existing_close', str(err.exception))
self.assertIsInstance(err.exception.__context__, NameError)
self.assertIn('non_existing_flush', str(err.exception.__context__))
self.assertFalse(txt.closed)
# Silence destructor error
buffer.close = lambda: None
txt.flush = lambda: None
def test_multi_close(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt.close()
txt.close()
txt.close()
self.assertRaises(ValueError, txt.flush)
def test_unseekable(self):
txt = self.TextIOWrapper(self.MockUnseekableIO(self.testdata))
self.assertRaises(self.UnsupportedOperation, txt.tell)
self.assertRaises(self.UnsupportedOperation, txt.seek, 0)
def test_readonly_attributes(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
buf = self.BytesIO(self.testdata)
with self.assertRaises(AttributeError):
txt.buffer = buf
def test_rawio(self):
# Issue #12591: TextIOWrapper must work with raw I/O objects, so
# that subprocess.Popen() can have the required unbuffered
# semantics with universal_newlines=True.
raw = self.MockRawIO([b'abc', b'def', b'ghi\njkl\nopq\n'])
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
# Reads
self.assertEqual(txt.read(4), 'abcd')
self.assertEqual(txt.readline(), 'efghi\n')
self.assertEqual(list(txt), ['jkl\n', 'opq\n'])
def test_rawio_write_through(self):
# Issue #12591: with write_through=True, writes don't need a flush
raw = self.MockRawIO([b'abc', b'def', b'ghi\njkl\nopq\n'])
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n',
write_through=True)
txt.write('1')
txt.write('23\n4')
txt.write('5')
self.assertEqual(b''.join(raw._write_stack), b'123\n45')
def test_bufio_write_through(self):
# Issue #21396: write_through=True doesn't force a flush()
# on the underlying binary buffered object.
flush_called, write_called = [], []
class BufferedWriter(self.BufferedWriter):
def flush(self, *args, **kwargs):
flush_called.append(True)
return super().flush(*args, **kwargs)
def write(self, *args, **kwargs):
write_called.append(True)
return super().write(*args, **kwargs)
rawio = self.BytesIO()
data = b"a"
bufio = BufferedWriter(rawio, len(data)*2)
textio = self.TextIOWrapper(bufio, encoding='ascii',
write_through=True)
# write to the buffered io but don't overflow the buffer
text = data.decode('ascii')
textio.write(text)
# buffer.flush is not called with write_through=True
self.assertFalse(flush_called)
# buffer.write *is* called with write_through=True
self.assertTrue(write_called)
self.assertEqual(rawio.getvalue(), b"") # no flush
write_called = [] # reset
textio.write(text * 10) # total content is larger than bufio buffer
self.assertTrue(write_called)
self.assertEqual(rawio.getvalue(), data * 11) # all flushed
def test_reconfigure_write_through(self):
raw = self.MockRawIO([])
t = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
t.write('1')
t.reconfigure(write_through=True) # implied flush
self.assertEqual(t.write_through, True)
self.assertEqual(b''.join(raw._write_stack), b'1')
t.write('23')
self.assertEqual(b''.join(raw._write_stack), b'123')
t.reconfigure(write_through=False)
self.assertEqual(t.write_through, False)
t.write('45')
t.flush()
self.assertEqual(b''.join(raw._write_stack), b'12345')
# Keeping default value
t.reconfigure()
t.reconfigure(write_through=None)
self.assertEqual(t.write_through, False)
t.reconfigure(write_through=True)
t.reconfigure()
t.reconfigure(write_through=None)
self.assertEqual(t.write_through, True)
def test_read_nonbytes(self):
# Issue #17106
# Crash when underlying read() returns non-bytes
t = self.TextIOWrapper(self.StringIO('a'))
self.assertRaises(TypeError, t.read, 1)
t = self.TextIOWrapper(self.StringIO('a'))
self.assertRaises(TypeError, t.readline)
t = self.TextIOWrapper(self.StringIO('a'))
self.assertRaises(TypeError, t.read)
def test_illegal_encoder(self):
# Issue 31271: Calling write() while the return value of encoder's
# encode() is invalid shouldn't cause an assertion failure.
rot13 = codecs.lookup("rot13")
with support.swap_attr(rot13, '_is_text_encoding', True):
t = io.TextIOWrapper(io.BytesIO(b'foo'), encoding="rot13")
self.assertRaises(TypeError, t.write, 'bar')
def test_illegal_decoder(self):
# Issue #17106
# Bypass the early encoding check added in issue 20404
def _make_illegal_wrapper():
quopri = codecs.lookup("quopri")
quopri._is_text_encoding = True
try:
t = self.TextIOWrapper(self.BytesIO(b'aaaaaa'),
newline='\n', encoding="quopri")
finally:
quopri._is_text_encoding = False
return t
# Crash when decoder returns non-string
t = _make_illegal_wrapper()
self.assertRaises(TypeError, t.read, 1)
t = _make_illegal_wrapper()
self.assertRaises(TypeError, t.readline)
t = _make_illegal_wrapper()
self.assertRaises(TypeError, t.read)
# Issue 31243: calling read() while the return value of decoder's
# getstate() is invalid should neither crash the interpreter nor
# raise a SystemError.
def _make_very_illegal_wrapper(getstate_ret_val):
class BadDecoder:
def getstate(self):
return getstate_ret_val
def _get_bad_decoder(dummy):
return BadDecoder()
quopri = codecs.lookup("quopri")
with support.swap_attr(quopri, 'incrementaldecoder',
_get_bad_decoder):
return _make_illegal_wrapper()
t = _make_very_illegal_wrapper(42)
self.assertRaises(TypeError, t.read, 42)
t = _make_very_illegal_wrapper(())
self.assertRaises(TypeError, t.read, 42)
t = _make_very_illegal_wrapper((1, 2))
self.assertRaises(TypeError, t.read, 42)
def _check_create_at_shutdown(self, **kwargs):
# Issue #20037: creating a TextIOWrapper at shutdown
# shouldn't crash the interpreter.
iomod = self.io.__name__
code = """if 1:
import codecs
import {iomod} as io
# Avoid looking up codecs at shutdown
codecs.lookup('utf-8')
class C:
def __init__(self):
self.buf = io.BytesIO()
def __del__(self):
io.TextIOWrapper(self.buf, **{kwargs})
print("ok")
c = C()
""".format(iomod=iomod, kwargs=kwargs)
return assert_python_ok("-c", code)
@support.requires_type_collecting
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_create_at_shutdown_without_encoding(self):
rc, out, err = self._check_create_at_shutdown()
if err:
# Can error out with a RuntimeError if the module state
# isn't found.
self.assertIn(self.shutdown_error, err.decode())
else:
self.assertEqual("ok", out.decode().strip())
@support.requires_type_collecting
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_create_at_shutdown_with_encoding(self):
rc, out, err = self._check_create_at_shutdown(encoding='utf-8',
errors='strict')
self.assertFalse(err)
self.assertEqual("ok", out.decode().strip())
def test_read_byteslike(self):
r = MemviewBytesIO(b'Just some random string\n')
t = self.TextIOWrapper(r, 'utf-8')
# TextIOwrapper will not read the full string, because
# we truncate it to a multiple of the native int size
# so that we can construct a more complex memoryview.
bytes_val = _to_memoryview(r.getvalue()).tobytes()
self.assertEqual(t.read(200), bytes_val.decode('utf-8'))
def test_issue22849(self):
class F(object):
def readable(self): return True
def writable(self): return True
def seekable(self): return True
for i in range(10):
try:
self.TextIOWrapper(F(), encoding='utf-8')
except Exception:
pass
F.tell = lambda x: 0
t = self.TextIOWrapper(F(), encoding='utf-8')
def test_reconfigure_encoding_read(self):
# latin1 -> utf8
# (latin1 can decode utf-8 encoded string)
data = 'abc\xe9\n'.encode('latin1') + 'd\xe9f\n'.encode('utf8')
raw = self.BytesIO(data)
txt = self.TextIOWrapper(raw, encoding='latin1', newline='\n')
self.assertEqual(txt.readline(), 'abc\xe9\n')
with self.assertRaises(self.UnsupportedOperation):
txt.reconfigure(encoding='utf-8')
with self.assertRaises(self.UnsupportedOperation):
txt.reconfigure(newline=None)
def test_reconfigure_write_fromascii(self):
# ascii has a specific encodefunc in the C implementation,
# but utf-8-sig has not. Make sure that we get rid of the
# cached encodefunc when we switch encoders.
raw = self.BytesIO()
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
txt.write('foo\n')
txt.reconfigure(encoding='utf-8-sig')
txt.write('\xe9\n')
txt.flush()
self.assertEqual(raw.getvalue(), b'foo\n\xc3\xa9\n')
def test_reconfigure_write(self):
# latin -> utf8
raw = self.BytesIO()
txt = self.TextIOWrapper(raw, encoding='latin1', newline='\n')
txt.write('abc\xe9\n')
txt.reconfigure(encoding='utf-8')
self.assertEqual(raw.getvalue(), b'abc\xe9\n')
txt.write('d\xe9f\n')
txt.flush()
self.assertEqual(raw.getvalue(), b'abc\xe9\nd\xc3\xa9f\n')
# ascii -> utf-8-sig: ensure that no BOM is written in the middle of
# the file
raw = self.BytesIO()
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
txt.write('abc\n')
txt.reconfigure(encoding='utf-8-sig')
txt.write('d\xe9f\n')
txt.flush()
self.assertEqual(raw.getvalue(), b'abc\nd\xc3\xa9f\n')
def test_reconfigure_write_non_seekable(self):
raw = self.BytesIO()
raw.seekable = lambda: False
raw.seek = None
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
txt.write('abc\n')
txt.reconfigure(encoding='utf-8-sig')
txt.write('d\xe9f\n')
txt.flush()
# If the raw stream is not seekable, there'll be a BOM
self.assertEqual(raw.getvalue(), b'abc\n\xef\xbb\xbfd\xc3\xa9f\n')
def test_reconfigure_defaults(self):
txt = self.TextIOWrapper(self.BytesIO(), 'ascii', 'replace', '\n')
txt.reconfigure(encoding=None)
self.assertEqual(txt.encoding, 'ascii')
self.assertEqual(txt.errors, 'replace')
txt.write('LF\n')
txt.reconfigure(newline='\r\n')
self.assertEqual(txt.encoding, 'ascii')
self.assertEqual(txt.errors, 'replace')
txt.reconfigure(errors='ignore')
self.assertEqual(txt.encoding, 'ascii')
self.assertEqual(txt.errors, 'ignore')
txt.write('CRLF\n')
txt.reconfigure(encoding='utf-8', newline=None)
self.assertEqual(txt.errors, 'strict')
txt.seek(0)
self.assertEqual(txt.read(), 'LF\nCRLF\n')
self.assertEqual(txt.detach().getvalue(), b'LF\nCRLF\r\n')
def test_reconfigure_newline(self):
raw = self.BytesIO(b'CR\rEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\n')
txt.reconfigure(newline=None)
self.assertEqual(txt.readline(), 'CR\n')
raw = self.BytesIO(b'CR\rEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\n')
txt.reconfigure(newline='')
self.assertEqual(txt.readline(), 'CR\r')
raw = self.BytesIO(b'CR\rLF\nEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\r')
txt.reconfigure(newline='\n')
self.assertEqual(txt.readline(), 'CR\rLF\n')
raw = self.BytesIO(b'LF\nCR\rEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\n')
txt.reconfigure(newline='\r')
self.assertEqual(txt.readline(), 'LF\nCR\r')
raw = self.BytesIO(b'CR\rCRLF\r\nEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\r')
txt.reconfigure(newline='\r\n')
self.assertEqual(txt.readline(), 'CR\rCRLF\r\n')
txt = self.TextIOWrapper(self.BytesIO(), 'ascii', newline='\r')
txt.reconfigure(newline=None)
txt.write('linesep\n')
txt.reconfigure(newline='')
txt.write('LF\n')
txt.reconfigure(newline='\n')
txt.write('LF\n')
txt.reconfigure(newline='\r')
txt.write('CR\n')
txt.reconfigure(newline='\r\n')
txt.write('CRLF\n')
expected = 'linesep' + os.linesep + 'LF\nLF\nCR\rCRLF\r\n'
self.assertEqual(txt.detach().getvalue().decode('ascii'), expected)
def test_issue25862(self):
# Assertion failures occurred in tell() after read() and write().
t = self.TextIOWrapper(self.BytesIO(b'test'), encoding='ascii')
t.read(1)
t.read()
t.tell()
t = self.TextIOWrapper(self.BytesIO(b'test'), encoding='ascii')
t.read(1)
t.write('x')
t.tell()
class MemviewBytesIO(io.BytesIO):
'''A BytesIO object whose read method returns memoryviews
rather than bytes'''
def read1(self, len_):
return _to_memoryview(super().read1(len_))
def read(self, len_):
return _to_memoryview(super().read(len_))
def _to_memoryview(buf):
'''Convert bytes-object *buf* to a non-trivial memoryview'''
arr = array.array('i')
idx = len(buf) - len(buf) % arr.itemsize
arr.frombytes(buf[:idx])
return memoryview(arr)
class CTextIOWrapperTest(TextIOWrapperTest):
io = io
shutdown_error = "RuntimeError: could not find io module state"
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_constructor(self):
super().test_constructor()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_detach(self):
super().test_detach()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_encoding_read(self):
super().test_reconfigure_encoding_read()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_line_buffering(self):
super().test_reconfigure_line_buffering()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_basic_io(self):
super().test_basic_io()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_telling(self):
super().test_telling()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_uninitialized(self):
super().test_uninitialized()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_non_text_encoding_codecs_are_rejected(self):
super().test_non_text_encoding_codecs_are_rejected()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_repr(self):
super().test_repr()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newlines(self):
super().test_newlines()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newlines_input(self):
super().test_newlines_input()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_read_one_by_one(self):
super().test_read_one_by_one()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_read_by_chunk(self):
super().test_read_by_chunk()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_1(self):
super().test_issue1395_1()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_2(self):
super().test_issue1395_2()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_3(self):
super().test_issue1395_3()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_4(self):
super().test_issue1395_4()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_5(self):
super().test_issue1395_5()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_write_through(self):
super().test_reconfigure_write_through()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_write_fromascii(self):
super().test_reconfigure_write_fromascii()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_write(self):
super().test_reconfigure_write()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_write_non_seekable(self):
super().test_reconfigure_write_non_seekable()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_defaults(self):
super().test_reconfigure_defaults()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_newline(self):
super().test_reconfigure_newline()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_initialization(self):
r = self.BytesIO(b"\xc3\xa9\n\n")
b = self.BufferedReader(r, 1000)
t = self.TextIOWrapper(b)
self.assertRaises(ValueError, t.__init__, b, newline='xyzzy')
self.assertRaises(ValueError, t.read)
t = self.TextIOWrapper.__new__(self.TextIOWrapper)
self.assertRaises(Exception, repr, t)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_garbage_collection(self):
# C TextIOWrapper objects are collected, and collecting them flushes
# all data to disk.
# The Python version has __del__, so it ends in gc.garbage instead.
with support.check_warnings(('', ResourceWarning)):
rawio = io.FileIO(os_helper.TESTFN, "wb")
b = self.BufferedWriter(rawio)
t = self.TextIOWrapper(b, encoding="ascii")
t.write("456def")
t.x = t
wr = weakref.ref(t)
del t
support.gc_collect()
self.assertIsNone(wr(), wr)
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"456def")
def test_rwpair_cleared_before_textio(self):
# Issue 13070: TextIOWrapper's finalization would crash when called
# after the reference to the underlying BufferedRWPair's writer got
# cleared by the GC.
for i in range(1000):
b1 = self.BufferedRWPair(self.MockRawIO(), self.MockRawIO())
t1 = self.TextIOWrapper(b1, encoding="ascii")
b2 = self.BufferedRWPair(self.MockRawIO(), self.MockRawIO())
t2 = self.TextIOWrapper(b2, encoding="ascii")
# circular references
t1.buddy = t2
t2.buddy = t1
support.gc_collect()
def test_del__CHUNK_SIZE_SystemError(self):
t = self.TextIOWrapper(self.BytesIO(), encoding='ascii')
with self.assertRaises(AttributeError):
del t._CHUNK_SIZE
class PyTextIOWrapperTest(TextIOWrapperTest):
io = pyio
shutdown_error = "LookupError: unknown encoding: ascii"
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newlines(self):
super().test_newlines()
class IncrementalNewlineDecoderTest(unittest.TestCase):
def check_newline_decoding_utf8(self, decoder):
# UTF-8 specific tests for a newline decoder
def _check_decode(b, s, **kwargs):
# We exercise getstate() / setstate() as well as decode()
state = decoder.getstate()
self.assertEqual(decoder.decode(b, **kwargs), s)
decoder.setstate(state)
self.assertEqual(decoder.decode(b, **kwargs), s)
_check_decode(b'\xe8\xa2\x88', "\u8888")
_check_decode(b'\xe8', "")
_check_decode(b'\xa2', "")
_check_decode(b'\x88', "\u8888")
_check_decode(b'\xe8', "")
_check_decode(b'\xa2', "")
_check_decode(b'\x88', "\u8888")
_check_decode(b'\xe8', "")
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', final=True)
decoder.reset()
_check_decode(b'\n', "\n")
_check_decode(b'\r', "")
_check_decode(b'', "\n", final=True)
_check_decode(b'\r', "\n", final=True)
_check_decode(b'\r', "")
_check_decode(b'a', "\na")
_check_decode(b'\r\r\n', "\n\n")
_check_decode(b'\r', "")
_check_decode(b'\r', "\n")
_check_decode(b'\na', "\na")
_check_decode(b'\xe8\xa2\x88\r\n', "\u8888\n")
_check_decode(b'\xe8\xa2\x88', "\u8888")
_check_decode(b'\n', "\n")
_check_decode(b'\xe8\xa2\x88\r', "\u8888")
_check_decode(b'\n', "\n")
def check_newline_decoding(self, decoder, encoding):
result = []
if encoding is not None:
encoder = codecs.getincrementalencoder(encoding)()
def _decode_bytewise(s):
# Decode one byte at a time
for b in encoder.encode(s):
result.append(decoder.decode(bytes([b])))
else:
encoder = None
def _decode_bytewise(s):
# Decode one char at a time
for c in s:
result.append(decoder.decode(c))
self.assertEqual(decoder.newlines, None)
_decode_bytewise("abc\n\r")
self.assertEqual(decoder.newlines, '\n')
_decode_bytewise("\nabc")
self.assertEqual(decoder.newlines, ('\n', '\r\n'))
_decode_bytewise("abc\r")
self.assertEqual(decoder.newlines, ('\n', '\r\n'))
_decode_bytewise("abc")
self.assertEqual(decoder.newlines, ('\r', '\n', '\r\n'))
_decode_bytewise("abc\r")
self.assertEqual("".join(result), "abc\n\nabcabc\nabcabc")
decoder.reset()
input = "abc"
if encoder is not None:
encoder.reset()
input = encoder.encode(input)
self.assertEqual(decoder.decode(input), "abc")
self.assertEqual(decoder.newlines, None)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newline_decoder(self):
encodings = (
# None meaning the IncrementalNewlineDecoder takes unicode input
# rather than bytes input
None, 'utf-8', 'latin-1',
'utf-16', 'utf-16-le', 'utf-16-be',
'utf-32', 'utf-32-le', 'utf-32-be',
)
for enc in encodings:
decoder = enc and codecs.getincrementaldecoder(enc)()
decoder = self.IncrementalNewlineDecoder(decoder, translate=True)
self.check_newline_decoding(decoder, enc)
decoder = codecs.getincrementaldecoder("utf-8")()
decoder = self.IncrementalNewlineDecoder(decoder, translate=True)
self.check_newline_decoding_utf8(decoder)
self.assertRaises(TypeError, decoder.setstate, 42)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newline_bytes(self):
# Issue 5433: Excessive optimization in IncrementalNewlineDecoder
def _check(dec):
self.assertEqual(dec.newlines, None)
self.assertEqual(dec.decode("\u0D00"), "\u0D00")
self.assertEqual(dec.newlines, None)
self.assertEqual(dec.decode("\u0A00"), "\u0A00")
self.assertEqual(dec.newlines, None)
dec = self.IncrementalNewlineDecoder(None, translate=False)
_check(dec)
dec = self.IncrementalNewlineDecoder(None, translate=True)
_check(dec)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_translate(self):
# issue 35062
for translate in (-2, -1, 1, 2):
decoder = codecs.getincrementaldecoder("utf-8")()
decoder = self.IncrementalNewlineDecoder(decoder, translate)
self.check_newline_decoding_utf8(decoder)
decoder = codecs.getincrementaldecoder("utf-8")()
decoder = self.IncrementalNewlineDecoder(decoder, translate=0)
self.assertEqual(decoder.decode(b"\r\r\n"), "\r\r\n")
class CIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest):
pass
class PyIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest):
pass
# XXX Tests for open()
class MiscIOTest(unittest.TestCase):
def tearDown(self):
os_helper.unlink(os_helper.TESTFN)
def test___all__(self):
for name in self.io.__all__:
obj = getattr(self.io, name, None)
self.assertIsNotNone(obj, name)
if name in ("open", "open_code"):
continue
elif "error" in name.lower() or name == "UnsupportedOperation":
self.assertTrue(issubclass(obj, Exception), name)
elif not name.startswith("SEEK_"):
self.assertTrue(issubclass(obj, self.IOBase))
def test_attributes(self):
f = self.open(os_helper.TESTFN, "wb", buffering=0)
self.assertEqual(f.mode, "wb")
f.close()
# XXX RUSTPYTHON: universal mode is deprecated anyway, so I
# feel fine about skipping it
# with support.check_warnings(('', DeprecationWarning)):
# f = self.open(os_helper.TESTFN, "U")
# self.assertEqual(f.name, os_helper.TESTFN)
# self.assertEqual(f.buffer.name, os_helper.TESTFN)
# self.assertEqual(f.buffer.raw.name, os_helper.TESTFN)
# self.assertEqual(f.mode, "U")
# self.assertEqual(f.buffer.mode, "rb")
# self.assertEqual(f.buffer.raw.mode, "rb")
# f.close()
f = self.open(os_helper.TESTFN, "w+")
self.assertEqual(f.mode, "w+")
self.assertEqual(f.buffer.mode, "rb+") # Does it really matter?
self.assertEqual(f.buffer.raw.mode, "rb+")
g = self.open(f.fileno(), "wb", closefd=False)
self.assertEqual(g.mode, "wb")
self.assertEqual(g.raw.mode, "wb")
self.assertEqual(g.name, f.fileno())
self.assertEqual(g.raw.name, f.fileno())
f.close()
g.close()
def test_open_pipe_with_append(self):
# bpo-27805: Ignore ESPIPE from lseek() in open().
r, w = os.pipe()
self.addCleanup(os.close, r)
f = self.open(w, 'a')
self.addCleanup(f.close)
# Check that the file is marked non-seekable. On Windows, however, lseek
# somehow succeeds on pipes.
if sys.platform != 'win32':
self.assertFalse(f.seekable())
def test_io_after_close(self):
for kwargs in [
{"mode": "w"},
{"mode": "wb"},
{"mode": "w", "buffering": 1},
{"mode": "w", "buffering": 2},
{"mode": "wb", "buffering": 0},
{"mode": "r"},
{"mode": "rb"},
{"mode": "r", "buffering": 1},
{"mode": "r", "buffering": 2},
{"mode": "rb", "buffering": 0},
{"mode": "w+"},
{"mode": "w+b"},
{"mode": "w+", "buffering": 1},
{"mode": "w+", "buffering": 2},
{"mode": "w+b", "buffering": 0},
]:
f = self.open(os_helper.TESTFN, **kwargs)
f.close()
self.assertRaises(ValueError, f.flush)
self.assertRaises(ValueError, f.fileno)
self.assertRaises(ValueError, f.isatty)
self.assertRaises(ValueError, f.__iter__)
if hasattr(f, "peek"):
self.assertRaises(ValueError, f.peek, 1)
self.assertRaises(ValueError, f.read)
if hasattr(f, "read1"):
self.assertRaises(ValueError, f.read1, 1024)
self.assertRaises(ValueError, f.read1)
if hasattr(f, "readall"):
self.assertRaises(ValueError, f.readall)
if hasattr(f, "readinto"):
self.assertRaises(ValueError, f.readinto, bytearray(1024))
if hasattr(f, "readinto1"):
self.assertRaises(ValueError, f.readinto1, bytearray(1024))
self.assertRaises(ValueError, f.readline)
self.assertRaises(ValueError, f.readlines)
self.assertRaises(ValueError, f.readlines, 1)
self.assertRaises(ValueError, f.seek, 0)
self.assertRaises(ValueError, f.tell)
self.assertRaises(ValueError, f.truncate)
self.assertRaises(ValueError, f.write,
b"" if "b" in kwargs['mode'] else "")
self.assertRaises(ValueError, f.writelines, [])
self.assertRaises(ValueError, next, f)
# TODO: RUSTPYTHON, cyclic gc
@unittest.expectedFailure
def test_blockingioerror(self):
# Various BlockingIOError issues
class C(str):
pass
c = C("")
b = self.BlockingIOError(1, c)
c.b = b
b.c = c
wr = weakref.ref(c)
del c, b
support.gc_collect()
self.assertIsNone(wr(), wr)
def test_abcs(self):
# Test the visible base classes are ABCs.
self.assertIsInstance(self.IOBase, abc.ABCMeta)
self.assertIsInstance(self.RawIOBase, abc.ABCMeta)
self.assertIsInstance(self.BufferedIOBase, abc.ABCMeta)
self.assertIsInstance(self.TextIOBase, abc.ABCMeta)
def _check_abc_inheritance(self, abcmodule):
with self.open(os_helper.TESTFN, "wb", buffering=0) as f:
self.assertIsInstance(f, abcmodule.IOBase)
self.assertIsInstance(f, abcmodule.RawIOBase)
self.assertNotIsInstance(f, abcmodule.BufferedIOBase)
self.assertNotIsInstance(f, abcmodule.TextIOBase)
with self.open(os_helper.TESTFN, "wb") as f:
self.assertIsInstance(f, abcmodule.IOBase)
self.assertNotIsInstance(f, abcmodule.RawIOBase)
self.assertIsInstance(f, abcmodule.BufferedIOBase)
self.assertNotIsInstance(f, abcmodule.TextIOBase)
with self.open(os_helper.TESTFN, "w") as f:
self.assertIsInstance(f, abcmodule.IOBase)
self.assertNotIsInstance(f, abcmodule.RawIOBase)
self.assertNotIsInstance(f, abcmodule.BufferedIOBase)
self.assertIsInstance(f, abcmodule.TextIOBase)
def test_abc_inheritance(self):
# Test implementations inherit from their respective ABCs
self._check_abc_inheritance(self)
def test_abc_inheritance_official(self):
# Test implementations inherit from the official ABCs of the
# baseline "io" module.
self._check_abc_inheritance(io)
def _check_warn_on_dealloc(self, *args, **kwargs):
f = open(*args, **kwargs)
r = repr(f)
with self.assertWarns(ResourceWarning) as cm:
f = None
support.gc_collect()
self.assertIn(r, str(cm.warning.args[0]))
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_warn_on_dealloc(self):
self._check_warn_on_dealloc(os_helper.TESTFN, "wb", buffering=0)
self._check_warn_on_dealloc(os_helper.TESTFN, "wb")
self._check_warn_on_dealloc(os_helper.TESTFN, "w")
def _check_warn_on_dealloc_fd(self, *args, **kwargs):
fds = []
def cleanup_fds():
for fd in fds:
try:
os.close(fd)
except OSError as e:
if e.errno != errno.EBADF:
raise
self.addCleanup(cleanup_fds)
r, w = os.pipe()
fds += r, w
self._check_warn_on_dealloc(r, *args, **kwargs)
# When using closefd=False, there's no warning
r, w = os.pipe()
fds += r, w
with support.check_no_resource_warning(self):
open(r, *args, closefd=False, **kwargs)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_warn_on_dealloc_fd(self):
self._check_warn_on_dealloc_fd("rb", buffering=0)
self._check_warn_on_dealloc_fd("rb")
self._check_warn_on_dealloc_fd("r")
def test_pickling(self):
# Pickling file objects is forbidden
for kwargs in [
{"mode": "w"},
{"mode": "wb"},
{"mode": "wb", "buffering": 0},
{"mode": "r"},
{"mode": "rb"},
{"mode": "rb", "buffering": 0},
{"mode": "w+"},
{"mode": "w+b"},
{"mode": "w+b", "buffering": 0},
]:
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
with self.open(os_helper.TESTFN, **kwargs) as f:
self.assertRaises(TypeError, pickle.dumps, f, protocol)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_nonblock_pipe_write_bigbuf(self):
self._test_nonblock_pipe_write(16*1024)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_nonblock_pipe_write_smallbuf(self):
self._test_nonblock_pipe_write(1024)
@unittest.skipUnless(hasattr(os, 'set_blocking'),
'os.set_blocking() required for this test')
def _test_nonblock_pipe_write(self, bufsize):
sent = []
received = []
r, w = os.pipe()
os.set_blocking(r, False)
os.set_blocking(w, False)
# To exercise all code paths in the C implementation we need
# to play with buffer sizes. For instance, if we choose a
# buffer size less than or equal to _PIPE_BUF (4096 on Linux)
# then we will never get a partial write of the buffer.
rf = self.open(r, mode='rb', closefd=True, buffering=bufsize)
wf = self.open(w, mode='wb', closefd=True, buffering=bufsize)
with rf, wf:
for N in 9999, 73, 7574:
try:
i = 0
while True:
msg = bytes([i % 26 + 97]) * N
sent.append(msg)
wf.write(msg)
i += 1
except self.BlockingIOError as e:
self.assertEqual(e.args[0], errno.EAGAIN)
self.assertEqual(e.args[2], e.characters_written)
sent[-1] = sent[-1][:e.characters_written]
received.append(rf.read())
msg = b'BLOCKED'
wf.write(msg)
sent.append(msg)
while True:
try:
wf.flush()
break
except self.BlockingIOError as e:
self.assertEqual(e.args[0], errno.EAGAIN)
self.assertEqual(e.args[2], e.characters_written)
self.assertEqual(e.characters_written, 0)
received.append(rf.read())
received += iter(rf.read, None)
sent, received = b''.join(sent), b''.join(received)
self.assertEqual(sent, received)
self.assertTrue(wf.closed)
self.assertTrue(rf.closed)
def test_create_fail(self):
# 'x' mode fails if file is existing
with self.open(os_helper.TESTFN, 'w'):
pass
self.assertRaises(FileExistsError, self.open, os_helper.TESTFN, 'x')
def test_create_writes(self):
# 'x' mode opens for writing
with self.open(os_helper.TESTFN, 'xb') as f:
f.write(b"spam")
with self.open(os_helper.TESTFN, 'rb') as f:
self.assertEqual(b"spam", f.read())
def test_open_allargs(self):
# there used to be a buffer overflow in the parser for rawmode
self.assertRaises(ValueError, self.open, os_helper.TESTFN, 'rwax+')
class CMiscIOTest(MiscIOTest):
io = io
def test_readinto_buffer_overflow(self):
# Issue #18025
class BadReader(self.io.BufferedIOBase):
def read(self, n=-1):
return b'x' * 10**6
bufio = BadReader()
b = bytearray(2)
self.assertRaises(ValueError, bufio.readinto, b)
def check_daemon_threads_shutdown_deadlock(self, stream_name):
# Issue #23309: deadlocks at shutdown should be avoided when a
# daemon thread and the main thread both write to a file.
code = """if 1:
import sys
import time
import threading
from test.support import SuppressCrashReport
file = sys.{stream_name}
def run():
while True:
file.write('.')
file.flush()
crash = SuppressCrashReport()
crash.__enter__()
# don't call __exit__(): the crash occurs at Python shutdown
thread = threading.Thread(target=run)
thread.daemon = True
thread.start()
time.sleep(0.5)
file.write('!')
file.flush()
""".format_map(locals())
res, _ = run_python_until_end("-c", code)
err = res.err.decode()
if res.rc != 0:
# Failure: should be a fatal error
pattern = (r"Fatal Python error: could not acquire lock "
r"for <(_io\.)?BufferedWriter name='<{stream_name}>'> "
r"at interpreter shutdown, possibly due to "
r"daemon threads".format_map(locals()))
self.assertRegex(err, pattern)
else:
self.assertFalse(err.strip('.!'))
def test_daemon_threads_shutdown_stdout_deadlock(self):
self.check_daemon_threads_shutdown_deadlock('stdout')
def test_daemon_threads_shutdown_stderr_deadlock(self):
self.check_daemon_threads_shutdown_deadlock('stderr')
class PyMiscIOTest(MiscIOTest):
io = pyio
@unittest.skipIf(os.name == 'nt', 'POSIX signals required for this test.')
class SignalsTest(unittest.TestCase):
def setUp(self):
self.oldalrm = signal.signal(signal.SIGALRM, self.alarm_interrupt)
def tearDown(self):
signal.signal(signal.SIGALRM, self.oldalrm)
def alarm_interrupt(self, sig, frame):
1/0
def check_interrupted_write(self, item, bytes, **fdopen_kwargs):
"""Check that a partial write, when it gets interrupted, properly
invokes the signal handler, and bubbles up the exception raised
in the latter."""
read_results = []
def _read():
s = os.read(r, 1)
read_results.append(s)
t = threading.Thread(target=_read)
t.daemon = True
r, w = os.pipe()
fdopen_kwargs["closefd"] = False
large_data = item * (support.PIPE_MAX_SIZE // len(item) + 1)
try:
wio = self.io.open(w, **fdopen_kwargs)
if hasattr(signal, 'pthread_sigmask'):
# create the thread with SIGALRM signal blocked
signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGALRM])
t.start()
signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGALRM])
else:
t.start()
# Fill the pipe enough that the write will be blocking.
# It will be interrupted by the timer armed above. Since the
# other thread has read one byte, the low-level write will
# return with a successful (partial) result rather than an EINTR.
# The buffered IO layer must check for pending signal
# handlers, which in this case will invoke alarm_interrupt().
signal.alarm(1)
try:
self.assertRaises(ZeroDivisionError, wio.write, large_data)
finally:
signal.alarm(0)
t.join()
# We got one byte, get another one and check that it isn't a
# repeat of the first one.
read_results.append(os.read(r, 1))
self.assertEqual(read_results, [bytes[0:1], bytes[1:2]])
finally:
os.close(w)
os.close(r)
# This is deliberate. If we didn't close the file descriptor
# before closing wio, wio would try to flush its internal
# buffer, and block again.
try:
wio.close()
except OSError as e:
if e.errno != errno.EBADF:
raise
def test_interrupted_write_unbuffered(self):
self.check_interrupted_write(b"xy", b"xy", mode="wb", buffering=0)
def test_interrupted_write_buffered(self):
self.check_interrupted_write(b"xy", b"xy", mode="wb")
def test_interrupted_write_text(self):
self.check_interrupted_write("xy", b"xy", mode="w", encoding="ascii")
@support.no_tracing
def check_reentrant_write(self, data, **fdopen_kwargs):
def on_alarm(*args):
# Will be called reentrantly from the same thread
wio.write(data)
1/0
signal.signal(signal.SIGALRM, on_alarm)
r, w = os.pipe()
wio = self.io.open(w, **fdopen_kwargs)
try:
signal.alarm(1)
# Either the reentrant call to wio.write() fails with RuntimeError,
# or the signal handler raises ZeroDivisionError.
with self.assertRaises((ZeroDivisionError, RuntimeError)) as cm:
while 1:
for i in range(100):
wio.write(data)
wio.flush()
# Make sure the buffer doesn't fill up and block further writes
os.read(r, len(data) * 100)
exc = cm.exception
if isinstance(exc, RuntimeError):
self.assertTrue(str(exc).startswith("reentrant call"), str(exc))
finally:
signal.alarm(0)
wio.close()
os.close(r)
def test_reentrant_write_buffered(self):
self.check_reentrant_write(b"xy", mode="wb")
def test_reentrant_write_text(self):
self.check_reentrant_write("xy", mode="w", encoding="ascii")
def check_interrupted_read_retry(self, decode, **fdopen_kwargs):
"""Check that a buffered read, when it gets interrupted (either
returning a partial result or EINTR), properly invokes the signal
handler and retries if the latter returned successfully."""
r, w = os.pipe()
fdopen_kwargs["closefd"] = False
def alarm_handler(sig, frame):
os.write(w, b"bar")
signal.signal(signal.SIGALRM, alarm_handler)
try:
rio = self.io.open(r, **fdopen_kwargs)
os.write(w, b"foo")
signal.alarm(1)
# Expected behaviour:
# - first raw read() returns partial b"foo"
# - second raw read() returns EINTR
# - third raw read() returns b"bar"
self.assertEqual(decode(rio.read(6)), "foobar")
finally:
signal.alarm(0)
rio.close()
os.close(w)
os.close(r)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_interrupted_read_retry_buffered(self):
self.check_interrupted_read_retry(lambda x: x.decode('latin1'),
mode="rb")
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_interrupted_read_retry_text(self):
self.check_interrupted_read_retry(lambda x: x,
mode="r")
def check_interrupted_write_retry(self, item, **fdopen_kwargs):
"""Check that a buffered write, when it gets interrupted (either
returning a partial result or EINTR), properly invokes the signal
handler and retries if the latter returned successfully."""
select = support.import_module("select")
# A quantity that exceeds the buffer size of an anonymous pipe's
# write end.
N = support.PIPE_MAX_SIZE
r, w = os.pipe()
fdopen_kwargs["closefd"] = False
# We need a separate thread to read from the pipe and allow the
# write() to finish. This thread is started after the SIGALRM is
# received (forcing a first EINTR in write()).
read_results = []
write_finished = False
error = None
def _read():
try:
while not write_finished:
while r in select.select([r], [], [], 1.0)[0]:
s = os.read(r, 1024)
read_results.append(s)
except BaseException as exc:
nonlocal error
error = exc
t = threading.Thread(target=_read)
t.daemon = True
def alarm1(sig, frame):
signal.signal(signal.SIGALRM, alarm2)
signal.alarm(1)
def alarm2(sig, frame):
t.start()
large_data = item * N
signal.signal(signal.SIGALRM, alarm1)
try:
wio = self.io.open(w, **fdopen_kwargs)
signal.alarm(1)
# Expected behaviour:
# - first raw write() is partial (because of the limited pipe buffer
# and the first alarm)
# - second raw write() returns EINTR (because of the second alarm)
# - subsequent write()s are successful (either partial or complete)
written = wio.write(large_data)
self.assertEqual(N, written)
wio.flush()
write_finished = True
t.join()
self.assertIsNone(error)
self.assertEqual(N, sum(len(x) for x in read_results))
finally:
signal.alarm(0)
write_finished = True
os.close(w)
os.close(r)
# This is deliberate. If we didn't close the file descriptor
# before closing wio, wio would try to flush its internal
# buffer, and could block (in case of failure).
try:
wio.close()
except OSError as e:
if e.errno != errno.EBADF:
raise
@unittest.skip("TODO: RUSTPYTHON, thread 'main' panicked at 'already borrowed: BorrowMutError'")
def test_interrupted_write_retry_buffered(self):
self.check_interrupted_write_retry(b"x", mode="wb")
@unittest.skip("TODO: RUSTPYTHON, thread 'main' panicked at 'already borrowed: BorrowMutError'")
def test_interrupted_write_retry_text(self):
self.check_interrupted_write_retry("x", mode="w", encoding="latin1")
class CSignalsTest(SignalsTest):
io = io
class PySignalsTest(SignalsTest):
io = pyio
# Handling reentrancy issues would slow down _pyio even more, so the
# tests are disabled.
test_reentrant_write_buffered = None
test_reentrant_write_text = None
def load_tests(*args):
tests = (CIOTest, PyIOTest, APIMismatchTest,
CBufferedReaderTest, PyBufferedReaderTest,
CBufferedWriterTest, PyBufferedWriterTest,
CBufferedRWPairTest, PyBufferedRWPairTest,
CBufferedRandomTest, PyBufferedRandomTest,
StatefulIncrementalDecoderTest,
CIncrementalNewlineDecoderTest, PyIncrementalNewlineDecoderTest,
CTextIOWrapperTest, PyTextIOWrapperTest,
CMiscIOTest, PyMiscIOTest,
CSignalsTest, PySignalsTest,
)
# Put the namespaces of the IO module we are testing and some useful mock
# classes in the __dict__ of each test.
mocks = (MockRawIO, MisbehavedRawIO, MockFileIO, CloseFailureIO,
MockNonBlockWriterIO, MockUnseekableIO, MockRawIOWithoutRead,
SlowFlushRawIO)
all_members = io.__all__# + ["IncrementalNewlineDecoder"] XXX RUSTPYTHON
c_io_ns = {name : getattr(io, name) for name in all_members}
py_io_ns = {name : getattr(pyio, name) for name in all_members}
globs = globals()
c_io_ns.update((x.__name__, globs["C" + x.__name__]) for x in mocks)
py_io_ns.update((x.__name__, globs["Py" + x.__name__]) for x in mocks)
# Avoid turning open into a bound method.
py_io_ns["open"] = pyio.OpenWrapper
for test in tests:
if test.__name__.startswith("C"):
for name, obj in c_io_ns.items():
setattr(test, name, obj)
elif test.__name__.startswith("Py"):
for name, obj in py_io_ns.items():
setattr(test, name, obj)
suite = unittest.TestSuite([unittest.makeSuite(test) for test in tests])
return suite
if __name__ == "__main__":
unittest.main()
| 36.930777 | 103 | 0.581164 |
b = bytes(b)
n = -1
if self._blocker_char:
try:
n = b.index(self._blocker_char)
except ValueError:
pass
else:
if n > 0:
# write data up to the first blocker
self._write_stack.append(b[:n])
return n
else:
# cancel blocker and indicate would block
self._blocker_char = None
return None
self._write_stack.append(b)
return len(b)
class CMockNonBlockWriterIO(MockNonBlockWriterIO, io.RawIOBase):
BlockingIOError = io.BlockingIOError
class PyMockNonBlockWriterIO(MockNonBlockWriterIO, pyio.RawIOBase):
BlockingIOError = pyio.BlockingIOError
class IOTest(unittest.TestCase):
def setUp(self):
os_helper.unlink(os_helper.TESTFN)
def tearDown(self):
os_helper.unlink(os_helper.TESTFN)
def write_ops(self, f):
self.assertEqual(f.write(b"blah."), 5)
f.truncate(0)
self.assertEqual(f.tell(), 5)
f.seek(0)
self.assertEqual(f.write(b"blah."), 5)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(b"Hello."), 6)
self.assertEqual(f.tell(), 6)
self.assertEqual(f.seek(-1, 1), 5)
self.assertEqual(f.tell(), 5)
buffer = bytearray(b" world\n\n\n")
self.assertEqual(f.write(buffer), 9)
buffer[:] = b"*" * 9 # Overwrite our copy of the data
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.write(b"h"), 1)
self.assertEqual(f.seek(-1, 2), 13)
self.assertEqual(f.tell(), 13)
self.assertEqual(f.truncate(12), 12)
self.assertEqual(f.tell(), 13)
self.assertRaises(TypeError, f.seek, 0.0)
def read_ops(self, f, buffered=False):
data = f.read(5)
self.assertEqual(data, b"hello")
data = byteslike(data)
self.assertEqual(f.readinto(data), 5)
self.assertEqual(bytes(data), b" worl")
data = bytearray(5)
self.assertEqual(f.readinto(data), 2)
self.assertEqual(len(data), 5)
self.assertEqual(data[:2], b"d\n")
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.read(20), b"hello world\n")
self.assertEqual(f.read(1), b"")
self.assertEqual(f.readinto(byteslike(b"x")), 0)
self.assertEqual(f.seek(-6, 2), 6)
self.assertEqual(f.read(5), b"world")
self.assertEqual(f.read(0), b"")
self.assertEqual(f.readinto(byteslike()), 0)
self.assertEqual(f.seek(-6, 1), 5)
self.assertEqual(f.read(5), b" worl")
self.assertEqual(f.tell(), 10)
self.assertRaises(TypeError, f.seek, 0.0)
if buffered:
f.seek(0)
self.assertEqual(f.read(), b"hello world\n")
f.seek(6)
self.assertEqual(f.read(), b"world\n")
self.assertEqual(f.read(), b"")
f.seek(0)
data = byteslike(5)
self.assertEqual(f.readinto1(data), 5)
self.assertEqual(bytes(data), b"hello")
LARGE = 2**31
def large_file_ops(self, f):
assert f.readable()
assert f.writable()
try:
self.assertEqual(f.seek(self.LARGE), self.LARGE)
except (OverflowError, ValueError):
self.skipTest("no largefile support")
self.assertEqual(f.tell(), self.LARGE)
self.assertEqual(f.write(b"xxx"), 3)
self.assertEqual(f.tell(), self.LARGE + 3)
self.assertEqual(f.seek(-1, 1), self.LARGE + 2)
self.assertEqual(f.truncate(), self.LARGE + 2)
self.assertEqual(f.tell(), self.LARGE + 2)
self.assertEqual(f.seek(0, 2), self.LARGE + 2)
self.assertEqual(f.truncate(self.LARGE + 1), self.LARGE + 1)
self.assertEqual(f.tell(), self.LARGE + 2)
self.assertEqual(f.seek(0, 2), self.LARGE + 1)
self.assertEqual(f.seek(-1, 2), self.LARGE)
self.assertEqual(f.read(2), b"x")
def test_invalid_operations(self):
# Try writing on a file opened in read mode and vice-versa.
exc = self.UnsupportedOperation
for mode in ("w", "wb"):
with self.open(os_helper.TESTFN, mode) as fp:
self.assertRaises(exc, fp.read)
self.assertRaises(exc, fp.readline)
with self.open(os_helper.TESTFN, "wb", buffering=0) as fp:
self.assertRaises(exc, fp.read)
self.assertRaises(exc, fp.readline)
with self.open(os_helper.TESTFN, "rb", buffering=0) as fp:
self.assertRaises(exc, fp.write, b"blah")
self.assertRaises(exc, fp.writelines, [b"blah\n"])
with self.open(os_helper.TESTFN, "rb") as fp:
self.assertRaises(exc, fp.write, b"blah")
self.assertRaises(exc, fp.writelines, [b"blah\n"])
with self.open(os_helper.TESTFN, "r") as fp:
self.assertRaises(exc, fp.write, "blah")
self.assertRaises(exc, fp.writelines, ["blah\n"])
# Non-zero seeking from current or end pos
self.assertRaises(exc, fp.seek, 1, self.SEEK_CUR)
self.assertRaises(exc, fp.seek, -1, self.SEEK_END)
def test_optional_abilities(self):
# Test for OSError when optional APIs are not supported
# The purpose of this test is to try fileno(), reading, writing and
# seeking operations with various objects that indicate they do not
# support these operations.
def pipe_reader():
[r, w] = os.pipe()
os.close(w) # So that read() is harmless
return self.FileIO(r, "r")
def pipe_writer():
[r, w] = os.pipe()
self.addCleanup(os.close, r)
# Guarantee that we can write into the pipe without blocking
thread = threading.Thread(target=os.read, args=(r, 100))
thread.start()
self.addCleanup(thread.join)
return self.FileIO(w, "w")
def buffered_reader():
return self.BufferedReader(self.MockUnseekableIO())
def buffered_writer():
return self.BufferedWriter(self.MockUnseekableIO())
def buffered_random():
return self.BufferedRandom(self.BytesIO())
def buffered_rw_pair():
return self.BufferedRWPair(self.MockUnseekableIO(),
self.MockUnseekableIO())
def text_reader():
class UnseekableReader(self.MockUnseekableIO):
writable = self.BufferedIOBase.writable
write = self.BufferedIOBase.write
return self.TextIOWrapper(UnseekableReader(), "ascii")
def text_writer():
class UnseekableWriter(self.MockUnseekableIO):
readable = self.BufferedIOBase.readable
read = self.BufferedIOBase.read
return self.TextIOWrapper(UnseekableWriter(), "ascii")
tests = (
(pipe_reader, "fr"), (pipe_writer, "fw"),
(buffered_reader, "r"), (buffered_writer, "w"),
(buffered_random, "rws"), (buffered_rw_pair, "rw"),
(text_reader, "r"), (text_writer, "w"),
(self.BytesIO, "rws"), (self.StringIO, "rws"),
)
for [test, abilities] in tests:
with self.subTest(test), test() as obj:
readable = "r" in abilities
self.assertEqual(obj.readable(), readable)
writable = "w" in abilities
self.assertEqual(obj.writable(), writable)
if isinstance(obj, self.TextIOBase):
data = "3"
elif isinstance(obj, (self.BufferedIOBase, self.RawIOBase)):
data = b"3"
else:
self.fail("Unknown base class")
if "f" in abilities:
obj.fileno()
else:
self.assertRaises(OSError, obj.fileno)
if readable:
obj.read(1)
obj.read()
else:
self.assertRaises(OSError, obj.read, 1)
self.assertRaises(OSError, obj.read)
if writable:
obj.write(data)
else:
self.assertRaises(OSError, obj.write, data)
if sys.platform.startswith("win") and test in (
pipe_reader, pipe_writer):
# Pipes seem to appear as seekable on Windows
continue
seekable = "s" in abilities
self.assertEqual(obj.seekable(), seekable)
if seekable:
obj.tell()
obj.seek(0)
else:
self.assertRaises(OSError, obj.tell)
self.assertRaises(OSError, obj.seek, 0)
if writable and seekable:
obj.truncate()
obj.truncate(0)
else:
self.assertRaises(OSError, obj.truncate)
self.assertRaises(OSError, obj.truncate, 0)
def test_open_handles_NUL_chars(self):
fn_with_NUL = 'foo\0bar'
self.assertRaises(ValueError, self.open, fn_with_NUL, 'w')
bytes_fn = bytes(fn_with_NUL, 'ascii')
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.assertRaises(ValueError, self.open, bytes_fn, 'w')
def test_raw_file_io(self):
with self.open(os_helper.TESTFN, "wb", buffering=0) as f:
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.write_ops(f)
with self.open(os_helper.TESTFN, "rb", buffering=0) as f:
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
self.read_ops(f)
def test_buffered_file_io(self):
with self.open(os_helper.TESTFN, "wb") as f:
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.write_ops(f)
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
self.read_ops(f, True)
def test_readline(self):
with self.open(os_helper.TESTFN, "wb") as f:
f.write(b"abc\ndef\nxyzzy\nfoo\x00bar\nanother line")
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.readline(), b"abc\n")
self.assertEqual(f.readline(10), b"def\n")
self.assertEqual(f.readline(2), b"xy")
self.assertEqual(f.readline(4), b"zzy\n")
self.assertEqual(f.readline(), b"foo\x00bar\n")
self.assertEqual(f.readline(None), b"another line")
self.assertRaises(TypeError, f.readline, 5.3)
with self.open(os_helper.TESTFN, "r") as f:
self.assertRaises(TypeError, f.readline, 5.3)
def test_readline_nonsizeable(self):
# Issue #30061
# Crash when readline() returns an object without __len__
class R(self.IOBase):
def readline(self):
return None
self.assertRaises((TypeError, StopIteration), next, R())
def test_next_nonsizeable(self):
# Issue #30061
# Crash when __next__() returns an object without __len__
class R(self.IOBase):
def __next__(self):
return None
self.assertRaises(TypeError, R().readlines, 1)
def test_raw_bytes_io(self):
f = self.BytesIO()
self.write_ops(f)
data = f.getvalue()
self.assertEqual(data, b"hello world\n")
f = self.BytesIO(data)
self.read_ops(f, True)
def test_large_file_ops(self):
# On Windows and Mac OSX this test consumes large resources; It takes
# a long time to build the >2 GiB file and takes >2 GiB of disk space
# therefore the resource must be enabled to run this test.
if sys.platform[:3] == 'win' or sys.platform == 'darwin':
support.requires(
'largefile',
'test requires %s bytes and a long time to run' % self.LARGE)
with self.open(os_helper.TESTFN, "w+b", 0) as f:
self.large_file_ops(f)
with self.open(os_helper.TESTFN, "w+b") as f:
self.large_file_ops(f)
def test_with_open(self):
for bufsize in (0, 100):
f = None
with self.open(os_helper.TESTFN, "wb", bufsize) as f:
f.write(b"xxx")
self.assertEqual(f.closed, True)
f = None
try:
with self.open(os_helper.TESTFN, "wb", bufsize) as f:
1/0
except ZeroDivisionError:
self.assertEqual(f.closed, True)
else:
self.fail("1/0 didn't raise an exception")
def test_append_mode_tell(self):
with self.open(os_helper.TESTFN, "wb") as f:
f.write(b"xxx")
with self.open(os_helper.TESTFN, "ab", buffering=0) as f:
self.assertEqual(f.tell(), 3)
with self.open(os_helper.TESTFN, "ab") as f:
self.assertEqual(f.tell(), 3)
with self.open(os_helper.TESTFN, "a") as f:
self.assertGreater(f.tell(), 0)
def test_destructor(self):
record = []
class MyFileIO(self.FileIO):
def __del__(self):
record.append(1)
try:
f = super().__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(2)
super().close()
def flush(self):
record.append(3)
super().flush()
with support.check_warnings(('', ResourceWarning)):
f = MyFileIO(os_helper.TESTFN, "wb")
f.write(b"xxx")
del f
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"xxx")
def _check_base_destructor(self, base):
record = []
class MyIO(base):
def __init__(self):
self.on_del = 1
self.on_close = 2
self.on_flush = 3
def __del__(self):
record.append(self.on_del)
try:
f = super().__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(self.on_close)
super().close()
def flush(self):
record.append(self.on_flush)
super().flush()
f = MyIO()
del f
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
def test_IOBase_destructor(self):
self._check_base_destructor(self.IOBase)
def test_RawIOBase_destructor(self):
self._check_base_destructor(self.RawIOBase)
def test_BufferedIOBase_destructor(self):
self._check_base_destructor(self.BufferedIOBase)
def test_TextIOBase_destructor(self):
self._check_base_destructor(self.TextIOBase)
def test_close_flushes(self):
with self.open(os_helper.TESTFN, "wb") as f:
f.write(b"xxx")
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"xxx")
def test_array_writes(self):
a = array.array('i', range(10))
n = len(a.tobytes())
def check(f):
with f:
self.assertEqual(f.write(a), n)
f.writelines((a,))
check(self.BytesIO())
check(self.FileIO(os_helper.TESTFN, "w"))
check(self.BufferedWriter(self.MockRawIO()))
check(self.BufferedRandom(self.MockRawIO()))
check(self.BufferedRWPair(self.MockRawIO(), self.MockRawIO()))
def test_closefd(self):
self.assertRaises(ValueError, self.open, os_helper.TESTFN, 'w',
closefd=False)
def test_read_closed(self):
with self.open(os_helper.TESTFN, "w") as f:
f.write("egg\n")
with self.open(os_helper.TESTFN, "r") as f:
file = self.open(f.fileno(), "r", closefd=False)
self.assertEqual(file.read(), "egg\n")
file.seek(0)
file.close()
self.assertRaises(ValueError, file.read)
with self.open(os_helper.TESTFN, "rb") as f:
file = self.open(f.fileno(), "rb", closefd=False)
self.assertEqual(file.read()[:3], b"egg")
file.close()
self.assertRaises(ValueError, file.readinto, bytearray(1))
def test_no_closefd_with_filename(self):
self.assertRaises(ValueError, self.open, os_helper.TESTFN, "r", closefd=False)
def test_closefd_attr(self):
with self.open(os_helper.TESTFN, "wb") as f:
f.write(b"egg\n")
with self.open(os_helper.TESTFN, "r") as f:
self.assertEqual(f.buffer.raw.closefd, True)
file = self.open(f.fileno(), "r", closefd=False)
self.assertEqual(file.buffer.raw.closefd, False)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_garbage_collection(self):
# FileIO objects are collected, and collecting them flushes
# all data to disk.
with support.check_warnings(('', ResourceWarning)):
f = self.FileIO(os_helper.TESTFN, "wb")
f.write(b"abcxxx")
f.f = f
wr = weakref.ref(f)
del f
support.gc_collect()
self.assertIsNone(wr(), wr)
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"abcxxx")
def test_unbounded_file(self):
# Issue #1174606: reading from an unbounded stream such as /dev/zero.
zero = "/dev/zero"
if not os.path.exists(zero):
self.skipTest("{0} does not exist".format(zero))
if sys.maxsize > 0x7FFFFFFF:
self.skipTest("test can only run in a 32-bit address space")
if support.real_max_memuse < support._2G:
self.skipTest("test requires at least 2 GiB of memory")
with self.open(zero, "rb", buffering=0) as f:
self.assertRaises(OverflowError, f.read)
with self.open(zero, "rb") as f:
self.assertRaises(OverflowError, f.read)
with self.open(zero, "r") as f:
self.assertRaises(OverflowError, f.read)
def check_flush_error_on_close(self, *args, **kwargs):
# Test that the file is closed despite failed flush
# and that flush() is called before file closed.
f = self.open(*args, **kwargs)
closed = []
def bad_flush():
closed[:] = [f.closed]
raise OSError()
f.flush = bad_flush
self.assertRaises(OSError, f.close) # exception not swallowed
self.assertTrue(f.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
f.flush = lambda: None # break reference loop
def test_flush_error_on_close(self):
# raw file
# Issue #5700: io.FileIO calls flush() after file closed
self.check_flush_error_on_close(os_helper.TESTFN, 'wb', buffering=0)
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', buffering=0)
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', buffering=0, closefd=False)
os.close(fd)
# buffered io
self.check_flush_error_on_close(os_helper.TESTFN, 'wb')
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb')
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'wb', closefd=False)
os.close(fd)
# text io
self.check_flush_error_on_close(os_helper.TESTFN, 'w')
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'w')
fd = os.open(os_helper.TESTFN, os.O_WRONLY|os.O_CREAT)
self.check_flush_error_on_close(fd, 'w', closefd=False)
os.close(fd)
def test_multi_close(self):
f = self.open(os_helper.TESTFN, "wb", buffering=0)
f.close()
f.close()
f.close()
self.assertRaises(ValueError, f.flush)
def test_RawIOBase_read(self):
# Exercise the default limited RawIOBase.read(n) implementation (which
# calls readinto() internally).
rawio = self.MockRawIOWithoutRead((b"abc", b"d", None, b"efg", None))
self.assertEqual(rawio.read(2), b"ab")
self.assertEqual(rawio.read(2), b"c")
self.assertEqual(rawio.read(2), b"d")
self.assertEqual(rawio.read(2), None)
self.assertEqual(rawio.read(2), b"ef")
self.assertEqual(rawio.read(2), b"g")
self.assertEqual(rawio.read(2), None)
self.assertEqual(rawio.read(2), b"")
def test_types_have_dict(self):
test = (
self.IOBase(),
self.RawIOBase(),
self.TextIOBase(),
self.StringIO(),
self.BytesIO()
)
for obj in test:
self.assertTrue(hasattr(obj, "__dict__"))
def test_opener(self):
with self.open(os_helper.TESTFN, "w") as f:
f.write("egg\n")
fd = os.open(os_helper.TESTFN, os.O_RDONLY)
def opener(path, flags):
return fd
with self.open("non-existent", "r", opener=opener) as f:
self.assertEqual(f.read(), "egg\n")
def test_bad_opener_negative_1(self):
# Issue #27066.
def badopener(fname, flags):
return -1
with self.assertRaises(ValueError) as cm:
open('non-existent', 'r', opener=badopener)
self.assertEqual(str(cm.exception), 'opener returned -1')
def test_bad_opener_other_negative(self):
# Issue #27066.
def badopener(fname, flags):
return -2
with self.assertRaises(ValueError) as cm:
open('non-existent', 'r', opener=badopener)
self.assertEqual(str(cm.exception), 'opener returned -2')
def test_fileio_closefd(self):
# Issue #4841
with self.open(__file__, 'rb') as f1, \
self.open(__file__, 'rb') as f2:
fileio = self.FileIO(f1.fileno(), closefd=False)
# .__init__() must not close f1
fileio.__init__(f2.fileno(), closefd=False)
f1.readline()
# .close() must not close f2
fileio.close()
f2.readline()
def test_nonbuffered_textio(self):
with support.check_no_resource_warning(self):
with self.assertRaises(ValueError):
self.open(os_helper.TESTFN, 'w', buffering=0)
def test_invalid_newline(self):
with support.check_no_resource_warning(self):
with self.assertRaises(ValueError):
self.open(os_helper.TESTFN, 'w', newline='invalid')
def test_buffered_readinto_mixin(self):
# Test the implementation provided by BufferedIOBase
class Stream(self.BufferedIOBase):
def read(self, size):
return b"12345"
read1 = read
stream = Stream()
for method in ("readinto", "readinto1"):
with self.subTest(method):
buffer = byteslike(5)
self.assertEqual(getattr(stream, method)(buffer), 5)
self.assertEqual(bytes(buffer), b"12345")
def test_fspath_support(self):
def check_path_succeeds(path):
with self.open(path, "w") as f:
f.write("egg\n")
with self.open(path, "r") as f:
self.assertEqual(f.read(), "egg\n")
check_path_succeeds(FakePath(os_helper.TESTFN))
check_path_succeeds(FakePath(os_helper.TESTFN.encode('utf-8')))
with self.open(os_helper.TESTFN, "w") as f:
bad_path = FakePath(f.fileno())
with self.assertRaises(TypeError):
self.open(bad_path, 'w')
bad_path = FakePath(None)
with self.assertRaises(TypeError):
self.open(bad_path, 'w')
bad_path = FakePath(FloatingPointError)
with self.assertRaises(FloatingPointError):
self.open(bad_path, 'w')
# ensure that refcounting is correct with some error conditions
with self.assertRaisesRegex(ValueError, 'read/write/append mode'):
self.open(FakePath(os_helper.TESTFN), 'rwxa')
def test_RawIOBase_readall(self):
# Exercise the default unlimited RawIOBase.read() and readall()
# implementations.
rawio = self.MockRawIOWithoutRead((b"abc", b"d", b"efg"))
self.assertEqual(rawio.read(), b"abcdefg")
rawio = self.MockRawIOWithoutRead((b"abc", b"d", b"efg"))
self.assertEqual(rawio.readall(), b"abcdefg")
def test_BufferedIOBase_readinto(self):
# Exercise the default BufferedIOBase.readinto() and readinto1()
# implementations (which call read() or read1() internally).
class Reader(self.BufferedIOBase):
def __init__(self, avail):
self.avail = avail
def read(self, size):
result = self.avail[:size]
self.avail = self.avail[size:]
return result
def read1(self, size):
return self.read(min(size, 5))
tests = (
# (test method, total data available, read buffer size, expected
# read size)
("readinto", 10, 5, 5),
("readinto", 10, 6, 6), # More than read1() can return
("readinto", 5, 6, 5), # Buffer larger than total available
("readinto", 6, 7, 6),
("readinto", 10, 0, 0), # Empty buffer
("readinto1", 10, 5, 5), # Result limited to single read1() call
("readinto1", 10, 6, 5), # Buffer larger than read1() can return
("readinto1", 5, 6, 5), # Buffer larger than total available
("readinto1", 6, 7, 5),
("readinto1", 10, 0, 0), # Empty buffer
)
UNUSED_BYTE = 0x81
for test in tests:
with self.subTest(test):
method, avail, request, result = test
reader = Reader(bytes(range(avail)))
buffer = bytearray((UNUSED_BYTE,) * request)
method = getattr(reader, method)
self.assertEqual(method(buffer), result)
self.assertEqual(len(buffer), request)
self.assertSequenceEqual(buffer[:result], range(result))
unused = (UNUSED_BYTE,) * (request - result)
self.assertSequenceEqual(buffer[result:], unused)
self.assertEqual(len(reader.avail), avail - result)
def test_close_assert(self):
class R(self.IOBase):
def __setattr__(self, name, value):
pass
def flush(self):
raise OSError()
f = R()
# This would cause an assertion failure.
self.assertRaises(OSError, f.close)
# Silence destructor error
R.flush = lambda self: None
class CIOTest(IOTest):
# TODO: RUSTPYTHON, cyclic gc
@unittest.expectedFailure
def test_IOBase_finalize(self):
# Issue #12149: segmentation fault on _PyIOBase_finalize when both a
# class which inherits IOBase and an object of this class are caught
# in a reference cycle and close() is already in the method cache.
class MyIO(self.IOBase):
def close(self):
pass
# create an instance to populate the method cache
MyIO()
obj = MyIO()
obj.obj = obj
wr = weakref.ref(obj)
del MyIO
del obj
support.gc_collect()
self.assertIsNone(wr(), wr)
# TODO: RUSTPYTHON, AssertionError: filter ('', ResourceWarning) did not catch any warning
@unittest.expectedFailure
def test_destructor(self):
super().test_destructor(self)
class PyIOTest(IOTest):
pass
@support.cpython_only
class APIMismatchTest(unittest.TestCase):
def test_RawIOBase_io_in_pyio_match(self):
mismatch = support.detect_api_mismatch(pyio.RawIOBase, io.RawIOBase,
ignore=('__weakref__',))
self.assertEqual(mismatch, set(), msg='Python RawIOBase does not have all C RawIOBase methods')
def test_RawIOBase_pyio_in_io_match(self):
mismatch = support.detect_api_mismatch(io.RawIOBase, pyio.RawIOBase)
self.assertEqual(mismatch, set(), msg='C RawIOBase does not have all Python RawIOBase methods')
class CommonBufferedTests:
# Tests common to BufferedReader, BufferedWriter and BufferedRandom
def test_detach(self):
raw = self.MockRawIO()
buf = self.tp(raw)
self.assertIs(buf.detach(), raw)
self.assertRaises(ValueError, buf.detach)
repr(buf) # Should still work
def test_fileno(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertEqual(42, bufio.fileno())
def test_invalid_args(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
# Invalid whence
self.assertRaises(ValueError, bufio.seek, 0, -1)
self.assertRaises(ValueError, bufio.seek, 0, 9)
def test_override_destructor(self):
tp = self.tp
record = []
class MyBufferedIO(tp):
def __del__(self):
record.append(1)
try:
f = super().__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(2)
super().close()
def flush(self):
record.append(3)
super().flush()
rawio = self.MockRawIO()
bufio = MyBufferedIO(rawio)
del bufio
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
def test_context_manager(self):
# Test usability as a context manager
rawio = self.MockRawIO()
bufio = self.tp(rawio)
def _with():
with bufio:
pass
_with()
# bufio should now be closed, and using it a second time should raise
# a ValueError.
self.assertRaises(ValueError, _with)
# TODO: RUSTPYTHON, sys.unraisablehook
@unittest.expectedFailure
def test_error_through_destructor(self):
# Test that the exception state is not modified by a destructor,
# even if close() fails.
rawio = self.CloseFailureIO()
with support.catch_unraisable_exception() as cm:
with self.assertRaises(AttributeError):
self.tp(rawio).xyzzy
if not IOBASE_EMITS_UNRAISABLE:
self.assertIsNone(cm.unraisable)
elif cm.unraisable is not None:
self.assertEqual(cm.unraisable.exc_type, OSError)
def test_repr(self):
raw = self.MockRawIO()
b = self.tp(raw)
clsname = r"(%s\.)?%s" % (self.tp.__module__, self.tp.__qualname__)
self.assertRegex(repr(b), "<%s>" % clsname)
raw.name = "dummy"
self.assertRegex(repr(b), "<%s name='dummy'>" % clsname)
raw.name = b"dummy"
self.assertRegex(repr(b), "<%s name=b'dummy'>" % clsname)
def test_recursive_repr(self):
# Issue #25455
raw = self.MockRawIO()
b = self.tp(raw)
with support.swap_attr(raw, 'name', b):
try:
repr(b) # Should not crash
except RuntimeError:
pass
def test_flush_error_on_close(self):
# Test that buffered file is closed despite failed flush
# and that flush() is called before file closed.
raw = self.MockRawIO()
closed = []
def bad_flush():
closed[:] = [b.closed, raw.closed]
raise OSError()
raw.flush = bad_flush
b = self.tp(raw)
self.assertRaises(OSError, b.close) # exception not swallowed
self.assertTrue(b.closed)
self.assertTrue(raw.closed)
self.assertTrue(closed) # flush() called
self.assertFalse(closed[0]) # flush() called before file closed
self.assertFalse(closed[1])
raw.flush = lambda: None # break reference loop
def test_close_error_on_close(self):
raw = self.MockRawIO()
def bad_flush():
raise OSError('flush')
def bad_close():
raise OSError('close')
raw.close = bad_close
b = self.tp(raw)
b.flush = bad_flush
with self.assertRaises(OSError) as err: # exception not swallowed
b.close()
self.assertEqual(err.exception.args, ('close',))
self.assertIsInstance(err.exception.__context__, OSError)
self.assertEqual(err.exception.__context__.args, ('flush',))
self.assertFalse(b.closed)
# Silence destructor error
raw.close = lambda: None
b.flush = lambda: None
def test_nonnormalized_close_error_on_close(self):
# Issue #21677
raw = self.MockRawIO()
def bad_flush():
raise non_existing_flush
def bad_close():
raise non_existing_close
raw.close = bad_close
b = self.tp(raw)
b.flush = bad_flush
with self.assertRaises(NameError) as err: # exception not swallowed
b.close()
self.assertIn('non_existing_close', str(err.exception))
self.assertIsInstance(err.exception.__context__, NameError)
self.assertIn('non_existing_flush', str(err.exception.__context__))
self.assertFalse(b.closed)
# Silence destructor error
b.flush = lambda: None
raw.close = lambda: None
def test_multi_close(self):
raw = self.MockRawIO()
b = self.tp(raw)
b.close()
b.close()
b.close()
self.assertRaises(ValueError, b.flush)
def test_unseekable(self):
bufio = self.tp(self.MockUnseekableIO(b"A" * 10))
self.assertRaises(self.UnsupportedOperation, bufio.tell)
self.assertRaises(self.UnsupportedOperation, bufio.seek, 0)
def test_readonly_attributes(self):
raw = self.MockRawIO()
buf = self.tp(raw)
x = self.MockRawIO()
with self.assertRaises(AttributeError):
buf.raw = x
class SizeofTest:
@support.cpython_only
def test_sizeof(self):
bufsize1 = 4096
bufsize2 = 8192
rawio = self.MockRawIO()
bufio = self.tp(rawio, buffer_size=bufsize1)
size = sys.getsizeof(bufio) - bufsize1
rawio = self.MockRawIO()
bufio = self.tp(rawio, buffer_size=bufsize2)
self.assertEqual(sys.getsizeof(bufio), size + bufsize2)
@support.cpython_only
def test_buffer_freeing(self) :
bufsize = 4096
rawio = self.MockRawIO()
bufio = self.tp(rawio, buffer_size=bufsize)
size = sys.getsizeof(bufio) - bufsize
bufio.close()
self.assertEqual(sys.getsizeof(bufio), size)
class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
read_mode = "rb"
def test_constructor(self):
rawio = self.MockRawIO([b"abc"])
bufio = self.tp(rawio)
bufio.__init__(rawio)
bufio.__init__(rawio, buffer_size=1024)
bufio.__init__(rawio, buffer_size=16)
self.assertEqual(b"abc", bufio.read())
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
rawio = self.MockRawIO([b"abc"])
bufio.__init__(rawio)
self.assertEqual(b"abc", bufio.read())
def test_uninitialized(self):
bufio = self.tp.__new__(self.tp)
del bufio
bufio = self.tp.__new__(self.tp)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
bufio.read, 0)
bufio.__init__(self.MockRawIO())
self.assertEqual(bufio.read(0), b'')
def test_read(self):
for arg in (None, 7):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"abcdefg", bufio.read(arg))
# Invalid args
self.assertRaises(ValueError, bufio.read, -2)
def test_read1(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"a", bufio.read(1))
self.assertEqual(b"b", bufio.read1(1))
self.assertEqual(rawio._reads, 1)
self.assertEqual(b"", bufio.read1(0))
self.assertEqual(b"c", bufio.read1(100))
self.assertEqual(rawio._reads, 1)
self.assertEqual(b"d", bufio.read1(100))
self.assertEqual(rawio._reads, 2)
self.assertEqual(b"efg", bufio.read1(100))
self.assertEqual(rawio._reads, 3)
self.assertEqual(b"", bufio.read1(100))
self.assertEqual(rawio._reads, 4)
def test_read1_arbitrary(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"a", bufio.read(1))
self.assertEqual(b"bc", bufio.read1())
self.assertEqual(b"d", bufio.read1())
self.assertEqual(b"efg", bufio.read1(-1))
self.assertEqual(rawio._reads, 3)
self.assertEqual(b"", bufio.read1())
self.assertEqual(rawio._reads, 4)
def test_readinto(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
b = bytearray(2)
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"ab")
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"cd")
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"ef")
self.assertEqual(bufio.readinto(b), 1)
self.assertEqual(b, b"gf")
self.assertEqual(bufio.readinto(b), 0)
self.assertEqual(b, b"gf")
rawio = self.MockRawIO((b"abc", None))
bufio = self.tp(rawio)
self.assertEqual(bufio.readinto(b), 2)
self.assertEqual(b, b"ab")
self.assertEqual(bufio.readinto(b), 1)
self.assertEqual(b, b"cb")
def test_readinto1(self):
buffer_size = 10
rawio = self.MockRawIO((b"abc", b"de", b"fgh", b"jkl"))
bufio = self.tp(rawio, buffer_size=buffer_size)
b = bytearray(2)
self.assertEqual(bufio.peek(3), b'abc')
self.assertEqual(rawio._reads, 1)
self.assertEqual(bufio.readinto1(b), 2)
self.assertEqual(b, b"ab")
self.assertEqual(rawio._reads, 1)
self.assertEqual(bufio.readinto1(b), 1)
self.assertEqual(b[:1], b"c")
self.assertEqual(rawio._reads, 1)
self.assertEqual(bufio.readinto1(b), 2)
self.assertEqual(b, b"de")
self.assertEqual(rawio._reads, 2)
b = bytearray(2*buffer_size)
self.assertEqual(bufio.peek(3), b'fgh')
self.assertEqual(rawio._reads, 3)
self.assertEqual(bufio.readinto1(b), 6)
self.assertEqual(b[:6], b"fghjkl")
self.assertEqual(rawio._reads, 4)
def test_readinto_array(self):
buffer_size = 60
data = b"a" * 26
rawio = self.MockRawIO((data,))
bufio = self.tp(rawio, buffer_size=buffer_size)
# Create an array with element size > 1 byte
b = array.array('i', b'x' * 32)
assert len(b) != 16
# Read into it. We should get as many *bytes* as we can fit into b
# (which is more than the number of elements)
n = bufio.readinto(b)
self.assertGreater(n, len(b))
# Check that old contents of b are preserved
bm = memoryview(b).cast('B')
self.assertLess(n, len(bm))
self.assertEqual(bm[:n], data[:n])
self.assertEqual(bm[n:], b'x' * (len(bm[n:])))
def test_readinto1_array(self):
buffer_size = 60
data = b"a" * 26
rawio = self.MockRawIO((data,))
bufio = self.tp(rawio, buffer_size=buffer_size)
# Create an array with element size > 1 byte
b = array.array('i', b'x' * 32)
assert len(b) != 16
# Read into it. We should get as many *bytes* as we can fit into b
# (which is more than the number of elements)
n = bufio.readinto1(b)
self.assertGreater(n, len(b))
# Check that old contents of b are preserved
bm = memoryview(b).cast('B')
self.assertLess(n, len(bm))
self.assertEqual(bm[:n], data[:n])
self.assertEqual(bm[n:], b'x' * (len(bm[n:])))
def test_readlines(self):
def bufio():
rawio = self.MockRawIO((b"abc\n", b"d\n", b"ef"))
return self.tp(rawio)
self.assertEqual(bufio().readlines(), [b"abc\n", b"d\n", b"ef"])
self.assertEqual(bufio().readlines(5), [b"abc\n", b"d\n"])
self.assertEqual(bufio().readlines(None), [b"abc\n", b"d\n", b"ef"])
def test_buffering(self):
data = b"abcdefghi"
dlen = len(data)
tests = [
[ 100, [ 3, 1, 4, 8 ], [ dlen, 0 ] ],
[ 100, [ 3, 3, 3], [ dlen ] ],
[ 4, [ 1, 2, 4, 2 ], [ 4, 4, 1 ] ],
]
for bufsize, buf_read_sizes, raw_read_sizes in tests:
rawio = self.MockFileIO(data)
bufio = self.tp(rawio, buffer_size=bufsize)
pos = 0
for nbytes in buf_read_sizes:
self.assertEqual(bufio.read(nbytes), data[pos:pos+nbytes])
pos += nbytes
# this is mildly implementation-dependent
self.assertEqual(rawio.read_history, raw_read_sizes)
def test_read_non_blocking(self):
# Inject some None's in there to simulate EWOULDBLOCK
rawio = self.MockRawIO((b"abc", b"d", None, b"efg", None, None, None))
bufio = self.tp(rawio)
self.assertEqual(b"abcd", bufio.read(6))
self.assertEqual(b"e", bufio.read(1))
self.assertEqual(b"fg", bufio.read())
self.assertEqual(b"", bufio.peek(1))
self.assertIsNone(bufio.read())
self.assertEqual(b"", bufio.read())
rawio = self.MockRawIO((b"a", None, None))
self.assertEqual(b"a", rawio.readall())
self.assertIsNone(rawio.readall())
def test_read_past_eof(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"abcdefg", bufio.read(9000))
def test_read_all(self):
rawio = self.MockRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertEqual(b"abcdefg", bufio.read())
@support.requires_resource('cpu')
def test_threads(self):
try:
# 1's... 255's. This will help us check that concurrent reading
# doesn't duplicate or forget contents.
N = 1000
l = list(range(256)) * N
random.shuffle(l)
s = bytes(bytearray(l))
with self.open(os_helper.TESTFN, "wb") as f:
f.write(s)
with self.open(os_helper.TESTFN, self.read_mode, buffering=0) as raw:
bufio = self.tp(raw, 8)
errors = []
results = []
def f():
try:
for n in cycle([1, 19]):
s = bufio.read(n)
if not s:
break
results.append(s)
except Exception as e:
errors.append(e)
raise
threads = [threading.Thread(target=f) for x in range(20)]
with support.start_threads(threads):
time.sleep(0.02)
self.assertFalse(errors,
"the following exceptions were caught: %r" % errors)
s = b''.join(results)
for i in range(256):
c = bytes(bytearray([i]))
self.assertEqual(s.count(c), N)
finally:
os_helper.unlink(os_helper.TESTFN)
def test_unseekable(self):
bufio = self.tp(self.MockUnseekableIO(b"A" * 10))
self.assertRaises(self.UnsupportedOperation, bufio.tell)
self.assertRaises(self.UnsupportedOperation, bufio.seek, 0)
bufio.read(1)
self.assertRaises(self.UnsupportedOperation, bufio.seek, 0)
self.assertRaises(self.UnsupportedOperation, bufio.tell)
def test_misbehaved_io(self):
rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertRaises(OSError, bufio.seek, 0)
self.assertRaises(OSError, bufio.tell)
bufio.close = lambda: None
def test_no_extraneous_read(self):
bufsize - 1, bufsize, bufsize + 1, bufsize * 2):
rawio = self.MockRawIO([b"x" * n])
bufio = self.tp(rawio, bufsize)
self.assertEqual(bufio.read(n), b"x" * n)
self.assertEqual(rawio._extraneous_reads, 0,
"failed for {}: {} != 0".format(n, rawio._extraneous_reads))
rawio = self.MockRawIO([b"x" * (n - 1), b"x"])
bufio = self.tp(rawio, bufsize)
self.assertEqual(bufio.read(n), b"x" * n)
self.assertEqual(rawio._extraneous_reads, 0,
"failed for {}: {} != 0".format(n, rawio._extraneous_reads))
def test_read_on_closed(self):
b = io.BufferedReader(io.BytesIO(b"12"))
b.read(1)
b.close()
self.assertRaises(ValueError, b.peek)
self.assertRaises(ValueError, b.read1, 1)
class CBufferedReaderTest(BufferedReaderTest, SizeofTest):
tp = io.BufferedReader
@unittest.skip("TODO: RUSTPYTHON, fallible allocation")
@unittest.skipIf(MEMORY_SANITIZER, "MSan defaults to crashing "
"instead of returning NULL for malloc failure.")
def test_constructor(self):
BufferedReaderTest.test_constructor(self)
if sys.maxsize > 0x7FFFFFFF:
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises((OverflowError, MemoryError, ValueError),
bufio.__init__, rawio, sys.maxsize)
def test_initialization(self):
rawio = self.MockRawIO([b"abc"])
bufio = self.tp(rawio)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.read)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.read)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
self.assertRaises(ValueError, bufio.read)
def test_misbehaved_io_read(self):
rawio = self.MisbehavedRawIO((b"abc", b"d", b"efg"))
bufio = self.tp(rawio)
self.assertRaises(OSError, bufio.read, 10)
@unittest.expectedFailure
def test_garbage_collection(self):
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with support.check_warnings(('', ResourceWarning)):
rawio = self.FileIO(os_helper.TESTFN, "w+b")
f = self.tp(rawio)
f.f = f
wr = weakref.ref(f)
del f
support.gc_collect()
self.assertIsNone(wr(), wr)
@unittest.expectedFailure
def test_args_error(self):
with self.assertRaisesRegex(TypeError, "BufferedReader"):
self.tp(io.BytesIO(), 1024, 1024, 1024)
@unittest.expectedFailure
def test_flush_error_on_close(self):
super().test_flush_error_on_close()
class PyBufferedReaderTest(BufferedReaderTest):
tp = pyio.BufferedReader
class BufferedWriterTest(unittest.TestCase, CommonBufferedTests):
write_mode = "wb"
def test_constructor(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
bufio.__init__(rawio)
bufio.__init__(rawio, buffer_size=1024)
bufio.__init__(rawio, buffer_size=16)
self.assertEqual(3, bufio.write(b"abc"))
bufio.flush()
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
bufio.__init__(rawio)
self.assertEqual(3, bufio.write(b"ghi"))
bufio.flush()
self.assertEqual(b"".join(rawio._write_stack), b"abcghi")
def test_uninitialized(self):
bufio = self.tp.__new__(self.tp)
del bufio
bufio = self.tp.__new__(self.tp)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
bufio.write, b'')
bufio.__init__(self.MockRawIO())
self.assertEqual(bufio.write(b''), 0)
def test_detach_flush(self):
raw = self.MockRawIO()
buf = self.tp(raw)
buf.write(b"howdy!")
self.assertFalse(raw._write_stack)
buf.detach()
self.assertEqual(raw._write_stack, [b"howdy!"])
def test_write(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.write(b"abc")
self.assertFalse(writer._write_stack)
buffer = bytearray(b"def")
bufio.write(buffer)
buffer[:] = b"***" # Overwrite our copy of the data
bufio.flush()
self.assertEqual(b"".join(writer._write_stack), b"abcdef")
def test_write_overflow(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
contents = b"abcdefghijklmnop"
for n in range(0, len(contents), 3):
bufio.write(contents[n:n+3])
flushed = b"".join(writer._write_stack)
# At least (total - 8) bytes were implicitly flushed, perhaps more
# depending on the implementation.
self.assertTrue(flushed.startswith(contents[:-8]), flushed)
def check_writes(self, intermediate_func):
# Lots of writes, test the flushed output is as expected.
contents = bytes(range(256)) * 1000
n = 0
writer = self.MockRawIO()
bufio = self.tp(writer, 13)
# Generator of write sizes: repeat each N 15 times then proceed to N+1
def gen_sizes():
for size in count(1):
for i in range(15):
yield size
sizes = gen_sizes()
while n < len(contents):
size = min(next(sizes), len(contents) - n)
self.assertEqual(bufio.write(contents[n:n+size]), size)
intermediate_func(bufio)
n += size
bufio.flush()
self.assertEqual(contents, b"".join(writer._write_stack))
def test_writes(self):
self.check_writes(lambda bufio: None)
def test_writes_and_flushes(self):
self.check_writes(lambda bufio: bufio.flush())
def test_writes_and_seeks(self):
def _seekabs(bufio):
pos = bufio.tell()
bufio.seek(pos + 1, 0)
bufio.seek(pos - 1, 0)
bufio.seek(pos, 0)
self.check_writes(_seekabs)
def _seekrel(bufio):
pos = bufio.seek(0, 1)
bufio.seek(+1, 1)
bufio.seek(-1, 1)
bufio.seek(pos, 0)
self.check_writes(_seekrel)
def test_writes_and_truncates(self):
self.check_writes(lambda bufio: bufio.truncate(bufio.tell()))
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_write_non_blocking(self):
raw = self.MockNonBlockWriterIO()
bufio = self.tp(raw, 8)
self.assertEqual(bufio.write(b"abcd"), 4)
self.assertEqual(bufio.write(b"efghi"), 5)
# 1 byte will be written, the rest will be buffered
raw.block_on(b"k")
self.assertEqual(bufio.write(b"jklmn"), 5)
# 8 bytes will be written, 8 will be buffered and the rest will be lost
raw.block_on(b"0")
try:
bufio.write(b"opqrwxyz0123456789")
except self.BlockingIOError as e:
written = e.characters_written
else:
self.fail("BlockingIOError should have been raised")
self.assertEqual(written, 16)
self.assertEqual(raw.pop_written(),
b"abcdefghijklmnopqrwxyz")
self.assertEqual(bufio.write(b"ABCDEFGHI"), 9)
s = raw.pop_written()
# Previously buffered bytes were flushed
self.assertTrue(s.startswith(b"01234567A"), s)
def test_write_and_rewind(self):
raw = io.BytesIO()
bufio = self.tp(raw, 4)
self.assertEqual(bufio.write(b"abcdef"), 6)
self.assertEqual(bufio.tell(), 6)
bufio.seek(0, 0)
self.assertEqual(bufio.write(b"XY"), 2)
bufio.seek(6, 0)
self.assertEqual(raw.getvalue(), b"XYcdef")
self.assertEqual(bufio.write(b"123456"), 6)
bufio.flush()
self.assertEqual(raw.getvalue(), b"XYcdef123456")
def test_flush(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.write(b"abc")
bufio.flush()
self.assertEqual(b"abc", writer._write_stack[0])
def test_writelines(self):
l = [b'ab', b'cd', b'ef']
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.writelines(l)
bufio.flush()
self.assertEqual(b''.join(writer._write_stack), b'abcdef')
def test_writelines_userlist(self):
l = UserList([b'ab', b'cd', b'ef'])
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.writelines(l)
bufio.flush()
self.assertEqual(b''.join(writer._write_stack), b'abcdef')
def test_writelines_error(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
self.assertRaises(TypeError, bufio.writelines, [1, 2, 3])
self.assertRaises(TypeError, bufio.writelines, None)
self.assertRaises(TypeError, bufio.writelines, 'abc')
def test_destructor(self):
writer = self.MockRawIO()
bufio = self.tp(writer, 8)
bufio.write(b"abc")
del bufio
support.gc_collect()
self.assertEqual(b"abc", writer._write_stack[0])
def test_truncate(self):
# Truncate implicitly flushes the buffer.
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with self.open(os_helper.TESTFN, self.write_mode, buffering=0) as raw:
bufio = self.tp(raw, 8)
bufio.write(b"abcdef")
self.assertEqual(bufio.truncate(3), 3)
self.assertEqual(bufio.tell(), 6)
with self.open(os_helper.TESTFN, "rb", buffering=0) as f:
self.assertEqual(f.read(), b"abc")
def test_truncate_after_write(self):
# Ensure that truncate preserves the file position after
# writes longer than the buffer size.
# Issue: https://bugs.python.org/issue32228
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with self.open(os_helper.TESTFN, "wb") as f:
# Fill with some buffer
f.write(b'\x00' * 10000)
buffer_sizes = [8192, 4096, 200]
for buffer_size in buffer_sizes:
with self.open(os_helper.TESTFN, "r+b", buffering=buffer_size) as f:
f.write(b'\x00' * (buffer_size + 1))
# After write write_pos and write_end are set to 0
f.read(1)
# read operation makes sure that pos != raw_pos
f.truncate()
self.assertEqual(f.tell(), buffer_size + 2)
@support.requires_resource('cpu')
def test_threads(self):
try:
# Write out many bytes from many threads and test they were
# all flushed.
N = 1000
contents = bytes(range(256)) * N
sizes = cycle([1, 19])
n = 0
queue = deque()
while n < len(contents):
size = next(sizes)
queue.append(contents[n:n+size])
n += size
del contents
# We use a real file object because it allows us to
# exercise situations where the GIL is released before
# writing the buffer to the raw streams. This is in addition
# to concurrency issues due to switching threads in the middle
# of Python code.
with self.open(os_helper.TESTFN, self.write_mode, buffering=0) as raw:
bufio = self.tp(raw, 8)
errors = []
def f():
try:
while True:
try:
s = queue.popleft()
except IndexError:
return
bufio.write(s)
except Exception as e:
errors.append(e)
raise
threads = [threading.Thread(target=f) for x in range(20)]
with support.start_threads(threads):
time.sleep(0.02) # yield
self.assertFalse(errors,
"the following exceptions were caught: %r" % errors)
bufio.close()
with self.open(os_helper.TESTFN, "rb") as f:
s = f.read()
for i in range(256):
self.assertEqual(s.count(bytes([i])), N)
finally:
os_helper.unlink(os_helper.TESTFN)
def test_misbehaved_io(self):
rawio = self.MisbehavedRawIO()
bufio = self.tp(rawio, 5)
self.assertRaises(OSError, bufio.seek, 0)
self.assertRaises(OSError, bufio.tell)
self.assertRaises(OSError, bufio.write, b"abcdef")
# Silence destructor error
bufio.close = lambda: None
def test_max_buffer_size_removal(self):
with self.assertRaises(TypeError):
self.tp(self.MockRawIO(), 8, 12)
def test_write_error_on_close(self):
raw = self.MockRawIO()
def bad_write(b):
raise OSError()
raw.write = bad_write
b = self.tp(raw)
b.write(b'spam')
self.assertRaises(OSError, b.close) # exception not swallowed
self.assertTrue(b.closed)
def test_slow_close_from_thread(self):
# Issue #31976
rawio = self.SlowFlushRawIO()
bufio = self.tp(rawio, 8)
t = threading.Thread(target=bufio.close)
t.start()
rawio.in_flush.wait()
self.assertRaises(ValueError, bufio.write, b'spam')
self.assertTrue(bufio.closed)
t.join()
class CBufferedWriterTest(BufferedWriterTest, SizeofTest):
tp = io.BufferedWriter
@unittest.skip("TODO: RUSTPYTHON, fallible allocation")
@unittest.skipIf(MEMORY_SANITIZER, "MSan defaults to crashing "
"instead of returning NULL for malloc failure.")
def test_constructor(self):
BufferedWriterTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
# than 2 GiB RAM and a 64-bit kernel.
if sys.maxsize > 0x7FFFFFFF:
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises((OverflowError, MemoryError, ValueError),
bufio.__init__, rawio, sys.maxsize)
def test_initialization(self):
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=0)
self.assertRaises(ValueError, bufio.write, b"def")
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-16)
self.assertRaises(ValueError, bufio.write, b"def")
self.assertRaises(ValueError, bufio.__init__, rawio, buffer_size=-1)
self.assertRaises(ValueError, bufio.write, b"def")
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_garbage_collection(self):
# C BufferedWriter objects are collected, and collecting them flushes
# all data to disk.
# The Python version has __del__, so it ends into gc.garbage instead
self.addCleanup(os_helper.unlink, os_helper.TESTFN)
with support.check_warnings(('', ResourceWarning)):
rawio = self.FileIO(os_helper.TESTFN, "w+b")
f = self.tp(rawio)
f.write(b"123xxx")
f.x = f
wr = weakref.ref(f)
del f
support.gc_collect()
self.assertIsNone(wr(), wr)
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"123xxx")
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_args_error(self):
# Issue #17275
with self.assertRaisesRegex(TypeError, "BufferedWriter"):
self.tp(io.BytesIO(), 1024, 1024, 1024)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_flush_error_on_close(self):
super().test_flush_error_on_close()
class PyBufferedWriterTest(BufferedWriterTest):
tp = pyio.BufferedWriter
class BufferedRWPairTest(unittest.TestCase):
def test_constructor(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertFalse(pair.closed)
def test_uninitialized(self):
pair = self.tp.__new__(self.tp)
del pair
pair = self.tp.__new__(self.tp)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
pair.read, 0)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
pair.write, b'')
pair.__init__(self.MockRawIO(), self.MockRawIO())
self.assertEqual(pair.read(0), b'')
self.assertEqual(pair.write(b''), 0)
def test_detach(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertRaises(self.UnsupportedOperation, pair.detach)
def test_constructor_max_buffer_size_removal(self):
with self.assertRaises(TypeError):
self.tp(self.MockRawIO(), self.MockRawIO(), 8, 12)
def test_constructor_with_not_readable(self):
class NotReadable(MockRawIO):
def readable(self):
return False
self.assertRaises(OSError, self.tp, NotReadable(), self.MockRawIO())
def test_constructor_with_not_writeable(self):
class NotWriteable(MockRawIO):
def writable(self):
return False
self.assertRaises(OSError, self.tp, self.MockRawIO(), NotWriteable())
def test_read(self):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
self.assertEqual(pair.read(3), b"abc")
self.assertEqual(pair.read(1), b"d")
self.assertEqual(pair.read(), b"ef")
pair = self.tp(self.BytesIO(b"abc"), self.MockRawIO())
self.assertEqual(pair.read(None), b"abc")
def test_readlines(self):
pair = lambda: self.tp(self.BytesIO(b"abc\ndef\nh"), self.MockRawIO())
self.assertEqual(pair().readlines(), [b"abc\n", b"def\n", b"h"])
self.assertEqual(pair().readlines(), [b"abc\n", b"def\n", b"h"])
self.assertEqual(pair().readlines(5), [b"abc\n", b"def\n"])
def test_read1(self):
# .read1() is delegated to the underlying reader object, so this test
# can be shallow.
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
self.assertEqual(pair.read1(3), b"abc")
self.assertEqual(pair.read1(), b"def")
def test_readinto(self):
for method in ("readinto", "readinto1"):
with self.subTest(method):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
data = byteslike(b'\0' * 5)
self.assertEqual(getattr(pair, method)(data), 5)
self.assertEqual(bytes(data), b"abcde")
def test_write(self):
w = self.MockRawIO()
pair = self.tp(self.MockRawIO(), w)
pair.write(b"abc")
pair.flush()
buffer = bytearray(b"def")
pair.write(buffer)
buffer[:] = b"***" # Overwrite our copy of the data
pair.flush()
self.assertEqual(w._write_stack, [b"abc", b"def"])
def test_peek(self):
pair = self.tp(self.BytesIO(b"abcdef"), self.MockRawIO())
self.assertTrue(pair.peek(3).startswith(b"abc"))
self.assertEqual(pair.read(3), b"abc")
def test_readable(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertTrue(pair.readable())
def test_writeable(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertTrue(pair.writable())
def test_seekable(self):
# BufferedRWPairs are never seekable, even if their readers and writers
# are.
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertFalse(pair.seekable())
# .flush() is delegated to the underlying writer object and has been
# tested in the test_write method.
def test_close_and_closed(self):
pair = self.tp(self.MockRawIO(), self.MockRawIO())
self.assertFalse(pair.closed)
pair.close()
self.assertTrue(pair.closed)
def test_reader_close_error_on_close(self):
def reader_close():
reader_non_existing
reader = self.MockRawIO()
reader.close = reader_close
writer = self.MockRawIO()
pair = self.tp(reader, writer)
with self.assertRaises(NameError) as err:
pair.close()
self.assertIn('reader_non_existing', str(err.exception))
self.assertTrue(pair.closed)
self.assertFalse(reader.closed)
self.assertTrue(writer.closed)
# Silence destructor error
reader.close = lambda: None
# TODO: RUSTPYTHON, sys.unraisablehook
@unittest.expectedFailure
def test_writer_close_error_on_close(self):
def writer_close():
writer_non_existing
reader = self.MockRawIO()
writer = self.MockRawIO()
writer.close = writer_close
pair = self.tp(reader, writer)
with self.assertRaises(NameError) as err:
pair.close()
self.assertIn('writer_non_existing', str(err.exception))
self.assertFalse(pair.closed)
self.assertTrue(reader.closed)
self.assertFalse(writer.closed)
# Silence destructor error
writer.close = lambda: None
writer = None
# Ignore BufferedWriter (of the BufferedRWPair) unraisable exception
with support.catch_unraisable_exception():
# Ignore BufferedRWPair unraisable exception
with support.catch_unraisable_exception():
pair = None
support.gc_collect()
support.gc_collect()
def test_reader_writer_close_error_on_close(self):
def reader_close():
reader_non_existing
def writer_close():
writer_non_existing
reader = self.MockRawIO()
reader.close = reader_close
writer = self.MockRawIO()
writer.close = writer_close
pair = self.tp(reader, writer)
with self.assertRaises(NameError) as err:
pair.close()
self.assertIn('reader_non_existing', str(err.exception))
self.assertIsInstance(err.exception.__context__, NameError)
self.assertIn('writer_non_existing', str(err.exception.__context__))
self.assertFalse(pair.closed)
self.assertFalse(reader.closed)
self.assertFalse(writer.closed)
# Silence destructor error
reader.close = lambda: None
writer.close = lambda: None
def test_isatty(self):
class SelectableIsAtty(MockRawIO):
def __init__(self, isatty):
MockRawIO.__init__(self)
self._isatty = isatty
def isatty(self):
return self._isatty
pair = self.tp(SelectableIsAtty(False), SelectableIsAtty(False))
self.assertFalse(pair.isatty())
pair = self.tp(SelectableIsAtty(True), SelectableIsAtty(False))
self.assertTrue(pair.isatty())
pair = self.tp(SelectableIsAtty(False), SelectableIsAtty(True))
self.assertTrue(pair.isatty())
pair = self.tp(SelectableIsAtty(True), SelectableIsAtty(True))
self.assertTrue(pair.isatty())
def test_weakref_clearing(self):
brw = self.tp(self.MockRawIO(), self.MockRawIO())
ref = weakref.ref(brw)
brw = None
ref = None # Shouldn't segfault.
class CBufferedRWPairTest(BufferedRWPairTest):
tp = io.BufferedRWPair
class PyBufferedRWPairTest(BufferedRWPairTest):
tp = pyio.BufferedRWPair
class BufferedRandomTest(BufferedReaderTest, BufferedWriterTest):
read_mode = "rb+"
write_mode = "wb+"
def test_constructor(self):
BufferedReaderTest.test_constructor(self)
BufferedWriterTest.test_constructor(self)
def test_uninitialized(self):
BufferedReaderTest.test_uninitialized(self)
BufferedWriterTest.test_uninitialized(self)
def test_read_and_write(self):
raw = self.MockRawIO((b"asdf", b"ghjk"))
rw = self.tp(raw, 8)
self.assertEqual(b"as", rw.read(2))
rw.write(b"ddd")
rw.write(b"eee")
self.assertFalse(raw._write_stack)
self.assertEqual(b"ghjk", rw.read())
self.assertEqual(b"dddeee", raw._write_stack[0])
def test_seek_and_tell(self):
raw = self.BytesIO(b"asdfghjkl")
rw = self.tp(raw)
self.assertEqual(b"as", rw.read(2))
self.assertEqual(2, rw.tell())
rw.seek(0, 0)
self.assertEqual(b"asdf", rw.read(4))
rw.write(b"123f")
rw.seek(0, 0)
self.assertEqual(b"asdf123fl", rw.read())
self.assertEqual(9, rw.tell())
rw.seek(-4, 2)
self.assertEqual(5, rw.tell())
rw.seek(2, 1)
self.assertEqual(7, rw.tell())
self.assertEqual(b"fl", rw.read(11))
rw.flush()
self.assertEqual(b"asdf123fl", raw.getvalue())
self.assertRaises(TypeError, rw.seek, 0.0)
def check_flush_and_read(self, read_func):
raw = self.BytesIO(b"abcdefghi")
bufio = self.tp(raw)
self.assertEqual(b"ab", read_func(bufio, 2))
bufio.write(b"12")
self.assertEqual(b"ef", read_func(bufio, 2))
self.assertEqual(6, bufio.tell())
bufio.flush()
self.assertEqual(6, bufio.tell())
self.assertEqual(b"ghi", read_func(bufio))
raw.seek(0, 0)
raw.write(b"XYZ")
bufio.flush()
bufio.seek(0, 0)
self.assertEqual(b"XYZ", read_func(bufio, 3))
def test_flush_and_read(self):
self.check_flush_and_read(lambda bufio, *args: bufio.read(*args))
def test_flush_and_readinto(self):
def _readinto(bufio, n=-1):
b = bytearray(n if n >= 0 else 9999)
n = bufio.readinto(b)
return bytes(b[:n])
self.check_flush_and_read(_readinto)
def test_flush_and_peek(self):
def _peek(bufio, n=-1):
b = bufio.peek(n)
if n != -1:
b = b[:n]
bufio.seek(len(b), 1)
return b
self.check_flush_and_read(_peek)
def test_flush_and_write(self):
raw = self.BytesIO(b"abcdefghi")
bufio = self.tp(raw)
bufio.write(b"123")
bufio.flush()
bufio.write(b"45")
bufio.flush()
bufio.seek(0, 0)
self.assertEqual(b"12345fghi", raw.getvalue())
self.assertEqual(b"12345fghi", bufio.read())
def test_threads(self):
BufferedReaderTest.test_threads(self)
BufferedWriterTest.test_threads(self)
def test_writes_and_peek(self):
def _peek(bufio):
bufio.peek(1)
self.check_writes(_peek)
def _peek(bufio):
pos = bufio.tell()
bufio.seek(-1, 1)
bufio.peek(1)
bufio.seek(pos, 0)
self.check_writes(_peek)
def test_writes_and_reads(self):
def _read(bufio):
bufio.seek(-1, 1)
bufio.read(1)
self.check_writes(_read)
def test_writes_and_read1s(self):
def _read1(bufio):
bufio.seek(-1, 1)
bufio.read1(1)
self.check_writes(_read1)
def test_writes_and_readintos(self):
def _read(bufio):
bufio.seek(-1, 1)
bufio.readinto(bytearray(1))
self.check_writes(_read)
def test_write_after_readahead(self):
w = self.BytesIO(b"A" * 10)
bufio = self.tp(raw, 4)
self.assertEqual(bufio.read(1), b"A")
self.assertEqual(bufio.tell(), 1)
bufio.write(b"B" * overwrite_size)
self.assertEqual(bufio.tell(), overwrite_size + 1)
bufio.flush()
self.assertEqual(bufio.tell(), overwrite_size + 1)
s = raw.getvalue()
self.assertEqual(s,
b"A" + b"B" * overwrite_size + b"A" * (9 - overwrite_size))
def test_write_rewind_write(self):
def mutate(bufio, pos1, pos2):
assert pos2 >= pos1
bufio.seek(pos1)
bufio.read(pos2 - pos1)
bufio.write(b'\x02')
bufio.seek(pos1)
bufio.write(b'\x01')
b = b"\x80\x81\x82\x83\x84"
for i in range(0, len(b)):
for j in range(i, len(b)):
raw = self.BytesIO(b)
bufio = self.tp(raw, 100)
mutate(bufio, i, j)
bufio.flush()
expected = bytearray(b)
expected[j] = 2
expected[i] = 1
self.assertEqual(raw.getvalue(), expected,
"failed result for i=%d, j=%d" % (i, j))
def test_truncate_after_read_or_write(self):
raw = self.BytesIO(b"A" * 10)
bufio = self.tp(raw, 100)
self.assertEqual(bufio.read(2), b"AA")
self.assertEqual(bufio.truncate(), 2)
self.assertEqual(bufio.write(b"BB"), 2)
self.assertEqual(bufio.truncate(), 4)
def test_misbehaved_io(self):
BufferedReaderTest.test_misbehaved_io(self)
BufferedWriterTest.test_misbehaved_io(self)
def test_interleaved_read_write(self):
with self.BytesIO(b'abcdefgh') as raw:
with self.tp(raw, 100) as f:
f.write(b"1")
self.assertEqual(f.read(1), b'b')
f.write(b'2')
self.assertEqual(f.read1(1), b'd')
f.write(b'3')
buf = bytearray(1)
f.readinto(buf)
self.assertEqual(buf, b'f')
f.write(b'4')
self.assertEqual(f.peek(1), b'h')
f.flush()
self.assertEqual(raw.getvalue(), b'1b2d3f4h')
with self.BytesIO(b'abc') as raw:
with self.tp(raw, 100) as f:
self.assertEqual(f.read(1), b'a')
f.write(b"2")
self.assertEqual(f.read(1), b'c')
f.flush()
self.assertEqual(raw.getvalue(), b'a2c')
def test_interleaved_readline_write(self):
with self.BytesIO(b'ab\ncdef\ng\n') as raw:
with self.tp(raw) as f:
f.write(b'1')
self.assertEqual(f.readline(), b'b\n')
f.write(b'2')
self.assertEqual(f.readline(), b'def\n')
f.write(b'3')
self.assertEqual(f.readline(), b'\n')
f.flush()
self.assertEqual(raw.getvalue(), b'1b\n2def\n3\n')
test_unseekable = None
class CBufferedRandomTest(BufferedRandomTest, SizeofTest):
tp = io.BufferedRandom
@unittest.skip("TODO: RUSTPYTHON, fallible allocation")
@unittest.skipIf(MEMORY_SANITIZER, "MSan defaults to crashing "
"instead of returning NULL for malloc failure.")
def test_constructor(self):
BufferedRandomTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
# than 2 GiB RAM and a 64-bit kernel.
if sys.maxsize > 0x7FFFFFFF:
rawio = self.MockRawIO()
bufio = self.tp(rawio)
self.assertRaises((OverflowError, MemoryError, ValueError),
bufio.__init__, rawio, sys.maxsize)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_garbage_collection(self):
CBufferedReaderTest.test_garbage_collection(self)
CBufferedWriterTest.test_garbage_collection(self)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_args_error(self):
# Issue #17275
with self.assertRaisesRegex(TypeError, "BufferedRandom"):
self.tp(io.BytesIO(), 1024, 1024, 1024)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_flush_error_on_close(self):
super().test_flush_error_on_close()
class PyBufferedRandomTest(BufferedRandomTest):
tp = pyio.BufferedRandom
# To fully exercise seek/tell, the StatefulIncrementalDecoder has these
# properties:
# - A single output character can correspond to many bytes of input.
# - The number of input bytes to complete the character can be
# undetermined until the last input byte is received.
# - The number of input bytes can vary depending on previous input.
# - A single input byte can correspond to many characters of output.
# - The number of output characters can be undetermined until the
# last input byte is received.
# - The number of output characters can vary depending on previous input.
class StatefulIncrementalDecoder(codecs.IncrementalDecoder):
def __init__(self, errors='strict'):
codecs.IncrementalDecoder.__init__(self, errors)
self.reset()
def __repr__(self):
return '<SID %x>' % id(self)
def reset(self):
self.i = 1
self.o = 1
self.buffer = bytearray()
def getstate(self):
i, o = self.i ^ 1, self.o ^ 1 # so that flags = 0 after reset()
return bytes(self.buffer), i*100 + o
def setstate(self, state):
buffer, io = state
self.buffer = bytearray(buffer)
i, o = divmod(io, 100)
self.i, self.o = i ^ 1, o ^ 1
def decode(self, input, final=False):
output = ''
for b in input:
if self.i == 0: # variable-length, terminated with period
if b == ord('.'):
if self.buffer:
output += self.process_word()
else:
self.buffer.append(b)
else: # fixed-length, terminate after self.i bytes
self.buffer.append(b)
if len(self.buffer) == self.i:
output += self.process_word()
if final and self.buffer: # EOF terminates the last word
output += self.process_word()
return output
def process_word(self):
output = ''
if self.buffer[0] == ord('i'):
self.i = min(99, int(self.buffer[1:] or 0)) # set input length
elif self.buffer[0] == ord('o'):
self.o = min(99, int(self.buffer[1:] or 0)) # set output length
else:
output = self.buffer.decode('ascii')
if len(output) < self.o:
output += '-'*self.o # pad out with hyphens
if self.o:
output = output[:self.o] # truncate to output length
output += '.'
self.buffer = bytearray()
return output
codecEnabled = False
@classmethod
def lookupTestDecoder(cls, name):
if cls.codecEnabled and name == 'test_decoder':
latin1 = codecs.lookup('latin-1')
return codecs.CodecInfo(
name='test_decoder', encode=latin1.encode, decode=None,
incrementalencoder=None,
streamreader=None, streamwriter=None,
incrementaldecoder=cls)
# Register the previous decoder for testing.
# Disabled by default, tests will enable it.
codecs.register(StatefulIncrementalDecoder.lookupTestDecoder)
class StatefulIncrementalDecoderTest(unittest.TestCase):
test_cases = [
# I=1, O=1 (fixed-length input == fixed-length output)
(b'abcd', False, 'a.b.c.d.'),
# I=0, O=0 (variable-length input, variable-length output)
(b'oiabcd', True, 'abcd.'),
# I=0, O=0 (should ignore extra periods)
(b'oi...abcd...', True, 'abcd.'),
# I=0, O=6 (variable-length input, fixed-length output)
(b'i.o6.x.xyz.toolongtofit.', False, 'x-----.xyz---.toolon.'),
# I=2, O=6 (fixed-length input < fixed-length output)
(b'i.i2.o6xyz', True, 'xy----.z-----.'),
# I=6, O=3 (fixed-length input > fixed-length output)
(b'i.o3.i6.abcdefghijklmnop', True, 'abc.ghi.mno.'),
# I=0, then 3; O=29, then 15 (with longer output)
(b'i.o29.a.b.cde.o15.abcdefghijabcdefghij.i3.a.b.c.d.ei00k.l.m', True,
'a----------------------------.' +
'b----------------------------.' +
'cde--------------------------.' +
'abcdefghijabcde.' +
'a.b------------.' +
'.c.------------.' +
'd.e------------.' +
'k--------------.' +
'l--------------.' +
'm--------------.')
]
def test_decoder(self):
# Try a few one-shot test cases.
for input, eof, output in self.test_cases:
d = StatefulIncrementalDecoder()
self.assertEqual(d.decode(input, eof), output)
# Also test an unfinished decode, followed by forcing EOF.
d = StatefulIncrementalDecoder()
self.assertEqual(d.decode(b'oiabcd'), '')
self.assertEqual(d.decode(b'', 1), 'abcd.')
class TextIOWrapperTest(unittest.TestCase):
def setUp(self):
self.testdata = b"AAA\r\nBBB\rCCC\r\nDDD\nEEE\r\n"
self.normalized = b"AAA\nBBB\nCCC\nDDD\nEEE\n".decode("ascii")
os_helper.unlink(os_helper.TESTFN)
def tearDown(self):
os_helper.unlink(os_helper.TESTFN)
def test_constructor(self):
r = self.BytesIO(b"\xc3\xa9\n\n")
b = self.BufferedReader(r, 1000)
t = self.TextIOWrapper(b)
t.__init__(b, encoding="latin-1", newline="\r\n")
self.assertEqual(t.encoding, "latin-1")
self.assertEqual(t.line_buffering, False)
t.__init__(b, encoding="utf-8", line_buffering=True)
self.assertEqual(t.encoding, "utf-8")
self.assertEqual(t.line_buffering, True)
self.assertEqual("\xe9\n", t.readline())
self.assertRaises(TypeError, t.__init__, b, newline=42)
self.assertRaises(ValueError, t.__init__, b, newline='xyzzy')
def test_uninitialized(self):
t = self.TextIOWrapper.__new__(self.TextIOWrapper)
del t
t = self.TextIOWrapper.__new__(self.TextIOWrapper)
self.assertRaises(Exception, repr, t)
self.assertRaisesRegex((ValueError, AttributeError),
'uninitialized|has no attribute',
t.read, 0)
t.__init__(self.MockRawIO())
self.assertEqual(t.read(0), '')
def test_non_text_encoding_codecs_are_rejected(self):
# Ensure the constructor complains if passed a codec that isn't
r = self.BytesIO()
b = self.BufferedWriter(r)
with self.assertRaisesRegex(LookupError, "is not a text encoding"):
self.TextIOWrapper(b, encoding="hex")
def test_detach(self):
r = self.BytesIO()
b = self.BufferedWriter(r)
t = self.TextIOWrapper(b)
self.assertIs(t.detach(), b)
t = self.TextIOWrapper(b, encoding="ascii")
t.write("howdy")
self.assertFalse(r.getvalue())
t.detach()
self.assertEqual(r.getvalue(), b"howdy")
self.assertRaises(ValueError, t.detach)
repr(t)
self.assertEqual(t.encoding, "ascii")
self.assertEqual(t.errors, "strict")
self.assertFalse(t.line_buffering)
self.assertFalse(t.write_through)
def test_repr(self):
raw = self.BytesIO("hello".encode("utf-8"))
b = self.BufferedReader(raw)
t = self.TextIOWrapper(b, encoding="utf-8")
modname = self.TextIOWrapper.__module__
self.assertRegex(repr(t),
r"<(%s\.)?TextIOWrapper encoding='utf-8'>" % modname)
raw.name = "dummy"
self.assertRegex(repr(t),
r"<(%s\.)?TextIOWrapper name='dummy' encoding='utf-8'>" % modname)
t.mode = "r"
self.assertRegex(repr(t),
r"<(%s\.)?TextIOWrapper name='dummy' mode='r' encoding='utf-8'>" % modname)
raw.name = b"dummy"
self.assertRegex(repr(t),
r"<(%s\.)?TextIOWrapper name=b'dummy' mode='r' encoding='utf-8'>" % modname)
t.buffer.detach()
repr(t)
def test_recursive_repr(self):
raw = self.BytesIO()
t = self.TextIOWrapper(raw)
with support.swap_attr(raw, 'name', t):
try:
repr(t)
except RuntimeError:
pass
def test_line_buffering(self):
r = self.BytesIO()
b = self.BufferedWriter(r, 1000)
t = self.TextIOWrapper(b, newline="\n", line_buffering=True)
t.write("X")
self.assertEqual(r.getvalue(), b"")
t.write("Y\nZ")
self.assertEqual(r.getvalue(), b"XY\nZ")
t.write("A\rB")
self.assertEqual(r.getvalue(), b"XY\nZA\rB")
def test_reconfigure_line_buffering(self):
r = self.BytesIO()
b = self.BufferedWriter(r, 1000)
t = self.TextIOWrapper(b, newline="\n", line_buffering=False)
t.write("AB\nC")
self.assertEqual(r.getvalue(), b"")
t.reconfigure(line_buffering=True)
self.assertEqual(r.getvalue(), b"AB\nC")
t.write("DEF\nG")
self.assertEqual(r.getvalue(), b"AB\nCDEF\nG")
t.write("H")
self.assertEqual(r.getvalue(), b"AB\nCDEF\nG")
t.reconfigure(line_buffering=False)
self.assertEqual(r.getvalue(), b"AB\nCDEF\nGH")
t.write("IJ")
self.assertEqual(r.getvalue(), b"AB\nCDEF\nGH")
t.reconfigure()
t.reconfigure(line_buffering=None)
self.assertEqual(t.line_buffering, False)
t.reconfigure(line_buffering=True)
t.reconfigure()
t.reconfigure(line_buffering=None)
self.assertEqual(t.line_buffering, True)
@unittest.skipIf(sys.flags.utf8_mode, "utf-8 mode is enabled")
def test_default_encoding(self):
old_environ = dict(os.environ)
try:
for key in ('LC_ALL', 'LANG', 'LC_CTYPE'):
if key in os.environ:
del os.environ[key]
current_locale_encoding = locale.getpreferredencoding(False)
b = self.BytesIO()
t = self.TextIOWrapper(b)
self.assertEqual(t.encoding, current_locale_encoding)
finally:
os.environ.clear()
os.environ.update(old_environ)
@support.cpython_only
@unittest.skipIf(sys.flags.utf8_mode, "utf-8 mode is enabled")
def test_device_encoding(self):
import _testcapi
b = self.BytesIO()
b.fileno = lambda: _testcapi.INT_MAX + 1
self.assertRaises(OverflowError, self.TextIOWrapper, b)
b.fileno = lambda: _testcapi.UINT_MAX + 1
self.assertRaises(OverflowError, self.TextIOWrapper, b)
def test_encoding(self):
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="utf-8")
self.assertEqual(t.encoding, "utf-8")
t = self.TextIOWrapper(b)
self.assertIsNotNone(t.encoding)
codecs.lookup(t.encoding)
def test_encoding_errors_reading(self):
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii")
self.assertRaises(UnicodeError, t.read)
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii", errors="strict")
self.assertRaises(UnicodeError, t.read)
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii", errors="ignore")
self.assertEqual(t.read(), "abc\n\n")
b = self.BytesIO(b"abc\n\xff\n")
t = self.TextIOWrapper(b, encoding="ascii", errors="replace")
self.assertEqual(t.read(), "abc\n\ufffd\n")
def test_encoding_errors_writing(self):
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii")
self.assertRaises(UnicodeError, t.write, "\xff")
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii", errors="strict")
self.assertRaises(UnicodeError, t.write, "\xff")
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii", errors="ignore",
newline="\n")
t.write("abc\xffdef\n")
t.flush()
self.assertEqual(b.getvalue(), b"abcdef\n")
b = self.BytesIO()
t = self.TextIOWrapper(b, encoding="ascii", errors="replace",
newline="\n")
t.write("abc\xffdef\n")
t.flush()
self.assertEqual(b.getvalue(), b"abc?def\n")
def test_newlines(self):
input_lines = [ "unix\n", "windows\r\n", "os9\r", "last\n", "nonl" ]
tests = [
[ None, [ 'unix\n', 'windows\n', 'os9\n', 'last\n', 'nonl' ] ],
[ '', input_lines ],
[ '\n', [ "unix\n", "windows\r\n", "os9\rlast\n", "nonl" ] ],
[ '\r\n', [ "unix\nwindows\r\n", "os9\rlast\nnonl" ] ],
[ '\r', [ "unix\nwindows\r", "\nos9\r", "last\nnonl" ] ],
]
encodings = (
'utf-8', 'latin-1',
'utf-16', 'utf-16-le', 'utf-16-be',
'utf-32', 'utf-32-le', 'utf-32-be',
)
for encoding in encodings:
data = bytes(''.join(input_lines).encode(encoding))
for do_reads in (False, True):
for bufsize in range(1, 10):
for newline, exp_lines in tests:
bufio = self.BufferedReader(self.BytesIO(data), bufsize)
textio = self.TextIOWrapper(bufio, newline=newline,
encoding=encoding)
if do_reads:
got_lines = []
while True:
c2 = textio.read(2)
if c2 == '':
break
self.assertEqual(len(c2), 2)
got_lines.append(c2 + textio.readline())
else:
got_lines = list(textio)
for got_line, exp_line in zip(got_lines, exp_lines):
self.assertEqual(got_line, exp_line)
self.assertEqual(len(got_lines), len(exp_lines))
def test_newlines_input(self):
testdata = b"AAA\nBB\x00B\nCCC\rDDD\rEEE\r\nFFF\r\nGGG"
normalized = testdata.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
for newline, expected in [
(None, normalized.decode("ascii").splitlines(keepends=True)),
("", testdata.decode("ascii").splitlines(keepends=True)),
("\n", ["AAA\n", "BB\x00B\n", "CCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r\n", ["AAA\nBB\x00B\nCCC\rDDD\rEEE\r\n", "FFF\r\n", "GGG"]),
("\r", ["AAA\nBB\x00B\nCCC\r", "DDD\r", "EEE\r", "\nFFF\r", "\nGGG"]),
]:
buf = self.BytesIO(testdata)
txt = self.TextIOWrapper(buf, encoding="ascii", newline=newline)
self.assertEqual(txt.readlines(), expected)
txt.seek(0)
self.assertEqual(txt.read(), "".join(expected))
def test_newlines_output(self):
testdict = {
"": b"AAA\nBBB\nCCC\nX\rY\r\nZ",
"\n": b"AAA\nBBB\nCCC\nX\rY\r\nZ",
"\r": b"AAA\rBBB\rCCC\rX\rY\r\rZ",
"\r\n": b"AAA\r\nBBB\r\nCCC\r\nX\rY\r\r\nZ",
}
tests = [(None, testdict[os.linesep])] + sorted(testdict.items())
for newline, expected in tests:
buf = self.BytesIO()
txt = self.TextIOWrapper(buf, encoding="ascii", newline=newline)
txt.write("AAA\nB")
txt.write("BB\nCCC\n")
txt.write("X\rY\r\nZ")
txt.flush()
self.assertEqual(buf.closed, False)
self.assertEqual(buf.getvalue(), expected)
def test_destructor(self):
l = []
base = self.BytesIO
class MyBytesIO(base):
def close(self):
l.append(self.getvalue())
base.close(self)
b = MyBytesIO()
t = self.TextIOWrapper(b, encoding="ascii")
t.write("abc")
del t
support.gc_collect()
self.assertEqual([b"abc"], l)
def test_override_destructor(self):
record = []
class MyTextIO(self.TextIOWrapper):
def __del__(self):
record.append(1)
try:
f = super().__del__
except AttributeError:
pass
else:
f()
def close(self):
record.append(2)
super().close()
def flush(self):
record.append(3)
super().flush()
b = self.BytesIO()
t = MyTextIO(b, encoding="ascii")
del t
support.gc_collect()
self.assertEqual(record, [1, 2, 3])
@unittest.expectedFailure
def test_error_through_destructor(self):
rawio = self.CloseFailureIO()
with support.catch_unraisable_exception() as cm:
with self.assertRaises(AttributeError):
self.TextIOWrapper(rawio).xyzzy
if not IOBASE_EMITS_UNRAISABLE:
self.assertIsNone(cm.unraisable)
elif cm.unraisable is not None:
self.assertEqual(cm.unraisable.exc_type, OSError)
def test_basic_io(self):
for chunksize in (1, 2, 3, 4, 5, 15, 16, 17, 31, 32, 33, 63, 64, 65):
for enc in "ascii", "latin-1", "utf-8" :
f = self.open(os_helper.TESTFN, "w+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEqual(f.write("abc"), 3)
f.close()
f = self.open(os_helper.TESTFN, "r+", encoding=enc)
f._CHUNK_SIZE = chunksize
self.assertEqual(f.tell(), 0)
self.assertEqual(f.read(), "abc")
cookie = f.tell()
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.read(None), "abc")
f.seek(0)
self.assertEqual(f.read(2), "ab")
self.assertEqual(f.read(1), "c")
self.assertEqual(f.read(1), "")
self.assertEqual(f.read(), "")
self.assertEqual(f.tell(), cookie)
self.assertEqual(f.seek(0), 0)
self.assertEqual(f.seek(0, 2), cookie)
self.assertEqual(f.write("def"), 3)
self.assertEqual(f.seek(cookie), cookie)
self.assertEqual(f.read(), "def")
if enc.startswith("utf"):
self.multi_line_test(f, enc)
f.close()
def multi_line_test(self, f, enc):
f.seek(0)
f.truncate()
sample = "s\xff\u0fff\uffff"
wlines = []
for size in (0, 1, 2, 3, 4, 5, 30, 31, 32, 33, 62, 63, 64, 65, 1000):
chars = []
for i in range(size):
chars.append(sample[i % len(sample)])
line = "".join(chars) + "\n"
wlines.append((f.tell(), line))
f.write(line)
f.seek(0)
rlines = []
while True:
pos = f.tell()
line = f.readline()
if not line:
break
rlines.append((pos, line))
self.assertEqual(rlines, wlines)
def test_telling(self):
f = self.open(os_helper.TESTFN, "w+", encoding="utf-8")
p0 = f.tell()
f.write("\xff\n")
p1 = f.tell()
f.write("\xff\n")
p2 = f.tell()
f.seek(0)
self.assertEqual(f.tell(), p0)
self.assertEqual(f.readline(), "\xff\n")
self.assertEqual(f.tell(), p1)
self.assertEqual(f.readline(), "\xff\n")
self.assertEqual(f.tell(), p2)
f.seek(0)
for line in f:
self.assertEqual(line, "\xff\n")
self.assertRaises(OSError, f.tell)
self.assertEqual(f.tell(), p2)
f.close()
def test_seeking(self):
chunk_size = _default_chunk_size()
prefix_size = chunk_size - 2
u_prefix = "a" * prefix_size
prefix = bytes(u_prefix.encode("utf-8"))
self.assertEqual(len(u_prefix), len(prefix))
u_suffix = "\u8888\n"
suffix = bytes(u_suffix.encode("utf-8"))
line = prefix + suffix
with self.open(os_helper.TESTFN, "wb") as f:
f.write(line*2)
with self.open(os_helper.TESTFN, "r", encoding="utf-8") as f:
s = f.read(prefix_size)
self.assertEqual(s, str(prefix, "ascii"))
self.assertEqual(f.tell(), prefix_size)
self.assertEqual(f.readline(), u_suffix)
def test_seeking_too(self):
data = b'\xe0\xbf\xbf\n'
with self.open(os_helper.TESTFN, "wb") as f:
f.write(data)
with self.open(os_helper.TESTFN, "r", encoding="utf-8") as f:
f._CHUNK_SIZE
f._CHUNK_SIZE = 2
f.readline()
f.tell()
def test_seek_and_tell(self):
CHUNK_SIZE = 128
def test_seek_and_tell_with_data(data, min_pos=0):
f = self.open(os_helper.TESTFN, 'wb')
f.write(data)
f.close()
f = self.open(os_helper.TESTFN, encoding='test_decoder')
f._CHUNK_SIZE = CHUNK_SIZE
decoded = f.read()
f.close()
for i in range(min_pos, len(decoded) + 1):
for j in [1, 5, len(decoded) - i]:
f = self.open(os_helper.TESTFN, encoding='test_decoder')
self.assertEqual(f.read(i), decoded[:i])
cookie = f.tell()
self.assertEqual(f.read(j), decoded[i:i + j])
f.seek(cookie)
self.assertEqual(f.read(), decoded[i:])
f.close()
StatefulIncrementalDecoder.codecEnabled = 1
try:
for input, _, _ in StatefulIncrementalDecoderTest.test_cases:
test_seek_and_tell_with_data(input)
for input, _, _ in StatefulIncrementalDecoderTest.test_cases:
offset = CHUNK_SIZE - len(input)//2
prefix = b'.'*offset
min_pos = offset*2
test_seek_and_tell_with_data(prefix + input, min_pos)
# Ensure our test decoder won't interfere with subsequent tests.
finally:
StatefulIncrementalDecoder.codecEnabled = 0
@unittest.expectedFailure
def test_multibyte_seek_and_tell(self):
f = self.open(os_helper.TESTFN, "w", encoding="euc_jp")
f.write("AB\n\u3046\u3048\n")
f.close()
f = self.open(os_helper.TESTFN, "r", encoding="euc_jp")
self.assertEqual(f.readline(), "AB\n")
p0 = f.tell()
self.assertEqual(f.readline(), "\u3046\u3048\n")
p1 = f.tell()
f.seek(p0)
self.assertEqual(f.readline(), "\u3046\u3048\n")
self.assertEqual(f.tell(), p1)
f.close()
@unittest.expectedFailure
def test_seek_with_encoder_state(self):
f = self.open(os_helper.TESTFN, "w", encoding="euc_jis_2004")
f.write("\u00e6\u0300")
p0 = f.tell()
f.write("\u00e6")
f.seek(p0)
f.write("\u0300")
f.close()
f = self.open(os_helper.TESTFN, "r", encoding="euc_jis_2004")
self.assertEqual(f.readline(), "\u00e6\u0300\u0300")
f.close()
@unittest.expectedFailure
def test_encoded_writes(self):
data = "1234567890"
tests = ("utf-16",
"utf-16-le",
"utf-16-be",
"utf-32",
"utf-32-le",
"utf-32-be")
for encoding in tests:
buf = self.BytesIO()
f = self.TextIOWrapper(buf, encoding=encoding)
f.write(data)
f.write(data)
f.seek(0)
self.assertEqual(f.read(), data * 2)
f.seek(0)
self.assertEqual(f.read(), data * 2)
self.assertEqual(buf.getvalue(), (data * 2).encode(encoding))
def test_unreadable(self):
class UnReadable(self.BytesIO):
def readable(self):
return False
txt = self.TextIOWrapper(UnReadable())
self.assertRaises(OSError, txt.read)
def test_read_one_by_one(self):
txt = self.TextIOWrapper(self.BytesIO(b"AA\r\nBB"))
reads = ""
while True:
c = txt.read(1)
if not c:
break
reads += c
self.assertEqual(reads, "AA\nBB")
def test_readlines(self):
txt = self.TextIOWrapper(self.BytesIO(b"AA\nBB\nCC"))
self.assertEqual(txt.readlines(), ["AA\n", "BB\n", "CC"])
txt.seek(0)
self.assertEqual(txt.readlines(None), ["AA\n", "BB\n", "CC"])
txt.seek(0)
self.assertEqual(txt.readlines(5), ["AA\n", "BB\n"])
def test_read_by_chunk(self):
txt = self.TextIOWrapper(self.BytesIO(b"A" * 127 + b"\r\nB"))
reads = ""
while True:
c = txt.read(128)
if not c:
break
reads += c
self.assertEqual(reads, "A"*127+"\nB")
def test_writelines(self):
l = ['ab', 'cd', 'ef']
buf = self.BytesIO()
txt = self.TextIOWrapper(buf)
txt.writelines(l)
txt.flush()
self.assertEqual(buf.getvalue(), b'abcdef')
def test_writelines_userlist(self):
l = UserList(['ab', 'cd', 'ef'])
buf = self.BytesIO()
txt = self.TextIOWrapper(buf)
txt.writelines(l)
txt.flush()
self.assertEqual(buf.getvalue(), b'abcdef')
def test_writelines_error(self):
txt = self.TextIOWrapper(self.BytesIO())
self.assertRaises(TypeError, txt.writelines, [1, 2, 3])
self.assertRaises(TypeError, txt.writelines, None)
self.assertRaises(TypeError, txt.writelines, b'abc')
def test_issue1395_1(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
reads = ""
while True:
c = txt.read(1)
if not c:
break
reads += c
self.assertEqual(reads, self.normalized)
def test_issue1395_2(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = ""
while True:
c = txt.read(4)
if not c:
break
reads += c
self.assertEqual(reads, self.normalized)
def test_issue1395_3(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
reads += txt.read(4)
reads += txt.readline()
reads += txt.readline()
reads += txt.readline()
self.assertEqual(reads, self.normalized)
def test_issue1395_4(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
reads += txt.read()
self.assertEqual(reads, self.normalized)
def test_issue1395_5(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt._CHUNK_SIZE = 4
reads = txt.read(4)
pos = txt.tell()
txt.seek(0)
txt.seek(pos)
self.assertEqual(txt.read(4), "BBB\n")
def test_issue2282(self):
buffer = self.BytesIO(self.testdata)
txt = self.TextIOWrapper(buffer, encoding="ascii")
self.assertEqual(buffer.seekable(), txt.seekable())
@unittest.expectedFailure
def test_append_bom(self):
filename = os_helper.TESTFN
for charset in ('utf-8-sig', 'utf-16', 'utf-32'):
with self.open(filename, 'w', encoding=charset) as f:
f.write('aaa')
pos = f.tell()
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'aaa'.encode(charset))
with self.open(filename, 'a', encoding=charset) as f:
f.write('xxx')
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'aaaxxx'.encode(charset))
@unittest.expectedFailure
def test_seek_bom(self):
filename = os_helper.TESTFN
for charset in ('utf-8-sig', 'utf-16', 'utf-32'):
with self.open(filename, 'w', encoding=charset) as f:
f.write('aaa')
pos = f.tell()
with self.open(filename, 'r+', encoding=charset) as f:
f.seek(pos)
f.write('zzz')
f.seek(0)
f.write('bbb')
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'bbbzzz'.encode(charset))
@unittest.expectedFailure
def test_seek_append_bom(self):
filename = os_helper.TESTFN
for charset in ('utf-8-sig', 'utf-16', 'utf-32'):
with self.open(filename, 'w', encoding=charset) as f:
f.write('aaa')
with self.open(filename, 'a', encoding=charset) as f:
f.seek(0)
f.seek(0, self.SEEK_END)
f.write('xxx')
with self.open(filename, 'rb') as f:
self.assertEqual(f.read(), 'aaaxxx'.encode(charset))
def test_errors_property(self):
with self.open(os_helper.TESTFN, "w") as f:
self.assertEqual(f.errors, "strict")
with self.open(os_helper.TESTFN, "w", errors="replace") as f:
self.assertEqual(f.errors, "replace")
@support.no_tracing
def test_threads_write(self):
event = threading.Event()
with self.open(os_helper.TESTFN, "w", buffering=1) as f:
def run(n):
text = "Thread%03d\n" % n
event.wait()
f.write(text)
threads = [threading.Thread(target=run, args=(x,))
for x in range(20)]
with support.start_threads(threads, event.set):
time.sleep(0.02)
with self.open(os_helper.TESTFN) as f:
content = f.read()
for n in range(20):
self.assertEqual(content.count("Thread%03d\n" % n), 1)
def test_flush_error_on_close(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
closed = []
def bad_flush():
closed[:] = [txt.closed, txt.buffer.closed]
raise OSError()
txt.flush = bad_flush
self.assertRaises(OSError, txt.close)
self.assertTrue(txt.closed)
self.assertTrue(txt.buffer.closed)
self.assertTrue(closed)
self.assertFalse(closed[0])
self.assertFalse(closed[1])
txt.flush = lambda: None
def test_close_error_on_close(self):
buffer = self.BytesIO(self.testdata)
def bad_flush():
raise OSError('flush')
def bad_close():
raise OSError('close')
buffer.close = bad_close
txt = self.TextIOWrapper(buffer, encoding="ascii")
txt.flush = bad_flush
with self.assertRaises(OSError) as err:
txt.close()
self.assertEqual(err.exception.args, ('close',))
self.assertIsInstance(err.exception.__context__, OSError)
self.assertEqual(err.exception.__context__.args, ('flush',))
self.assertFalse(txt.closed)
buffer.close = lambda: None
txt.flush = lambda: None
def test_nonnormalized_close_error_on_close(self):
buffer = self.BytesIO(self.testdata)
def bad_flush():
raise non_existing_flush
def bad_close():
raise non_existing_close
buffer.close = bad_close
txt = self.TextIOWrapper(buffer, encoding="ascii")
txt.flush = bad_flush
with self.assertRaises(NameError) as err:
txt.close()
self.assertIn('non_existing_close', str(err.exception))
self.assertIsInstance(err.exception.__context__, NameError)
self.assertIn('non_existing_flush', str(err.exception.__context__))
self.assertFalse(txt.closed)
buffer.close = lambda: None
txt.flush = lambda: None
def test_multi_close(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
txt.close()
txt.close()
txt.close()
self.assertRaises(ValueError, txt.flush)
def test_unseekable(self):
txt = self.TextIOWrapper(self.MockUnseekableIO(self.testdata))
self.assertRaises(self.UnsupportedOperation, txt.tell)
self.assertRaises(self.UnsupportedOperation, txt.seek, 0)
def test_readonly_attributes(self):
txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii")
buf = self.BytesIO(self.testdata)
with self.assertRaises(AttributeError):
txt.buffer = buf
def test_rawio(self):
b'def', b'ghi\njkl\nopq\n'])
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
self.assertEqual(txt.read(4), 'abcd')
self.assertEqual(txt.readline(), 'efghi\n')
self.assertEqual(list(txt), ['jkl\n', 'opq\n'])
def test_rawio_write_through(self):
nopq\n'])
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n',
write_through=True)
txt.write('1')
txt.write('23\n4')
txt.write('5')
self.assertEqual(b''.join(raw._write_stack), b'123\n45')
def test_bufio_write_through(self):
# Issue #21396: write_through=True doesn't force a flush()
flush_called, write_called = [], []
class BufferedWriter(self.BufferedWriter):
def flush(self, *args, **kwargs):
flush_called.append(True)
return super().flush(*args, **kwargs)
def write(self, *args, **kwargs):
write_called.append(True)
return super().write(*args, **kwargs)
rawio = self.BytesIO()
data = b"a"
bufio = BufferedWriter(rawio, len(data)*2)
textio = self.TextIOWrapper(bufio, encoding='ascii',
write_through=True)
text = data.decode('ascii')
textio.write(text)
# buffer.flush is not called with write_through=True
self.assertFalse(flush_called)
# buffer.write *is* called with write_through=True
self.assertTrue(write_called)
self.assertEqual(rawio.getvalue(), b"") # no flush
write_called = [] # reset
textio.write(text * 10) # total content is larger than bufio buffer
self.assertTrue(write_called)
self.assertEqual(rawio.getvalue(), data * 11) # all flushed
def test_reconfigure_write_through(self):
raw = self.MockRawIO([])
t = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
t.write('1')
t.reconfigure(write_through=True) # implied flush
self.assertEqual(t.write_through, True)
self.assertEqual(b''.join(raw._write_stack), b'1')
t.write('23')
self.assertEqual(b''.join(raw._write_stack), b'123')
t.reconfigure(write_through=False)
self.assertEqual(t.write_through, False)
t.write('45')
t.flush()
self.assertEqual(b''.join(raw._write_stack), b'12345')
# Keeping default value
t.reconfigure()
t.reconfigure(write_through=None)
self.assertEqual(t.write_through, False)
t.reconfigure(write_through=True)
t.reconfigure()
t.reconfigure(write_through=None)
self.assertEqual(t.write_through, True)
def test_read_nonbytes(self):
# Issue #17106
# Crash when underlying read() returns non-bytes
t = self.TextIOWrapper(self.StringIO('a'))
self.assertRaises(TypeError, t.read, 1)
t = self.TextIOWrapper(self.StringIO('a'))
self.assertRaises(TypeError, t.readline)
t = self.TextIOWrapper(self.StringIO('a'))
self.assertRaises(TypeError, t.read)
def test_illegal_encoder(self):
# Issue 31271: Calling write() while the return value of encoder's
rot13 = codecs.lookup("rot13")
with support.swap_attr(rot13, '_is_text_encoding', True):
t = io.TextIOWrapper(io.BytesIO(b'foo'), encoding="rot13")
self.assertRaises(TypeError, t.write, 'bar')
def test_illegal_decoder(self):
# Issue #17106
# Bypass the early encoding check added in issue 20404
def _make_illegal_wrapper():
quopri = codecs.lookup("quopri")
quopri._is_text_encoding = True
try:
t = self.TextIOWrapper(self.BytesIO(b'aaaaaa'),
newline='\n', encoding="quopri")
finally:
quopri._is_text_encoding = False
return t
# Crash when decoder returns non-string
t = _make_illegal_wrapper()
self.assertRaises(TypeError, t.read, 1)
t = _make_illegal_wrapper()
self.assertRaises(TypeError, t.readline)
t = _make_illegal_wrapper()
self.assertRaises(TypeError, t.read)
# Issue 31243: calling read() while the return value of decoder's
def _make_very_illegal_wrapper(getstate_ret_val):
class BadDecoder:
def getstate(self):
return getstate_ret_val
def _get_bad_decoder(dummy):
return BadDecoder()
quopri = codecs.lookup("quopri")
with support.swap_attr(quopri, 'incrementaldecoder',
_get_bad_decoder):
return _make_illegal_wrapper()
t = _make_very_illegal_wrapper(42)
self.assertRaises(TypeError, t.read, 42)
t = _make_very_illegal_wrapper(())
self.assertRaises(TypeError, t.read, 42)
t = _make_very_illegal_wrapper((1, 2))
self.assertRaises(TypeError, t.read, 42)
def _check_create_at_shutdown(self, **kwargs):
code = """if 1:
import codecs
import {iomod} as io
# Avoid looking up codecs at shutdown
codecs.lookup('utf-8')
class C:
def __init__(self):
self.buf = io.BytesIO()
def __del__(self):
io.TextIOWrapper(self.buf, **{kwargs})
print("ok")
c = C()
""".format(iomod=iomod, kwargs=kwargs)
return assert_python_ok("-c", code)
@support.requires_type_collecting
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_create_at_shutdown_without_encoding(self):
rc, out, err = self._check_create_at_shutdown()
if err:
# Can error out with a RuntimeError if the module state
# isn't found.
self.assertIn(self.shutdown_error, err.decode())
else:
self.assertEqual("ok", out.decode().strip())
@support.requires_type_collecting
@unittest.expectedFailure
def test_create_at_shutdown_with_encoding(self):
rc, out, err = self._check_create_at_shutdown(encoding='utf-8',
errors='strict')
self.assertFalse(err)
self.assertEqual("ok", out.decode().strip())
def test_read_byteslike(self):
r = MemviewBytesIO(b'Just some random string\n')
t = self.TextIOWrapper(r, 'utf-8')
bytes_val = _to_memoryview(r.getvalue()).tobytes()
self.assertEqual(t.read(200), bytes_val.decode('utf-8'))
def test_issue22849(self):
class F(object):
def readable(self): return True
def writable(self): return True
def seekable(self): return True
for i in range(10):
try:
self.TextIOWrapper(F(), encoding='utf-8')
except Exception:
pass
F.tell = lambda x: 0
t = self.TextIOWrapper(F(), encoding='utf-8')
def test_reconfigure_encoding_read(self):
data = 'abc\xe9\n'.encode('latin1') + 'd\xe9f\n'.encode('utf8')
raw = self.BytesIO(data)
txt = self.TextIOWrapper(raw, encoding='latin1', newline='\n')
self.assertEqual(txt.readline(), 'abc\xe9\n')
with self.assertRaises(self.UnsupportedOperation):
txt.reconfigure(encoding='utf-8')
with self.assertRaises(self.UnsupportedOperation):
txt.reconfigure(newline=None)
def test_reconfigure_write_fromascii(self):
raw = self.BytesIO()
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
txt.write('foo\n')
txt.reconfigure(encoding='utf-8-sig')
txt.write('\xe9\n')
txt.flush()
self.assertEqual(raw.getvalue(), b'foo\n\xc3\xa9\n')
def test_reconfigure_write(self):
raw = self.BytesIO()
txt = self.TextIOWrapper(raw, encoding='latin1', newline='\n')
txt.write('abc\xe9\n')
txt.reconfigure(encoding='utf-8')
self.assertEqual(raw.getvalue(), b'abc\xe9\n')
txt.write('d\xe9f\n')
txt.flush()
self.assertEqual(raw.getvalue(), b'abc\xe9\nd\xc3\xa9f\n')
raw = self.BytesIO()
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
txt.write('abc\n')
txt.reconfigure(encoding='utf-8-sig')
txt.write('d\xe9f\n')
txt.flush()
self.assertEqual(raw.getvalue(), b'abc\nd\xc3\xa9f\n')
def test_reconfigure_write_non_seekable(self):
raw = self.BytesIO()
raw.seekable = lambda: False
raw.seek = None
txt = self.TextIOWrapper(raw, encoding='ascii', newline='\n')
txt.write('abc\n')
txt.reconfigure(encoding='utf-8-sig')
txt.write('d\xe9f\n')
txt.flush()
self.assertEqual(raw.getvalue(), b'abc\n\xef\xbb\xbfd\xc3\xa9f\n')
def test_reconfigure_defaults(self):
txt = self.TextIOWrapper(self.BytesIO(), 'ascii', 'replace', '\n')
txt.reconfigure(encoding=None)
self.assertEqual(txt.encoding, 'ascii')
self.assertEqual(txt.errors, 'replace')
txt.write('LF\n')
txt.reconfigure(newline='\r\n')
self.assertEqual(txt.encoding, 'ascii')
self.assertEqual(txt.errors, 'replace')
txt.reconfigure(errors='ignore')
self.assertEqual(txt.encoding, 'ascii')
self.assertEqual(txt.errors, 'ignore')
txt.write('CRLF\n')
txt.reconfigure(encoding='utf-8', newline=None)
self.assertEqual(txt.errors, 'strict')
txt.seek(0)
self.assertEqual(txt.read(), 'LF\nCRLF\n')
self.assertEqual(txt.detach().getvalue(), b'LF\nCRLF\r\n')
def test_reconfigure_newline(self):
raw = self.BytesIO(b'CR\rEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\n')
txt.reconfigure(newline=None)
self.assertEqual(txt.readline(), 'CR\n')
raw = self.BytesIO(b'CR\rEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\n')
txt.reconfigure(newline='')
self.assertEqual(txt.readline(), 'CR\r')
raw = self.BytesIO(b'CR\rLF\nEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\r')
txt.reconfigure(newline='\n')
self.assertEqual(txt.readline(), 'CR\rLF\n')
raw = self.BytesIO(b'LF\nCR\rEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\n')
txt.reconfigure(newline='\r')
self.assertEqual(txt.readline(), 'LF\nCR\r')
raw = self.BytesIO(b'CR\rCRLF\r\nEOF')
txt = self.TextIOWrapper(raw, 'ascii', newline='\r')
txt.reconfigure(newline='\r\n')
self.assertEqual(txt.readline(), 'CR\rCRLF\r\n')
txt = self.TextIOWrapper(self.BytesIO(), 'ascii', newline='\r')
txt.reconfigure(newline=None)
txt.write('linesep\n')
txt.reconfigure(newline='')
txt.write('LF\n')
txt.reconfigure(newline='\n')
txt.write('LF\n')
txt.reconfigure(newline='\r')
txt.write('CR\n')
txt.reconfigure(newline='\r\n')
txt.write('CRLF\n')
expected = 'linesep' + os.linesep + 'LF\nLF\nCR\rCRLF\r\n'
self.assertEqual(txt.detach().getvalue().decode('ascii'), expected)
def test_issue25862(self):
# Assertion failures occurred in tell() after read() and write().
t = self.TextIOWrapper(self.BytesIO(b'test'), encoding='ascii')
t.read(1)
t.read()
t.tell()
t = self.TextIOWrapper(self.BytesIO(b'test'), encoding='ascii')
t.read(1)
t.write('x')
t.tell()
class MemviewBytesIO(io.BytesIO):
def read1(self, len_):
return _to_memoryview(super().read1(len_))
def read(self, len_):
return _to_memoryview(super().read(len_))
def _to_memoryview(buf):
arr = array.array('i')
idx = len(buf) - len(buf) % arr.itemsize
arr.frombytes(buf[:idx])
return memoryview(arr)
class CTextIOWrapperTest(TextIOWrapperTest):
io = io
shutdown_error = "RuntimeError: could not find io module state"
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_constructor(self):
super().test_constructor()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_detach(self):
super().test_detach()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_encoding_read(self):
super().test_reconfigure_encoding_read()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_line_buffering(self):
super().test_reconfigure_line_buffering()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_basic_io(self):
super().test_basic_io()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_telling(self):
super().test_telling()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_uninitialized(self):
super().test_uninitialized()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_non_text_encoding_codecs_are_rejected(self):
super().test_non_text_encoding_codecs_are_rejected()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_repr(self):
super().test_repr()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newlines(self):
super().test_newlines()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newlines_input(self):
super().test_newlines_input()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_read_one_by_one(self):
super().test_read_one_by_one()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_read_by_chunk(self):
super().test_read_by_chunk()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_1(self):
super().test_issue1395_1()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_2(self):
super().test_issue1395_2()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_3(self):
super().test_issue1395_3()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_4(self):
super().test_issue1395_4()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_issue1395_5(self):
super().test_issue1395_5()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_write_through(self):
super().test_reconfigure_write_through()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_write_fromascii(self):
super().test_reconfigure_write_fromascii()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_write(self):
super().test_reconfigure_write()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_write_non_seekable(self):
super().test_reconfigure_write_non_seekable()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_defaults(self):
super().test_reconfigure_defaults()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_reconfigure_newline(self):
super().test_reconfigure_newline()
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_initialization(self):
r = self.BytesIO(b"\xc3\xa9\n\n")
b = self.BufferedReader(r, 1000)
t = self.TextIOWrapper(b)
self.assertRaises(ValueError, t.__init__, b, newline='xyzzy')
self.assertRaises(ValueError, t.read)
t = self.TextIOWrapper.__new__(self.TextIOWrapper)
self.assertRaises(Exception, repr, t)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_garbage_collection(self):
# C TextIOWrapper objects are collected, and collecting them flushes
# all data to disk.
# The Python version has __del__, so it ends in gc.garbage instead.
with support.check_warnings(('', ResourceWarning)):
rawio = io.FileIO(os_helper.TESTFN, "wb")
b = self.BufferedWriter(rawio)
t = self.TextIOWrapper(b, encoding="ascii")
t.write("456def")
t.x = t
wr = weakref.ref(t)
del t
support.gc_collect()
self.assertIsNone(wr(), wr)
with self.open(os_helper.TESTFN, "rb") as f:
self.assertEqual(f.read(), b"456def")
def test_rwpair_cleared_before_textio(self):
# Issue 13070: TextIOWrapper's finalization would crash when called
# cleared by the GC.
for i in range(1000):
b1 = self.BufferedRWPair(self.MockRawIO(), self.MockRawIO())
t1 = self.TextIOWrapper(b1, encoding="ascii")
b2 = self.BufferedRWPair(self.MockRawIO(), self.MockRawIO())
t2 = self.TextIOWrapper(b2, encoding="ascii")
# circular references
t1.buddy = t2
t2.buddy = t1
support.gc_collect()
def test_del__CHUNK_SIZE_SystemError(self):
t = self.TextIOWrapper(self.BytesIO(), encoding='ascii')
with self.assertRaises(AttributeError):
del t._CHUNK_SIZE
class PyTextIOWrapperTest(TextIOWrapperTest):
io = pyio
shutdown_error = "LookupError: unknown encoding: ascii"
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newlines(self):
super().test_newlines()
class IncrementalNewlineDecoderTest(unittest.TestCase):
def check_newline_decoding_utf8(self, decoder):
# UTF-8 specific tests for a newline decoder
def _check_decode(b, s, **kwargs):
# We exercise getstate() / setstate() as well as decode()
state = decoder.getstate()
self.assertEqual(decoder.decode(b, **kwargs), s)
decoder.setstate(state)
self.assertEqual(decoder.decode(b, **kwargs), s)
_check_decode(b'\xe8\xa2\x88', "\u8888")
_check_decode(b'\xe8', "")
_check_decode(b'\xa2', "")
_check_decode(b'\x88', "\u8888")
_check_decode(b'\xe8', "")
_check_decode(b'\xa2', "")
_check_decode(b'\x88', "\u8888")
_check_decode(b'\xe8', "")
self.assertRaises(UnicodeDecodeError, decoder.decode, b'', final=True)
decoder.reset()
_check_decode(b'\n', "\n")
_check_decode(b'\r', "")
_check_decode(b'', "\n", final=True)
_check_decode(b'\r', "\n", final=True)
_check_decode(b'\r', "")
_check_decode(b'a', "\na")
_check_decode(b'\r\r\n', "\n\n")
_check_decode(b'\r', "")
_check_decode(b'\r', "\n")
_check_decode(b'\na', "\na")
_check_decode(b'\xe8\xa2\x88\r\n', "\u8888\n")
_check_decode(b'\xe8\xa2\x88', "\u8888")
_check_decode(b'\n', "\n")
_check_decode(b'\xe8\xa2\x88\r', "\u8888")
_check_decode(b'\n', "\n")
def check_newline_decoding(self, decoder, encoding):
result = []
if encoding is not None:
encoder = codecs.getincrementalencoder(encoding)()
def _decode_bytewise(s):
# Decode one byte at a time
for b in encoder.encode(s):
result.append(decoder.decode(bytes([b])))
else:
encoder = None
def _decode_bytewise(s):
# Decode one char at a time
for c in s:
result.append(decoder.decode(c))
self.assertEqual(decoder.newlines, None)
_decode_bytewise("abc\n\r")
self.assertEqual(decoder.newlines, '\n')
_decode_bytewise("\nabc")
self.assertEqual(decoder.newlines, ('\n', '\r\n'))
_decode_bytewise("abc\r")
self.assertEqual(decoder.newlines, ('\n', '\r\n'))
_decode_bytewise("abc")
self.assertEqual(decoder.newlines, ('\r', '\n', '\r\n'))
_decode_bytewise("abc\r")
self.assertEqual("".join(result), "abc\n\nabcabc\nabcabc")
decoder.reset()
input = "abc"
if encoder is not None:
encoder.reset()
input = encoder.encode(input)
self.assertEqual(decoder.decode(input), "abc")
self.assertEqual(decoder.newlines, None)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newline_decoder(self):
encodings = (
# None meaning the IncrementalNewlineDecoder takes unicode input
# rather than bytes input
None, 'utf-8', 'latin-1',
'utf-16', 'utf-16-le', 'utf-16-be',
'utf-32', 'utf-32-le', 'utf-32-be',
)
for enc in encodings:
decoder = enc and codecs.getincrementaldecoder(enc)()
decoder = self.IncrementalNewlineDecoder(decoder, translate=True)
self.check_newline_decoding(decoder, enc)
decoder = codecs.getincrementaldecoder("utf-8")()
decoder = self.IncrementalNewlineDecoder(decoder, translate=True)
self.check_newline_decoding_utf8(decoder)
self.assertRaises(TypeError, decoder.setstate, 42)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_newline_bytes(self):
# Issue 5433: Excessive optimization in IncrementalNewlineDecoder
def _check(dec):
self.assertEqual(dec.newlines, None)
self.assertEqual(dec.decode("\u0D00"), "\u0D00")
self.assertEqual(dec.newlines, None)
self.assertEqual(dec.decode("\u0A00"), "\u0A00")
self.assertEqual(dec.newlines, None)
dec = self.IncrementalNewlineDecoder(None, translate=False)
_check(dec)
dec = self.IncrementalNewlineDecoder(None, translate=True)
_check(dec)
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_translate(self):
# issue 35062
for translate in (-2, -1, 1, 2):
decoder = codecs.getincrementaldecoder("utf-8")()
decoder = self.IncrementalNewlineDecoder(decoder, translate)
self.check_newline_decoding_utf8(decoder)
decoder = codecs.getincrementaldecoder("utf-8")()
decoder = self.IncrementalNewlineDecoder(decoder, translate=0)
self.assertEqual(decoder.decode(b"\r\r\n"), "\r\r\n")
class CIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest):
pass
class PyIncrementalNewlineDecoderTest(IncrementalNewlineDecoderTest):
pass
# XXX Tests for open()
class MiscIOTest(unittest.TestCase):
def tearDown(self):
os_helper.unlink(os_helper.TESTFN)
def test___all__(self):
for name in self.io.__all__:
obj = getattr(self.io, name, None)
self.assertIsNotNone(obj, name)
if name in ("open", "open_code"):
continue
elif "error" in name.lower() or name == "UnsupportedOperation":
self.assertTrue(issubclass(obj, Exception), name)
elif not name.startswith("SEEK_"):
self.assertTrue(issubclass(obj, self.IOBase))
def test_attributes(self):
f = self.open(os_helper.TESTFN, "wb", buffering=0)
self.assertEqual(f.mode, "wb")
f.close()
# XXX RUSTPYTHON: universal mode is deprecated anyway, so I
# feel fine about skipping it
# with support.check_warnings(('', DeprecationWarning)):
# f = self.open(os_helper.TESTFN, "U")
# self.assertEqual(f.name, os_helper.TESTFN)
# self.assertEqual(f.buffer.name, os_helper.TESTFN)
# self.assertEqual(f.buffer.raw.name, os_helper.TESTFN)
# self.assertEqual(f.mode, "U")
# self.assertEqual(f.buffer.mode, "rb")
# self.assertEqual(f.buffer.raw.mode, "rb")
# f.close()
f = self.open(os_helper.TESTFN, "w+")
self.assertEqual(f.mode, "w+")
self.assertEqual(f.buffer.mode, "rb+") # Does it really matter?
self.assertEqual(f.buffer.raw.mode, "rb+")
g = self.open(f.fileno(), "wb", closefd=False)
self.assertEqual(g.mode, "wb")
self.assertEqual(g.raw.mode, "wb")
self.assertEqual(g.name, f.fileno())
self.assertEqual(g.raw.name, f.fileno())
f.close()
g.close()
def test_open_pipe_with_append(self):
# bpo-27805: Ignore ESPIPE from lseek() in open().
r, w = os.pipe()
self.addCleanup(os.close, r)
f = self.open(w, 'a')
self.addCleanup(f.close)
# Check that the file is marked non-seekable. On Windows, however, lseek
# somehow succeeds on pipes.
if sys.platform != 'win32':
self.assertFalse(f.seekable())
def test_io_after_close(self):
for kwargs in [
{"mode": "w"},
{"mode": "wb"},
{"mode": "w", "buffering": 1},
{"mode": "w", "buffering": 2},
{"mode": "wb", "buffering": 0},
{"mode": "r"},
{"mode": "rb"},
{"mode": "r", "buffering": 1},
{"mode": "r", "buffering": 2},
{"mode": "rb", "buffering": 0},
{"mode": "w+"},
{"mode": "w+b"},
{"mode": "w+", "buffering": 1},
{"mode": "w+", "buffering": 2},
{"mode": "w+b", "buffering": 0},
]:
f = self.open(os_helper.TESTFN, **kwargs)
f.close()
self.assertRaises(ValueError, f.flush)
self.assertRaises(ValueError, f.fileno)
self.assertRaises(ValueError, f.isatty)
self.assertRaises(ValueError, f.__iter__)
if hasattr(f, "peek"):
self.assertRaises(ValueError, f.peek, 1)
self.assertRaises(ValueError, f.read)
if hasattr(f, "read1"):
self.assertRaises(ValueError, f.read1, 1024)
self.assertRaises(ValueError, f.read1)
if hasattr(f, "readall"):
self.assertRaises(ValueError, f.readall)
if hasattr(f, "readinto"):
self.assertRaises(ValueError, f.readinto, bytearray(1024))
if hasattr(f, "readinto1"):
self.assertRaises(ValueError, f.readinto1, bytearray(1024))
self.assertRaises(ValueError, f.readline)
self.assertRaises(ValueError, f.readlines)
self.assertRaises(ValueError, f.readlines, 1)
self.assertRaises(ValueError, f.seek, 0)
self.assertRaises(ValueError, f.tell)
self.assertRaises(ValueError, f.truncate)
self.assertRaises(ValueError, f.write,
b"" if "b" in kwargs['mode'] else "")
self.assertRaises(ValueError, f.writelines, [])
self.assertRaises(ValueError, next, f)
# TODO: RUSTPYTHON, cyclic gc
@unittest.expectedFailure
def test_blockingioerror(self):
# Various BlockingIOError issues
class C(str):
pass
c = C("")
b = self.BlockingIOError(1, c)
c.b = b
b.c = c
wr = weakref.ref(c)
del c, b
support.gc_collect()
self.assertIsNone(wr(), wr)
def test_abcs(self):
# Test the visible base classes are ABCs.
self.assertIsInstance(self.IOBase, abc.ABCMeta)
self.assertIsInstance(self.RawIOBase, abc.ABCMeta)
self.assertIsInstance(self.BufferedIOBase, abc.ABCMeta)
self.assertIsInstance(self.TextIOBase, abc.ABCMeta)
def _check_abc_inheritance(self, abcmodule):
with self.open(os_helper.TESTFN, "wb", buffering=0) as f:
self.assertIsInstance(f, abcmodule.IOBase)
self.assertIsInstance(f, abcmodule.RawIOBase)
self.assertNotIsInstance(f, abcmodule.BufferedIOBase)
self.assertNotIsInstance(f, abcmodule.TextIOBase)
with self.open(os_helper.TESTFN, "wb") as f:
self.assertIsInstance(f, abcmodule.IOBase)
self.assertNotIsInstance(f, abcmodule.RawIOBase)
self.assertIsInstance(f, abcmodule.BufferedIOBase)
self.assertNotIsInstance(f, abcmodule.TextIOBase)
with self.open(os_helper.TESTFN, "w") as f:
self.assertIsInstance(f, abcmodule.IOBase)
self.assertNotIsInstance(f, abcmodule.RawIOBase)
self.assertNotIsInstance(f, abcmodule.BufferedIOBase)
self.assertIsInstance(f, abcmodule.TextIOBase)
def test_abc_inheritance(self):
# Test implementations inherit from their respective ABCs
self._check_abc_inheritance(self)
def test_abc_inheritance_official(self):
# Test implementations inherit from the official ABCs of the
# baseline "io" module.
self._check_abc_inheritance(io)
def _check_warn_on_dealloc(self, *args, **kwargs):
f = open(*args, **kwargs)
r = repr(f)
with self.assertWarns(ResourceWarning) as cm:
f = None
support.gc_collect()
self.assertIn(r, str(cm.warning.args[0]))
# TODO: RUSTPYTHON
@unittest.expectedFailure
def test_warn_on_dealloc(self):
self._check_warn_on_dealloc(os_helper.TESTFN, "wb", buffering=0)
self._check_warn_on_dealloc(os_helper.TESTFN, "wb")
self._check_warn_on_dealloc(os_helper.TESTFN, "w")
def _check_warn_on_dealloc_fd(self, *args, **kwargs):
fds = []
def cleanup_fds():
for fd in fds:
try:
os.close(fd)
except OSError as e:
if e.errno != errno.EBADF:
raise
self.addCleanup(cleanup_fds)
r, w = os.pipe()
fds += r, w
self._check_warn_on_dealloc(r, *args, **kwargs)
# When using closefd=False, there's no warning
r, w = os.pipe()
fds += r, w
with support.check_no_resource_warning(self):
open(r, *args, closefd=False, **kwargs)
@unittest.expectedFailure
def test_warn_on_dealloc_fd(self):
self._check_warn_on_dealloc_fd("rb", buffering=0)
self._check_warn_on_dealloc_fd("rb")
self._check_warn_on_dealloc_fd("r")
def test_pickling(self):
for kwargs in [
{"mode": "w"},
{"mode": "wb"},
{"mode": "wb", "buffering": 0},
{"mode": "r"},
{"mode": "rb"},
{"mode": "rb", "buffering": 0},
{"mode": "w+"},
{"mode": "w+b"},
{"mode": "w+b", "buffering": 0},
]:
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
with self.open(os_helper.TESTFN, **kwargs) as f:
self.assertRaises(TypeError, pickle.dumps, f, protocol)
@unittest.expectedFailure
def test_nonblock_pipe_write_bigbuf(self):
self._test_nonblock_pipe_write(16*1024)
@unittest.expectedFailure
def test_nonblock_pipe_write_smallbuf(self):
self._test_nonblock_pipe_write(1024)
@unittest.skipUnless(hasattr(os, 'set_blocking'),
'os.set_blocking() required for this test')
def _test_nonblock_pipe_write(self, bufsize):
sent = []
received = []
r, w = os.pipe()
os.set_blocking(r, False)
os.set_blocking(w, False)
rf = self.open(r, mode='rb', closefd=True, buffering=bufsize)
wf = self.open(w, mode='wb', closefd=True, buffering=bufsize)
with rf, wf:
for N in 9999, 73, 7574:
try:
i = 0
while True:
msg = bytes([i % 26 + 97]) * N
sent.append(msg)
wf.write(msg)
i += 1
except self.BlockingIOError as e:
self.assertEqual(e.args[0], errno.EAGAIN)
self.assertEqual(e.args[2], e.characters_written)
sent[-1] = sent[-1][:e.characters_written]
received.append(rf.read())
msg = b'BLOCKED'
wf.write(msg)
sent.append(msg)
while True:
try:
wf.flush()
break
except self.BlockingIOError as e:
self.assertEqual(e.args[0], errno.EAGAIN)
self.assertEqual(e.args[2], e.characters_written)
self.assertEqual(e.characters_written, 0)
received.append(rf.read())
received += iter(rf.read, None)
sent, received = b''.join(sent), b''.join(received)
self.assertEqual(sent, received)
self.assertTrue(wf.closed)
self.assertTrue(rf.closed)
def test_create_fail(self):
with self.open(os_helper.TESTFN, 'w'):
pass
self.assertRaises(FileExistsError, self.open, os_helper.TESTFN, 'x')
def test_create_writes(self):
with self.open(os_helper.TESTFN, 'xb') as f:
f.write(b"spam")
with self.open(os_helper.TESTFN, 'rb') as f:
self.assertEqual(b"spam", f.read())
def test_open_allargs(self):
self.assertRaises(ValueError, self.open, os_helper.TESTFN, 'rwax+')
class CMiscIOTest(MiscIOTest):
io = io
def test_readinto_buffer_overflow(self):
class BadReader(self.io.BufferedIOBase):
def read(self, n=-1):
return b'x' * 10**6
bufio = BadReader()
b = bytearray(2)
self.assertRaises(ValueError, bufio.readinto, b)
def check_daemon_threads_shutdown_deadlock(self, stream_name):
ys
import time
import threading
from test.support import SuppressCrashReport
file = sys.{stream_name}
def run():
while True:
file.write('.')
file.flush()
crash = SuppressCrashReport()
crash.__enter__()
# don't call __exit__(): the crash occurs at Python shutdown
thread = threading.Thread(target=run)
thread.daemon = True
thread.start()
time.sleep(0.5)
file.write('!')
file.flush()
""".format_map(locals())
res, _ = run_python_until_end("-c", code)
err = res.err.decode()
if res.rc != 0:
# Failure: should be a fatal error
pattern = (r"Fatal Python error: could not acquire lock "
r"for <(_io\.)?BufferedWriter name='<{stream_name}>'> "
r"at interpreter shutdown, possibly due to "
r"daemon threads".format_map(locals()))
self.assertRegex(err, pattern)
else:
self.assertFalse(err.strip('.!'))
def test_daemon_threads_shutdown_stdout_deadlock(self):
self.check_daemon_threads_shutdown_deadlock('stdout')
def test_daemon_threads_shutdown_stderr_deadlock(self):
self.check_daemon_threads_shutdown_deadlock('stderr')
class PyMiscIOTest(MiscIOTest):
io = pyio
@unittest.skipIf(os.name == 'nt', 'POSIX signals required for this test.')
class SignalsTest(unittest.TestCase):
def setUp(self):
self.oldalrm = signal.signal(signal.SIGALRM, self.alarm_interrupt)
def tearDown(self):
signal.signal(signal.SIGALRM, self.oldalrm)
def alarm_interrupt(self, sig, frame):
1/0
def check_interrupted_write(self, item, bytes, **fdopen_kwargs):
read_results = []
def _read():
s = os.read(r, 1)
read_results.append(s)
t = threading.Thread(target=_read)
t.daemon = True
r, w = os.pipe()
fdopen_kwargs["closefd"] = False
large_data = item * (support.PIPE_MAX_SIZE // len(item) + 1)
try:
wio = self.io.open(w, **fdopen_kwargs)
if hasattr(signal, 'pthread_sigmask'):
# create the thread with SIGALRM signal blocked
signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGALRM])
t.start()
signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGALRM])
else:
t.start()
# Fill the pipe enough that the write will be blocking.
# It will be interrupted by the timer armed above. Since the
# other thread has read one byte, the low-level write will
# return with a successful (partial) result rather than an EINTR.
# The buffered IO layer must check for pending signal
# handlers, which in this case will invoke alarm_interrupt().
signal.alarm(1)
try:
self.assertRaises(ZeroDivisionError, wio.write, large_data)
finally:
signal.alarm(0)
t.join()
# We got one byte, get another one and check that it isn't a
read_results.append(os.read(r, 1))
self.assertEqual(read_results, [bytes[0:1], bytes[1:2]])
finally:
os.close(w)
os.close(r)
# before closing wio, wio would try to flush its internal
# buffer, and block again.
try:
wio.close()
except OSError as e:
if e.errno != errno.EBADF:
raise
def test_interrupted_write_unbuffered(self):
self.check_interrupted_write(b"xy", b"xy", mode="wb", buffering=0)
def test_interrupted_write_buffered(self):
self.check_interrupted_write(b"xy", b"xy", mode="wb")
def test_interrupted_write_text(self):
self.check_interrupted_write("xy", b"xy", mode="w", encoding="ascii")
@support.no_tracing
def check_reentrant_write(self, data, **fdopen_kwargs):
def on_alarm(*args):
# Will be called reentrantly from the same thread
wio.write(data)
1/0
signal.signal(signal.SIGALRM, on_alarm)
r, w = os.pipe()
wio = self.io.open(w, **fdopen_kwargs)
try:
signal.alarm(1)
# Either the reentrant call to wio.write() fails with RuntimeError,
# or the signal handler raises ZeroDivisionError.
with self.assertRaises((ZeroDivisionError, RuntimeError)) as cm:
while 1:
for i in range(100):
wio.write(data)
wio.flush()
# Make sure the buffer doesn't fill up and block further writes
os.read(r, len(data) * 100)
exc = cm.exception
if isinstance(exc, RuntimeError):
self.assertTrue(str(exc).startswith("reentrant call"), str(exc))
finally:
signal.alarm(0)
wio.close()
os.close(r)
def test_reentrant_write_buffered(self):
self.check_reentrant_write(b"xy", mode="wb")
def test_reentrant_write_text(self):
self.check_reentrant_write("xy", mode="w", encoding="ascii")
def check_interrupted_read_retry(self, decode, **fdopen_kwargs):
r, w = os.pipe()
fdopen_kwargs["closefd"] = False
def alarm_handler(sig, frame):
os.write(w, b"bar")
signal.signal(signal.SIGALRM, alarm_handler)
try:
rio = self.io.open(r, **fdopen_kwargs)
os.write(w, b"foo")
signal.alarm(1)
self.assertEqual(decode(rio.read(6)), "foobar")
finally:
signal.alarm(0)
rio.close()
os.close(w)
os.close(r)
@unittest.expectedFailure
def test_interrupted_read_retry_buffered(self):
self.check_interrupted_read_retry(lambda x: x.decode('latin1'),
mode="rb")
@unittest.expectedFailure
def test_interrupted_read_retry_text(self):
self.check_interrupted_read_retry(lambda x: x,
mode="r")
def check_interrupted_write_retry(self, item, **fdopen_kwargs):
select = support.import_module("select")
# write end.
N = support.PIPE_MAX_SIZE
r, w = os.pipe()
fdopen_kwargs["closefd"] = False
# We need a separate thread to read from the pipe and allow the
# write() to finish. This thread is started after the SIGALRM is
# received (forcing a first EINTR in write()).
read_results = []
write_finished = False
error = None
def _read():
try:
while not write_finished:
while r in select.select([r], [], [], 1.0)[0]:
s = os.read(r, 1024)
read_results.append(s)
except BaseException as exc:
nonlocal error
error = exc
t = threading.Thread(target=_read)
t.daemon = True
def alarm1(sig, frame):
signal.signal(signal.SIGALRM, alarm2)
signal.alarm(1)
def alarm2(sig, frame):
t.start()
large_data = item * N
signal.signal(signal.SIGALRM, alarm1)
try:
wio = self.io.open(w, **fdopen_kwargs)
signal.alarm(1)
# Expected behaviour:
# - first raw write() is partial (because of the limited pipe buffer
# and the first alarm)
# - second raw write() returns EINTR (because of the second alarm)
# - subsequent write()s are successful (either partial or complete)
written = wio.write(large_data)
self.assertEqual(N, written)
wio.flush()
write_finished = True
t.join()
self.assertIsNone(error)
self.assertEqual(N, sum(len(x) for x in read_results))
finally:
signal.alarm(0)
write_finished = True
os.close(w)
os.close(r)
# This is deliberate. If we didn't close the file descriptor
try:
wio.close()
except OSError as e:
if e.errno != errno.EBADF:
raise
@unittest.skip("TODO: RUSTPYTHON, thread 'main' panicked at 'already borrowed: BorrowMutError'")
def test_interrupted_write_retry_buffered(self):
self.check_interrupted_write_retry(b"x", mode="wb")
@unittest.skip("TODO: RUSTPYTHON, thread 'main' panicked at 'already borrowed: BorrowMutError'")
def test_interrupted_write_retry_text(self):
self.check_interrupted_write_retry("x", mode="w", encoding="latin1")
class CSignalsTest(SignalsTest):
io = io
class PySignalsTest(SignalsTest):
io = pyio
test_reentrant_write_buffered = None
test_reentrant_write_text = None
def load_tests(*args):
tests = (CIOTest, PyIOTest, APIMismatchTest,
CBufferedReaderTest, PyBufferedReaderTest,
CBufferedWriterTest, PyBufferedWriterTest,
CBufferedRWPairTest, PyBufferedRWPairTest,
CBufferedRandomTest, PyBufferedRandomTest,
StatefulIncrementalDecoderTest,
CIncrementalNewlineDecoderTest, PyIncrementalNewlineDecoderTest,
CTextIOWrapperTest, PyTextIOWrapperTest,
CMiscIOTest, PyMiscIOTest,
CSignalsTest, PySignalsTest,
)
mocks = (MockRawIO, MisbehavedRawIO, MockFileIO, CloseFailureIO,
MockNonBlockWriterIO, MockUnseekableIO, MockRawIOWithoutRead,
SlowFlushRawIO)
all_members = io.__all__
c_io_ns = {name : getattr(io, name) for name in all_members}
py_io_ns = {name : getattr(pyio, name) for name in all_members}
globs = globals()
c_io_ns.update((x.__name__, globs["C" + x.__name__]) for x in mocks)
py_io_ns.update((x.__name__, globs["Py" + x.__name__]) for x in mocks)
py_io_ns["open"] = pyio.OpenWrapper
for test in tests:
if test.__name__.startswith("C"):
for name, obj in c_io_ns.items():
setattr(test, name, obj)
elif test.__name__.startswith("Py"):
for name, obj in py_io_ns.items():
setattr(test, name, obj)
suite = unittest.TestSuite([unittest.makeSuite(test) for test in tests])
return suite
if __name__ == "__main__":
unittest.main()
| true | true |
f72dea21ca829e1df65f8d8e6a552085a2c9d8ac | 5,428 | py | Python | isi_sdk_8_1_1/isi_sdk_8_1_1/models/statistics_keys_extended.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 24 | 2018-06-22T14:13:23.000Z | 2022-03-23T01:21:26.000Z | isi_sdk_8_1_1/isi_sdk_8_1_1/models/statistics_keys_extended.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 46 | 2018-04-30T13:28:22.000Z | 2022-03-21T21:11:07.000Z | isi_sdk_8_1_1/isi_sdk_8_1_1/models/statistics_keys_extended.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 29 | 2018-06-19T00:14:04.000Z | 2022-02-08T17:51:19.000Z | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 6
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_8_1_1.models.statistics_key import StatisticsKey # noqa: F401,E501
from isi_sdk_8_1_1.models.statistics_keys import StatisticsKeys # noqa: F401,E501
class StatisticsKeysExtended(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'keys': 'list[StatisticsKey]',
'resume': 'str',
'total': 'int'
}
attribute_map = {
'keys': 'keys',
'resume': 'resume',
'total': 'total'
}
def __init__(self, keys=None, resume=None, total=None): # noqa: E501
"""StatisticsKeysExtended - a model defined in Swagger""" # noqa: E501
self._keys = None
self._resume = None
self._total = None
self.discriminator = None
if keys is not None:
self.keys = keys
if resume is not None:
self.resume = resume
if total is not None:
self.total = total
@property
def keys(self):
"""Gets the keys of this StatisticsKeysExtended. # noqa: E501
:return: The keys of this StatisticsKeysExtended. # noqa: E501
:rtype: list[StatisticsKey]
"""
return self._keys
@keys.setter
def keys(self, keys):
"""Sets the keys of this StatisticsKeysExtended.
:param keys: The keys of this StatisticsKeysExtended. # noqa: E501
:type: list[StatisticsKey]
"""
self._keys = keys
@property
def resume(self):
"""Gets the resume of this StatisticsKeysExtended. # noqa: E501
Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options). # noqa: E501
:return: The resume of this StatisticsKeysExtended. # noqa: E501
:rtype: str
"""
return self._resume
@resume.setter
def resume(self, resume):
"""Sets the resume of this StatisticsKeysExtended.
Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options). # noqa: E501
:param resume: The resume of this StatisticsKeysExtended. # noqa: E501
:type: str
"""
if resume is not None and len(resume) < 0:
raise ValueError("Invalid value for `resume`, length must be greater than or equal to `0`") # noqa: E501
self._resume = resume
@property
def total(self):
"""Gets the total of this StatisticsKeysExtended. # noqa: E501
Total number of items available. # noqa: E501
:return: The total of this StatisticsKeysExtended. # noqa: E501
:rtype: int
"""
return self._total
@total.setter
def total(self, total):
"""Sets the total of this StatisticsKeysExtended.
Total number of items available. # noqa: E501
:param total: The total of this StatisticsKeysExtended. # noqa: E501
:type: int
"""
if total is not None and total > 4294967295: # noqa: E501
raise ValueError("Invalid value for `total`, must be a value less than or equal to `4294967295`") # noqa: E501
if total is not None and total < 0: # noqa: E501
raise ValueError("Invalid value for `total`, must be a value greater than or equal to `0`") # noqa: E501
self._total = total
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, StatisticsKeysExtended):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 30.494382 | 170 | 0.590825 |
import pprint
import re
import six
from isi_sdk_8_1_1.models.statistics_key import StatisticsKey
from isi_sdk_8_1_1.models.statistics_keys import StatisticsKeys
class StatisticsKeysExtended(object):
swagger_types = {
'keys': 'list[StatisticsKey]',
'resume': 'str',
'total': 'int'
}
attribute_map = {
'keys': 'keys',
'resume': 'resume',
'total': 'total'
}
def __init__(self, keys=None, resume=None, total=None):
self._keys = None
self._resume = None
self._total = None
self.discriminator = None
if keys is not None:
self.keys = keys
if resume is not None:
self.resume = resume
if total is not None:
self.total = total
@property
def keys(self):
return self._keys
@keys.setter
def keys(self, keys):
self._keys = keys
@property
def resume(self):
return self._resume
@resume.setter
def resume(self, resume):
if resume is not None and len(resume) < 0:
raise ValueError("Invalid value for `resume`, length must be greater than or equal to `0`")
self._resume = resume
@property
def total(self):
return self._total
@total.setter
def total(self, total):
if total is not None and total > 4294967295:
raise ValueError("Invalid value for `total`, must be a value less than or equal to `4294967295`")
if total is not None and total < 0:
raise ValueError("Invalid value for `total`, must be a value greater than or equal to `0`")
self._total = total
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, StatisticsKeysExtended):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f72deaa2b24b9fcbe3161c2781c93d8552c84e58 | 6,403 | py | Python | src/timeatlas/time_series/component_handler.py | fredmontet/timeatlas | 9a439a913ef9a8a1ef9833b42e5fb4e988d7e35e | [
"MIT"
] | 10 | 2020-08-25T09:23:02.000Z | 2021-01-12T14:00:35.000Z | src/timeatlas/time_series/component_handler.py | fredmontet/timeatlas | 9a439a913ef9a8a1ef9833b42e5fb4e988d7e35e | [
"MIT"
] | 140 | 2020-06-30T11:59:47.000Z | 2021-08-23T20:58:43.000Z | src/timeatlas/time_series/component_handler.py | fredmontet/timeatlas | 9a439a913ef9a8a1ef9833b42e5fb4e988d7e35e | [
"MIT"
] | null | null | null | from typing import List, Union, NoReturn
from copy import deepcopy, copy
from pandas import Index
from .component import Component
class ComponentHandler:
""" Helper class to manage many components
The purpose of this class is to make the management of components in a
time series as simple as possible, with one or many components.
The underlying data structure is a simple list where component are stored.
"""
def __init__(self, components: Union[List[Component], Component] = None):
if isinstance(components, Component):
components = [components]
self.components = components if components is not None else []
def __getitem__(self, item: Union[int, str, List[int], List[str]]):
# handler[0]
if isinstance(item, int):
new_components = self.components[item]
# handler["0_foo"]
elif isinstance(item, str):
new_components = self.get_component_by_name(item)
elif isinstance(item, list):
# handler[[0,3,5]]
if all(isinstance(i, int) for i in item):
new_components = [self.components[i] for i in item]
# handler[["0_foo","1_bar"]]
elif all(isinstance(i, str) for i in item):
new_components = [self.get_component_by_name(i_n)
for i_n in item]
else:
raise TypeError(f"ComponentHandler list indices must be int or "
f"str, not {type(item)}")
else:
raise TypeError(f"ComponentHandler indices must be int, str or list,"
f" not {type(item)}")
return ComponentHandler(new_components)
def __delitem__(self, key: Union[int, str]) -> NoReturn:
""" Delete an item from the ComponentHandler
Args:
key: int or str of the item to delete
"""
if isinstance(key, int):
del self.components[key]
elif isinstance(key, str):
i = self.get_component_id_by_name(key)
del self.components[i]
def __len__(self) -> int:
""" Get the number of item in the ComponentHandler
Returns:
int
"""
return len(self.components)
def __str__(self):
""" get the str representation of a ComponentHandler
Returns:
str
"""
return str(self.get_columns().to_list())
def append(self, component: Component) -> NoReturn:
""" Append a Component to the ComponentHandler
Args:
component: Component to append
"""
self.components.append(component)
def clear(self):
""" Removes all Components from the ComponentHandler
"""
self.components.clear()
def get_component_id_by_name(self, name: str) -> int:
""" Get a Component ID by its name
Args:
name: str of the name of the Component, including the ID (lol)
e.g. "0_temperature"
Returns:
int
"""
for i, c in enumerate(self.get_columns().to_list()):
if name == c:
return i
# if no component are found throughout the for loop
raise KeyError(f"Component with name '{name}' does not exist.")
def get_component_by_name(self, name: str):
""" Get a Component by its name
Args:
name: str of the name of the Component, including the ID
e.g. "0_temperature"
Returns:
Component
"""
for i, c in enumerate(self.components):
component_name = self.__format_main_series(i, c.get_main())
if name == component_name:
return c
raise KeyError(f"Component with name '{name}' does not exist.")
def get_column_by_id(self, index: int) -> Index:
""" Get a the name of a column by its Component ID
Get Pandas Index of a Component from the ComponentHandler by its
positional identifier
Args:
index: int of the index of the component in the ComponentHandler
with_meta: bool to include or not meta series in the return value
Returns:
Pandas Index of the names of the component
"""
c = self.components[index]
cols = [self.__format_main_series(index, c.get_main())]
return Index(cols)
def get_column_by_name(self, name: str) -> Index:
""" Get the name of a column by its Component name
Args:
name: str if the name of the component in the ComponentHandler
e.g: "0_temperature"
Returns:
Pandas Index of the names of the component
"""
for i, c in enumerate(self.get_columns().to_list()):
if name == c:
return self.get_column_by_id(i)
# if no component are found throughout the for loop
raise KeyError(f"Component with name '{name}' does not exist.")
def get_columns(self) -> Index:
""" Get names of all the Components columns
Get Pandas Index of a Component from the ComponentHandler by its
positional identifier
Args:
index: int of the index of the component in the ComponentHandler
Returns:
Pandas Index of the names of the component
"""
cols = []
for i, c in enumerate(self.components):
cols.extend(self.get_column_by_id(i).to_list())
return Index(cols)
def copy(self, deep=True) -> 'ComponentHandler':
""" Copy function, deep by default
Args:
deep: bool if deep copy or not
Returns:
ComponentHandler
"""
return deepcopy(self) if deep else copy(self)
@staticmethod
def __format_main_series(index: int, value: Union[str, list]):
""" Format a main series name
Args:
index: int of the position of the main series
value: list with the main series name
Returns:
list with the formatted str of the series
"""
if isinstance(value, str):
return f"{index}_{value}"
elif isinstance(value, list):
return [f"{index}_{v}" for v in value]
else:
TypeError(f"Type {value} isn't accepted")
| 31.855721 | 81 | 0.582071 | from typing import List, Union, NoReturn
from copy import deepcopy, copy
from pandas import Index
from .component import Component
class ComponentHandler:
def __init__(self, components: Union[List[Component], Component] = None):
if isinstance(components, Component):
components = [components]
self.components = components if components is not None else []
def __getitem__(self, item: Union[int, str, List[int], List[str]]):
if isinstance(item, int):
new_components = self.components[item]
elif isinstance(item, str):
new_components = self.get_component_by_name(item)
elif isinstance(item, list):
if all(isinstance(i, int) for i in item):
new_components = [self.components[i] for i in item]
elif all(isinstance(i, str) for i in item):
new_components = [self.get_component_by_name(i_n)
for i_n in item]
else:
raise TypeError(f"ComponentHandler list indices must be int or "
f"str, not {type(item)}")
else:
raise TypeError(f"ComponentHandler indices must be int, str or list,"
f" not {type(item)}")
return ComponentHandler(new_components)
def __delitem__(self, key: Union[int, str]) -> NoReturn:
if isinstance(key, int):
del self.components[key]
elif isinstance(key, str):
i = self.get_component_id_by_name(key)
del self.components[i]
def __len__(self) -> int:
return len(self.components)
def __str__(self):
return str(self.get_columns().to_list())
def append(self, component: Component) -> NoReturn:
self.components.append(component)
def clear(self):
self.components.clear()
def get_component_id_by_name(self, name: str) -> int:
for i, c in enumerate(self.get_columns().to_list()):
if name == c:
return i
raise KeyError(f"Component with name '{name}' does not exist.")
def get_component_by_name(self, name: str):
for i, c in enumerate(self.components):
component_name = self.__format_main_series(i, c.get_main())
if name == component_name:
return c
raise KeyError(f"Component with name '{name}' does not exist.")
def get_column_by_id(self, index: int) -> Index:
c = self.components[index]
cols = [self.__format_main_series(index, c.get_main())]
return Index(cols)
def get_column_by_name(self, name: str) -> Index:
for i, c in enumerate(self.get_columns().to_list()):
if name == c:
return self.get_column_by_id(i)
raise KeyError(f"Component with name '{name}' does not exist.")
def get_columns(self) -> Index:
cols = []
for i, c in enumerate(self.components):
cols.extend(self.get_column_by_id(i).to_list())
return Index(cols)
def copy(self, deep=True) -> 'ComponentHandler':
return deepcopy(self) if deep else copy(self)
@staticmethod
def __format_main_series(index: int, value: Union[str, list]):
if isinstance(value, str):
return f"{index}_{value}"
elif isinstance(value, list):
return [f"{index}_{v}" for v in value]
else:
TypeError(f"Type {value} isn't accepted")
| true | true |
f72dec76524dfa952323f1e88da3ddb20c4e4607 | 757 | py | Python | inheritance/inheritanceapp/migrations/0003_auto_20211227_0338.py | LaTonia-Mertica/inheritance-artifacts | be9d80fc43d3b2f82755c311d4c9c44d0ff508c3 | [
"MIT"
] | 1 | 2022-02-26T04:44:43.000Z | 2022-02-26T04:44:43.000Z | inheritance/inheritanceapp/migrations/0003_auto_20211227_0338.py | Jason-Doze/Inheritance- | d2406edd4bd5ae1b1fedf531d61f56beed23aeef | [
"MIT"
] | 68 | 2021-12-24T15:09:06.000Z | 2022-02-07T22:49:43.000Z | inheritance/inheritanceapp/migrations/0003_auto_20211227_0338.py | LaTonia-Mertica/inheritance-artifacts | be9d80fc43d3b2f82755c311d4c9c44d0ff508c3 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.9 on 2021-12-27 03:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inheritanceapp', '0002_remove_artifact_description_remove_artifact_img_link_and_more'),
]
operations = [
migrations.AlterField(
model_name='artifact',
name='image',
field=models.ImageField(upload_to='images/'),
),
migrations.AlterField(
model_name='artifact',
name='imgdesc',
field=models.CharField(max_length=44),
),
migrations.AlterField(
model_name='artifact',
name='imgtitle',
field=models.CharField(max_length=13),
),
]
| 26.103448 | 97 | 0.593131 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inheritanceapp', '0002_remove_artifact_description_remove_artifact_img_link_and_more'),
]
operations = [
migrations.AlterField(
model_name='artifact',
name='image',
field=models.ImageField(upload_to='images/'),
),
migrations.AlterField(
model_name='artifact',
name='imgdesc',
field=models.CharField(max_length=44),
),
migrations.AlterField(
model_name='artifact',
name='imgtitle',
field=models.CharField(max_length=13),
),
]
| true | true |
f72decfb61264fa0bc392c143e7fe12312565b28 | 6,341 | py | Python | dvr_scan/timecode.py | Fitblip/DVR-Scan | 8beb41b27e061f52fd134c67c3eeb012ddf2c36d | [
"BSD-3-Clause-Clear",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | dvr_scan/timecode.py | Fitblip/DVR-Scan | 8beb41b27e061f52fd134c67c3eeb012ddf2c36d | [
"BSD-3-Clause-Clear",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | dvr_scan/timecode.py | Fitblip/DVR-Scan | 8beb41b27e061f52fd134c67c3eeb012ddf2c36d | [
"BSD-3-Clause-Clear",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | #
# DVR-Scan: Find & Export Motion Events in Video Footage
# --------------------------------------------------------------
# [ Site: https://github.com/Breakthrough/DVR-Scan/ ]
# [ Documentation: http://dvr-scan.readthedocs.org/ ]
#
# This file contains all code related to timecode formats, interpreting,
# parsing, and conversion.
#
# Copyright (C) 2016-2020 Brandon Castellano <http://www.bcastell.com>.
#
# DVR-Scan is licensed under the BSD 2-Clause License; see the included
# LICENSE file or visit one of the following pages for details:
# - https://github.com/Breakthrough/DVR-Scan/
#
# This software uses Numpy and OpenCV; see the LICENSE-NUMPY and
# LICENSE-OPENCV files or visit the above URL for details.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
class FrameTimecode(object):
""" Object for frame-based timecodes, using the video framerate
to compute back and forth between frame number and second/timecode formats.
The passed argument is declared valid if it meets one of three valid forms:
1) Standard timecode HH:MM:SS[.nnn]:
in string form 'HH:MM:SS' or 'HH:MM:SS.nnn', or
in list/tuple form [HH, MM, SS] or [HH, MM, SS.nnn]
2) Number of seconds S[.SSS], where S >= 0.0:
in string form 'Ss' or 'S.SSSs' (e.g. '5s', '1.234s'), or
in integer or floating point form S or S.SSS
3) Exact number of frames N, where N >= 0:
in either integer or string form N or 'N'
Raises:
TypeError, ValueError
"""
def __init__(self, fps, timecode):
if not isinstance(fps, (int, float)):
raise TypeError('Framerate must be of type int/float.')
self.framerate = float(fps)
self.frame_num = -1
# Exact number of frames N
if isinstance(timecode, int):
if timecode < 0:
raise ValueError('Timecode frame number must be positive.')
self.frame_num = timecode
# Number of seconds S
elif isinstance(timecode, float):
if timecode < 0.0:
raise ValueError('Timecode value must be positive.')
self.frame_num = int(timecode * self.framerate)
# Standard timecode in list format [HH, MM, SS.nnn]
elif isinstance(timecode, (list, tuple)) and len(timecode) == 3:
if any(not isinstance(x, (int, float)) for x in timecode):
raise ValueError('Timecode components must be of type int/float.')
hrs, mins, secs = timecode
if not (hrs >= 0 and mins >= 0 and secs >= 0 and mins < 60
and secs < 60):
raise ValueError('Timecode components must be positive.')
secs += (((hrs * 60.0) + mins) * 60.0)
self.frame_num = int(secs * self.framerate)
elif isinstance(timecode, str):
# Number of seconds S
if timecode.endswith('s'):
secs = timecode[:-1]
if not secs.replace('.', '').isdigit():
raise ValueError('All characters in timecode seconds string must be digits.')
secs = float(secs)
if secs < 0.0:
raise ValueError('Timecode seconds value must be positive.')
self.frame_num = int(secs * self.framerate)
# Exact number of frames N
elif timecode.isdigit():
timecode = int(timecode)
if timecode < 0:
raise ValueError('Timecode frame number must be positive.')
self.frame_num = timecode
# Standard timecode in string format 'HH:MM:SS[.nnn]'
else:
tc_val = timecode.split(':')
if not (len(tc_val) == 3 and tc_val[0].isdigit() and tc_val[1].isdigit()
and tc_val[2].replace('.', '').isdigit()):
raise TypeError('Improperly formatted timecode string.')
hrs, mins = int(tc_val[0]), int(tc_val[1])
secs = float(tc_val[2]) if '.' in tc_val[2] else int(tc_val[2])
if not (hrs >= 0 and mins >= 0 and secs >= 0 and mins < 60
and secs < 60):
raise ValueError('Invalid timecode range.')
secs += (((hrs * 60.0) + mins) * 60.0)
self.frame_num = int(secs * self.framerate)
else:
raise TypeError('Timecode format unrecognized.')
def get_seconds(self):
""" Get the frame's position in number of seconds.
Returns:
A float of the current time/position in seconds.
"""
return float(self.frame_num) / self.framerate
def get_timecode(self, precision = 3, use_rounding = True):
""" Get a formatted timecode string of the form HH:MM:SS[.nnn].
Args:
precision: The number of decimal places to include in the output [.nnn].
use_rounding: True (default) to round the output to the desired precision.
Returns:
A string with a formatted timecode (HH:MM:SS[.nnn]).
"""
# Compute hours and minutes based off of seconds, and update seconds.
secs = self.get_seconds()
base = 60.0 * 60.0
hrs = int(secs / base)
secs -= (hrs * base)
base = 60.0
mins = int(secs / base)
secs -= (mins * base)
# Convert seconds into string based on required precision.
if precision > 0:
if use_rounding:
secs = round(secs, precision)
msec = format(secs, '.%df' % precision)[-precision:]
secs = '%02d.%s' % (int(secs), msec)
else:
secs = '%02d' % int(round(secs, 0)) if use_rounding else '%02d' % int(secs)
# Return hours, minutes, and seconds as a formatted timecode string.
return '%02d:%02d:%s' % (hrs, mins, secs)
| 44.971631 | 97 | 0.578142 |
class FrameTimecode(object):
def __init__(self, fps, timecode):
if not isinstance(fps, (int, float)):
raise TypeError('Framerate must be of type int/float.')
self.framerate = float(fps)
self.frame_num = -1
if isinstance(timecode, int):
if timecode < 0:
raise ValueError('Timecode frame number must be positive.')
self.frame_num = timecode
elif isinstance(timecode, float):
if timecode < 0.0:
raise ValueError('Timecode value must be positive.')
self.frame_num = int(timecode * self.framerate)
elif isinstance(timecode, (list, tuple)) and len(timecode) == 3:
if any(not isinstance(x, (int, float)) for x in timecode):
raise ValueError('Timecode components must be of type int/float.')
hrs, mins, secs = timecode
if not (hrs >= 0 and mins >= 0 and secs >= 0 and mins < 60
and secs < 60):
raise ValueError('Timecode components must be positive.')
secs += (((hrs * 60.0) + mins) * 60.0)
self.frame_num = int(secs * self.framerate)
elif isinstance(timecode, str):
if timecode.endswith('s'):
secs = timecode[:-1]
if not secs.replace('.', '').isdigit():
raise ValueError('All characters in timecode seconds string must be digits.')
secs = float(secs)
if secs < 0.0:
raise ValueError('Timecode seconds value must be positive.')
self.frame_num = int(secs * self.framerate)
elif timecode.isdigit():
timecode = int(timecode)
if timecode < 0:
raise ValueError('Timecode frame number must be positive.')
self.frame_num = timecode
else:
tc_val = timecode.split(':')
if not (len(tc_val) == 3 and tc_val[0].isdigit() and tc_val[1].isdigit()
and tc_val[2].replace('.', '').isdigit()):
raise TypeError('Improperly formatted timecode string.')
hrs, mins = int(tc_val[0]), int(tc_val[1])
secs = float(tc_val[2]) if '.' in tc_val[2] else int(tc_val[2])
if not (hrs >= 0 and mins >= 0 and secs >= 0 and mins < 60
and secs < 60):
raise ValueError('Invalid timecode range.')
secs += (((hrs * 60.0) + mins) * 60.0)
self.frame_num = int(secs * self.framerate)
else:
raise TypeError('Timecode format unrecognized.')
def get_seconds(self):
return float(self.frame_num) / self.framerate
def get_timecode(self, precision = 3, use_rounding = True):
secs = self.get_seconds()
base = 60.0 * 60.0
hrs = int(secs / base)
secs -= (hrs * base)
base = 60.0
mins = int(secs / base)
secs -= (mins * base)
if precision > 0:
if use_rounding:
secs = round(secs, precision)
msec = format(secs, '.%df' % precision)[-precision:]
secs = '%02d.%s' % (int(secs), msec)
else:
secs = '%02d' % int(round(secs, 0)) if use_rounding else '%02d' % int(secs)
return '%02d:%02d:%s' % (hrs, mins, secs)
| true | true |
f72ded038e83af2472f6149ffece96e678944a35 | 9,435 | py | Python | synapse/handlers/admin.py | lukaslihotzki/synapse | 1dfdc87b9bb07cc3c958dde7f41f2af4322477e5 | [
"Apache-2.0"
] | 2 | 2021-07-07T10:21:41.000Z | 2021-12-28T00:13:20.000Z | synapse/handlers/admin.py | lukaslihotzki/synapse | 1dfdc87b9bb07cc3c958dde7f41f2af4322477e5 | [
"Apache-2.0"
] | 2 | 2021-12-17T21:45:54.000Z | 2021-12-29T20:12:09.000Z | synapse/handlers/admin.py | lukaslihotzki/synapse | 1dfdc87b9bb07cc3c958dde7f41f2af4322477e5 | [
"Apache-2.0"
] | 1 | 2021-06-16T23:25:48.000Z | 2021-06-16T23:25:48.000Z | # Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import logging
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set
from synapse.api.constants import Membership
from synapse.events import EventBase
from synapse.types import JsonDict, RoomStreamToken, StateMap, UserID
from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class AdminHandler(BaseHandler):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.storage = hs.get_storage()
self.state_store = self.storage.state
async def get_whois(self, user: UserID) -> JsonDict:
connections = []
sessions = await self.store.get_user_ip_and_agents(user)
for session in sessions:
connections.append(
{
"ip": session["ip"],
"last_seen": session["last_seen"],
"user_agent": session["user_agent"],
}
)
ret = {
"user_id": user.to_string(),
"devices": {"": {"sessions": [{"connections": connections}]}},
}
return ret
async def get_user(self, user: UserID) -> Optional[JsonDict]:
"""Function to get user details"""
ret = await self.store.get_user_by_id(user.to_string())
if ret:
profile = await self.store.get_profileinfo(user.localpart)
threepids = await self.store.user_get_threepids(user.to_string())
ret["displayname"] = profile.display_name
ret["avatar_url"] = profile.avatar_url
ret["threepids"] = threepids
return ret
async def export_user_data(self, user_id: str, writer: "ExfiltrationWriter") -> Any:
"""Write all data we have on the user to the given writer.
Args:
user_id: The user ID to fetch data of.
writer: The writer to write to.
Returns:
Resolves when all data for a user has been written.
The returned value is that returned by `writer.finished()`.
"""
# Get all rooms the user is in or has been in
rooms = await self.store.get_rooms_for_local_user_where_membership_is(
user_id,
membership_list=(
Membership.JOIN,
Membership.LEAVE,
Membership.BAN,
Membership.INVITE,
),
)
# We only try and fetch events for rooms the user has been in. If
# they've been e.g. invited to a room without joining then we handle
# those separately.
rooms_user_has_been_in = await self.store.get_rooms_user_has_been_in(user_id)
for index, room in enumerate(rooms):
room_id = room.room_id
logger.info(
"[%s] Handling room %s, %d/%d", user_id, room_id, index + 1, len(rooms)
)
forgotten = await self.store.did_forget(user_id, room_id)
if forgotten:
logger.info("[%s] User forgot room %d, ignoring", user_id, room_id)
continue
if room_id not in rooms_user_has_been_in:
# If we haven't been in the rooms then the filtering code below
# won't return anything, so we need to handle these cases
# explicitly.
if room.membership == Membership.INVITE:
event_id = room.event_id
invite = await self.store.get_event(event_id, allow_none=True)
if invite:
invited_state = invite.unsigned["invite_room_state"]
writer.write_invite(room_id, invite, invited_state)
continue
# We only want to bother fetching events up to the last time they
# were joined. We estimate that point by looking at the
# stream_ordering of the last membership if it wasn't a join.
if room.membership == Membership.JOIN:
stream_ordering = self.store.get_room_max_stream_ordering()
else:
stream_ordering = room.stream_ordering
from_key = RoomStreamToken(0, 0)
to_key = RoomStreamToken(None, stream_ordering)
# Events that we've processed in this room
written_events = set() # type: Set[str]
# We need to track gaps in the events stream so that we can then
# write out the state at those events. We do this by keeping track
# of events whose prev events we haven't seen.
# Map from event ID to prev events that haven't been processed,
# dict[str, set[str]].
event_to_unseen_prevs = {}
# The reverse mapping to above, i.e. map from unseen event to events
# that have the unseen event in their prev_events, i.e. the unseen
# events "children".
unseen_to_child_events = {} # type: Dict[str, Set[str]]
# We fetch events in the room the user could see by fetching *all*
# events that we have and then filtering, this isn't the most
# efficient method perhaps but it does guarantee we get everything.
while True:
events, _ = await self.store.paginate_room_events(
room_id, from_key, to_key, limit=100, direction="f"
)
if not events:
break
from_key = events[-1].internal_metadata.after
events = await filter_events_for_client(self.storage, user_id, events)
writer.write_events(room_id, events)
# Update the extremity tracking dicts
for event in events:
# Check if we have any prev events that haven't been
# processed yet, and add those to the appropriate dicts.
unseen_events = set(event.prev_event_ids()) - written_events
if unseen_events:
event_to_unseen_prevs[event.event_id] = unseen_events
for unseen in unseen_events:
unseen_to_child_events.setdefault(unseen, set()).add(
event.event_id
)
# Now check if this event is an unseen prev event, if so
# then we remove this event from the appropriate dicts.
for child_id in unseen_to_child_events.pop(event.event_id, []):
event_to_unseen_prevs[child_id].discard(event.event_id)
written_events.add(event.event_id)
logger.info(
"Written %d events in room %s", len(written_events), room_id
)
# Extremities are the events who have at least one unseen prev event.
extremities = (
event_id
for event_id, unseen_prevs in event_to_unseen_prevs.items()
if unseen_prevs
)
for event_id in extremities:
if not event_to_unseen_prevs[event_id]:
continue
state = await self.state_store.get_state_for_event(event_id)
writer.write_state(room_id, event_id, state)
return writer.finished()
class ExfiltrationWriter(metaclass=abc.ABCMeta):
"""Interface used to specify how to write exported data."""
@abc.abstractmethod
def write_events(self, room_id: str, events: List[EventBase]) -> None:
"""Write a batch of events for a room."""
raise NotImplementedError()
@abc.abstractmethod
def write_state(
self, room_id: str, event_id: str, state: StateMap[EventBase]
) -> None:
"""Write the state at the given event in the room.
This only gets called for backward extremities rather than for each
event.
"""
raise NotImplementedError()
@abc.abstractmethod
def write_invite(
self, room_id: str, event: EventBase, state: StateMap[dict]
) -> None:
"""Write an invite for the room, with associated invite state.
Args:
room_id: The room ID the invite is for.
event: The invite event.
state: A subset of the state at the invite, with a subset of the
event keys (type, state_key content and sender).
"""
raise NotImplementedError()
@abc.abstractmethod
def finished(self) -> Any:
"""Called when all data has successfully been exported and written.
This functions return value is passed to the caller of
`export_user_data`.
"""
raise NotImplementedError()
| 38.510204 | 88 | 0.595866 |
import abc
import logging
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set
from synapse.api.constants import Membership
from synapse.events import EventBase
from synapse.types import JsonDict, RoomStreamToken, StateMap, UserID
from synapse.visibility import filter_events_for_client
from ._base import BaseHandler
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class AdminHandler(BaseHandler):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.storage = hs.get_storage()
self.state_store = self.storage.state
async def get_whois(self, user: UserID) -> JsonDict:
connections = []
sessions = await self.store.get_user_ip_and_agents(user)
for session in sessions:
connections.append(
{
"ip": session["ip"],
"last_seen": session["last_seen"],
"user_agent": session["user_agent"],
}
)
ret = {
"user_id": user.to_string(),
"devices": {"": {"sessions": [{"connections": connections}]}},
}
return ret
async def get_user(self, user: UserID) -> Optional[JsonDict]:
ret = await self.store.get_user_by_id(user.to_string())
if ret:
profile = await self.store.get_profileinfo(user.localpart)
threepids = await self.store.user_get_threepids(user.to_string())
ret["displayname"] = profile.display_name
ret["avatar_url"] = profile.avatar_url
ret["threepids"] = threepids
return ret
async def export_user_data(self, user_id: str, writer: "ExfiltrationWriter") -> Any:
rooms = await self.store.get_rooms_for_local_user_where_membership_is(
user_id,
membership_list=(
Membership.JOIN,
Membership.LEAVE,
Membership.BAN,
Membership.INVITE,
),
)
# those separately.
rooms_user_has_been_in = await self.store.get_rooms_user_has_been_in(user_id)
for index, room in enumerate(rooms):
room_id = room.room_id
logger.info(
"[%s] Handling room %s, %d/%d", user_id, room_id, index + 1, len(rooms)
)
forgotten = await self.store.did_forget(user_id, room_id)
if forgotten:
logger.info("[%s] User forgot room %d, ignoring", user_id, room_id)
continue
if room_id not in rooms_user_has_been_in:
# If we haven't been in the rooms then the filtering code below
# explicitly.
if room.membership == Membership.INVITE:
event_id = room.event_id
invite = await self.store.get_event(event_id, allow_none=True)
if invite:
invited_state = invite.unsigned["invite_room_state"]
writer.write_invite(room_id, invite, invited_state)
continue
# We only want to bother fetching events up to the last time they
# were joined. We estimate that point by looking at the
# stream_ordering of the last membership if it wasn't a join.
if room.membership == Membership.JOIN:
stream_ordering = self.store.get_room_max_stream_ordering()
else:
stream_ordering = room.stream_ordering
from_key = RoomStreamToken(0, 0)
to_key = RoomStreamToken(None, stream_ordering)
written_events = set() # type: Set[str]
# We need to track gaps in the events stream so that we can then
# write out the state at those events. We do this by keeping track
# of events whose prev events we haven't seen.
# dict[str, set[str]].
event_to_unseen_prevs = {}
# The reverse mapping to above, i.e. map from unseen event to events
# that have the unseen event in their prev_events, i.e. the unseen
# events "children".
unseen_to_child_events = {} # type: Dict[str, Set[str]]
# We fetch events in the room the user could see by fetching *all*
# events that we have and then filtering, this isn't the most
while True:
events, _ = await self.store.paginate_room_events(
room_id, from_key, to_key, limit=100, direction="f"
)
if not events:
break
from_key = events[-1].internal_metadata.after
events = await filter_events_for_client(self.storage, user_id, events)
writer.write_events(room_id, events)
for event in events:
# processed yet, and add those to the appropriate dicts.
unseen_events = set(event.prev_event_ids()) - written_events
if unseen_events:
event_to_unseen_prevs[event.event_id] = unseen_events
for unseen in unseen_events:
unseen_to_child_events.setdefault(unseen, set()).add(
event.event_id
)
# Now check if this event is an unseen prev event, if so
# then we remove this event from the appropriate dicts.
for child_id in unseen_to_child_events.pop(event.event_id, []):
event_to_unseen_prevs[child_id].discard(event.event_id)
written_events.add(event.event_id)
logger.info(
"Written %d events in room %s", len(written_events), room_id
)
# Extremities are the events who have at least one unseen prev event.
extremities = (
event_id
for event_id, unseen_prevs in event_to_unseen_prevs.items()
if unseen_prevs
)
for event_id in extremities:
if not event_to_unseen_prevs[event_id]:
continue
state = await self.state_store.get_state_for_event(event_id)
writer.write_state(room_id, event_id, state)
return writer.finished()
class ExfiltrationWriter(metaclass=abc.ABCMeta):
@abc.abstractmethod
def write_events(self, room_id: str, events: List[EventBase]) -> None:
raise NotImplementedError()
@abc.abstractmethod
def write_state(
self, room_id: str, event_id: str, state: StateMap[EventBase]
) -> None:
raise NotImplementedError()
@abc.abstractmethod
def write_invite(
self, room_id: str, event: EventBase, state: StateMap[dict]
) -> None:
raise NotImplementedError()
@abc.abstractmethod
def finished(self) -> Any:
raise NotImplementedError()
| true | true |
f72ded5a3c7ec214775edc37bb5d699d24407c13 | 5,940 | py | Python | app/fedgraphnn/ego_networks_link_pred/data/utils.py | ray-ruisun/FedML | 24ff30d636bb70f64e94e9ca205375033597d3dd | [
"Apache-2.0"
] | null | null | null | app/fedgraphnn/ego_networks_link_pred/data/utils.py | ray-ruisun/FedML | 24ff30d636bb70f64e94e9ca205375033597d3dd | [
"Apache-2.0"
] | null | null | null | app/fedgraphnn/ego_networks_link_pred/data/utils.py | ray-ruisun/FedML | 24ff30d636bb70f64e94e9ca205375033597d3dd | [
"Apache-2.0"
] | null | null | null | import numpy as np
import scipy.sparse as sp
import torch
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
from torch_geometric.utils import to_networkx, degree
import torch.nn.functional as F
def convert_to_nodeDegreeFeatures(graphs):
# print(graph.x)
graph_infos = []
maxdegree = 0
for i, graph in enumerate(graphs):
g = to_networkx(graph, to_undirected=True)
gdegree = max(dict(g.degree).values())
if gdegree > maxdegree:
maxdegree = gdegree
graph_infos.append(
(graph, g.degree, graph.num_nodes)
) # (graph, node_degrees, num_nodes)
new_graphs = []
for i, tuple in enumerate(graph_infos):
idx, x = tuple[0].edge_index[0], tuple[0].x
deg = degree(idx, tuple[2], dtype=torch.long)
deg = F.one_hot(deg, num_classes=maxdegree + 1).to(torch.float)
new_graph = tuple[0].clone()
new_graph.__setitem__("x", deg)
new_graphs.append(new_graph)
return new_graphs
def split_data(graphs, train=None, test=None, shuffle=True, seed=None):
y = torch.cat([graph.y for graph in graphs])
graphs_tv, graphs_test = train_test_split(
graphs,
train_size=train,
test_size=test,
stratify=y,
shuffle=shuffle,
random_state=seed,
)
return graphs_tv, graphs_test
def np_uniform_sample_next(compact_adj, tree, fanout):
last_level = tree[-1] # [batch, f^depth]
batch_lengths = compact_adj.degrees[last_level]
nodes = np.repeat(last_level, fanout, axis=1)
batch_lengths = np.repeat(batch_lengths, fanout, axis=1)
batch_next_neighbor_ids = np.random.uniform(
size=batch_lengths.shape, low=0, high=1 - 1e-9
)
# Shape = (len(nodes), neighbors_per_node)
batch_next_neighbor_ids = np.array(
batch_next_neighbor_ids * batch_lengths, dtype=last_level.dtype
)
shape = batch_next_neighbor_ids.shape
batch_next_neighbor_ids = np.array(
compact_adj.compact_adj[nodes.reshape(-1), batch_next_neighbor_ids.reshape(-1)]
).reshape(shape)
return batch_next_neighbor_ids
def np_traverse(
compact_adj, seed_nodes, fanouts=(1,), sample_fn=np_uniform_sample_next
):
if not isinstance(seed_nodes, np.ndarray):
raise ValueError("Seed must a numpy array")
if (
len(seed_nodes.shape) > 2
or len(seed_nodes.shape) < 1
or not str(seed_nodes.dtype).startswith("int")
):
raise ValueError("seed_nodes must be 1D or 2D int array")
if len(seed_nodes.shape) == 1:
seed_nodes = np.expand_dims(seed_nodes, 1)
# Make walk-tree
forest_array = [seed_nodes]
for f in fanouts:
next_level = sample_fn(compact_adj, forest_array, f)
assert next_level.shape[1] == forest_array[-1].shape[1] * f
forest_array.append(next_level)
return forest_array
class WalkForestCollator(object):
def __init__(self, normalize_features=False):
self.normalize_features = normalize_features
def __call__(self, molecule):
comp_adj, feature_matrix, label, fanouts = molecule[0]
node_ids = np.array(list(range(feature_matrix.shape[0])), dtype=np.int32)
forest = np_traverse(comp_adj, node_ids, fanouts)
torch_forest = [torch.from_numpy(forest[0]).flatten()]
label = np.where(np.isnan(label), 0.0, label)
for i in range(len(forest) - 1):
torch_forest.append(torch.from_numpy(forest[i + 1]).reshape(-1, fanouts[i]))
if self.normalize_features:
mx = sp.csr_matrix(feature_matrix)
rowsum = np.array(mx.sum(1))
r_inv = np.power(rowsum, -1).flatten()
r_inv[np.isinf(r_inv)] = 0.0
r_mat_inv = sp.diags(r_inv)
normalized_feature_matrix = r_mat_inv.dot(mx)
normalized_feature_matrix = np.array(normalized_feature_matrix.todense())
else:
scaler = StandardScaler()
scaler.fit(feature_matrix)
normalized_feature_matrix = scaler.transform(feature_matrix)
return (
torch_forest,
torch.as_tensor(normalized_feature_matrix, dtype=torch.float32),
torch.as_tensor(label, dtype=torch.float32),
)
class DefaultCollator(object):
def __init__(self, normalize_features=True, normalize_adj=True):
self.normalize_features = normalize_features
self.normalize_adj = normalize_adj
def __call__(self, molecule):
adj_matrix, feature_matrix, label, _ = molecule[0]
label = np.where(np.isnan(label), 0.0, label)
if self.normalize_features:
mx = sp.csr_matrix(feature_matrix)
rowsum = np.array(mx.sum(1))
r_inv = np.power(rowsum, -1).flatten()
r_inv[np.isinf(r_inv)] = 0.0
r_mat_inv = sp.diags(r_inv)
normalized_feature_matrix = r_mat_inv.dot(mx)
normalized_feature_matrix = np.array(normalized_feature_matrix.todense())
else:
scaler = StandardScaler()
scaler.fit(feature_matrix)
normalized_feature_matrix = scaler.transform(feature_matrix)
if self.normalize_adj:
rowsum = np.array(adj_matrix.sum(1))
r_inv_sqrt = np.power(rowsum, -0.5).flatten()
r_inv_sqrt[np.isinf(r_inv_sqrt)] = 0.0
r_mat_inv_sqrt = sp.diags(r_inv_sqrt)
normalized_adj_matrix = (
adj_matrix.dot(r_mat_inv_sqrt).transpose().dot(r_mat_inv_sqrt)
)
else:
normalized_adj_matrix = adj_matrix
return (
torch.as_tensor(
np.array(normalized_adj_matrix.todense()), dtype=torch.float32
),
torch.as_tensor(normalized_feature_matrix, dtype=torch.float32),
torch.as_tensor(label, dtype=torch.float32),
) | 34.941176 | 88 | 0.646465 | import numpy as np
import scipy.sparse as sp
import torch
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
from torch_geometric.utils import to_networkx, degree
import torch.nn.functional as F
def convert_to_nodeDegreeFeatures(graphs):
graph_infos = []
maxdegree = 0
for i, graph in enumerate(graphs):
g = to_networkx(graph, to_undirected=True)
gdegree = max(dict(g.degree).values())
if gdegree > maxdegree:
maxdegree = gdegree
graph_infos.append(
(graph, g.degree, graph.num_nodes)
)
new_graphs = []
for i, tuple in enumerate(graph_infos):
idx, x = tuple[0].edge_index[0], tuple[0].x
deg = degree(idx, tuple[2], dtype=torch.long)
deg = F.one_hot(deg, num_classes=maxdegree + 1).to(torch.float)
new_graph = tuple[0].clone()
new_graph.__setitem__("x", deg)
new_graphs.append(new_graph)
return new_graphs
def split_data(graphs, train=None, test=None, shuffle=True, seed=None):
y = torch.cat([graph.y for graph in graphs])
graphs_tv, graphs_test = train_test_split(
graphs,
train_size=train,
test_size=test,
stratify=y,
shuffle=shuffle,
random_state=seed,
)
return graphs_tv, graphs_test
def np_uniform_sample_next(compact_adj, tree, fanout):
last_level = tree[-1]
batch_lengths = compact_adj.degrees[last_level]
nodes = np.repeat(last_level, fanout, axis=1)
batch_lengths = np.repeat(batch_lengths, fanout, axis=1)
batch_next_neighbor_ids = np.random.uniform(
size=batch_lengths.shape, low=0, high=1 - 1e-9
)
batch_next_neighbor_ids = np.array(
batch_next_neighbor_ids * batch_lengths, dtype=last_level.dtype
)
shape = batch_next_neighbor_ids.shape
batch_next_neighbor_ids = np.array(
compact_adj.compact_adj[nodes.reshape(-1), batch_next_neighbor_ids.reshape(-1)]
).reshape(shape)
return batch_next_neighbor_ids
def np_traverse(
compact_adj, seed_nodes, fanouts=(1,), sample_fn=np_uniform_sample_next
):
if not isinstance(seed_nodes, np.ndarray):
raise ValueError("Seed must a numpy array")
if (
len(seed_nodes.shape) > 2
or len(seed_nodes.shape) < 1
or not str(seed_nodes.dtype).startswith("int")
):
raise ValueError("seed_nodes must be 1D or 2D int array")
if len(seed_nodes.shape) == 1:
seed_nodes = np.expand_dims(seed_nodes, 1)
forest_array = [seed_nodes]
for f in fanouts:
next_level = sample_fn(compact_adj, forest_array, f)
assert next_level.shape[1] == forest_array[-1].shape[1] * f
forest_array.append(next_level)
return forest_array
class WalkForestCollator(object):
def __init__(self, normalize_features=False):
self.normalize_features = normalize_features
def __call__(self, molecule):
comp_adj, feature_matrix, label, fanouts = molecule[0]
node_ids = np.array(list(range(feature_matrix.shape[0])), dtype=np.int32)
forest = np_traverse(comp_adj, node_ids, fanouts)
torch_forest = [torch.from_numpy(forest[0]).flatten()]
label = np.where(np.isnan(label), 0.0, label)
for i in range(len(forest) - 1):
torch_forest.append(torch.from_numpy(forest[i + 1]).reshape(-1, fanouts[i]))
if self.normalize_features:
mx = sp.csr_matrix(feature_matrix)
rowsum = np.array(mx.sum(1))
r_inv = np.power(rowsum, -1).flatten()
r_inv[np.isinf(r_inv)] = 0.0
r_mat_inv = sp.diags(r_inv)
normalized_feature_matrix = r_mat_inv.dot(mx)
normalized_feature_matrix = np.array(normalized_feature_matrix.todense())
else:
scaler = StandardScaler()
scaler.fit(feature_matrix)
normalized_feature_matrix = scaler.transform(feature_matrix)
return (
torch_forest,
torch.as_tensor(normalized_feature_matrix, dtype=torch.float32),
torch.as_tensor(label, dtype=torch.float32),
)
class DefaultCollator(object):
def __init__(self, normalize_features=True, normalize_adj=True):
self.normalize_features = normalize_features
self.normalize_adj = normalize_adj
def __call__(self, molecule):
adj_matrix, feature_matrix, label, _ = molecule[0]
label = np.where(np.isnan(label), 0.0, label)
if self.normalize_features:
mx = sp.csr_matrix(feature_matrix)
rowsum = np.array(mx.sum(1))
r_inv = np.power(rowsum, -1).flatten()
r_inv[np.isinf(r_inv)] = 0.0
r_mat_inv = sp.diags(r_inv)
normalized_feature_matrix = r_mat_inv.dot(mx)
normalized_feature_matrix = np.array(normalized_feature_matrix.todense())
else:
scaler = StandardScaler()
scaler.fit(feature_matrix)
normalized_feature_matrix = scaler.transform(feature_matrix)
if self.normalize_adj:
rowsum = np.array(adj_matrix.sum(1))
r_inv_sqrt = np.power(rowsum, -0.5).flatten()
r_inv_sqrt[np.isinf(r_inv_sqrt)] = 0.0
r_mat_inv_sqrt = sp.diags(r_inv_sqrt)
normalized_adj_matrix = (
adj_matrix.dot(r_mat_inv_sqrt).transpose().dot(r_mat_inv_sqrt)
)
else:
normalized_adj_matrix = adj_matrix
return (
torch.as_tensor(
np.array(normalized_adj_matrix.todense()), dtype=torch.float32
),
torch.as_tensor(normalized_feature_matrix, dtype=torch.float32),
torch.as_tensor(label, dtype=torch.float32),
) | true | true |
f72ded8645955a1c689217635dd09438e4636d20 | 3,975 | py | Python | projects/vdk-core/src/vdk/internal/builtin_plugins/connection/recovery_cursor.py | alod83/versatile-data-kit | 9ca672d3929eb3dc6fe5c677e8c8a75e2a0d2be8 | [
"Apache-2.0"
] | 100 | 2021-10-04T09:32:04.000Z | 2022-03-30T11:23:53.000Z | projects/vdk-core/src/vdk/internal/builtin_plugins/connection/recovery_cursor.py | alod83/versatile-data-kit | 9ca672d3929eb3dc6fe5c677e8c8a75e2a0d2be8 | [
"Apache-2.0"
] | 208 | 2021-10-04T16:56:40.000Z | 2022-03-31T10:41:44.000Z | projects/vdk-core/src/vdk/internal/builtin_plugins/connection/recovery_cursor.py | alod83/versatile-data-kit | 9ca672d3929eb3dc6fe5c677e8c8a75e2a0d2be8 | [
"Apache-2.0"
] | 14 | 2021-10-11T14:15:13.000Z | 2022-03-11T13:39:17.000Z | # Copyright 2021 VMware, Inc.
# SPDX-License-Identifier: Apache-2.0
from vdk.internal.builtin_plugins.connection.decoration_cursor import DecorationCursor
from vdk.internal.builtin_plugins.connection.decoration_cursor import ManagedOperation
from vdk.internal.builtin_plugins.connection.pep249.interfaces import PEP249Cursor
class RecoveryCursor(PEP249Cursor):
"""
Extends PEP249Cursor to provide:
* query and parameters executed
* exception that occurred during execution
* tooling for operation recovery
See connection_hook_spec#db_connection_recover_operation for more details and examples how to use it.
"""
def __init__(
self,
native_cursor: PEP249Cursor,
log,
exception,
managed_operation: ManagedOperation,
decoration_operation_callback,
):
super().__init__(native_cursor, log)
self.__exception = exception
self.__managed_operation = managed_operation
self.__decoration_operation_callback = decoration_operation_callback
self.__retries = 0
def get_exception(self) -> Exception:
"""
Retrieve the original exception with which the SQL operation failed.
:return: Exception
"""
return self.__exception
def get_managed_operation(self) -> ManagedOperation:
"""
Retrieve an object that contains information about the query and query parameters used in
the database operation. The retrieved Data Transfer Object (DTO) is purposed
to curate the query and parameters.
:return: ManagedOperation
Query and parameters DTO
"""
return self.__managed_operation
def get_retries(self) -> int:
"""
Fetch retries made using retry_operation().
:return: int
Number of operation retries performed
"""
return self.__retries
def execute(self, operation, parameters=None) -> None:
"""
Execute an additional query purposed for the recovery of the original operation.
:param operation: helper query to facilitate operation recovery
:param parameters: helper query parameters
"""
managed_operation = ManagedOperation(operation, parameters)
if self.__decoration_operation_callback:
self._log.debug("Before executing recovery query:\n%s" % operation)
self.__decoration_operation_callback(
decoration_cursor=DecorationCursor(
self._cursor, self._log, managed_operation
)
)
self._log.info(
"Executing recovery query:\n%s" % managed_operation.get_operation()
)
try:
super().execute(*managed_operation.get_operation_parameters_tuple())
self._log.info("Executing recovery query SUCCEEDED.")
except Exception as e:
self.retries_increment()
self._log.warning(f"Executing recovery query FAILED. Exception: {e}")
raise e
def retry_operation(self) -> None:
"""
Retry original operation to recover.
"""
# could potentially enforce max retries here globally - in favour of per custom error handler
self.retries_increment()
retry_number = self.get_retries()
self._log.info(
f"Retrying attempt #{retry_number} "
f"for query:\n{self.get_managed_operation().get_operation()}"
)
try:
super().execute(
*self.get_managed_operation().get_operation_parameters_tuple()
)
self._log.info(f"Retrying attempt #{retry_number} for query SUCCEEDED.")
except Exception as e:
self._log.warning(
f"Retrying attempt #{retry_number} for query FAILED. Exception: {e}"
)
raise e
def retries_increment(self) -> None:
self.__retries += 1
| 35.810811 | 105 | 0.647296 |
from vdk.internal.builtin_plugins.connection.decoration_cursor import DecorationCursor
from vdk.internal.builtin_plugins.connection.decoration_cursor import ManagedOperation
from vdk.internal.builtin_plugins.connection.pep249.interfaces import PEP249Cursor
class RecoveryCursor(PEP249Cursor):
def __init__(
self,
native_cursor: PEP249Cursor,
log,
exception,
managed_operation: ManagedOperation,
decoration_operation_callback,
):
super().__init__(native_cursor, log)
self.__exception = exception
self.__managed_operation = managed_operation
self.__decoration_operation_callback = decoration_operation_callback
self.__retries = 0
def get_exception(self) -> Exception:
return self.__exception
def get_managed_operation(self) -> ManagedOperation:
return self.__managed_operation
def get_retries(self) -> int:
return self.__retries
def execute(self, operation, parameters=None) -> None:
managed_operation = ManagedOperation(operation, parameters)
if self.__decoration_operation_callback:
self._log.debug("Before executing recovery query:\n%s" % operation)
self.__decoration_operation_callback(
decoration_cursor=DecorationCursor(
self._cursor, self._log, managed_operation
)
)
self._log.info(
"Executing recovery query:\n%s" % managed_operation.get_operation()
)
try:
super().execute(*managed_operation.get_operation_parameters_tuple())
self._log.info("Executing recovery query SUCCEEDED.")
except Exception as e:
self.retries_increment()
self._log.warning(f"Executing recovery query FAILED. Exception: {e}")
raise e
def retry_operation(self) -> None:
self.retries_increment()
retry_number = self.get_retries()
self._log.info(
f"Retrying attempt #{retry_number} "
f"for query:\n{self.get_managed_operation().get_operation()}"
)
try:
super().execute(
*self.get_managed_operation().get_operation_parameters_tuple()
)
self._log.info(f"Retrying attempt #{retry_number} for query SUCCEEDED.")
except Exception as e:
self._log.warning(
f"Retrying attempt #{retry_number} for query FAILED. Exception: {e}"
)
raise e
def retries_increment(self) -> None:
self.__retries += 1
| true | true |
f72dedbca91a85778a91704c65c27a57e88c9b11 | 12,504 | py | Python | pkgs/nltk-3.2-py27_0/lib/python2.7/site-packages/nltk/classify/decisiontree.py | wangyum/anaconda | 6e5a0dbead3327661d73a61e85414cf92aa52be6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | pkgs/nltk-3.2-py27_0/lib/python2.7/site-packages/nltk/classify/decisiontree.py | wangyum/anaconda | 6e5a0dbead3327661d73a61e85414cf92aa52be6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | pkgs/nltk-3.2-py27_0/lib/python2.7/site-packages/nltk/classify/decisiontree.py | wangyum/anaconda | 6e5a0dbead3327661d73a61e85414cf92aa52be6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # Natural Language Toolkit: Decision Tree Classifiers
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
A classifier model that decides which label to assign to a token on
the basis of a tree structure, where branches correspond to conditions
on feature values, and leaves correspond to label assignments.
"""
from __future__ import print_function, unicode_literals, division
from collections import defaultdict
from nltk.probability import FreqDist, MLEProbDist, entropy
from nltk.classify.api import ClassifierI
from nltk.compat import python_2_unicode_compatible
@python_2_unicode_compatible
class DecisionTreeClassifier(ClassifierI):
def __init__(self, label, feature_name=None, decisions=None, default=None):
"""
:param label: The most likely label for tokens that reach
this node in the decision tree. If this decision tree
has no children, then this label will be assigned to
any token that reaches this decision tree.
:param feature_name: The name of the feature that this
decision tree selects for.
:param decisions: A dictionary mapping from feature values
for the feature identified by ``feature_name`` to
child decision trees.
:param default: The child that will be used if the value of
feature ``feature_name`` does not match any of the keys in
``decisions``. This is used when constructing binary
decision trees.
"""
self._label = label
self._fname = feature_name
self._decisions = decisions
self._default = default
def labels(self):
labels = [self._label]
if self._decisions is not None:
for dt in self._decisions.values():
labels.extend(dt.labels())
if self._default is not None:
labels.extend(self._default.labels())
return list(set(labels))
def classify(self, featureset):
# Decision leaf:
if self._fname is None:
return self._label
# Decision tree:
fval = featureset.get(self._fname)
if fval in self._decisions:
return self._decisions[fval].classify(featureset)
elif self._default is not None:
return self._default.classify(featureset)
else:
return self._label
def error(self, labeled_featuresets):
errors = 0
for featureset, label in labeled_featuresets:
if self.classify(featureset) != label:
errors += 1
return errors/len(labeled_featuresets)
def pretty_format(self, width=70, prefix='', depth=4):
"""
Return a string containing a pretty-printed version of this
decision tree. Each line in this string corresponds to a
single decision tree node or leaf, and indentation is used to
display the structure of the decision tree.
"""
# [xx] display default!!
if self._fname is None:
n = width-len(prefix)-15
return '%s%s %s\n' % (prefix, '.'*n, self._label)
s = ''
for i, (fval, result) in enumerate(sorted(self._decisions.items())):
hdr = '%s%s=%s? ' % (prefix, self._fname, fval)
n = width-15-len(hdr)
s += '%s%s %s\n' % (hdr, '.'*(n), result._label)
if result._fname is not None and depth>1:
s += result.pretty_format(width, prefix+' ', depth-1)
if self._default is not None:
n = width-len(prefix)-21
s += '%selse: %s %s\n' % (prefix, '.'*n, self._default._label)
if self._default._fname is not None and depth>1:
s += self._default.pretty_format(width, prefix+' ', depth-1)
return s
def pseudocode(self, prefix='', depth=4):
"""
Return a string representation of this decision tree that
expresses the decisions it makes as a nested set of pseudocode
if statements.
"""
if self._fname is None:
return "%sreturn %r\n" % (prefix, self._label)
s = ''
for (fval, result) in sorted(self._decisions.items()):
s += '%sif %s == %r: ' % (prefix, self._fname, fval)
if result._fname is not None and depth>1:
s += '\n'+result.pseudocode(prefix+' ', depth-1)
else:
s += 'return %r\n' % result._label
if self._default is not None:
if len(self._decisions) == 1:
s += '%sif %s != %r: '% (prefix, self._fname,
list(self._decisions.keys())[0])
else:
s += '%selse: ' % (prefix,)
if self._default._fname is not None and depth>1:
s += '\n'+self._default.pseudocode(prefix+' ', depth-1)
else:
s += 'return %r\n' % self._default._label
return s
def __str__(self):
return self.pretty_format()
@staticmethod
def train(labeled_featuresets, entropy_cutoff=0.05, depth_cutoff=100,
support_cutoff=10, binary=False, feature_values=None,
verbose=False):
"""
:param binary: If true, then treat all feature/value pairs as
individual binary features, rather than using a single n-way
branch for each feature.
"""
# Collect a list of all feature names.
feature_names = set()
for featureset, label in labeled_featuresets:
for fname in featureset:
feature_names.add(fname)
# Collect a list of the values each feature can take.
if feature_values is None and binary:
feature_values = defaultdict(set)
for featureset, label in labeled_featuresets:
for fname, fval in featureset.items():
feature_values[fname].add(fval)
# Start with a stump.
if not binary:
tree = DecisionTreeClassifier.best_stump(
feature_names, labeled_featuresets, verbose)
else:
tree = DecisionTreeClassifier.best_binary_stump(
feature_names, labeled_featuresets, feature_values, verbose)
# Refine the stump.
tree.refine(labeled_featuresets, entropy_cutoff, depth_cutoff-1,
support_cutoff, binary, feature_values, verbose)
# Return it
return tree
@staticmethod
def leaf(labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
return DecisionTreeClassifier(label)
@staticmethod
def stump(feature_name, labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
# Find the best label for each value.
freqs = defaultdict(FreqDist) # freq(label|value)
for featureset, label in labeled_featuresets:
feature_value = featureset.get(feature_name)
freqs[feature_value][label] += 1
decisions = dict((val, DecisionTreeClassifier(freqs[val].max()))
for val in freqs)
return DecisionTreeClassifier(label, feature_name, decisions)
def refine(self, labeled_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary=False, feature_values=None,
verbose=False):
if len(labeled_featuresets) <= support_cutoff: return
if self._fname is None: return
if depth_cutoff <= 0: return
for fval in self._decisions:
fval_featuresets = [(featureset, label) for (featureset, label)
in labeled_featuresets
if featureset.get(self._fname) == fval]
label_freqs = FreqDist(label for (featureset, label)
in fval_featuresets)
if entropy(MLEProbDist(label_freqs)) > entropy_cutoff:
self._decisions[fval] = DecisionTreeClassifier.train(
fval_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary, feature_values, verbose)
if self._default is not None:
default_featuresets = [(featureset, label) for (featureset, label)
in labeled_featuresets
if featureset.get(self._fname) not in
self._decisions]
label_freqs = FreqDist(label for (featureset, label)
in default_featuresets)
if entropy(MLEProbDist(label_freqs)) > entropy_cutoff:
self._default = DecisionTreeClassifier.train(
default_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary, feature_values, verbose)
@staticmethod
def best_stump(feature_names, labeled_featuresets, verbose=False):
best_stump = DecisionTreeClassifier.leaf(labeled_featuresets)
best_error = best_stump.error(labeled_featuresets)
for fname in feature_names:
stump = DecisionTreeClassifier.stump(fname, labeled_featuresets)
stump_error = stump.error(labeled_featuresets)
if stump_error < best_error:
best_error = stump_error
best_stump = stump
if verbose:
print(('best stump for %6d toks uses %-20s err=%6.4f' %
(len(labeled_featuresets), best_stump._fname, best_error)))
return best_stump
@staticmethod
def binary_stump(feature_name, feature_value, labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
# Find the best label for each value.
pos_fdist = FreqDist()
neg_fdist = FreqDist()
for featureset, label in labeled_featuresets:
if featureset.get(feature_name) == feature_value:
pos_fdist[label] += 1
else:
neg_fdist[label] += 1
decisions = {}
default = label
# But hopefully we have observations!
if pos_fdist.N() > 0:
decisions = {feature_value: DecisionTreeClassifier(pos_fdist.max())}
if neg_fdist.N() > 0:
default = DecisionTreeClassifier(neg_fdist.max())
return DecisionTreeClassifier(label, feature_name, decisions, default)
@staticmethod
def best_binary_stump(feature_names, labeled_featuresets, feature_values,
verbose=False):
best_stump = DecisionTreeClassifier.leaf(labeled_featuresets)
best_error = best_stump.error(labeled_featuresets)
for fname in feature_names:
for fval in feature_values[fname]:
stump = DecisionTreeClassifier.binary_stump(
fname, fval, labeled_featuresets)
stump_error = stump.error(labeled_featuresets)
if stump_error < best_error:
best_error = stump_error
best_stump = stump
if best_stump._decisions:
descr = '%s=%s' % (best_stump._fname,
list(best_stump._decisions.keys())[0])
else:
descr = '(default)'
if verbose:
print(('best stump for %6d toks uses %-20s err=%6.4f' %
(len(labeled_featuresets), descr, best_error)))
return best_stump
##//////////////////////////////////////////////////////
## Demo
##//////////////////////////////////////////////////////
def f(x):
return DecisionTreeClassifier.train(x, binary=True, verbose=True)
def demo():
from nltk.classify.util import names_demo, binary_names_demo_features
classifier = names_demo(f, #DecisionTreeClassifier.train,
binary_names_demo_features)
print(classifier.pp(depth=7))
print(classifier.pseudocode(depth=7))
if __name__ == '__main__':
demo()
| 42.243243 | 81 | 0.581334 |
from __future__ import print_function, unicode_literals, division
from collections import defaultdict
from nltk.probability import FreqDist, MLEProbDist, entropy
from nltk.classify.api import ClassifierI
from nltk.compat import python_2_unicode_compatible
@python_2_unicode_compatible
class DecisionTreeClassifier(ClassifierI):
def __init__(self, label, feature_name=None, decisions=None, default=None):
self._label = label
self._fname = feature_name
self._decisions = decisions
self._default = default
def labels(self):
labels = [self._label]
if self._decisions is not None:
for dt in self._decisions.values():
labels.extend(dt.labels())
if self._default is not None:
labels.extend(self._default.labels())
return list(set(labels))
def classify(self, featureset):
if self._fname is None:
return self._label
fval = featureset.get(self._fname)
if fval in self._decisions:
return self._decisions[fval].classify(featureset)
elif self._default is not None:
return self._default.classify(featureset)
else:
return self._label
def error(self, labeled_featuresets):
errors = 0
for featureset, label in labeled_featuresets:
if self.classify(featureset) != label:
errors += 1
return errors/len(labeled_featuresets)
def pretty_format(self, width=70, prefix='', depth=4):
if self._fname is None:
n = width-len(prefix)-15
return '%s%s %s\n' % (prefix, '.'*n, self._label)
s = ''
for i, (fval, result) in enumerate(sorted(self._decisions.items())):
hdr = '%s%s=%s? ' % (prefix, self._fname, fval)
n = width-15-len(hdr)
s += '%s%s %s\n' % (hdr, '.'*(n), result._label)
if result._fname is not None and depth>1:
s += result.pretty_format(width, prefix+' ', depth-1)
if self._default is not None:
n = width-len(prefix)-21
s += '%selse: %s %s\n' % (prefix, '.'*n, self._default._label)
if self._default._fname is not None and depth>1:
s += self._default.pretty_format(width, prefix+' ', depth-1)
return s
def pseudocode(self, prefix='', depth=4):
if self._fname is None:
return "%sreturn %r\n" % (prefix, self._label)
s = ''
for (fval, result) in sorted(self._decisions.items()):
s += '%sif %s == %r: ' % (prefix, self._fname, fval)
if result._fname is not None and depth>1:
s += '\n'+result.pseudocode(prefix+' ', depth-1)
else:
s += 'return %r\n' % result._label
if self._default is not None:
if len(self._decisions) == 1:
s += '%sif %s != %r: '% (prefix, self._fname,
list(self._decisions.keys())[0])
else:
s += '%selse: ' % (prefix,)
if self._default._fname is not None and depth>1:
s += '\n'+self._default.pseudocode(prefix+' ', depth-1)
else:
s += 'return %r\n' % self._default._label
return s
def __str__(self):
return self.pretty_format()
@staticmethod
def train(labeled_featuresets, entropy_cutoff=0.05, depth_cutoff=100,
support_cutoff=10, binary=False, feature_values=None,
verbose=False):
feature_names = set()
for featureset, label in labeled_featuresets:
for fname in featureset:
feature_names.add(fname)
if feature_values is None and binary:
feature_values = defaultdict(set)
for featureset, label in labeled_featuresets:
for fname, fval in featureset.items():
feature_values[fname].add(fval)
if not binary:
tree = DecisionTreeClassifier.best_stump(
feature_names, labeled_featuresets, verbose)
else:
tree = DecisionTreeClassifier.best_binary_stump(
feature_names, labeled_featuresets, feature_values, verbose)
tree.refine(labeled_featuresets, entropy_cutoff, depth_cutoff-1,
support_cutoff, binary, feature_values, verbose)
return tree
@staticmethod
def leaf(labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
return DecisionTreeClassifier(label)
@staticmethod
def stump(feature_name, labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
freqs = defaultdict(FreqDist)
for featureset, label in labeled_featuresets:
feature_value = featureset.get(feature_name)
freqs[feature_value][label] += 1
decisions = dict((val, DecisionTreeClassifier(freqs[val].max()))
for val in freqs)
return DecisionTreeClassifier(label, feature_name, decisions)
def refine(self, labeled_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary=False, feature_values=None,
verbose=False):
if len(labeled_featuresets) <= support_cutoff: return
if self._fname is None: return
if depth_cutoff <= 0: return
for fval in self._decisions:
fval_featuresets = [(featureset, label) for (featureset, label)
in labeled_featuresets
if featureset.get(self._fname) == fval]
label_freqs = FreqDist(label for (featureset, label)
in fval_featuresets)
if entropy(MLEProbDist(label_freqs)) > entropy_cutoff:
self._decisions[fval] = DecisionTreeClassifier.train(
fval_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary, feature_values, verbose)
if self._default is not None:
default_featuresets = [(featureset, label) for (featureset, label)
in labeled_featuresets
if featureset.get(self._fname) not in
self._decisions]
label_freqs = FreqDist(label for (featureset, label)
in default_featuresets)
if entropy(MLEProbDist(label_freqs)) > entropy_cutoff:
self._default = DecisionTreeClassifier.train(
default_featuresets, entropy_cutoff, depth_cutoff,
support_cutoff, binary, feature_values, verbose)
@staticmethod
def best_stump(feature_names, labeled_featuresets, verbose=False):
best_stump = DecisionTreeClassifier.leaf(labeled_featuresets)
best_error = best_stump.error(labeled_featuresets)
for fname in feature_names:
stump = DecisionTreeClassifier.stump(fname, labeled_featuresets)
stump_error = stump.error(labeled_featuresets)
if stump_error < best_error:
best_error = stump_error
best_stump = stump
if verbose:
print(('best stump for %6d toks uses %-20s err=%6.4f' %
(len(labeled_featuresets), best_stump._fname, best_error)))
return best_stump
@staticmethod
def binary_stump(feature_name, feature_value, labeled_featuresets):
label = FreqDist(label for (featureset, label)
in labeled_featuresets).max()
pos_fdist = FreqDist()
neg_fdist = FreqDist()
for featureset, label in labeled_featuresets:
if featureset.get(feature_name) == feature_value:
pos_fdist[label] += 1
else:
neg_fdist[label] += 1
decisions = {}
default = label
if pos_fdist.N() > 0:
decisions = {feature_value: DecisionTreeClassifier(pos_fdist.max())}
if neg_fdist.N() > 0:
default = DecisionTreeClassifier(neg_fdist.max())
return DecisionTreeClassifier(label, feature_name, decisions, default)
@staticmethod
def best_binary_stump(feature_names, labeled_featuresets, feature_values,
verbose=False):
best_stump = DecisionTreeClassifier.leaf(labeled_featuresets)
best_error = best_stump.error(labeled_featuresets)
for fname in feature_names:
for fval in feature_values[fname]:
stump = DecisionTreeClassifier.binary_stump(
fname, fval, labeled_featuresets)
stump_error = stump.error(labeled_featuresets)
if stump_error < best_error:
best_error = stump_error
best_stump = stump
if best_stump._decisions:
descr = '%s=%s' % (best_stump._fname,
list(best_stump._decisions.keys())[0])
else:
descr = '(default)'
if verbose:
print(('best stump for %6d toks uses %-20s err=%6.4f' %
(len(labeled_featuresets), descr, best_error)))
return best_stump
sify.util import names_demo, binary_names_demo_features
classifier = names_demo(f,
binary_names_demo_features)
print(classifier.pp(depth=7))
print(classifier.pseudocode(depth=7))
if __name__ == '__main__':
demo()
| true | true |
f72dedd88334ea8574c1e9464e0dba75f3bbd9f6 | 135 | py | Python | Klang/lang/__init__.py | asmcos/Klang | 3ba4a1eeb74952101f4631ed3501c5deb5f3f255 | [
"Apache-2.0"
] | 37 | 2021-05-26T05:48:31.000Z | 2022-02-12T15:43:07.000Z | Klang/lang/__init__.py | asmcos/Klang | 3ba4a1eeb74952101f4631ed3501c5deb5f3f255 | [
"Apache-2.0"
] | 2 | 2021-07-07T00:27:13.000Z | 2021-12-23T08:29:16.000Z | Klang/lang/__init__.py | asmcos/Klang | 3ba4a1eeb74952101f4631ed3501c5deb5f3f255 | [
"Apache-2.0"
] | 15 | 2021-06-28T06:11:53.000Z | 2022-01-17T06:55:26.000Z |
# lang是提供股票编程语言,注重语法的翻译
# 利用了python 的ply lex,yacc
# setPY提供klang和python之间桥梁可以同享 函数和变量
from .kparse import *
from .mAST import setPY
| 16.875 | 35 | 0.792593 |
from .kparse import *
from .mAST import setPY
| true | true |
f72deed3b46f822170703b719de22d967f764017 | 1,123 | py | Python | test_project/users/tests/test_forms.py | gtsapelas/test_project | 4ff4542da02130c4428f8a2098a065f1d40c0297 | [
"MIT"
] | null | null | null | test_project/users/tests/test_forms.py | gtsapelas/test_project | 4ff4542da02130c4428f8a2098a065f1d40c0297 | [
"MIT"
] | null | null | null | test_project/users/tests/test_forms.py | gtsapelas/test_project | 4ff4542da02130c4428f8a2098a065f1d40c0297 | [
"MIT"
] | null | null | null | import pytest
from test_project.users.forms import UserCreationForm
from test_project.users.tests.factories import UserFactory
pytestmark = pytest.mark.django_db
class TestUserCreationForm:
def test_clean_username(self):
# A user with proto_user params does not exist yet.
proto_user = UserFactory.build()
form = UserCreationForm(
{
"username": proto_user.username,
"password1": proto_user._password,
"password2": proto_user._password,
}
)
assert form.is_valid()
assert form.clean_username() == proto_user.username
# Creating a user.
form.save()
# The user with proto_user params already exists,
# hence cannot be created.
form = UserCreationForm(
{
"username": proto_user.username,
"password1": proto_user._password,
"password2": proto_user._password,
}
)
assert not form.is_valid()
assert len(form.errors) == 1
assert "username" in form.errors
| 27.390244 | 59 | 0.596616 | import pytest
from test_project.users.forms import UserCreationForm
from test_project.users.tests.factories import UserFactory
pytestmark = pytest.mark.django_db
class TestUserCreationForm:
def test_clean_username(self):
proto_user = UserFactory.build()
form = UserCreationForm(
{
"username": proto_user.username,
"password1": proto_user._password,
"password2": proto_user._password,
}
)
assert form.is_valid()
assert form.clean_username() == proto_user.username
form.save()
form = UserCreationForm(
{
"username": proto_user.username,
"password1": proto_user._password,
"password2": proto_user._password,
}
)
assert not form.is_valid()
assert len(form.errors) == 1
assert "username" in form.errors
| true | true |
f72defa017f146b7b12a46b9b200510a13f7bf80 | 8,728 | py | Python | data/rec_data/rec_nextitem_dataset.py | cyente/OFA | 291a0abb76559a6379f1a7ebbdfdf1350c94a9f4 | [
"Apache-2.0"
] | null | null | null | data/rec_data/rec_nextitem_dataset.py | cyente/OFA | 291a0abb76559a6379f1a7ebbdfdf1350c94a9f4 | [
"Apache-2.0"
] | null | null | null | data/rec_data/rec_nextitem_dataset.py | cyente/OFA | 291a0abb76559a6379f1a7ebbdfdf1350c94a9f4 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from io import BytesIO
import logging
import warnings
import string
import numpy as np
import torch
import base64
from torchvision import transforms
from PIL import Image, ImageFile
from data import data_utils
from data.ofa_dataset import OFADataset
ImageFile.LOAD_TRUNCATED_IMAGES = True
ImageFile.MAX_IMAGE_PIXELS = None
Image.MAX_IMAGE_PIXELS = None
logger = logging.getLogger(__name__)
warnings.filterwarnings("ignore", "(Possibly )?corrupt EXIF data", UserWarning)
IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
def collate(samples, pad_idx, eos_idx):
if len(samples) == 0:
return {}
def merge(key):
return data_utils.collate_tokens(
[s[key] for s in samples],
pad_idx,
eos_idx=eos_idx,
)
id = np.array([s["id"] for s in samples])
src_tokens = merge("source")
src_lengths = torch.LongTensor([s["source"].ne(pad_idx).long().sum() for s in samples])
# patch_images = torch.stack([sample['patch_image'] for sample in samples], dim=0)
# patch_masks = torch.cat([sample['patch_mask'] for sample in samples])
prev_output_tokens = None
target = None
if samples[0].get("target", None) is not None:
target = merge("target")
tgt_lengths = torch.LongTensor([s["target"].ne(pad_idx).long().sum() for s in samples])
ntokens = tgt_lengths.sum().item()
if samples[0].get("prev_output_tokens", None) is not None:
prev_output_tokens = merge("prev_output_tokens")
else:
ntokens = src_lengths.sum().item()
batch = {
"id": id,
"nsentences": len(samples),
"ntokens": ntokens,
"net_input": {
"src_tokens": src_tokens,
"src_lengths": src_lengths,
# "patch_images": patch_images,
# "patch_masks": patch_masks,
"prev_output_tokens": prev_output_tokens
},
"target": target,
}
return batch
class Rec_nextitemDataset(OFADataset):
def __init__(
self,
split,
dataset,
bpe,
src_dict,
tgt_dict=None,
max_src_length=128,
max_tgt_length=30,
# patch_image_size=224,
# imagenet_default_mean_and_std=False,
scst=False
):
super().__init__(split, dataset, bpe, src_dict, tgt_dict)
self.max_src_length = max_src_length
self.max_tgt_length = max_tgt_length
# self.patch_image_size = patch_image_size
self.scst = scst
self.transtab = str.maketrans({key: None for key in string.punctuation})
# if imagenet_default_mean_and_std:
# mean = IMAGENET_DEFAULT_MEAN
# std = IMAGENET_DEFAULT_STD
# else:
# mean = [0.5, 0.5, 0.5]
# std = [0.5, 0.5, 0.5]
# self.patch_resize_transform = transforms.Compose([
# lambda image: image.convert("RGB"),
# transforms.Resize((patch_image_size, patch_image_size), interpolation=Image.BICUBIC),
# transforms.ToTensor(),
# transforms.Normalize(mean=mean, std=std),
# ])
print("self.max_tgt_length", self.max_tgt_length)
def __getitem__(self, index):
uniq_id, user_behavior, target_item, rating = self.dataset[index]
while target_item.translate(self.transtab).strip() == "":
uniq_id, user_behavior, target_item, rating = self.dataset[index]
if len(user_behavior) >= self.max_src_length - 20:
user_behavior = user_behavior[:self.max_src_length - 20]
if user_behavior[-1] != ",":
user_behavior = ','.join(user_behavior.split(",")[:-1])
else:
user_behavior = user_behavior[:-1]
if self.split == 'train' and not self.scst:
target_item = target_item.translate(self.transtab).strip()
target_item_token_list = target_item.strip().split(" ")
tgt_explain = ' '.join(target_item_token_list[:self.max_tgt_length])
else:
target_item = ' '.join(target_item.strip().split(" ")[:self.max_tgt_length])
target_item_list = [target_item.translate(self.transtab).strip() for explain in target_item.strip().split('&&')]
tgt_explain = '&&'.join(target_item_list)
print("user_behavior", user_behavior)
src_text = "If you liked " + user_behavior + \
", you will also like "
assert len(src_text.split(" ")) <= self.max_src_length
src_item = self.encode_text(src_text)
tgt_item = self.encode_text(" {}".format(tgt_explain))
src_item = torch.cat([self.bos_item, src_item, self.eos_item])
target_item = torch.cat([tgt_item, self.eos_item])
prev_output_item = torch.cat([self.bos_item, tgt_item])
example = {
"id": uniq_id,
"source": src_item,
# "patch_image": patch_image,
# "patch_mask": patch_mask,
"target": target_item,
"prev_output_tokens": prev_output_item
}
return example
def collater(self, samples, pad_to_length=None):
"""Merge a list of samples to form a mini-batch.
Args:
samples (List[dict]): samples to collate
Returns:
dict: a mini-batch with the following keys:
"""
return collate(samples, pad_idx=self.pad, eos_idx=self.eos)
def __getitem__2(self, index):
# uniq_id, user_behavior, explaination, fea, opt = self.dataset[index]
# print("user_behavior", user_behavior)
# print("fea", fea, "opt", opt)
# print("user_behavior", user_behavior)
tgt_explain = "asdasss ssa"
while tgt_explain.strip() != "":
uniq_id, user_behavior, explaination, fea, opt = self.dataset[index]
while explaination.translate(self.transtab).strip() == "":
uniq_id, user_behavior, explaination, fea, opt = self.dataset[index]
print("explaination begin", explaination)
tmp_user_beha = user_behavior.split(" Right now, ")
len_context = len(tmp_user_beha[1].split(" "))
behavior_list = tmp_user_beha[0].split(" ")[0: self.max_src_length - 40 - len_context]
behavior_ = " ".join(behavior_list)
if behavior_[-1] == ",":
behavior_ = behavior_[:-1] + '.'
if behavior_[-1] != ".":
behavior_ = ','.join(behavior_.split(",")[:-1]) + '.'
user_behavior = " right now, ".join([behavior_, tmp_user_beha[1]])
user_behavior += \
" the user cares about {} and the item is {}.".format(fea, opt)
# image = Image.open(BytesIO(base64.urlsafe_b64decode(image)))
# patch_image = self.patch_resize_transform(image)
# patch_mask = torch.tensor([True])
if self.split == 'train' and not self.scst:
explaination = explaination.translate(self.transtab).strip()
print("explaination.translate(self.transtab).strip()", explaination.translate(self.transtab).strip())
explaination_token_list = explaination.strip().split(" ")
tgt_explain = ' '.join(explaination_token_list[:self.max_tgt_length])
else:
explaination = ' '.join(explaination.strip().split(" ")[:self.max_tgt_length])
explain_list = [explain.translate(self.transtab).strip() for explain in explaination.strip().split('&&')]
tgt_explain = '&&'.join(explain_list)
print("explaination", explaination)
print("tgt_explain", tgt_explain)
assert False
src_text = user_behavior + \
" how to persuade the user to buy the item?"
# print("src_text", src_text.split(" ")[0:320])
assert len(src_text.split(" ")) <= self.max_src_length
src_item = self.encode_text(src_text)
tgt_item = self.encode_text(" {}".format(tgt_explain))
src_item = torch.cat([self.bos_item, src_item, self.eos_item])
target_item = torch.cat([tgt_item, self.eos_item])
prev_output_item = torch.cat([self.bos_item, tgt_item])
example = {
"id": uniq_id,
"source": src_item,
# "patch_image": patch_image,
# "patch_mask": patch_mask,
"target": target_item,
"prev_output_tokens": prev_output_item
}
return example
| 37.947826 | 124 | 0.605408 |
from io import BytesIO
import logging
import warnings
import string
import numpy as np
import torch
import base64
from torchvision import transforms
from PIL import Image, ImageFile
from data import data_utils
from data.ofa_dataset import OFADataset
ImageFile.LOAD_TRUNCATED_IMAGES = True
ImageFile.MAX_IMAGE_PIXELS = None
Image.MAX_IMAGE_PIXELS = None
logger = logging.getLogger(__name__)
warnings.filterwarnings("ignore", "(Possibly )?corrupt EXIF data", UserWarning)
IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
def collate(samples, pad_idx, eos_idx):
if len(samples) == 0:
return {}
def merge(key):
return data_utils.collate_tokens(
[s[key] for s in samples],
pad_idx,
eos_idx=eos_idx,
)
id = np.array([s["id"] for s in samples])
src_tokens = merge("source")
src_lengths = torch.LongTensor([s["source"].ne(pad_idx).long().sum() for s in samples])
prev_output_tokens = None
target = None
if samples[0].get("target", None) is not None:
target = merge("target")
tgt_lengths = torch.LongTensor([s["target"].ne(pad_idx).long().sum() for s in samples])
ntokens = tgt_lengths.sum().item()
if samples[0].get("prev_output_tokens", None) is not None:
prev_output_tokens = merge("prev_output_tokens")
else:
ntokens = src_lengths.sum().item()
batch = {
"id": id,
"nsentences": len(samples),
"ntokens": ntokens,
"net_input": {
"src_tokens": src_tokens,
"src_lengths": src_lengths,
"prev_output_tokens": prev_output_tokens
},
"target": target,
}
return batch
class Rec_nextitemDataset(OFADataset):
def __init__(
self,
split,
dataset,
bpe,
src_dict,
tgt_dict=None,
max_src_length=128,
max_tgt_length=30,
scst=False
):
super().__init__(split, dataset, bpe, src_dict, tgt_dict)
self.max_src_length = max_src_length
self.max_tgt_length = max_tgt_length
self.scst = scst
self.transtab = str.maketrans({key: None for key in string.punctuation})
print("self.max_tgt_length", self.max_tgt_length)
def __getitem__(self, index):
uniq_id, user_behavior, target_item, rating = self.dataset[index]
while target_item.translate(self.transtab).strip() == "":
uniq_id, user_behavior, target_item, rating = self.dataset[index]
if len(user_behavior) >= self.max_src_length - 20:
user_behavior = user_behavior[:self.max_src_length - 20]
if user_behavior[-1] != ",":
user_behavior = ','.join(user_behavior.split(",")[:-1])
else:
user_behavior = user_behavior[:-1]
if self.split == 'train' and not self.scst:
target_item = target_item.translate(self.transtab).strip()
target_item_token_list = target_item.strip().split(" ")
tgt_explain = ' '.join(target_item_token_list[:self.max_tgt_length])
else:
target_item = ' '.join(target_item.strip().split(" ")[:self.max_tgt_length])
target_item_list = [target_item.translate(self.transtab).strip() for explain in target_item.strip().split('&&')]
tgt_explain = '&&'.join(target_item_list)
print("user_behavior", user_behavior)
src_text = "If you liked " + user_behavior + \
", you will also like "
assert len(src_text.split(" ")) <= self.max_src_length
src_item = self.encode_text(src_text)
tgt_item = self.encode_text(" {}".format(tgt_explain))
src_item = torch.cat([self.bos_item, src_item, self.eos_item])
target_item = torch.cat([tgt_item, self.eos_item])
prev_output_item = torch.cat([self.bos_item, tgt_item])
example = {
"id": uniq_id,
"source": src_item,
"target": target_item,
"prev_output_tokens": prev_output_item
}
return example
def collater(self, samples, pad_to_length=None):
return collate(samples, pad_idx=self.pad, eos_idx=self.eos)
def __getitem__2(self, index):
tgt_explain = "asdasss ssa"
while tgt_explain.strip() != "":
uniq_id, user_behavior, explaination, fea, opt = self.dataset[index]
while explaination.translate(self.transtab).strip() == "":
uniq_id, user_behavior, explaination, fea, opt = self.dataset[index]
print("explaination begin", explaination)
tmp_user_beha = user_behavior.split(" Right now, ")
len_context = len(tmp_user_beha[1].split(" "))
behavior_list = tmp_user_beha[0].split(" ")[0: self.max_src_length - 40 - len_context]
behavior_ = " ".join(behavior_list)
if behavior_[-1] == ",":
behavior_ = behavior_[:-1] + '.'
if behavior_[-1] != ".":
behavior_ = ','.join(behavior_.split(",")[:-1]) + '.'
user_behavior = " right now, ".join([behavior_, tmp_user_beha[1]])
user_behavior += \
" the user cares about {} and the item is {}.".format(fea, opt)
if self.split == 'train' and not self.scst:
explaination = explaination.translate(self.transtab).strip()
print("explaination.translate(self.transtab).strip()", explaination.translate(self.transtab).strip())
explaination_token_list = explaination.strip().split(" ")
tgt_explain = ' '.join(explaination_token_list[:self.max_tgt_length])
else:
explaination = ' '.join(explaination.strip().split(" ")[:self.max_tgt_length])
explain_list = [explain.translate(self.transtab).strip() for explain in explaination.strip().split('&&')]
tgt_explain = '&&'.join(explain_list)
print("explaination", explaination)
print("tgt_explain", tgt_explain)
assert False
src_text = user_behavior + \
" how to persuade the user to buy the item?"
assert len(src_text.split(" ")) <= self.max_src_length
src_item = self.encode_text(src_text)
tgt_item = self.encode_text(" {}".format(tgt_explain))
src_item = torch.cat([self.bos_item, src_item, self.eos_item])
target_item = torch.cat([tgt_item, self.eos_item])
prev_output_item = torch.cat([self.bos_item, tgt_item])
example = {
"id": uniq_id,
"source": src_item,
"target": target_item,
"prev_output_tokens": prev_output_item
}
return example
| true | true |
f72defcb28cd0ceac38dddd76a6b7e0b93009a72 | 1,287 | py | Python | kivy/core/text/text_sdl2.py | Kolandiolaka/kivy | ed2615e542d19a7b2f1a8afa85af5e1b190cd519 | [
"MIT"
] | 13,889 | 2015-01-01T06:43:41.000Z | 2022-03-31T17:37:56.000Z | kivy/core/text/text_sdl2.py | Kolandiolaka/kivy | ed2615e542d19a7b2f1a8afa85af5e1b190cd519 | [
"MIT"
] | 4,570 | 2015-01-01T17:58:52.000Z | 2022-03-31T18:42:16.000Z | kivy/core/text/text_sdl2.py | Kolandiolaka/kivy | ed2615e542d19a7b2f1a8afa85af5e1b190cd519 | [
"MIT"
] | 3,786 | 2015-01-01T09:20:45.000Z | 2022-03-30T21:15:05.000Z | '''
SDL2 text provider
==================
Based on SDL2 + SDL2_ttf
'''
__all__ = ('LabelSDL2', )
from kivy.compat import PY2
from kivy.core.text import LabelBase
try:
from kivy.core.text._text_sdl2 import (_SurfaceContainer, _get_extents,
_get_fontdescent, _get_fontascent)
except ImportError:
from kivy.core import handle_win_lib_import_error
handle_win_lib_import_error(
'text', 'sdl2', 'kivy.core.text._text_sdl2')
raise
class LabelSDL2(LabelBase):
def _get_font_id(self):
return '|'.join([str(self.options[x]) for x
in ('font_size', 'font_name_r', 'bold',
'italic', 'underline', 'strikethrough')])
def get_extents(self, text):
try:
if PY2:
text = text.encode('UTF-8')
except:
pass
return _get_extents(self, text)
def get_descent(self):
return _get_fontdescent(self)
def get_ascent(self):
return _get_fontascent(self)
def _render_begin(self):
self._surface = _SurfaceContainer(self._size[0], self._size[1])
def _render_text(self, text, x, y):
self._surface.render(self, text, x, y)
def _render_end(self):
return self._surface.get_data()
| 25.235294 | 77 | 0.609946 |
__all__ = ('LabelSDL2', )
from kivy.compat import PY2
from kivy.core.text import LabelBase
try:
from kivy.core.text._text_sdl2 import (_SurfaceContainer, _get_extents,
_get_fontdescent, _get_fontascent)
except ImportError:
from kivy.core import handle_win_lib_import_error
handle_win_lib_import_error(
'text', 'sdl2', 'kivy.core.text._text_sdl2')
raise
class LabelSDL2(LabelBase):
def _get_font_id(self):
return '|'.join([str(self.options[x]) for x
in ('font_size', 'font_name_r', 'bold',
'italic', 'underline', 'strikethrough')])
def get_extents(self, text):
try:
if PY2:
text = text.encode('UTF-8')
except:
pass
return _get_extents(self, text)
def get_descent(self):
return _get_fontdescent(self)
def get_ascent(self):
return _get_fontascent(self)
def _render_begin(self):
self._surface = _SurfaceContainer(self._size[0], self._size[1])
def _render_text(self, text, x, y):
self._surface.render(self, text, x, y)
def _render_end(self):
return self._surface.get_data()
| true | true |
f72df0f5126c719af11e61fcf081f63259466724 | 194 | py | Python | src/mattermostdriver/endpoints/opengraph.py | winstonyeu/python-mattermost-driver | 67dc92ee7b8707d7eeac8a441f829f0479195c3a | [
"MIT"
] | 142 | 2017-06-26T21:35:48.000Z | 2022-02-27T20:28:15.000Z | src/mattermostdriver/endpoints/opengraph.py | winstonyeu/python-mattermost-driver | 67dc92ee7b8707d7eeac8a441f829f0479195c3a | [
"MIT"
] | 90 | 2017-08-11T17:30:53.000Z | 2022-03-30T08:17:23.000Z | src/mattermostdriver/endpoints/opengraph.py | winstonyeu/python-mattermost-driver | 67dc92ee7b8707d7eeac8a441f829f0479195c3a | [
"MIT"
] | 51 | 2017-08-01T08:55:58.000Z | 2022-03-29T05:16:36.000Z | from .base import Base
class Opengraph(Base):
endpoint = '/opengraph'
def get_opengraph_metadata_for_url(self, options):
return self.client.post(
self.endpoint,
options=options
)
| 16.166667 | 51 | 0.737113 | from .base import Base
class Opengraph(Base):
endpoint = '/opengraph'
def get_opengraph_metadata_for_url(self, options):
return self.client.post(
self.endpoint,
options=options
)
| true | true |
f72df15a5571bc62a8e0386b911ec48c543485a3 | 3,730 | py | Python | dataloaders/adult_loader.py | Khumayun/FairDeepLearning | e19947c17c282ce1e89ad105cc241ffc07190628 | [
"MIT"
] | 23 | 2021-06-20T07:57:49.000Z | 2022-03-21T05:45:08.000Z | dataloaders/adult_loader.py | Khumayun/FairDeepLearning | e19947c17c282ce1e89ad105cc241ffc07190628 | [
"MIT"
] | 1 | 2022-03-24T14:29:37.000Z | 2022-03-24T14:29:37.000Z | dataloaders/adult_loader.py | Khumayun/FairDeepLearning | e19947c17c282ce1e89ad105cc241ffc07190628 | [
"MIT"
] | 8 | 2021-06-20T08:01:55.000Z | 2022-03-24T14:31:41.000Z | import os
import numpy as np
import pandas as pd
import torch
from torch.utils.data import Dataset
from dataloaders.adult_process import get_adult_data
class AdultDataset(Dataset):
"""
The UCI Adult dataset.
"""
def __init__(self, root_dir, phase, tar_attr, priv_attr, clr_ratio):
self.tar_attr = tar_attr
self.priv_attr = priv_attr
self.data = get_adult_data(tar_attr, priv_attr, clr_ratio)
if phase not in ["train", "val", "test"]:
raise NotImplementedError
if phase == "train":
self.X = self.data[f"x_train"][self.data["train_inds"]]
self.Y = self.data[f"y_train"][self.data["train_inds"]]
self.A = self.data[f"attr_train"][self.data["train_inds"]]
elif phase == "val":
self.X = self.data[f"x_train"][self.data["valid_inds"]]
self.Y = self.data[f"y_train"][self.data["valid_inds"]]
self.A = self.data[f"attr_train"][self.data["valid_inds"]]
elif phase == "test":
self.X = self.data[f"x_test"]
self.Y = self.data[f"y_test"]
self.A = self.data[f"attr_test"]
else:
raise Exception("Wrong phase")
self.input_shape = self.X.shape
self.num_samples = self.input_shape[0]
self.xdim = self.X.shape[1]
self.ydim = 1
self.adim = 1
def __len__(self):
return len(self.X)
def __getitem__(self, idx):
if self.ydim == 1 and len(self.Y.shape) == 2: # binary classification
return (
torch.from_numpy(self.X[idx]).float(),
torch.from_numpy(self.Y[idx]),
torch.from_numpy(self.A[idx]),
)
raise NotImplementedError
def onehot_2_int(self, ts):
if len(ts.shape) == 2:
return torch.argmax(ts, dim=1)
if len(ts.shape) == 1:
return torch.argmax(ts, dim=0)
raise NotImplementedError
def get_A_proportions(self):
"""for catergorical attribute"""
assert len(self.A.shape) == 2
num_class = self.A.shape[1]
A_label = np.argmax(self.A, axis=1)
A_proportions = []
for cls_idx in range(num_class):
A_proportion = np.sum(cls_idx == A_label)
A_proportions.append(A_proportion)
A_proportions = [a_prop * 1.0 / len(A_label) for a_prop in A_proportions]
return A_proportions
def get_Y_proportions(self):
"""for catergorical attribute"""
assert len(self.Y.shape) == 2
num_class = self.Y.shape[1]
Y_label = np.argmax(self.Y, axis=1)
Y_proportions = []
for cls_idx in range(num_class):
Y_proportion = np.sum(cls_idx == Y_label)
Y_proportions.append(Y_proportion)
Y_proportions = [y_prop * 1.0 / len(Y_label) for y_prop in Y_proportions]
return Y_proportions
def get_AY_proportions(self):
"""for catergorical attributes"""
assert len(self.Y.shape) == len(self.A.shape) == 2
A_num_class = self.A.shape[1]
Y_num_class = self.Y.shape[1]
A_label = np.argmax(self.A, axis=1)
Y_label = np.argmax(self.Y, axis=1)
AY_proportions = []
for A_cls_idx in range(A_num_class):
Y_proportions = []
for Y_cls_idx in range(Y_num_class):
AY_proprtion = np.sum(
np.logical_and(Y_cls_idx == Y_label, A_cls_idx == A_label)
)
Y_proportions.append(AY_proprtion)
Y_proportions = [y_prop * 1.0 / len(Y_label) for y_prop in Y_proportions]
AY_proportions.append(Y_proportions)
return AY_proportions
| 35.188679 | 85 | 0.585255 | import os
import numpy as np
import pandas as pd
import torch
from torch.utils.data import Dataset
from dataloaders.adult_process import get_adult_data
class AdultDataset(Dataset):
def __init__(self, root_dir, phase, tar_attr, priv_attr, clr_ratio):
self.tar_attr = tar_attr
self.priv_attr = priv_attr
self.data = get_adult_data(tar_attr, priv_attr, clr_ratio)
if phase not in ["train", "val", "test"]:
raise NotImplementedError
if phase == "train":
self.X = self.data[f"x_train"][self.data["train_inds"]]
self.Y = self.data[f"y_train"][self.data["train_inds"]]
self.A = self.data[f"attr_train"][self.data["train_inds"]]
elif phase == "val":
self.X = self.data[f"x_train"][self.data["valid_inds"]]
self.Y = self.data[f"y_train"][self.data["valid_inds"]]
self.A = self.data[f"attr_train"][self.data["valid_inds"]]
elif phase == "test":
self.X = self.data[f"x_test"]
self.Y = self.data[f"y_test"]
self.A = self.data[f"attr_test"]
else:
raise Exception("Wrong phase")
self.input_shape = self.X.shape
self.num_samples = self.input_shape[0]
self.xdim = self.X.shape[1]
self.ydim = 1
self.adim = 1
def __len__(self):
return len(self.X)
def __getitem__(self, idx):
if self.ydim == 1 and len(self.Y.shape) == 2:
return (
torch.from_numpy(self.X[idx]).float(),
torch.from_numpy(self.Y[idx]),
torch.from_numpy(self.A[idx]),
)
raise NotImplementedError
def onehot_2_int(self, ts):
if len(ts.shape) == 2:
return torch.argmax(ts, dim=1)
if len(ts.shape) == 1:
return torch.argmax(ts, dim=0)
raise NotImplementedError
def get_A_proportions(self):
assert len(self.A.shape) == 2
num_class = self.A.shape[1]
A_label = np.argmax(self.A, axis=1)
A_proportions = []
for cls_idx in range(num_class):
A_proportion = np.sum(cls_idx == A_label)
A_proportions.append(A_proportion)
A_proportions = [a_prop * 1.0 / len(A_label) for a_prop in A_proportions]
return A_proportions
def get_Y_proportions(self):
assert len(self.Y.shape) == 2
num_class = self.Y.shape[1]
Y_label = np.argmax(self.Y, axis=1)
Y_proportions = []
for cls_idx in range(num_class):
Y_proportion = np.sum(cls_idx == Y_label)
Y_proportions.append(Y_proportion)
Y_proportions = [y_prop * 1.0 / len(Y_label) for y_prop in Y_proportions]
return Y_proportions
def get_AY_proportions(self):
assert len(self.Y.shape) == len(self.A.shape) == 2
A_num_class = self.A.shape[1]
Y_num_class = self.Y.shape[1]
A_label = np.argmax(self.A, axis=1)
Y_label = np.argmax(self.Y, axis=1)
AY_proportions = []
for A_cls_idx in range(A_num_class):
Y_proportions = []
for Y_cls_idx in range(Y_num_class):
AY_proprtion = np.sum(
np.logical_and(Y_cls_idx == Y_label, A_cls_idx == A_label)
)
Y_proportions.append(AY_proprtion)
Y_proportions = [y_prop * 1.0 / len(Y_label) for y_prop in Y_proportions]
AY_proportions.append(Y_proportions)
return AY_proportions
| true | true |
f72df193278370bad944349250f35f6a9ac335ee | 1,813 | py | Python | Python/npr.py | MarsBighead/mustang | ffbaf109931557e40da2d97e4eb914bc1c0aba0d | [
"MIT"
] | 4 | 2017-04-30T18:28:19.000Z | 2018-12-08T15:46:37.000Z | Python/npr.py | MarsBighead/mustang | ffbaf109931557e40da2d97e4eb914bc1c0aba0d | [
"MIT"
] | 1 | 2021-09-22T20:11:36.000Z | 2021-09-22T20:11:36.000Z | Python/npr.py | MarsBighead/mustang | ffbaf109931557e40da2d97e4eb914bc1c0aba0d | [
"MIT"
] | null | null | null | #!/usr/local/bin/python3
import numpy as np
import numpy.random as npr
import matplotlib.pyplot as plt
print (npr.rand(5,5))
a=5.
b=10.
print (npr.rand(10)*(b-a)+a )
sample_size =500
rn1 = npr.rand(sample_size,3)
rn2 = npr.randint(0,10,sample_size)
rn3 = npr.sample(size=sample_size)
a =[0, 25, 50, 75, 100]
rn4=npr.choice(a, size=sample_size)
fig, ((ax1,ax2),(ax3,ax4))= plt.subplots(
nrows=2,
ncols=2,
figsize=(7,7)
)
ax1.hist(rn1, bins=25, stacked=True)
ax1.set_title('rand')
ax1.set_ylabel('frequency')
ax1.grid(True)
ax2.hist(rn2, bins=25)
ax2.set_title('randint')
ax2.grid(True)
ax3.hist(rn3, bins=25)
ax3.set_title('sample')
ax3.set_ylabel('frequency')
ax3.grid(True)
ax4.hist(rn4, bins=25)
ax4.set_title('choice')
ax4.grid(True)
#print (fig)
#plt.show()
fig.savefig("random-statistics.png", bbox_inches='tight')
plt.close("all")
sample_size =500
rn1 = npr.standard_normal(sample_size)
rn2 = npr.normal(100,20,sample_size)
rn3 = npr.chisquare(df=0.5, size=sample_size)
a =[0, 25, 50, 75, 100]
rn4=npr.poisson(lam=1.0, size=sample_size)
fig, ((ax1,ax2),(ax3,ax4))= plt.subplots(
nrows=2,
ncols=2,
figsize=(7,7)
)
ax1.hist(rn1, bins=25, stacked=True)
ax1.set_title('standard normal')
ax1.set_ylabel('frequency')
ax1.grid(True)
ax2.hist(rn2, bins=25)
ax2.set_title('normal(100, 20)')
ax2.grid(True)
ax3.hist(rn3, bins=25)
ax3.set_title('chi square')
ax3.set_ylabel('frequency')
ax3.grid(True)
ax4.hist(rn4, bins=25)
ax4.set_title('Poisson')
ax4.grid(True)
fig.savefig("high-statistics.png", bbox_inches='tight')
plt.show() | 22.949367 | 138 | 0.606729 |
import numpy as np
import numpy.random as npr
import matplotlib.pyplot as plt
print (npr.rand(5,5))
a=5.
b=10.
print (npr.rand(10)*(b-a)+a )
sample_size =500
rn1 = npr.rand(sample_size,3)
rn2 = npr.randint(0,10,sample_size)
rn3 = npr.sample(size=sample_size)
a =[0, 25, 50, 75, 100]
rn4=npr.choice(a, size=sample_size)
fig, ((ax1,ax2),(ax3,ax4))= plt.subplots(
nrows=2,
ncols=2,
figsize=(7,7)
)
ax1.hist(rn1, bins=25, stacked=True)
ax1.set_title('rand')
ax1.set_ylabel('frequency')
ax1.grid(True)
ax2.hist(rn2, bins=25)
ax2.set_title('randint')
ax2.grid(True)
ax3.hist(rn3, bins=25)
ax3.set_title('sample')
ax3.set_ylabel('frequency')
ax3.grid(True)
ax4.hist(rn4, bins=25)
ax4.set_title('choice')
ax4.grid(True)
fig.savefig("random-statistics.png", bbox_inches='tight')
plt.close("all")
sample_size =500
rn1 = npr.standard_normal(sample_size)
rn2 = npr.normal(100,20,sample_size)
rn3 = npr.chisquare(df=0.5, size=sample_size)
a =[0, 25, 50, 75, 100]
rn4=npr.poisson(lam=1.0, size=sample_size)
fig, ((ax1,ax2),(ax3,ax4))= plt.subplots(
nrows=2,
ncols=2,
figsize=(7,7)
)
ax1.hist(rn1, bins=25, stacked=True)
ax1.set_title('standard normal')
ax1.set_ylabel('frequency')
ax1.grid(True)
ax2.hist(rn2, bins=25)
ax2.set_title('normal(100, 20)')
ax2.grid(True)
ax3.hist(rn3, bins=25)
ax3.set_title('chi square')
ax3.set_ylabel('frequency')
ax3.grid(True)
ax4.hist(rn4, bins=25)
ax4.set_title('Poisson')
ax4.grid(True)
fig.savefig("high-statistics.png", bbox_inches='tight')
plt.show() | true | true |
f72df1d4a7a22d1279f3046f85a92c9be425e84f | 327 | py | Python | yellowbrick/datasets/__init__.py | jabber39/yellowbrick | e9562aebb084bb2b7b58f393837aae0ebddc7742 | [
"Apache-2.0"
] | null | null | null | yellowbrick/datasets/__init__.py | jabber39/yellowbrick | e9562aebb084bb2b7b58f393837aae0ebddc7742 | [
"Apache-2.0"
] | null | null | null | yellowbrick/datasets/__init__.py | jabber39/yellowbrick | e9562aebb084bb2b7b58f393837aae0ebddc7742 | [
"Apache-2.0"
] | null | null | null | from .download import load_concrete
from .download import load_energy
from .download import load_credit
from .download import load_occupancy
from .download import load_mushroom
from .download import load_hobbies
from .download import load_game
from .download import load_bikeshare
from .download import load_spam
| 13.625 | 36 | 0.819572 | from .download import load_concrete
from .download import load_energy
from .download import load_credit
from .download import load_occupancy
from .download import load_mushroom
from .download import load_hobbies
from .download import load_game
from .download import load_bikeshare
from .download import load_spam
| true | true |
f72df239737902c10ccbf5649cd7a2f9c12ace8b | 132 | py | Python | narrow/apps/py.py | idlesign/narrow | bc5f3dd37b93226b3bc7a056cbe2f508c1dc077e | [
"BSD-3-Clause"
] | 3 | 2018-06-23T15:11:37.000Z | 2018-06-25T06:41:09.000Z | narrow/apps/py.py | idlesign/narrow | bc5f3dd37b93226b3bc7a056cbe2f508c1dc077e | [
"BSD-3-Clause"
] | null | null | null | narrow/apps/py.py | idlesign/narrow | bc5f3dd37b93226b3bc7a056cbe2f508c1dc077e | [
"BSD-3-Clause"
] | null | null | null | from ._base import register_app, App
@register_app
class PurePy(App):
alias = 'py'
description = 'Pure wsgi application'
| 14.666667 | 41 | 0.704545 | from ._base import register_app, App
@register_app
class PurePy(App):
alias = 'py'
description = 'Pure wsgi application'
| true | true |
f72df2f9ee4b88883324adc893d27d5deaafa123 | 2,653 | py | Python | fasp/search/kf_fhir_requests_example.py | STRIDES-Codes/Sample-search-based-on-clinical-phenotypic-and-sample-attributes | c677fa7791992692c4a38efebb2bf5d718353c15 | [
"MIT"
] | 4 | 2020-10-30T19:22:16.000Z | 2020-11-02T15:38:45.000Z | fasp/search/kf_fhir_requests_example.py | STRIDES-Codes/Sample-search-based-on-clinical-phenotypic-and-sample-attributes | c677fa7791992692c4a38efebb2bf5d718353c15 | [
"MIT"
] | 5 | 2020-11-07T21:22:55.000Z | 2020-11-18T14:39:02.000Z | fasp/search/kf_fhir_requests_example.py | STRIDES-Codes/Sample-search-based-on-clinical-phenotypic-and-sample-attributes | c677fa7791992692c4a38efebb2bf5d718353c15 | [
"MIT"
] | 3 | 2020-11-02T21:22:45.000Z | 2020-11-07T14:30:13.000Z | import requests
import os.path
import json
import pprint
import sys
import getopt
class FHIRSearchClient:
def __init__(self, hostURL, cookies, debug=False ):
self.hostURL = hostURL
self.debug = debug
self.headers = {
'content-type': 'application/json'
}
self.cookies = cookies
def runQuery(self, query):
next_url = "{}/Patient?gender={}".format(self.hostURL, query)
pageCount = 0
resultRows = []
print ("_Retrieving the query_")
while next_url != None :
if self.debug:
print(next_url)
pageCount += 1
print ("____Page{}_______________".format(pageCount))
response = requests.request("GET", next_url, cookies=self.cookies)
result = (response.json())
if 'link' in result :
nxt = next((sub for sub in result['link'] if sub['relation'] == 'next'), None)
next_url = nxt['url']
else:
next_url = None
for t in result['entry']:
print('patient id :',t['resource']['id'])
# if rowCount > 0:
# resultRows.append(result['data'])
# for r in result['data']:
# resultRows.append([*r.values()])
# return resultRows
def listResources(self):
next_url = self.hostURL + "/StructureDefinition"
pageCount = 0
resultRows = []
print ("_Retrieving the resource list_")
while next_url != None :
if self.debug:
pprint.pprint(next_url)
pageCount += 1
print ("____Page{}_______________".format(pageCount))
#response = requests.get(next_url, headers=self.headers)
response = requests.get(next_url, cookies=self.cookies)
if self.debug:
print(response.content)
result = (response.json())
if 'link' in result :
nxt = next((sub for sub in result['link'] if sub['relation'] == 'next'), None)
next_url = nxt['url']
else:
next_url = None
for t in result['entry']:
print(t['fullUrl'])
return
def usage():
print (sys.argv[0] +' -l listResources -r runQuery')
def main(argv):
endpoint = 'https://ncpi-api-fhir-service-dev.kidsfirstdrc.org'
full_cookie_path = os.path.expanduser('~/.keys/kf_cookies.json')
print(full_cookie_path)
with open(full_cookie_path) as f:
cookies = json.load(f)
searchClient = FHIRSearchClient(endpoint, cookies, debug=True)
try:
opts, args = getopt.getopt(argv, "hlr:", ["help", "listResources", "runQuery"])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-l", "--listTables"):
searchClient.listResources()
elif opt in ("-r", "--runQuery"):
searchClient.runQuery(arg)
if __name__ == "__main__":
main(sys.argv[1:])
| 22.483051 | 82 | 0.644553 | import requests
import os.path
import json
import pprint
import sys
import getopt
class FHIRSearchClient:
def __init__(self, hostURL, cookies, debug=False ):
self.hostURL = hostURL
self.debug = debug
self.headers = {
'content-type': 'application/json'
}
self.cookies = cookies
def runQuery(self, query):
next_url = "{}/Patient?gender={}".format(self.hostURL, query)
pageCount = 0
resultRows = []
print ("_Retrieving the query_")
while next_url != None :
if self.debug:
print(next_url)
pageCount += 1
print ("____Page{}_______________".format(pageCount))
response = requests.request("GET", next_url, cookies=self.cookies)
result = (response.json())
if 'link' in result :
nxt = next((sub for sub in result['link'] if sub['relation'] == 'next'), None)
next_url = nxt['url']
else:
next_url = None
for t in result['entry']:
print('patient id :',t['resource']['id'])
def listResources(self):
next_url = self.hostURL + "/StructureDefinition"
pageCount = 0
resultRows = []
print ("_Retrieving the resource list_")
while next_url != None :
if self.debug:
pprint.pprint(next_url)
pageCount += 1
print ("____Page{}_______________".format(pageCount))
response = requests.get(next_url, cookies=self.cookies)
if self.debug:
print(response.content)
result = (response.json())
if 'link' in result :
nxt = next((sub for sub in result['link'] if sub['relation'] == 'next'), None)
next_url = nxt['url']
else:
next_url = None
for t in result['entry']:
print(t['fullUrl'])
return
def usage():
print (sys.argv[0] +' -l listResources -r runQuery')
def main(argv):
endpoint = 'https://ncpi-api-fhir-service-dev.kidsfirstdrc.org'
full_cookie_path = os.path.expanduser('~/.keys/kf_cookies.json')
print(full_cookie_path)
with open(full_cookie_path) as f:
cookies = json.load(f)
searchClient = FHIRSearchClient(endpoint, cookies, debug=True)
try:
opts, args = getopt.getopt(argv, "hlr:", ["help", "listResources", "runQuery"])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-l", "--listTables"):
searchClient.listResources()
elif opt in ("-r", "--runQuery"):
searchClient.runQuery(arg)
if __name__ == "__main__":
main(sys.argv[1:])
| true | true |
f72df525cdb3918bf6feced9a860995462e52020 | 2,814 | py | Python | ragas.py | bhansid/raagid | 0234be71ccddba38833520b1403bff8c355ff830 | [
"MIT"
] | 3 | 2019-07-10T04:59:22.000Z | 2020-11-07T04:22:13.000Z | ragas.py | bhansid/raagid | 0234be71ccddba38833520b1403bff8c355ff830 | [
"MIT"
] | 2 | 2016-01-13T15:10:36.000Z | 2021-07-09T18:18:06.000Z | ragas.py | bhansid/raagid | 0234be71ccddba38833520b1403bff8c355ff830 | [
"MIT"
] | 2 | 2019-07-10T04:56:05.000Z | 2022-01-24T05:51:45.000Z | import matplotlib.pyplot as plt
from scipy.io import wavfile # get the api
from scipy.fftpack import fft
from pylab import *
import os
import math
import contextlib # for urllib.urlopen()
import urllib
import os
################################
#### Ragas.py functions ########
################################
def readFile(filename, mode="rt"):
# rt = "read text"
with open(filename, mode) as fin:
return fin.read()
def addRagasToDict(textfile):
#returns dictionary
#maps raga name to list containing its constituent notes (only Arohanam)
ragaDict = dict()
text = readFile(textfile)
ragaList = text.splitlines()
for raga in ragaList:
nameStartIndex = raga.index("|")
nameEndIndex = raga.index("|",nameStartIndex+1)
name = raga[nameStartIndex+1:nameEndIndex].strip()
notes = raga[nameEndIndex+1:].strip()
notesList = notes.split()
#G1 should become R2
#N1 should become D2
for i in xrange(len(notesList)):
if notesList[i] == 'G1':
notesList[i] = 'R2'
elif notesList[i] == 'N1':
notesList[i] = 'D2'
ragaDict[name] = notesList
return ragaDict
def isRagam2(notesList, thresholdPercentage, ragam):
#takes in a list of notes, thresholdPercentage, ragam name (string)
#to determine whether a list of notes is a particular ragam
ragaDict = addRagasToDict("RagaDatabase.txt")
ragaNotes = ragaDict[ragam] #raga arohanam"
numRagam = 0
for note in notesList:
if note in ragaNotes:
numRagam += 1
percentageRagam = numRagam*1.0/len(notesList)
return percentageRagam >= thresholdPercentage
def findPosRagams(notesList, thresholdPercentage):
ragaDict = addRagasToDict("RagaDatabase.txt")
posRagas = []
for ragam in ragaDict:
ragaNotes = ragaDict[ragam]
numRagam = 0
for note in notesList:
if note in ragaNotes:
numRagam += 1
percentageRagam = numRagam*1.0/len(notesList)
if percentageRagam >= thresholdPercentage:
posRagas += [ragam]
return posRagas
def readFile(filename, mode="rt"):
# rt = "read text"
with open(filename, mode) as fin:
return fin.read()
def frequencyToNote(freq):
lowSa = 636
a = 1.057994353 #factor to get to new notes
k = math.log(freq*1.0/lowSa, a)
k = int(round(k))
notesList = (["S", "R1", "R2", "G2", "G3",
"M1", "M2", "P", "D1", "D2", "N2", "N3"])
return notesList[k%12]
def windowFunction(n):
timeInterval = 250 #0.5 s = 500 mS
endTime = n*timeInterval
return endTime
#mohanam = ["S", "R2", "G3", "P", "D2"]
#madhyamavathi = ["S", "R2", "M1", "P", "N2"]
#hindolam = ["S", "G1", "M1", "D1", "N1"]
| 30.586957 | 76 | 0.600569 | import matplotlib.pyplot as plt
from scipy.io import wavfile
from scipy.fftpack import fft
from pylab import *
import os
import math
import contextlib
import urllib
import os
def findPosRagams(notesList, thresholdPercentage):
ragaDict = addRagasToDict("RagaDatabase.txt")
posRagas = []
for ragam in ragaDict:
ragaNotes = ragaDict[ragam]
numRagam = 0
for note in notesList:
if note in ragaNotes:
numRagam += 1
percentageRagam = numRagam*1.0/len(notesList)
if percentageRagam >= thresholdPercentage:
posRagas += [ragam]
return posRagas
def readFile(filename, mode="rt"):
# rt = "read text"
with open(filename, mode) as fin:
return fin.read()
def frequencyToNote(freq):
lowSa = 636
a = 1.057994353 #factor to get to new notes
k = math.log(freq*1.0/lowSa, a)
k = int(round(k))
notesList = (["S", "R1", "R2", "G2", "G3",
"M1", "M2", "P", "D1", "D2", "N2", "N3"])
return notesList[k%12]
def windowFunction(n):
timeInterval = 250 #0.5 s = 500 mS
endTime = n*timeInterval
return endTime
#mohanam = ["S", "R2", "G3", "P", "D2"]
#madhyamavathi = ["S", "R2", "M1", "P", "N2"]
#hindolam = ["S", "G1", "M1", "D1", "N1"]
| true | true |
f72df540b3732f5020994cd0ea52aa78a6d4619b | 6,433 | py | Python | feedbackcircuits/NDComponents/AntennalLobe/AlphaSpike.py | mkturkcan/FC.AntennalLobe | 6a0e124f68c249fcb067c571b5170002b3335efc | [
"BSD-3-Clause"
] | null | null | null | feedbackcircuits/NDComponents/AntennalLobe/AlphaSpike.py | mkturkcan/FC.AntennalLobe | 6a0e124f68c249fcb067c571b5170002b3335efc | [
"BSD-3-Clause"
] | null | null | null | feedbackcircuits/NDComponents/AntennalLobe/AlphaSpike.py | mkturkcan/FC.AntennalLobe | 6a0e124f68c249fcb067c571b5170002b3335efc | [
"BSD-3-Clause"
] | null | null | null | # pylint:disable=no-member
import os
from collections import OrderedDict
import numpy as np
import pycuda.gpuarray as garray
from pycuda.tools import dtype_to_ctype
import pycuda.driver as drv
from pycuda.compiler import SourceModule
from neurokernel.LPU.NDComponents.NDComponent import NDComponent
CUDA_SRC = """
#define G_MIN 0.0
#define G_MAX 50000.0
struct States {
double s;
double u;
double g;
};
struct Derivatives {
double s;
double u;
};
__device__ void clip(States &states)
{
states.g = fmax(states.g, G_MIN);
states.g = fmin(states.g, G_MAX);
}
__device__ void forward(
States &states,
Derivatives &gstates,
double dt
)
{
states.s += dt * gstates.s;
states.u += dt * gstates.u;
}
__device__ int ode(
States &states,
Derivatives &gstates,
double AD,
double AR,
double GMAX,
double &spike
)
{
gstates.s = states.u;
gstates.u = (((-(AR + AD)) * states.u) - ((AR * AD) * states.s));
if (spike) {
states.u = (states.u + (AR * AD));
}
states.g = (states.s * GMAX);
return 0;
}
__global__ void run_step (
int num_thread,
double dt,
double *g_state_s,
double *g_state_u,
double *g_state_g,
double *g_param_ad,
double *g_param_ar,
double *g_param_gmax,
double *g_input_spike,
double *g_output_g
)
{
/* TODO: option for 1-D or 2-D */
int tid = blockIdx.x * blockDim.x + threadIdx.x;
int total_threads = gridDim.x * blockDim.x;
for (int nid = tid; nid < num_thread; nid += total_threads) {
States states;
Derivatives gstates;
/* import data */
states.s = g_state_s[nid];
states.u = g_state_u[nid];
states.g = g_state_g[nid];
double param_AD = g_param_ad[nid];
double param_AR = g_param_ar[nid];
double param_GMAX = g_param_gmax[nid];
double input_spike = g_input_spike[nid];
/* compute gradient */
ode(states, gstates, param_AD, param_AR, param_GMAX, input_spike);
/* solve ode */
forward(states, gstates, dt);
/* clip */
clip(states);
/* export state (internals) data */
g_state_s[nid] = states.s;
g_state_u[nid] = states.u;
g_state_g[nid] = states.g;
/* export output (updates) data */
g_output_g[nid] = states.g;
}
return;
}
"""
class AlphaSpike(NDComponent):
"""AlphaSpike
Attributes:
accesses (list): list of input variables
updates (list): list of output variables
params (list): list of parameters
params_default (dict): default values of the parameters
internals (OrderedDict): internal variables of the model and initial value
time_scale (float): scaling factor of the `dt`
"""
accesses = [
"spike",
]
updates = [
"g",
]
params = [
"ad",
"ar",
"gmax",
]
params_default = dict(
ar=12.5,
ad=12.19,
gmax=0.1,
)
internals = OrderedDict(
[
("s", 0.0),
("u", 0.0),
("g", 0.0),
]
)
time_scale = 1.0 # scales dt
_has_rand = False
def maximum_dt_allowed(self):
return np.inf
def __init__(
self,
params_dict,
access_buffers,
dt,
LPU_id=None,
debug=False,
cuda_verbose=False,
):
if cuda_verbose:
self.compile_options = ["--ptxas-options=-v", "--expt-relaxed-constexpr"]
else:
self.compile_options = ["--expt-relaxed-constexpr"]
self.debug = debug
self.LPU_id = LPU_id
self.num_comps = params_dict[self.params[0]].size
self.dtype = params_dict[self.params[0]].dtype
self.dt = dt * self.time_scale
self.params_dict = params_dict
self.access_buffers = access_buffers
self.internal_states = {
c: garray.zeros(self.num_comps, dtype=self.dtype) + self.internals[c]
for c in self.internals
}
self.inputs = {
k: garray.empty(self.num_comps, dtype=self.access_buffers[k].dtype)
for k in self.accesses
}
# make all dtypes consistent
dtypes = {"dt": self.dtype}
dtypes.update(
{"state_" + k: self.internal_states[k].dtype for k in self.internals}
)
dtypes.update({"param_" + k: self.params_dict[k].dtype for k in self.params})
dtypes.update(
{"input_" + k.format(k): self.inputs[k].dtype for k in self.accesses}
)
dtypes.update({"output_" + k: self.dtype for k in self.updates})
self.update_func = self.get_update_func(dtypes)
if self._has_rand:
import neurokernel.LPU.utils.curand as curand
self.randState = curand.curand_setup(
self.num_comps, np.random.randint(10000)
)
dtypes.update({"rand": self.dtype})
def run_step(self, update_pointers, st=None):
for k in self.inputs:
self.sum_in_variable(k, self.inputs[k], st=st)
args = (
[self.internal_states[k].gpudata for k in self.internals]
+ [self.params_dict[k].gpudata for k in self.params]
+ [self.inputs[k].gpudata for k in self.accesses]
+ [update_pointers[k] for k in self.updates]
)
if self._has_rand:
args += [self.randState.gpudata]
self.update_func.prepared_async_call(
self.update_func.grid,
self.update_func.block,
st,
self.num_comps,
self.dt,
*args
)
def get_update_func(self, dtypes):
from pycuda.compiler import SourceModule
mod = SourceModule(
CUDA_SRC,
options=self.compile_options,
no_extern_c=self._has_rand,
)
func = mod.get_function("run_step")
type_dict = {k: dtype_to_ctype(dtypes[k]) for k in dtypes}
func.prepare("i" + np.dtype(self.dtype).char + "P" * (len(type_dict) - 1))
func.block = (256, 1, 1)
func.grid = (
min(
6 * drv.Context.get_device().MULTIPROCESSOR_COUNT,
(self.num_comps - 1) // 256 + 1,
),
1,
)
return func
| 24.553435 | 85 | 0.5649 |
import os
from collections import OrderedDict
import numpy as np
import pycuda.gpuarray as garray
from pycuda.tools import dtype_to_ctype
import pycuda.driver as drv
from pycuda.compiler import SourceModule
from neurokernel.LPU.NDComponents.NDComponent import NDComponent
CUDA_SRC = """
#define G_MIN 0.0
#define G_MAX 50000.0
struct States {
double s;
double u;
double g;
};
struct Derivatives {
double s;
double u;
};
__device__ void clip(States &states)
{
states.g = fmax(states.g, G_MIN);
states.g = fmin(states.g, G_MAX);
}
__device__ void forward(
States &states,
Derivatives &gstates,
double dt
)
{
states.s += dt * gstates.s;
states.u += dt * gstates.u;
}
__device__ int ode(
States &states,
Derivatives &gstates,
double AD,
double AR,
double GMAX,
double &spike
)
{
gstates.s = states.u;
gstates.u = (((-(AR + AD)) * states.u) - ((AR * AD) * states.s));
if (spike) {
states.u = (states.u + (AR * AD));
}
states.g = (states.s * GMAX);
return 0;
}
__global__ void run_step (
int num_thread,
double dt,
double *g_state_s,
double *g_state_u,
double *g_state_g,
double *g_param_ad,
double *g_param_ar,
double *g_param_gmax,
double *g_input_spike,
double *g_output_g
)
{
/* TODO: option for 1-D or 2-D */
int tid = blockIdx.x * blockDim.x + threadIdx.x;
int total_threads = gridDim.x * blockDim.x;
for (int nid = tid; nid < num_thread; nid += total_threads) {
States states;
Derivatives gstates;
/* import data */
states.s = g_state_s[nid];
states.u = g_state_u[nid];
states.g = g_state_g[nid];
double param_AD = g_param_ad[nid];
double param_AR = g_param_ar[nid];
double param_GMAX = g_param_gmax[nid];
double input_spike = g_input_spike[nid];
/* compute gradient */
ode(states, gstates, param_AD, param_AR, param_GMAX, input_spike);
/* solve ode */
forward(states, gstates, dt);
/* clip */
clip(states);
/* export state (internals) data */
g_state_s[nid] = states.s;
g_state_u[nid] = states.u;
g_state_g[nid] = states.g;
/* export output (updates) data */
g_output_g[nid] = states.g;
}
return;
}
"""
class AlphaSpike(NDComponent):
accesses = [
"spike",
]
updates = [
"g",
]
params = [
"ad",
"ar",
"gmax",
]
params_default = dict(
ar=12.5,
ad=12.19,
gmax=0.1,
)
internals = OrderedDict(
[
("s", 0.0),
("u", 0.0),
("g", 0.0),
]
)
time_scale = 1.0
_has_rand = False
def maximum_dt_allowed(self):
return np.inf
def __init__(
self,
params_dict,
access_buffers,
dt,
LPU_id=None,
debug=False,
cuda_verbose=False,
):
if cuda_verbose:
self.compile_options = ["--ptxas-options=-v", "--expt-relaxed-constexpr"]
else:
self.compile_options = ["--expt-relaxed-constexpr"]
self.debug = debug
self.LPU_id = LPU_id
self.num_comps = params_dict[self.params[0]].size
self.dtype = params_dict[self.params[0]].dtype
self.dt = dt * self.time_scale
self.params_dict = params_dict
self.access_buffers = access_buffers
self.internal_states = {
c: garray.zeros(self.num_comps, dtype=self.dtype) + self.internals[c]
for c in self.internals
}
self.inputs = {
k: garray.empty(self.num_comps, dtype=self.access_buffers[k].dtype)
for k in self.accesses
}
dtypes = {"dt": self.dtype}
dtypes.update(
{"state_" + k: self.internal_states[k].dtype for k in self.internals}
)
dtypes.update({"param_" + k: self.params_dict[k].dtype for k in self.params})
dtypes.update(
{"input_" + k.format(k): self.inputs[k].dtype for k in self.accesses}
)
dtypes.update({"output_" + k: self.dtype for k in self.updates})
self.update_func = self.get_update_func(dtypes)
if self._has_rand:
import neurokernel.LPU.utils.curand as curand
self.randState = curand.curand_setup(
self.num_comps, np.random.randint(10000)
)
dtypes.update({"rand": self.dtype})
def run_step(self, update_pointers, st=None):
for k in self.inputs:
self.sum_in_variable(k, self.inputs[k], st=st)
args = (
[self.internal_states[k].gpudata for k in self.internals]
+ [self.params_dict[k].gpudata for k in self.params]
+ [self.inputs[k].gpudata for k in self.accesses]
+ [update_pointers[k] for k in self.updates]
)
if self._has_rand:
args += [self.randState.gpudata]
self.update_func.prepared_async_call(
self.update_func.grid,
self.update_func.block,
st,
self.num_comps,
self.dt,
*args
)
def get_update_func(self, dtypes):
from pycuda.compiler import SourceModule
mod = SourceModule(
CUDA_SRC,
options=self.compile_options,
no_extern_c=self._has_rand,
)
func = mod.get_function("run_step")
type_dict = {k: dtype_to_ctype(dtypes[k]) for k in dtypes}
func.prepare("i" + np.dtype(self.dtype).char + "P" * (len(type_dict) - 1))
func.block = (256, 1, 1)
func.grid = (
min(
6 * drv.Context.get_device().MULTIPROCESSOR_COUNT,
(self.num_comps - 1) // 256 + 1,
),
1,
)
return func
| true | true |
f72df66bdb37dbf01fd33a4b88801f4db2ea03bc | 7,879 | py | Python | tools/test_file_dir/voc_test.py | lizhe960118/CenterNet | d1a0d13974e2316c6d127ca7860866cdd93bcfa7 | [
"Apache-2.0"
] | 92 | 2019-08-12T09:31:38.000Z | 2022-03-17T06:22:41.000Z | tools/test_file_dir/voc_test.py | lizhe960118/CenterNet | d1a0d13974e2316c6d127ca7860866cdd93bcfa7 | [
"Apache-2.0"
] | 4 | 2019-08-15T09:06:01.000Z | 2020-12-25T06:46:36.000Z | tools/test_file_dir/voc_test.py | lizhe960118/CenterNet | d1a0d13974e2316c6d127ca7860866cdd93bcfa7 | [
"Apache-2.0"
] | 18 | 2019-09-05T01:29:14.000Z | 2021-06-29T13:10:11.000Z | import argparse
import os
import os.path as osp
import shutil
import tempfile
import mmcv
import torch
import torch.distributed as dist
from mmcv.runner import load_checkpoint, get_dist_info
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmdet.apis import init_dist
from mmdet.core import results2json
# , coco_eval,
from txt_val import txt_eval
from mmdet.core import wrap_fp16_model
from mmdet.datasets import build_dataloader, build_dataset
from mmdet.models import build_detector
from mmdet import datasets
def single_gpu_test(model, data_loader, show=False):
model.eval()
results = []
dataset = data_loader.dataset
prog_bar = mmcv.ProgressBar(len(dataset))
for i, data in enumerate(data_loader):
with torch.no_grad():
result = model(return_loss=False, rescale=not show, **data)
results.append(result)
if show:
model.module.show_result(data, result, dataset.img_norm_cfg)
# batch_size = data['img'][0].size(0)
batch_size = 1
for _ in range(batch_size):
prog_bar.update()
return results
def multi_gpu_test(model, data_loader, tmpdir=None):
model.eval()
results = []
dataset = data_loader.dataset
rank, world_size = get_dist_info()
if rank == 0:
prog_bar = mmcv.ProgressBar(len(dataset))
for i, data in enumerate(data_loader):
with torch.no_grad():
result = model(return_loss=False, rescale=True, **data)
results.append(result)
if rank == 0:
batch_size = data['img'][0].size(0)
for _ in range(batch_size * world_size):
prog_bar.update()
# collect results from all ranks
results = collect_results(results, len(dataset), tmpdir)
return results
def collect_results(result_part, size, tmpdir=None):
rank, world_size = get_dist_info()
# create a tmp dir if it is not specified
if tmpdir is None:
MAX_LEN = 512
# 32 is whitespace
dir_tensor = torch.full((MAX_LEN, ),
32,
dtype=torch.uint8,
device='cuda')
if rank == 0:
tmpdir = tempfile.mkdtemp()
tmpdir = torch.tensor(
bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda')
dir_tensor[:len(tmpdir)] = tmpdir
dist.broadcast(dir_tensor, 0)
tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip()
else:
mmcv.mkdir_or_exist(tmpdir)
# dump the part result to the dir
mmcv.dump(result_part, osp.join(tmpdir, 'part_{}.pkl'.format(rank)))
dist.barrier()
# collect all parts
if rank != 0:
return None
else:
# load results of all parts from tmp dir
part_list = []
for i in range(world_size):
part_file = osp.join(tmpdir, 'part_{}.pkl'.format(i))
part_list.append(mmcv.load(part_file))
# sort the results
ordered_results = []
for res in zip(*part_list):
ordered_results.extend(list(res))
# the dataloader may pad some samples
ordered_results = ordered_results[:size]
# remove tmp dir
shutil.rmtree(tmpdir)
return ordered_results
def parse_args():
parser = argparse.ArgumentParser(description='MMDet test detector')
parser.add_argument('config', help='test config file path')
parser.add_argument('checkpoint', help='checkpoint file')
parser.add_argument('--out', help='output result file')
# parser.add_argument(
# '--eval',
# type=str,
# nargs='+',
# choices=['proposal', 'proposal_fast', 'bbox', 'segm', 'keypoints'],
# help='eval types')
parser.add_argument('--show', action='store_true', help='show results')
parser.add_argument('--tmpdir', help='tmp dir for writing some results')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
parser.add_argument('--iou_thr', type=float, default=0.5)
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
return args
def main():
#os.environ["CUDA_VISIBLE_DEVICES"] = "1"
args = parse_args()
assert args.out or args.show, \
('Please specify at least one operation (save or show the results) '
'with the argument "--out" or "--show"')
if args.out is not None and not args.out.endswith(('.pkl', '.pickle')):
raise ValueError('The output file must be a pkl file.')
cfg = mmcv.Config.fromfile(args.config)
# set cudnn_benchmark
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
cfg.model.pretrained = None
cfg.data.test.test_mode = True
# init distributed env first, since logger depends on the dist info.
if args.launcher == 'none':
distributed = False
else:
distributed = True
init_dist(args.launcher, **cfg.dist_params)
# build the dataloader
# TODO: support multiple images per gpu (only minor changes are needed)
dataset = build_dataset(cfg.data.test)
data_loader = build_dataloader(
dataset,
imgs_per_gpu=1,
workers_per_gpu=cfg.data.workers_per_gpu,
dist=distributed,
shuffle=False)
# build the model and load checkpoint
model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg)
fp16_cfg = cfg.get('fp16', None)
if fp16_cfg is not None:
wrap_fp16_model(model)
checkpoint = load_checkpoint(model, args.checkpoint, map_location='cpu')
# old versions did not save class info in checkpoints, this walkaround is
# for backward compatibility
if 'CLASSES' in checkpoint['meta']:
model.CLASSES = checkpoint['meta']['CLASSES']
else:
model.CLASSES = dataset.CLASSES
if not distributed:
model = MMDataParallel(model, device_ids=[0])
outputs = single_gpu_test(model, data_loader, args.show)
else:
model = MMDistributedDataParallel(model.cuda())
outputs = multi_gpu_test(model, data_loader, args.tmpdir)
rank, _ = get_dist_info()
if args.out and rank == 0:
print('\nwriting results to {}'.format(args.out))
mmcv.dump(outputs, args.out)
result_file = args.out
# args = parser.parse_args()
# cfg = mmcv.Config.fromfile(args.config)
# test_dataset = mmcv.runner.obj_from_dict(cfg.data.test, datasets)
# txt_eval(args.result, test_dataset, args.iou_thr)
txt_eval(result_file, dataset, iou_thr=args.iou_thr)
# eval_types = args.eval
# if eval_types:
# print('Starting evaluate {}'.format(' and '.join(eval_types)))
# if eval_types == ['proposal_fast']:
# result_file = args.out
# coco_eval(result_file, eval_types, dataset.coco)
# else:
# if not isinstance(outputs[0], dict):
# result_files = results2json(dataset, outputs, args.out)
# coco_eval(result_files, eval_types, dataset.coco)
# else:
# for name in outputs[0]:
# print('\nEvaluating {}'.format(name))
# outputs_ = [out[name] for out in outputs]
# result_file = args.out + '.{}'.format(name)
# result_files = results2json(dataset, outputs_,
# result_file)
# coco_eval(result_files, eval_types, dataset.coco)
if __name__ == '__main__':
main()
| 35.490991 | 77 | 0.617083 | import argparse
import os
import os.path as osp
import shutil
import tempfile
import mmcv
import torch
import torch.distributed as dist
from mmcv.runner import load_checkpoint, get_dist_info
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmdet.apis import init_dist
from mmdet.core import results2json
from txt_val import txt_eval
from mmdet.core import wrap_fp16_model
from mmdet.datasets import build_dataloader, build_dataset
from mmdet.models import build_detector
from mmdet import datasets
def single_gpu_test(model, data_loader, show=False):
model.eval()
results = []
dataset = data_loader.dataset
prog_bar = mmcv.ProgressBar(len(dataset))
for i, data in enumerate(data_loader):
with torch.no_grad():
result = model(return_loss=False, rescale=not show, **data)
results.append(result)
if show:
model.module.show_result(data, result, dataset.img_norm_cfg)
batch_size = 1
for _ in range(batch_size):
prog_bar.update()
return results
def multi_gpu_test(model, data_loader, tmpdir=None):
model.eval()
results = []
dataset = data_loader.dataset
rank, world_size = get_dist_info()
if rank == 0:
prog_bar = mmcv.ProgressBar(len(dataset))
for i, data in enumerate(data_loader):
with torch.no_grad():
result = model(return_loss=False, rescale=True, **data)
results.append(result)
if rank == 0:
batch_size = data['img'][0].size(0)
for _ in range(batch_size * world_size):
prog_bar.update()
results = collect_results(results, len(dataset), tmpdir)
return results
def collect_results(result_part, size, tmpdir=None):
rank, world_size = get_dist_info()
if tmpdir is None:
MAX_LEN = 512
dir_tensor = torch.full((MAX_LEN, ),
32,
dtype=torch.uint8,
device='cuda')
if rank == 0:
tmpdir = tempfile.mkdtemp()
tmpdir = torch.tensor(
bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda')
dir_tensor[:len(tmpdir)] = tmpdir
dist.broadcast(dir_tensor, 0)
tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip()
else:
mmcv.mkdir_or_exist(tmpdir)
mmcv.dump(result_part, osp.join(tmpdir, 'part_{}.pkl'.format(rank)))
dist.barrier()
if rank != 0:
return None
else:
part_list = []
for i in range(world_size):
part_file = osp.join(tmpdir, 'part_{}.pkl'.format(i))
part_list.append(mmcv.load(part_file))
ordered_results = []
for res in zip(*part_list):
ordered_results.extend(list(res))
ordered_results = ordered_results[:size]
shutil.rmtree(tmpdir)
return ordered_results
def parse_args():
parser = argparse.ArgumentParser(description='MMDet test detector')
parser.add_argument('config', help='test config file path')
parser.add_argument('checkpoint', help='checkpoint file')
parser.add_argument('--out', help='output result file')
parser.add_argument('--show', action='store_true', help='show results')
parser.add_argument('--tmpdir', help='tmp dir for writing some results')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
parser.add_argument('--iou_thr', type=float, default=0.5)
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
return args
def main():
args = parse_args()
assert args.out or args.show, \
('Please specify at least one operation (save or show the results) '
'with the argument "--out" or "--show"')
if args.out is not None and not args.out.endswith(('.pkl', '.pickle')):
raise ValueError('The output file must be a pkl file.')
cfg = mmcv.Config.fromfile(args.config)
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
cfg.model.pretrained = None
cfg.data.test.test_mode = True
if args.launcher == 'none':
distributed = False
else:
distributed = True
init_dist(args.launcher, **cfg.dist_params)
dataset = build_dataset(cfg.data.test)
data_loader = build_dataloader(
dataset,
imgs_per_gpu=1,
workers_per_gpu=cfg.data.workers_per_gpu,
dist=distributed,
shuffle=False)
model = build_detector(cfg.model, train_cfg=None, test_cfg=cfg.test_cfg)
fp16_cfg = cfg.get('fp16', None)
if fp16_cfg is not None:
wrap_fp16_model(model)
checkpoint = load_checkpoint(model, args.checkpoint, map_location='cpu')
if 'CLASSES' in checkpoint['meta']:
model.CLASSES = checkpoint['meta']['CLASSES']
else:
model.CLASSES = dataset.CLASSES
if not distributed:
model = MMDataParallel(model, device_ids=[0])
outputs = single_gpu_test(model, data_loader, args.show)
else:
model = MMDistributedDataParallel(model.cuda())
outputs = multi_gpu_test(model, data_loader, args.tmpdir)
rank, _ = get_dist_info()
if args.out and rank == 0:
print('\nwriting results to {}'.format(args.out))
mmcv.dump(outputs, args.out)
result_file = args.out
txt_eval(result_file, dataset, iou_thr=args.iou_thr)
if __name__ == '__main__':
main()
| true | true |
f72df6d367740c875fe688452a8ac3c3991004e3 | 10,775 | py | Python | AblationExperiment.py | Livioni/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning | eb246ebba160567277c9c1aa226e359f48629dac | [
"MIT"
] | 2 | 2022-03-03T08:52:14.000Z | 2022-03-11T02:27:57.000Z | AblationExperiment.py | Livioni/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning | eb246ebba160567277c9c1aa226e359f48629dac | [
"MIT"
] | 1 | 2022-03-11T02:51:06.000Z | 2022-03-11T05:02:34.000Z | AblationExperiment.py | Livioni/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning | eb246ebba160567277c9c1aa226e359f48629dac | [
"MIT"
] | null | null | null | import gym, torch, copy, os, xlwt, random
import torch.nn as nn
from datetime import datetime
import numpy as np
env = gym.make("clusterEnv-v0").unwrapped
state_dim, action_dim = env.return_dim_info()
####### initialize environment hyperparameters ######
max_ep_len = 1000 # max timesteps in one episode
auto_save = 1
total_test_episodes = 100 * auto_save # total num of testing episodes
def initial_excel():
global worksheet, workbook
# xlwt 库将数据导入Excel并设置默认字符编码为ascii
workbook = xlwt.Workbook(encoding='ascii')
# 添加一个表 参数为表名
worksheet = workbook.add_sheet('makespan')
# 生成单元格样式的方法
# 设置列宽, 3为列的数目, 12为列的宽度, 256为固定值
for i in range(3):
worksheet.col(i).width = 256 * 12
# 设置单元格行高, 25为行高, 20为固定值
worksheet.row(1).height_mismatch = True
worksheet.row(1).height = 20 * 25
# 保存excel文件
workbook.save('data/makespan_MCTSAE.xls')
def read_current_state():
'''
读取当前env的状态
:return: 当前env的状态
'''
state = copy.deepcopy(env.state)
ready_list = copy.deepcopy(env.ready_list)
done_job = copy.deepcopy(env.done_job)
tasks = copy.deepcopy(env.tasks)
wait_duration = copy.deepcopy(env.wait_duration)
cpu_demand = copy.deepcopy(env.cpu_demand)
memory_demand = copy.deepcopy(env.memory_demand)
tasks_remaing_time = copy.deepcopy(env.tasks_remaing_time)
time = env.time
cpu_res = env.cpu_res
memory_res = env.memory_res
return state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time
def load_current_state(state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time,
cpu_res, memory_res, time):
env.set_state(state[:])
env.set_ready_list(ready_list[:])
env.set_done_job(done_job[:])
env.set_tasks(tasks[:])
env.set_wait_duration(wait_duration[:])
env.set_cpu_demand(cpu_demand[:])
env.set_memory_demand(memory_demand[:])
env.set_tasks_remaing_time(tasks_remaing_time)
env.set_cpu_res(cpu_res)
env.set_memory_res(memory_res)
env.set_time(time)
return
class TreeNode(object):
def __init__(self, parent, state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand,
tasks_remaing_time, cpu_res, memory_res, time):
self._parent = parent
self._children = {} # a map from action to TreeNode
self._n_visits = 0
self._makespan = 0
self._total_makespan = 0
self._state = state
self._ready_list = ready_list
self._done_job = done_job
self._tasks = tasks
self._wait_duration = wait_duration
self._cpu_demand = cpu_demand
self._memory_demand = memory_demand
self._tasks_remaing_time = tasks_remaing_time
self._cpu_res = cpu_res
self._memory_res = memory_res
self._time = time
self._c = 40
self._value = 0
if self._parent != None:
self.get_value()
def expand(self):
'''
扩展树
'''
load_current_state(self._state, self._ready_list, self._done_job, self._tasks, self._wait_duration,
self._cpu_demand, self._memory_demand, self._tasks_remaing_time, self._cpu_res,
self._memory_res, self._time)
available_action = env.return_action_list()
if available_action:
for action in available_action:
load_current_state(self._state, self._ready_list, self._done_job, self._tasks, self._wait_duration,
self._cpu_demand, self._memory_demand, self._tasks_remaing_time, self._cpu_res,
self._memory_res, self._time)
if action not in self._children:
env.step(action)
state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time = read_current_state()
self._children[action] = TreeNode(self, state, ready_list, done_job, tasks, wait_duration,
cpu_demand, memory_demand, tasks_remaing_time, cpu_res,
memory_res, time)
else:
print("done")
def get_average_makespan(self):
return self._makespan
def get_value(self):
self._value = self._makespan + self._c * np.sqrt(np.log(self._parent._n_visits + 1) / (self._n_visits + 1))
return self._value
def select(self):
'''
在子节中选择具有搜索价值的点
'''
return max(self._children.items(), key=lambda act_node: act_node[1].get_value())[1]
def update(self, makespan):
# Count visit.
self._n_visits += 1
if self._makespan == 0:
self._makespan = -makespan
else:
if -makespan > self._makespan:
self._makespan = -makespan
if self._parent != None:
self._value = self.get_value()
def update_recursive(self, leaf_value):
# If it is not root, this node's parent should be updated first.
if self._parent:
self._parent.update_recursive(leaf_value)
self.update(leaf_value)
def is_leaf(self):
return self._children == {}
def is_root(self):
return self._parent is None
class MCTS(object):
def __init__(self, state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time,
cpu_res, memory_res, time, depth):
self._root = TreeNode(None, state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand,
tasks_remaing_time, cpu_res, memory_res, time)
self._root.expand() # 初始化扩展
self._initial_buget = 100
self._min_buget = 10
self._depth = depth
def playout(self):
buget = max(self._initial_buget / self._depth, self._min_buget)
for j in range(int(buget)):
node = self._root
while True:
if node.is_leaf():
if node._n_visits == 0:
cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration, cur_cpu_demand, cur_memory_demand, cur_tasks_remaing_time, cur_cpu_res, cur_memory_res, cur_time = node._state, node._ready_list, node._done_job, node._tasks, node._wait_duration, node._cpu_demand, node._memory_demand, node._tasks_remaing_time, node._cpu_res, node._memory_res, node._time
makespan = self._roll_out(cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration,
cur_cpu_demand, cur_memory_demand, cur_tasks_remaing_time,
cur_cpu_res, cur_memory_res, cur_time)
node.update_recursive(makespan)
break
else:
node.expand()
node = node.select()
else:
node = node.select()
node = self._root
return max(node._children.items(), key=lambda act_node: act_node[1].get_average_makespan())[0]
def _roll_out(self, cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration, cur_cpu_demand,
cur_memory_demand, cur_tasks_remaing_time, cur_cpu_res, cur_memory_res, cur_time):
load_current_state(cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration, cur_cpu_demand,
cur_memory_demand, cur_tasks_remaing_time, cur_cpu_res, cur_memory_res, cur_time)
state = cur_state
max_ep_len = 1000 # max timesteps in one episode
for t in range(1, max_ep_len + 1):
action = random.choice(range(action_dim)) - 1
state, reward, done, info = env.step(action)
while (info[0] == False):
action = random.choice(range(action_dim)) - 1
state, reward, done, info = env.step(action) # 输入step的都是
next_state, reward, done, _ = state, reward, done, info
# break; if the episode is over
state = next_state
if done:
makespan = state[0]
break
return makespan
if __name__ == '__main__':
initial_excel()
makespans = []
line = 0
start_time = datetime.now().replace(microsecond=0)
print("Started training at (GMT) : ", start_time)
print("============================================================================================")
for ep in range(1, total_test_episodes + 1):
initial_state = env.reset()
state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time = read_current_state()
for depth in range(1, max_ep_len + 1):
tree = MCTS(state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand,
tasks_remaing_time, cpu_res, memory_res, time, depth=depth)
best_action = tree.playout()
load_current_state(tree._root._state, tree._root._ready_list, tree._root._done_job, tree._root._tasks,
tree._root._wait_duration, tree._root._cpu_demand, tree._root._memory_demand,
tree._root._tasks_remaing_time, tree._root._cpu_res, tree._root._memory_res,
tree._root._time)
observation, reward, done, info = env.step(best_action)
state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time = read_current_state()
del tree
if done:
makespan = observation[0]
makespans.append(makespan)
print("Episode:", ep, "Makespan:", makespan)
if ep % auto_save == 0:
average_makespan = np.mean(makespans)
worksheet.write(line, 1, float(average_makespan))
workbook.save('data/makespan_MCTSAE.xls')
print('MCTS : Episode: {}, Makespan: {:.3f}s'.format((line + 1) * auto_save, average_makespan))
line += 1
makespans = []
end_time = datetime.now().replace(microsecond=0)
print("Finished testing at (GMT) : ", end_time)
print("Total testing time : ", end_time - start_time)
start_time = end_time
break
workbook.save('data/makespan_MCTSAE.xls')
env.close()
| 44.709544 | 383 | 0.610673 | import gym, torch, copy, os, xlwt, random
import torch.nn as nn
from datetime import datetime
import numpy as np
env = gym.make("clusterEnv-v0").unwrapped
state_dim, action_dim = env.return_dim_info()
orksheet.row(1).height_mismatch = True
worksheet.row(1).height = 20 * 25
workbook.save('data/makespan_MCTSAE.xls')
def read_current_state():
state = copy.deepcopy(env.state)
ready_list = copy.deepcopy(env.ready_list)
done_job = copy.deepcopy(env.done_job)
tasks = copy.deepcopy(env.tasks)
wait_duration = copy.deepcopy(env.wait_duration)
cpu_demand = copy.deepcopy(env.cpu_demand)
memory_demand = copy.deepcopy(env.memory_demand)
tasks_remaing_time = copy.deepcopy(env.tasks_remaing_time)
time = env.time
cpu_res = env.cpu_res
memory_res = env.memory_res
return state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time
def load_current_state(state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time,
cpu_res, memory_res, time):
env.set_state(state[:])
env.set_ready_list(ready_list[:])
env.set_done_job(done_job[:])
env.set_tasks(tasks[:])
env.set_wait_duration(wait_duration[:])
env.set_cpu_demand(cpu_demand[:])
env.set_memory_demand(memory_demand[:])
env.set_tasks_remaing_time(tasks_remaing_time)
env.set_cpu_res(cpu_res)
env.set_memory_res(memory_res)
env.set_time(time)
return
class TreeNode(object):
def __init__(self, parent, state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand,
tasks_remaing_time, cpu_res, memory_res, time):
self._parent = parent
self._children = {}
self._n_visits = 0
self._makespan = 0
self._total_makespan = 0
self._state = state
self._ready_list = ready_list
self._done_job = done_job
self._tasks = tasks
self._wait_duration = wait_duration
self._cpu_demand = cpu_demand
self._memory_demand = memory_demand
self._tasks_remaing_time = tasks_remaing_time
self._cpu_res = cpu_res
self._memory_res = memory_res
self._time = time
self._c = 40
self._value = 0
if self._parent != None:
self.get_value()
def expand(self):
load_current_state(self._state, self._ready_list, self._done_job, self._tasks, self._wait_duration,
self._cpu_demand, self._memory_demand, self._tasks_remaing_time, self._cpu_res,
self._memory_res, self._time)
available_action = env.return_action_list()
if available_action:
for action in available_action:
load_current_state(self._state, self._ready_list, self._done_job, self._tasks, self._wait_duration,
self._cpu_demand, self._memory_demand, self._tasks_remaing_time, self._cpu_res,
self._memory_res, self._time)
if action not in self._children:
env.step(action)
state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time = read_current_state()
self._children[action] = TreeNode(self, state, ready_list, done_job, tasks, wait_duration,
cpu_demand, memory_demand, tasks_remaing_time, cpu_res,
memory_res, time)
else:
print("done")
def get_average_makespan(self):
return self._makespan
def get_value(self):
self._value = self._makespan + self._c * np.sqrt(np.log(self._parent._n_visits + 1) / (self._n_visits + 1))
return self._value
def select(self):
return max(self._children.items(), key=lambda act_node: act_node[1].get_value())[1]
def update(self, makespan):
self._n_visits += 1
if self._makespan == 0:
self._makespan = -makespan
else:
if -makespan > self._makespan:
self._makespan = -makespan
if self._parent != None:
self._value = self.get_value()
def update_recursive(self, leaf_value):
if self._parent:
self._parent.update_recursive(leaf_value)
self.update(leaf_value)
def is_leaf(self):
return self._children == {}
def is_root(self):
return self._parent is None
class MCTS(object):
def __init__(self, state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time,
cpu_res, memory_res, time, depth):
self._root = TreeNode(None, state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand,
tasks_remaing_time, cpu_res, memory_res, time)
self._root.expand() # 初始化扩展
self._initial_buget = 100
self._min_buget = 10
self._depth = depth
def playout(self):
buget = max(self._initial_buget / self._depth, self._min_buget)
for j in range(int(buget)):
node = self._root
while True:
if node.is_leaf():
if node._n_visits == 0:
cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration, cur_cpu_demand, cur_memory_demand, cur_tasks_remaing_time, cur_cpu_res, cur_memory_res, cur_time = node._state, node._ready_list, node._done_job, node._tasks, node._wait_duration, node._cpu_demand, node._memory_demand, node._tasks_remaing_time, node._cpu_res, node._memory_res, node._time
makespan = self._roll_out(cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration,
cur_cpu_demand, cur_memory_demand, cur_tasks_remaing_time,
cur_cpu_res, cur_memory_res, cur_time)
node.update_recursive(makespan)
break
else:
node.expand()
node = node.select()
else:
node = node.select()
node = self._root
return max(node._children.items(), key=lambda act_node: act_node[1].get_average_makespan())[0]
def _roll_out(self, cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration, cur_cpu_demand,
cur_memory_demand, cur_tasks_remaing_time, cur_cpu_res, cur_memory_res, cur_time):
load_current_state(cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration, cur_cpu_demand,
cur_memory_demand, cur_tasks_remaing_time, cur_cpu_res, cur_memory_res, cur_time)
state = cur_state
max_ep_len = 1000 # max timesteps in one episode
for t in range(1, max_ep_len + 1):
action = random.choice(range(action_dim)) - 1
state, reward, done, info = env.step(action)
while (info[0] == False):
action = random.choice(range(action_dim)) - 1
state, reward, done, info = env.step(action) # 输入step的都是
next_state, reward, done, _ = state, reward, done, info
# break; if the episode is over
state = next_state
if done:
makespan = state[0]
break
return makespan
if __name__ == '__main__':
initial_excel()
makespans = []
line = 0
start_time = datetime.now().replace(microsecond=0)
print("Started training at (GMT) : ", start_time)
print("============================================================================================")
for ep in range(1, total_test_episodes + 1):
initial_state = env.reset()
state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time = read_current_state()
for depth in range(1, max_ep_len + 1):
tree = MCTS(state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand,
tasks_remaing_time, cpu_res, memory_res, time, depth=depth)
best_action = tree.playout()
load_current_state(tree._root._state, tree._root._ready_list, tree._root._done_job, tree._root._tasks,
tree._root._wait_duration, tree._root._cpu_demand, tree._root._memory_demand,
tree._root._tasks_remaing_time, tree._root._cpu_res, tree._root._memory_res,
tree._root._time)
observation, reward, done, info = env.step(best_action)
state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time = read_current_state()
del tree
if done:
makespan = observation[0]
makespans.append(makespan)
print("Episode:", ep, "Makespan:", makespan)
if ep % auto_save == 0:
average_makespan = np.mean(makespans)
worksheet.write(line, 1, float(average_makespan))
workbook.save('data/makespan_MCTSAE.xls')
print('MCTS : Episode: {}, Makespan: {:.3f}s'.format((line + 1) * auto_save, average_makespan))
line += 1
makespans = []
end_time = datetime.now().replace(microsecond=0)
print("Finished testing at (GMT) : ", end_time)
print("Total testing time : ", end_time - start_time)
start_time = end_time
break
workbook.save('data/makespan_MCTSAE.xls')
env.close()
| true | true |
f72df71b8672cbad4027a0bdcc6e49cf80336049 | 7,159 | py | Python | habari/apps/crawl/crawlers/dncrawler2/__init__.py | ppolle/habari | 671b98c361ce593f708bc15f69dd3aa6fe72b128 | [
"MIT"
] | 3 | 2020-06-08T08:39:06.000Z | 2020-07-30T10:46:22.000Z | habari/apps/crawl/crawlers/dncrawler2/__init__.py | ppolle/habari | 671b98c361ce593f708bc15f69dd3aa6fe72b128 | [
"MIT"
] | 9 | 2021-03-19T11:18:58.000Z | 2022-02-10T15:48:35.000Z | habari/apps/crawl/crawlers/dncrawler2/__init__.py | ppolle/habari | 671b98c361ce593f708bc15f69dd3aa6fe72b128 | [
"MIT"
] | 1 | 2021-09-22T07:23:03.000Z | 2021-09-22T07:23:03.000Z | import pytz
import logging
from datetime import datetime
from bs4 import BeautifulSoup
from habari.apps.crawl.models import Article
from habari.apps.crawl.crawlers import AbstractBaseCrawler
from habari.apps.utils.error_utils import error_to_string, http_error_to_string
logger = logging.getLogger(__name__)
class DNCrawler(AbstractBaseCrawler):
def __init__(self):
super().__init__('DN')
self.url = self.news_source.url
self.categories = self.get_category_links()
def oped_articles(self, url):
links = ('https://nation.africa/kenya/blogs-opinion/',
'https://nation.africa/kenya/photos/',
)
if url.startswith(links):
return True
else:
return False
def links_to_avoid(self, url):
links = ('https://nation.africa/kenya/blogs-opinion/cartoons/',
)
if url.startswith(links):
return False
else:
return True
def get_category_links(self):
logger.info('Getting links to all categories and sub-categories')
categories = [self.url, ]
try:
get_categories = self.requests(self.url)
except Exception as e:
logger.exception('Error: {0} while getting categories from {1}'.format(e,self.url))
self.errors.append(error_to_string(e))
else:
if get_categories.status_code == 200:
soup = BeautifulSoup(get_categories.content, 'html.parser')
main_categories = soup.select('footer ul.categories-nav_categories a')
for cat in main_categories:
if cat.get('href') is not None:
link = self.make_relative_links_absolute(cat.get('href'))
categories.append(link)
return categories
def get_top_stories(self):
logger.info('Getting the latest stories')
story_links = []
for category in self.categories:
try:
top_stories = self.requests(category)
if top_stories.status_code == 200:
soup = BeautifulSoup(top_stories.content, 'html.parser')
stories = soup.select('a.teaser-image-large') + soup.select('a.article-collection-teaser')
for story in stories:
story = self.make_relative_links_absolute(story.get('href').strip())
if not Article.objects.filter(article_url=story).exists() and \
self.check_for_top_level_domain(story) and self.links_to_avoid(story):
story_links.append(story)
except Exception as e:
logger.exception(
'{0} error while getting top stories for {1}'.format(e, category))
self.errors.append(error_to_string(e))
return set(story_links)
def get_story_details(self, link):
story = self.requests(link)
if story.status_code == 200:
soup = BeautifulSoup(story.content, 'html.parser')
try:
title = soup.select_one('h1.title-medium').get_text().strip()
except AttributeError:
title = soup.select_one('h1.title-large').get_text().strip()
try:
publication_date = soup.select_one('time.date').get('datetime')
except AttributeError:
publication_date = soup.find("meta", property="og:article:published_time").get('content').strip()
date = pytz.timezone("Africa/Nairobi").localize(datetime.strptime(publication_date, '%Y-%m-%dT%H:%M:%SZ'), is_dst=None)
author_list = soup.select('.article-authors_texts .article-authors_authors')
authors = self.sanitize_author_iterable(author_list)
try:
summary = soup.select_one('.article-content_summary .text-block').get_text().strip()
except AttributeError:
summary = soup.find("meta", property="og:description").get('content').strip()
try:
image_url = self.make_relative_links_absolute(\
soup.select_one('figure.article-picture img').get('data-src'))
except AttributeError:
try:
image_url = soup.select_one('figure iframe.lazy-iframe_iframe').get('data-src')
except AttributeError:
try:
image_url = soup.select_one('figure iframe').get('src')
except AttributeError:
image_url = soup.find("meta", property="og:image").get('content').strip()
return {'article_url':link,
'article_title':title,
'publication_date':date,
'author':authors,
'summary':summary,
'image_url':image_url}
def get_oped_article_details(self, url):
story = self.requests(url)
if story.status_code == 200:
soup = BeautifulSoup(story.content, 'html.parser')
try:
title = soup.select_one('h1.title-medium').get_text().strip()
except AttributeError:
title = soup.select_one('h1.title-large').get_text().strip()
try:
publication_date = soup.select_one('time.date').get('datetime')
except AttributeError:
publication_date = soup.find("meta", property="og:article:published_time").get('content').strip()
date = pytz.timezone("Africa/Nairobi").localize(datetime.strptime(publication_date, '%Y-%m-%dT%H:%M:%SZ'), is_dst=None)
author_list = soup.select('.article-authors_texts .article-authors_authors')
authors = self.sanitize_author_iterable(author_list)
try:
summary = soup.select_one('.article-content_summary .text-block').get_text().strip()
except AttributeError:
summary = soup.find("meta", property="og:description").get('content').strip()
try:
image_url = self.make_relative_links_absolute(\
soup.select_one('figure.article-picture img').get('data-src'))
except AttributeError:
try:
image_url = soup.select_one('figure iframe.lazy-iframe_iframe').get('data-src')
except AttributeError:
try:
image_url = soup.select_one('figure iframe').get('src')
except AttributeError:
image_url = soup.find("meta", property="og:image").get('content').strip()
return {'article_url':url,
'article_title':title,
'publication_date':date,
'author':authors,
'summary':summary,
'image_url':image_url}
def update_article_details(self, article):
if self.oped_articles(article):
return self.get_oped_article_details(article)
else:
return self.get_story_details(article)
def update_top_stories(self):
articles = self.get_top_stories()
article_info = []
for article in articles:
try:
logger.info('Updating article details for {}'.format(article))
story = self.update_article_details(article)
article_info.append(Article(title=story['article_title'],
article_url=story['article_url'],
article_image_url=story['image_url'],
author=story['author'],
publication_date=story['publication_date'],
summary=story['summary'],
news_source=self.news_source
))
except Exception as e:
logger.exception('Crawling Error: {0} while getting data from: {1}'.format(e, article))
self.errors.append(error_to_string(e))
try:
Article.objects.bulk_create(article_info)
logger.info('')
logger.info('Succesfully updated Daily Nation Latest Articles.{} new articles added'.format(
len(article_info)))
self.crawl.total_articles=len(article_info)
self.crawl.save()
except Exception as e:
logger.exception('Error!!!{}'.format(e))
self.errors.append(error_to_string(e))
| 36.340102 | 122 | 0.684453 | import pytz
import logging
from datetime import datetime
from bs4 import BeautifulSoup
from habari.apps.crawl.models import Article
from habari.apps.crawl.crawlers import AbstractBaseCrawler
from habari.apps.utils.error_utils import error_to_string, http_error_to_string
logger = logging.getLogger(__name__)
class DNCrawler(AbstractBaseCrawler):
def __init__(self):
super().__init__('DN')
self.url = self.news_source.url
self.categories = self.get_category_links()
def oped_articles(self, url):
links = ('https://nation.africa/kenya/blogs-opinion/',
'https://nation.africa/kenya/photos/',
)
if url.startswith(links):
return True
else:
return False
def links_to_avoid(self, url):
links = ('https://nation.africa/kenya/blogs-opinion/cartoons/',
)
if url.startswith(links):
return False
else:
return True
def get_category_links(self):
logger.info('Getting links to all categories and sub-categories')
categories = [self.url, ]
try:
get_categories = self.requests(self.url)
except Exception as e:
logger.exception('Error: {0} while getting categories from {1}'.format(e,self.url))
self.errors.append(error_to_string(e))
else:
if get_categories.status_code == 200:
soup = BeautifulSoup(get_categories.content, 'html.parser')
main_categories = soup.select('footer ul.categories-nav_categories a')
for cat in main_categories:
if cat.get('href') is not None:
link = self.make_relative_links_absolute(cat.get('href'))
categories.append(link)
return categories
def get_top_stories(self):
logger.info('Getting the latest stories')
story_links = []
for category in self.categories:
try:
top_stories = self.requests(category)
if top_stories.status_code == 200:
soup = BeautifulSoup(top_stories.content, 'html.parser')
stories = soup.select('a.teaser-image-large') + soup.select('a.article-collection-teaser')
for story in stories:
story = self.make_relative_links_absolute(story.get('href').strip())
if not Article.objects.filter(article_url=story).exists() and \
self.check_for_top_level_domain(story) and self.links_to_avoid(story):
story_links.append(story)
except Exception as e:
logger.exception(
'{0} error while getting top stories for {1}'.format(e, category))
self.errors.append(error_to_string(e))
return set(story_links)
def get_story_details(self, link):
story = self.requests(link)
if story.status_code == 200:
soup = BeautifulSoup(story.content, 'html.parser')
try:
title = soup.select_one('h1.title-medium').get_text().strip()
except AttributeError:
title = soup.select_one('h1.title-large').get_text().strip()
try:
publication_date = soup.select_one('time.date').get('datetime')
except AttributeError:
publication_date = soup.find("meta", property="og:article:published_time").get('content').strip()
date = pytz.timezone("Africa/Nairobi").localize(datetime.strptime(publication_date, '%Y-%m-%dT%H:%M:%SZ'), is_dst=None)
author_list = soup.select('.article-authors_texts .article-authors_authors')
authors = self.sanitize_author_iterable(author_list)
try:
summary = soup.select_one('.article-content_summary .text-block').get_text().strip()
except AttributeError:
summary = soup.find("meta", property="og:description").get('content').strip()
try:
image_url = self.make_relative_links_absolute(\
soup.select_one('figure.article-picture img').get('data-src'))
except AttributeError:
try:
image_url = soup.select_one('figure iframe.lazy-iframe_iframe').get('data-src')
except AttributeError:
try:
image_url = soup.select_one('figure iframe').get('src')
except AttributeError:
image_url = soup.find("meta", property="og:image").get('content').strip()
return {'article_url':link,
'article_title':title,
'publication_date':date,
'author':authors,
'summary':summary,
'image_url':image_url}
def get_oped_article_details(self, url):
story = self.requests(url)
if story.status_code == 200:
soup = BeautifulSoup(story.content, 'html.parser')
try:
title = soup.select_one('h1.title-medium').get_text().strip()
except AttributeError:
title = soup.select_one('h1.title-large').get_text().strip()
try:
publication_date = soup.select_one('time.date').get('datetime')
except AttributeError:
publication_date = soup.find("meta", property="og:article:published_time").get('content').strip()
date = pytz.timezone("Africa/Nairobi").localize(datetime.strptime(publication_date, '%Y-%m-%dT%H:%M:%SZ'), is_dst=None)
author_list = soup.select('.article-authors_texts .article-authors_authors')
authors = self.sanitize_author_iterable(author_list)
try:
summary = soup.select_one('.article-content_summary .text-block').get_text().strip()
except AttributeError:
summary = soup.find("meta", property="og:description").get('content').strip()
try:
image_url = self.make_relative_links_absolute(\
soup.select_one('figure.article-picture img').get('data-src'))
except AttributeError:
try:
image_url = soup.select_one('figure iframe.lazy-iframe_iframe').get('data-src')
except AttributeError:
try:
image_url = soup.select_one('figure iframe').get('src')
except AttributeError:
image_url = soup.find("meta", property="og:image").get('content').strip()
return {'article_url':url,
'article_title':title,
'publication_date':date,
'author':authors,
'summary':summary,
'image_url':image_url}
def update_article_details(self, article):
if self.oped_articles(article):
return self.get_oped_article_details(article)
else:
return self.get_story_details(article)
def update_top_stories(self):
articles = self.get_top_stories()
article_info = []
for article in articles:
try:
logger.info('Updating article details for {}'.format(article))
story = self.update_article_details(article)
article_info.append(Article(title=story['article_title'],
article_url=story['article_url'],
article_image_url=story['image_url'],
author=story['author'],
publication_date=story['publication_date'],
summary=story['summary'],
news_source=self.news_source
))
except Exception as e:
logger.exception('Crawling Error: {0} while getting data from: {1}'.format(e, article))
self.errors.append(error_to_string(e))
try:
Article.objects.bulk_create(article_info)
logger.info('')
logger.info('Succesfully updated Daily Nation Latest Articles.{} new articles added'.format(
len(article_info)))
self.crawl.total_articles=len(article_info)
self.crawl.save()
except Exception as e:
logger.exception('Error!!!{}'.format(e))
self.errors.append(error_to_string(e))
| true | true |
f72df764203ddd17af340ec550ae35e43d2207f8 | 6,706 | py | Python | clients/python/airavata_custos/admin/iam_admin_client.py | smarru/airavata-custos | d12028ac939b74f77691cecc89f7f6592b7ac3d1 | [
"Apache-2.0"
] | 1 | 2020-02-16T17:17:40.000Z | 2020-02-16T17:17:40.000Z | clients/python/airavata_custos/admin/iam_admin_client.py | isururanawaka/airavata-custos | 4cd85e1923a0d5c180ddc9c719e6739715563743 | [
"Apache-2.0"
] | null | null | null | clients/python/airavata_custos/admin/iam_admin_client.py | isururanawaka/airavata-custos | 4cd85e1923a0d5c180ddc9c719e6739715563743 | [
"Apache-2.0"
] | 1 | 2020-05-04T03:15:40.000Z | 2020-05-04T03:15:40.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import configparser
from airavata_custos import utils
from airavata_custos.settings import ProfileSettings
logger = logging.getLogger(__name__)
class IAMAdminClient(object):
def __init__(self, configuration_file_location):
"""
constructor for IAMAdminClient class
:param configuration_file_location: takes the location of the ini file containing server configuration
"""
self.profile_settings = ProfileSettings()
self._load_settings(configuration_file_location)
self.iamadmin_client_pool = utils.initialize_iamadmin_client_pool(self.profile_settings.PROFILE_SERVICE_HOST,
self.profile_settings.PROFILE_SERVICE_PORT)
def is_username_available(self, authz_token, username):
"""
This method validates if the username is available or not
:param authz_token: Object of AuthzToken class containing access token, username, gatewayId of the active user
:param username: The username whose availability needs to be verified
:return: boolean
"""
return self.iamadmin_client_pool.isUsernameAvailable(authz_token, username)
def register_user(self, authz_token, username, email_address, first_name, last_name, password):
"""
This method registers the user with the keycloak instance returns true if successful, false if the registration fails
:param authz_token: Object of AuthzToken class containing access token, username, gatewayId of the active user
:param username: The username of the user that needs to be registered
:param email_address: The email address of the user that needs to be registered
:param first_name: The first name of the user that needs to be registered
:param last_name: The last name of the user that needs to be registered
:param password: The password of the user that needs to be registered
:return: boolean
"""
return self.iamadmin_client_pool.registerUser(
authz_token,
username,
email_address,
first_name,
last_name,
password)
def is_user_enabled(self, authz_token, username):
"""
Checks the user is enabled/disabled in keycloak. Only the enabled user can login
:param authz_token: Object of AuthzToken class containing access token, username, gatewayId of the active user
:param username: The username of the user
:return: boolean
"""
return self.iamadmin_client_pool.isUserEnabled(authz_token, username)
def enable_user(self, authz_token, username):
"""
The method to enable a disabled user
:param authz_token: Object of AuthzToken class containing access token, username, gatewayId of the active user
:param username: The username of the user
:return: Object of UserProfile class, containing user details
"""
return self.iamadmin_client_pool.enableUser(authz_token, username)
def delete_user(self, authz_token, username):
"""
This method deleted the user from keycloak. Returns true if delete is successful
:param authz_token: Object of AuthzToken class containing access token, username, gatewayId of the active user
:param username: The username of the user
:return: boolean
"""
return self.iamadmin_client_pool.deleteUser(authz_token, username)
def is_user_exist(self, authz_token, username):
"""
This method checks if the user exists in keycloak. Returns true if the user exists otherwise returns false
:param authz_token: Object of AuthzToken class containing access token, username, gatewayId of the active user
:param username: The username of the user
:return: boolean
"""
try:
return self.iamadmin_client_pool.isUserExist(authz_token, username)
except Exception:
return None
def get_user(self, authz_token, username):
"""
:param authz_token: Object of AuthzToken class containing access token, username, gatewayId of the active user
:param username: username of the user
:return: object of class UserProfile
"""
try:
return self.iamadmin_client_pool.getUser(authz_token, username)
except Exception:
return None
def get_users(self, authz_token, offset=0, limit=-1, search=None):
"""
:param authz_token: Object of AuthzToken class containing access token, username, gatewayId of the active user
:param offset: start index
:param limit: end index
:param search: search criteria for filtering users
:return: list of UserProfile class objects
"""
try:
return self.iamadmin_client_pool.getUsers(authz_token, offset, limit, search)
except Exception:
return None
def reset_user_password(self, authz_token, username, new_password):
"""
:param authz_token: Object of AuthzToken class containing access token, username, gatewayId of the active user
:param username: username of the user
:param new_password: new password for the user
:return:
"""
try:
return self.iamadmin_client_pool.resetUserPassword(
authz_token, username, new_password)
except Exception:
return None
def _load_settings(self, configuration_file_location):
config = configparser.ConfigParser()
config.read(configuration_file_location)
settings = config['ProfileServerSettings']
self.profile_settings.PROFILE_SERVICE_HOST = settings['PROFILE_SERVICE_HOST']
self.profile_settings.PROFILE_SERVICE_PORT = settings['PROFILE_SERVICE_PORT']
| 44.410596 | 125 | 0.694602 |
import logging
import configparser
from airavata_custos import utils
from airavata_custos.settings import ProfileSettings
logger = logging.getLogger(__name__)
class IAMAdminClient(object):
def __init__(self, configuration_file_location):
self.profile_settings = ProfileSettings()
self._load_settings(configuration_file_location)
self.iamadmin_client_pool = utils.initialize_iamadmin_client_pool(self.profile_settings.PROFILE_SERVICE_HOST,
self.profile_settings.PROFILE_SERVICE_PORT)
def is_username_available(self, authz_token, username):
return self.iamadmin_client_pool.isUsernameAvailable(authz_token, username)
def register_user(self, authz_token, username, email_address, first_name, last_name, password):
return self.iamadmin_client_pool.registerUser(
authz_token,
username,
email_address,
first_name,
last_name,
password)
def is_user_enabled(self, authz_token, username):
return self.iamadmin_client_pool.isUserEnabled(authz_token, username)
def enable_user(self, authz_token, username):
return self.iamadmin_client_pool.enableUser(authz_token, username)
def delete_user(self, authz_token, username):
return self.iamadmin_client_pool.deleteUser(authz_token, username)
def is_user_exist(self, authz_token, username):
try:
return self.iamadmin_client_pool.isUserExist(authz_token, username)
except Exception:
return None
def get_user(self, authz_token, username):
try:
return self.iamadmin_client_pool.getUser(authz_token, username)
except Exception:
return None
def get_users(self, authz_token, offset=0, limit=-1, search=None):
try:
return self.iamadmin_client_pool.getUsers(authz_token, offset, limit, search)
except Exception:
return None
def reset_user_password(self, authz_token, username, new_password):
try:
return self.iamadmin_client_pool.resetUserPassword(
authz_token, username, new_password)
except Exception:
return None
def _load_settings(self, configuration_file_location):
config = configparser.ConfigParser()
config.read(configuration_file_location)
settings = config['ProfileServerSettings']
self.profile_settings.PROFILE_SERVICE_HOST = settings['PROFILE_SERVICE_HOST']
self.profile_settings.PROFILE_SERVICE_PORT = settings['PROFILE_SERVICE_PORT']
| true | true |
f72df87b6b7de4d36648f57dc9c5e87a38639da1 | 1,877 | py | Python | spoticly/spotify.py | onhernandes/spoticly | ed109f4ea118c57557af1e7a10120a120fd29fa4 | [
"MIT"
] | 6 | 2018-12-23T16:16:36.000Z | 2019-01-11T12:10:49.000Z | spoticly/spotify.py | onhernandes/spoticly | ed109f4ea118c57557af1e7a10120a120fd29fa4 | [
"MIT"
] | null | null | null | spoticly/spotify.py | onhernandes/spoticly | ed109f4ea118c57557af1e7a10120a120fd29fa4 | [
"MIT"
] | null | null | null | import sys
import requests
from . import settings
spotify_base = "https://api.spotify.com/v1"
def get_spotipy_token():
import spotipy.util as util
params = get_spotify_auth_params()
return util.prompt_for_user_token(**params)
def get_headers(token):
return {"Authorization": "Bearer %s" % (token)}
def get_spotify_auth_params():
return {
"client_id": settings.get("SPOTIPY_CLIENT_ID"),
"client_secret": settings.get("SPOTIPY_CLIENT_SECRET"),
"redirect_uri": settings.get("SPOTIPY_REDIRECT_URI"),
"username": settings.get("SPOTIPY_USERNAME"),
"scope": " ".join(list(settings.get("SPOTIPY_SCOPES"))),
}
def ensure_settings():
"""Ensure user set needed API config
:returns: None
"""
if not settings.ensure_all():
print("You must set your Spotify's app config!")
print("Run spotify-cli config")
sys.exit()
def get_spotipy():
ensure_settings()
import spotipy
token = get_spotipy_token()
return spotipy.Spotify(auth=token)
def previous_track(token):
url = "%s/me/player/previous" % (spotify_base)
r = requests.post(url, headers=get_headers(token))
return r.status_code == 204
def next_track(token):
url = "%s/me/player/next" % (spotify_base)
r = requests.post(url, headers=get_headers(token))
return r.status_code == 204
def get_current_playback(token):
url = "%s/me/player/currently-playing" % (spotify_base)
r = requests.get(url, headers=get_headers(token))
return r.json()
def pause_playback(token):
url = "%s/me/player/pause" % (spotify_base)
r = requests.put(url, headers=get_headers(token))
return r.status_code == 204
def resume_playback(token):
url = "%s/me/player/play" % (spotify_base)
r = requests.put(url, headers=get_headers(token))
return r.status_code == 204
| 23.17284 | 64 | 0.670751 | import sys
import requests
from . import settings
spotify_base = "https://api.spotify.com/v1"
def get_spotipy_token():
import spotipy.util as util
params = get_spotify_auth_params()
return util.prompt_for_user_token(**params)
def get_headers(token):
return {"Authorization": "Bearer %s" % (token)}
def get_spotify_auth_params():
return {
"client_id": settings.get("SPOTIPY_CLIENT_ID"),
"client_secret": settings.get("SPOTIPY_CLIENT_SECRET"),
"redirect_uri": settings.get("SPOTIPY_REDIRECT_URI"),
"username": settings.get("SPOTIPY_USERNAME"),
"scope": " ".join(list(settings.get("SPOTIPY_SCOPES"))),
}
def ensure_settings():
if not settings.ensure_all():
print("You must set your Spotify's app config!")
print("Run spotify-cli config")
sys.exit()
def get_spotipy():
ensure_settings()
import spotipy
token = get_spotipy_token()
return spotipy.Spotify(auth=token)
def previous_track(token):
url = "%s/me/player/previous" % (spotify_base)
r = requests.post(url, headers=get_headers(token))
return r.status_code == 204
def next_track(token):
url = "%s/me/player/next" % (spotify_base)
r = requests.post(url, headers=get_headers(token))
return r.status_code == 204
def get_current_playback(token):
url = "%s/me/player/currently-playing" % (spotify_base)
r = requests.get(url, headers=get_headers(token))
return r.json()
def pause_playback(token):
url = "%s/me/player/pause" % (spotify_base)
r = requests.put(url, headers=get_headers(token))
return r.status_code == 204
def resume_playback(token):
url = "%s/me/player/play" % (spotify_base)
r = requests.put(url, headers=get_headers(token))
return r.status_code == 204
| true | true |
f72df8cbe4640941d014b310325a8bb56d8af65f | 9,111 | py | Python | python/paddle/fluid/tests/unittests/test_transpose_op.py | OuyangChao/Paddle | cac9635a6733ffbbd816b33e21c3054e0cd81ab1 | [
"Apache-2.0"
] | 10 | 2021-05-12T07:20:32.000Z | 2022-03-04T08:21:56.000Z | python/paddle/fluid/tests/unittests/test_transpose_op.py | AFLee/Paddle | 311b3b44fc7d51d4d66d90ab8a3fc0d42231afda | [
"Apache-2.0"
] | 1 | 2021-01-25T09:40:19.000Z | 2021-01-25T09:40:19.000Z | python/paddle/fluid/tests/unittests/test_transpose_op.py | AFLee/Paddle | 311b3b44fc7d51d4d66d90ab8a3fc0d42231afda | [
"Apache-2.0"
] | 18 | 2021-05-19T08:01:49.000Z | 2022-02-11T03:11:32.000Z | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
paddle.enable_static()
class TestTransposeOp(OpTest):
def setUp(self):
self.init_op_type()
self.initTestCase()
self.inputs = {'X': np.random.random(self.shape).astype("float64")}
self.attrs = {
'axis': list(self.axis),
'use_mkldnn': self.use_mkldnn,
}
self.outputs = {
'XShape': np.random.random(self.shape).astype("float64"),
'Out': self.inputs['X'].transpose(self.axis)
}
def init_op_type(self):
self.op_type = "transpose2"
self.use_mkldnn = False
def test_check_output(self):
self.check_output(no_check_set=['XShape'])
def test_check_grad(self):
self.check_grad(['X'], 'Out')
def initTestCase(self):
self.shape = (3, 40)
self.axis = (1, 0)
class TestCase0(TestTransposeOp):
def initTestCase(self):
self.shape = (100, )
self.axis = (0, )
class TestCase1(TestTransposeOp):
def initTestCase(self):
self.shape = (3, 4, 10)
self.axis = (0, 2, 1)
class TestCase2(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 4, 5)
self.axis = (0, 2, 3, 1)
class TestCase3(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 4, 5, 6)
self.axis = (4, 2, 3, 1, 0)
class TestCase4(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 4, 5, 6, 1)
self.axis = (4, 2, 3, 1, 0, 5)
class TestCase5(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 16, 96)
self.axis = (0, 2, 1)
class TestCase6(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 10, 12, 16)
self.axis = (3, 1, 2, 0)
class TestCase7(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 10, 2, 16)
self.axis = (0, 1, 3, 2)
class TestCase8(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 2, 3, 2, 4, 3, 3)
self.axis = (0, 1, 3, 2, 4, 5, 6, 7)
class TestCase9(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 2, 3, 2, 4, 3, 3)
self.axis = (6, 1, 3, 5, 0, 2, 4, 7)
class TestTransposeOpError(unittest.TestCase):
def test_errors(self):
paddle.enable_static()
with program_guard(Program(), Program()):
x = fluid.layers.data(name='x', shape=[10, 5, 3], dtype='float64')
def test_x_Variable_check():
# the Input(x)'s type must be Variable
fluid.layers.transpose("not_variable", perm=[1, 0, 2])
self.assertRaises(TypeError, test_x_Variable_check)
def test_x_dtype_check():
# the Input(x)'s dtype must be one of [float16, float32, float64, int32, int64]
x1 = fluid.layers.data(
name='x1', shape=[10, 5, 3], dtype='bool')
fluid.layers.transpose(x1, perm=[1, 0, 2])
self.assertRaises(TypeError, test_x_dtype_check)
def test_perm_list_check():
# Input(perm)'s type must be list
fluid.layers.transpose(x, perm="[1, 0, 2]")
self.assertRaises(TypeError, test_perm_list_check)
def test_perm_length_and_x_dim_check():
# Input(perm) is the permutation of dimensions of Input(input)
# its length should be equal to dimensions of Input(input)
fluid.layers.transpose(x, perm=[1, 0, 2, 3, 4])
self.assertRaises(ValueError, test_perm_length_and_x_dim_check)
def test_each_elem_value_check():
# Each element in Input(perm) should be less than Input(x)'s dimension
fluid.layers.transpose(x, perm=[3, 5, 7])
self.assertRaises(ValueError, test_each_elem_value_check)
class TestTransposeApi(unittest.TestCase):
def test_static_out(self):
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.static.data(name='x', shape=[2, 3, 4], dtype='float32')
x_trans1 = paddle.transpose(x, perm=[1, 0, 2])
x_trans2 = paddle.transpose(x, perm=(2, 1, 0))
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
x_np = np.random.random([2, 3, 4]).astype("float32")
result1, result2 = exe.run(feed={"x": x_np}, fetch_list=[x_trans1, x_trans2])
expected_result1 = np.transpose(x_np, [1, 0, 2])
expected_result2 = np.transpose(x_np, (2, 1, 0))
np.testing.assert_array_equal(result1, expected_result1)
np.testing.assert_array_equal(result2, expected_result2)
def test_dygraph_out(self):
# This is an old test before 2.0 API so we need to disable static
# to trigger dygraph
paddle.disable_static()
x = paddle.randn([2, 3, 4])
x_trans1 = paddle.transpose(x, perm=[1, 0, 2])
x_trans2 = paddle.transpose(x, perm=(2, 1, 0))
x_np = x.numpy()
expected_result1 = np.transpose(x_np, [1, 0, 2])
expected_result2 = np.transpose(x_np, (2, 1, 0))
np.testing.assert_array_equal(x_trans1.numpy(), expected_result1)
np.testing.assert_array_equal(x_trans2.numpy(), expected_result2)
# This is an old test before 2.0 API so we enable static again after
# dygraph test
paddle.enable_static()
class TestTAPI(unittest.TestCase):
def test_out(self):
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10], dtype="float64", name="data")
data_t = paddle.t(data)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
data_np = np.random.random([10]).astype("float64")
result, = exe.run(feed={"data": data_np}, fetch_list=[data_t])
expected_result = np.transpose(data_np)
self.assertEqual((result == expected_result).all(), True)
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10, 5], dtype="float64", name="data")
data_t = paddle.t(data)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
data_np = np.random.random([10, 5]).astype("float64")
result, = exe.run(feed={"data": data_np}, fetch_list=[data_t])
expected_result = np.transpose(data_np)
self.assertEqual((result == expected_result).all(), True)
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[1, 5], dtype="float64", name="data")
data_t = paddle.t(data)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
data_np = np.random.random([1, 5]).astype("float64")
result, = exe.run(feed={"data": data_np}, fetch_list=[data_t])
expected_result = np.transpose(data_np)
self.assertEqual((result == expected_result).all(), True)
with fluid.dygraph.guard():
np_x = np.random.random([10]).astype("float64")
data = fluid.dygraph.to_variable(np_x)
z = paddle.t(data)
np_z = z.numpy()
z_expected = np.array(np.transpose(np_x))
self.assertEqual((np_z == z_expected).all(), True)
with fluid.dygraph.guard():
np_x = np.random.random([10, 5]).astype("float64")
data = fluid.dygraph.to_variable(np_x)
z = paddle.t(data)
np_z = z.numpy()
z_expected = np.array(np.transpose(np_x))
self.assertEqual((np_z == z_expected).all(), True)
with fluid.dygraph.guard():
np_x = np.random.random([1, 5]).astype("float64")
data = fluid.dygraph.to_variable(np_x)
z = paddle.t(data)
np_z = z.numpy()
z_expected = np.array(np.transpose(np_x))
self.assertEqual((np_z == z_expected).all(), True)
def test_errors(self):
with fluid.program_guard(fluid.Program()):
x = fluid.data(name='x', shape=[10, 5, 3], dtype='float64')
def test_x_dimension_check():
paddle.t(x)
self.assertRaises(ValueError, test_x_dimension_check)
if __name__ == '__main__':
unittest.main()
| 35.589844 | 95 | 0.597629 |
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
paddle.enable_static()
class TestTransposeOp(OpTest):
def setUp(self):
self.init_op_type()
self.initTestCase()
self.inputs = {'X': np.random.random(self.shape).astype("float64")}
self.attrs = {
'axis': list(self.axis),
'use_mkldnn': self.use_mkldnn,
}
self.outputs = {
'XShape': np.random.random(self.shape).astype("float64"),
'Out': self.inputs['X'].transpose(self.axis)
}
def init_op_type(self):
self.op_type = "transpose2"
self.use_mkldnn = False
def test_check_output(self):
self.check_output(no_check_set=['XShape'])
def test_check_grad(self):
self.check_grad(['X'], 'Out')
def initTestCase(self):
self.shape = (3, 40)
self.axis = (1, 0)
class TestCase0(TestTransposeOp):
def initTestCase(self):
self.shape = (100, )
self.axis = (0, )
class TestCase1(TestTransposeOp):
def initTestCase(self):
self.shape = (3, 4, 10)
self.axis = (0, 2, 1)
class TestCase2(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 4, 5)
self.axis = (0, 2, 3, 1)
class TestCase3(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 4, 5, 6)
self.axis = (4, 2, 3, 1, 0)
class TestCase4(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 4, 5, 6, 1)
self.axis = (4, 2, 3, 1, 0, 5)
class TestCase5(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 16, 96)
self.axis = (0, 2, 1)
class TestCase6(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 10, 12, 16)
self.axis = (3, 1, 2, 0)
class TestCase7(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 10, 2, 16)
self.axis = (0, 1, 3, 2)
class TestCase8(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 2, 3, 2, 4, 3, 3)
self.axis = (0, 1, 3, 2, 4, 5, 6, 7)
class TestCase9(TestTransposeOp):
def initTestCase(self):
self.shape = (2, 3, 2, 3, 2, 4, 3, 3)
self.axis = (6, 1, 3, 5, 0, 2, 4, 7)
class TestTransposeOpError(unittest.TestCase):
def test_errors(self):
paddle.enable_static()
with program_guard(Program(), Program()):
x = fluid.layers.data(name='x', shape=[10, 5, 3], dtype='float64')
def test_x_Variable_check():
fluid.layers.transpose("not_variable", perm=[1, 0, 2])
self.assertRaises(TypeError, test_x_Variable_check)
def test_x_dtype_check():
# the Input(x)'s dtype must be one of [float16, float32, float64, int32, int64]
x1 = fluid.layers.data(
name='x1', shape=[10, 5, 3], dtype='bool')
fluid.layers.transpose(x1, perm=[1, 0, 2])
self.assertRaises(TypeError, test_x_dtype_check)
def test_perm_list_check():
fluid.layers.transpose(x, perm="[1, 0, 2]")
self.assertRaises(TypeError, test_perm_list_check)
def test_perm_length_and_x_dim_check():
# Input(perm) is the permutation of dimensions of Input(input)
# its length should be equal to dimensions of Input(input)
fluid.layers.transpose(x, perm=[1, 0, 2, 3, 4])
self.assertRaises(ValueError, test_perm_length_and_x_dim_check)
def test_each_elem_value_check():
# Each element in Input(perm) should be less than Input(x)'s dimension
fluid.layers.transpose(x, perm=[3, 5, 7])
self.assertRaises(ValueError, test_each_elem_value_check)
class TestTransposeApi(unittest.TestCase):
def test_static_out(self):
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program()):
x = paddle.static.data(name='x', shape=[2, 3, 4], dtype='float32')
x_trans1 = paddle.transpose(x, perm=[1, 0, 2])
x_trans2 = paddle.transpose(x, perm=(2, 1, 0))
place = paddle.CPUPlace()
exe = paddle.static.Executor(place)
x_np = np.random.random([2, 3, 4]).astype("float32")
result1, result2 = exe.run(feed={"x": x_np}, fetch_list=[x_trans1, x_trans2])
expected_result1 = np.transpose(x_np, [1, 0, 2])
expected_result2 = np.transpose(x_np, (2, 1, 0))
np.testing.assert_array_equal(result1, expected_result1)
np.testing.assert_array_equal(result2, expected_result2)
def test_dygraph_out(self):
paddle.disable_static()
x = paddle.randn([2, 3, 4])
x_trans1 = paddle.transpose(x, perm=[1, 0, 2])
x_trans2 = paddle.transpose(x, perm=(2, 1, 0))
x_np = x.numpy()
expected_result1 = np.transpose(x_np, [1, 0, 2])
expected_result2 = np.transpose(x_np, (2, 1, 0))
np.testing.assert_array_equal(x_trans1.numpy(), expected_result1)
np.testing.assert_array_equal(x_trans2.numpy(), expected_result2)
paddle.enable_static()
class TestTAPI(unittest.TestCase):
def test_out(self):
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10], dtype="float64", name="data")
data_t = paddle.t(data)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
data_np = np.random.random([10]).astype("float64")
result, = exe.run(feed={"data": data_np}, fetch_list=[data_t])
expected_result = np.transpose(data_np)
self.assertEqual((result == expected_result).all(), True)
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[10, 5], dtype="float64", name="data")
data_t = paddle.t(data)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
data_np = np.random.random([10, 5]).astype("float64")
result, = exe.run(feed={"data": data_np}, fetch_list=[data_t])
expected_result = np.transpose(data_np)
self.assertEqual((result == expected_result).all(), True)
with fluid.program_guard(fluid.Program()):
data = fluid.data(shape=[1, 5], dtype="float64", name="data")
data_t = paddle.t(data)
place = fluid.CPUPlace()
exe = fluid.Executor(place)
data_np = np.random.random([1, 5]).astype("float64")
result, = exe.run(feed={"data": data_np}, fetch_list=[data_t])
expected_result = np.transpose(data_np)
self.assertEqual((result == expected_result).all(), True)
with fluid.dygraph.guard():
np_x = np.random.random([10]).astype("float64")
data = fluid.dygraph.to_variable(np_x)
z = paddle.t(data)
np_z = z.numpy()
z_expected = np.array(np.transpose(np_x))
self.assertEqual((np_z == z_expected).all(), True)
with fluid.dygraph.guard():
np_x = np.random.random([10, 5]).astype("float64")
data = fluid.dygraph.to_variable(np_x)
z = paddle.t(data)
np_z = z.numpy()
z_expected = np.array(np.transpose(np_x))
self.assertEqual((np_z == z_expected).all(), True)
with fluid.dygraph.guard():
np_x = np.random.random([1, 5]).astype("float64")
data = fluid.dygraph.to_variable(np_x)
z = paddle.t(data)
np_z = z.numpy()
z_expected = np.array(np.transpose(np_x))
self.assertEqual((np_z == z_expected).all(), True)
def test_errors(self):
with fluid.program_guard(fluid.Program()):
x = fluid.data(name='x', shape=[10, 5, 3], dtype='float64')
def test_x_dimension_check():
paddle.t(x)
self.assertRaises(ValueError, test_x_dimension_check)
if __name__ == '__main__':
unittest.main()
| true | true |
f72df908e98fa9abb2dd62e353cfd2a20a3d8d02 | 13,347 | py | Python | models/breast_cycle_gan/custom/conv/contrib.py | BreastGAN/augmentation | 0e1bcb7175e2b2a45cd8084bb14521e26b68caea | [
"Apache-2.0"
] | 13 | 2019-03-03T23:51:56.000Z | 2022-02-26T10:14:15.000Z | models/breast_cycle_gan/custom/conv/contrib.py | mrymaltin/augmentation | 0e1bcb7175e2b2a45cd8084bb14521e26b68caea | [
"Apache-2.0"
] | 2 | 2021-01-31T12:13:52.000Z | 2022-02-09T23:29:10.000Z | models/breast_cycle_gan/custom/conv/contrib.py | mrymaltin/augmentation | 0e1bcb7175e2b2a45cd8084bb14521e26b68caea | [
"Apache-2.0"
] | 5 | 2019-04-11T11:12:02.000Z | 2021-12-22T13:29:37.000Z | # Copyright 2019 Lukas Jendele and Ondrej Skopek.
# Adapted from The TensorFlow Authors, under the ASL 2.0.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# This part is copied from:
# https://github.com/tensorflow/tensorflow/blob/r1.11/tensorflow/contrib/layers/python/layers/layers.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework.python.ops import add_arg_scope
# from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.layers.python.layers import initializers
from tensorflow.contrib.layers.python.layers import utils
# from tensorflow.python.eager import context
# from tensorflow.python.framework import constant_op
# from tensorflow.python.framework import dtypes
# from tensorflow.python.framework import function
from tensorflow.python.framework import ops
# from tensorflow.python.framework import sparse_tensor
from tensorflow.python.layers import convolutional as convolutional_layers
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import variable_scope
# My imports
from tensorflow.contrib.layers.python.layers.layers import _build_variable_getter, _add_variable_to_collections
from models.breast_cycle_gan.custom.conv.layers import MyConv2D
import tensorflow as tf
# This part is copied from:
# https://github.com/tensorflow/tensorflow/blob/r1.11/tensorflow/contrib/layers/python/layers/layers.py
@add_arg_scope
def convolution2d(inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=None,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
use_spectral_norm=False,
is_training=False,
self_attention=False,
scope=None):
h = convolution(
inputs,
num_outputs,
kernel_size,
stride,
padding,
data_format,
rate,
activation_fn,
normalizer_fn,
normalizer_params,
weights_initializer,
weights_regularizer,
biases_initializer,
biases_regularizer,
reuse,
variables_collections,
outputs_collections,
trainable,
use_spectral_norm,
is_training,
scope,
conv_dims=2)
if not self_attention:
return h
with tf.variable_scope("self_attention"):
with tf.variable_scope("f"):
f = convolution(
inputs,
num_outputs // 8,
kernel_size,
stride,
padding,
data_format,
rate,
activation_fn,
normalizer_fn,
normalizer_params,
weights_initializer,
weights_regularizer,
biases_initializer,
biases_regularizer,
reuse,
variables_collections,
outputs_collections,
trainable,
use_spectral_norm,
is_training,
None,
conv_dims=2)
with tf.variable_scope("g"):
g = convolution(
inputs,
num_outputs // 8,
kernel_size,
stride,
padding,
data_format,
rate,
activation_fn,
normalizer_fn,
normalizer_params,
weights_initializer,
weights_regularizer,
biases_initializer,
biases_regularizer,
reuse,
variables_collections,
outputs_collections,
trainable,
use_spectral_norm,
is_training,
None,
conv_dims=2)
def hw_flatten(x):
return tf.reshape(x, shape=[x.shape[0], -1, x.shape[-1]])
# N = h * w
s = tf.matmul(hw_flatten(g), hw_flatten(f), transpose_b=True) # # [bs, N, N]
beta = tf.nn.softmax(s, axis=-1) # attention map
o = tf.matmul(beta, hw_flatten(h)) # [bs, N, C]
gamma = tf.get_variable("gamma", [1], initializer=tf.constant_initializer(0.0))
o = tf.reshape(o, shape=inputs.shape) # [bs, h, w, C]
x = gamma * o + inputs
return x
@add_arg_scope
def convolution(inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=None,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
use_spectral_norm=False,
is_training=False,
scope=None,
conv_dims=None):
"""Adds an N-D convolution followed by an optional batch_norm layer.
It is required that 1 <= N <= 3.
`convolution` creates a variable called `weights`, representing the
convolutional kernel, that is convolved (actually cross-correlated) with the
`inputs` to produce a `Tensor` of activations. If a `normalizer_fn` is
provided (such as `batch_norm`), it is then applied. Otherwise, if
`normalizer_fn` is None and a `biases_initializer` is provided then a `biases`
variable would be created and added the activations. Finally, if
`activation_fn` is not `None`, it is applied to the activations as well.
Performs atrous convolution with input stride/dilation rate equal to `rate`
if a value > 1 for any dimension of `rate` is specified. In this case
`stride` values != 1 are not supported.
Args:
inputs: A Tensor of rank N+2 of shape
`[batch_size] + input_spatial_shape + [in_channels]` if data_format does
not start with "NC" (default), or
`[batch_size, in_channels] + input_spatial_shape` if data_format starts
with "NC".
num_outputs: Integer, the number of output filters.
kernel_size: A sequence of N positive integers specifying the spatial
dimensions of the filters. Can be a single integer to specify the same
value for all spatial dimensions.
stride: A sequence of N positive integers specifying the stride at which to
compute output. Can be a single integer to specify the same value for all
spatial dimensions. Specifying any `stride` value != 1 is incompatible
with specifying any `rate` value != 1.
padding: One of `"VALID"` or `"SAME"`.
data_format: A string or None. Specifies whether the channel dimension of
the `input` and output is the last dimension (default, or if `data_format`
does not start with "NC"), or the second dimension (if `data_format`
starts with "NC"). For N=1, the valid values are "NWC" (default) and
"NCW". For N=2, the valid values are "NHWC" (default) and "NCHW".
For N=3, the valid values are "NDHWC" (default) and "NCDHW".
rate: A sequence of N positive integers specifying the dilation rate to use
for atrous convolution. Can be a single integer to specify the same
value for all spatial dimensions. Specifying any `rate` value != 1 is
incompatible with specifying any `stride` value != 1.
activation_fn: Activation function. The default value is a ReLU function.
Explicitly set it to None to skip it and maintain a linear activation.
normalizer_fn: Normalization function to use instead of `biases`. If
`normalizer_fn` is provided then `biases_initializer` and
`biases_regularizer` are ignored and `biases` are not created nor added.
default set to None for no normalizer function
normalizer_params: Normalization function parameters.
weights_initializer: An initializer for the weights.
weights_regularizer: Optional regularizer for the weights.
biases_initializer: An initializer for the biases. If None skip biases.
biases_regularizer: Optional regularizer for the biases.
reuse: Whether or not the layer and its variables should be reused. To be
able to reuse the layer scope must be given.
variables_collections: Optional list of collections for all the variables or
a dictionary containing a different list of collection per variable.
outputs_collections: Collection to add the outputs.
trainable: If `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable).
scope: Optional scope for `variable_scope`.
conv_dims: Optional convolution dimensionality, when set it would use the
corresponding convolution (e.g. 2 for Conv 2D, 3 for Conv 3D, ..). When
leaved to None it would select the convolution dimensionality based on
the input rank (i.e. Conv ND, with N = input_rank - 2).
Returns:
A tensor representing the output of the operation.
Raises:
ValueError: If `data_format` is invalid.
ValueError: Both 'rate' and `stride` are not uniformly 1.
"""
if data_format not in [None, 'NWC', 'NCW', 'NHWC', 'NCHW', 'NDHWC', 'NCDHW']:
raise ValueError('Invalid data_format: %r' % (data_format,))
layer_variable_getter = _build_variable_getter({'bias': 'biases', 'kernel': 'weights'})
with variable_scope.variable_scope(scope, 'Conv', [inputs], reuse=reuse, custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
input_rank = inputs.get_shape().ndims
if conv_dims is not None and conv_dims + 2 != input_rank:
raise ValueError('Convolution expects input with rank %d, got %d' % (conv_dims + 2, input_rank))
if input_rank == 3:
layer_class = convolutional_layers.Convolution1D
elif input_rank == 4:
layer_class = MyConv2D
elif input_rank == 5:
layer_class = convolutional_layers.Convolution3D
else:
raise ValueError('Convolution not supported for input with rank', input_rank)
df = ('channels_first' if data_format and data_format.startswith('NC') else 'channels_last')
layer = layer_class(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
dilation_rate=rate,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
use_spectral_norm=use_spectral_norm,
is_training=is_training,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
# Add variables to collections.
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.use_bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections, sc.name, outputs)
| 43.760656 | 120 | 0.622912 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework.python.ops import add_arg_scope
from tensorflow.contrib.layers.python.layers import initializers
from tensorflow.contrib.layers.python.layers import utils
from tensorflow.python.framework import ops
from tensorflow.python.layers import convolutional as convolutional_layers
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import variable_scope
from tensorflow.contrib.layers.python.layers.layers import _build_variable_getter, _add_variable_to_collections
from models.breast_cycle_gan.custom.conv.layers import MyConv2D
import tensorflow as tf
@add_arg_scope
def convolution2d(inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=None,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
use_spectral_norm=False,
is_training=False,
self_attention=False,
scope=None):
h = convolution(
inputs,
num_outputs,
kernel_size,
stride,
padding,
data_format,
rate,
activation_fn,
normalizer_fn,
normalizer_params,
weights_initializer,
weights_regularizer,
biases_initializer,
biases_regularizer,
reuse,
variables_collections,
outputs_collections,
trainable,
use_spectral_norm,
is_training,
scope,
conv_dims=2)
if not self_attention:
return h
with tf.variable_scope("self_attention"):
with tf.variable_scope("f"):
f = convolution(
inputs,
num_outputs // 8,
kernel_size,
stride,
padding,
data_format,
rate,
activation_fn,
normalizer_fn,
normalizer_params,
weights_initializer,
weights_regularizer,
biases_initializer,
biases_regularizer,
reuse,
variables_collections,
outputs_collections,
trainable,
use_spectral_norm,
is_training,
None,
conv_dims=2)
with tf.variable_scope("g"):
g = convolution(
inputs,
num_outputs // 8,
kernel_size,
stride,
padding,
data_format,
rate,
activation_fn,
normalizer_fn,
normalizer_params,
weights_initializer,
weights_regularizer,
biases_initializer,
biases_regularizer,
reuse,
variables_collections,
outputs_collections,
trainable,
use_spectral_norm,
is_training,
None,
conv_dims=2)
def hw_flatten(x):
return tf.reshape(x, shape=[x.shape[0], -1, x.shape[-1]])
s = tf.matmul(hw_flatten(g), hw_flatten(f), transpose_b=True) ta = tf.nn.softmax(s, axis=-1)
o = tf.matmul(beta, hw_flatten(h))
gamma = tf.get_variable("gamma", [1], initializer=tf.constant_initializer(0.0))
o = tf.reshape(o, shape=inputs.shape)
x = gamma * o + inputs
return x
@add_arg_scope
def convolution(inputs,
num_outputs,
kernel_size,
stride=1,
padding='SAME',
data_format=None,
rate=1,
activation_fn=nn.relu,
normalizer_fn=None,
normalizer_params=None,
weights_initializer=initializers.xavier_initializer(),
weights_regularizer=None,
biases_initializer=init_ops.zeros_initializer(),
biases_regularizer=None,
reuse=None,
variables_collections=None,
outputs_collections=None,
trainable=True,
use_spectral_norm=False,
is_training=False,
scope=None,
conv_dims=None):
if data_format not in [None, 'NWC', 'NCW', 'NHWC', 'NCHW', 'NDHWC', 'NCDHW']:
raise ValueError('Invalid data_format: %r' % (data_format,))
layer_variable_getter = _build_variable_getter({'bias': 'biases', 'kernel': 'weights'})
with variable_scope.variable_scope(scope, 'Conv', [inputs], reuse=reuse, custom_getter=layer_variable_getter) as sc:
inputs = ops.convert_to_tensor(inputs)
input_rank = inputs.get_shape().ndims
if conv_dims is not None and conv_dims + 2 != input_rank:
raise ValueError('Convolution expects input with rank %d, got %d' % (conv_dims + 2, input_rank))
if input_rank == 3:
layer_class = convolutional_layers.Convolution1D
elif input_rank == 4:
layer_class = MyConv2D
elif input_rank == 5:
layer_class = convolutional_layers.Convolution3D
else:
raise ValueError('Convolution not supported for input with rank', input_rank)
df = ('channels_first' if data_format and data_format.startswith('NC') else 'channels_last')
layer = layer_class(
filters=num_outputs,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format=df,
dilation_rate=rate,
activation=None,
use_bias=not normalizer_fn and biases_initializer,
kernel_initializer=weights_initializer,
bias_initializer=biases_initializer,
kernel_regularizer=weights_regularizer,
bias_regularizer=biases_regularizer,
activity_regularizer=None,
use_spectral_norm=use_spectral_norm,
is_training=is_training,
trainable=trainable,
name=sc.name,
dtype=inputs.dtype.base_dtype,
_scope=sc,
_reuse=reuse)
outputs = layer.apply(inputs)
_add_variable_to_collections(layer.kernel, variables_collections, 'weights')
if layer.use_bias:
_add_variable_to_collections(layer.bias, variables_collections, 'biases')
if normalizer_fn is not None:
normalizer_params = normalizer_params or {}
outputs = normalizer_fn(outputs, **normalizer_params)
if activation_fn is not None:
outputs = activation_fn(outputs)
return utils.collect_named_outputs(outputs_collections, sc.name, outputs)
| true | true |
f72df96125bc0cc1494b9380623f84d39d519b84 | 2,177 | py | Python | src/usb/core.py | latonaio/check-usb-storage-connection-kube | 38cd98f90756dc57f71ea3d5f60df8695324dc3e | [
"MIT"
] | 9 | 2021-09-22T07:16:38.000Z | 2021-11-05T01:25:53.000Z | src/usb/core.py | latonaio/check-usb-storage-connection-kube | 38cd98f90756dc57f71ea3d5f60df8695324dc3e | [
"MIT"
] | null | null | null | src/usb/core.py | latonaio/check-usb-storage-connection-kube | 38cd98f90756dc57f71ea3d5f60df8695324dc3e | [
"MIT"
] | null | null | null | # coding: utf-8
# Copyright (c) 2019-2020 Latona. All rights reserved.
import time
from aion.logger import lprint
from aion.microservice import Options, main_decorator
from .check import UpdateUsbStateToDB, UsbConnectionMonitor, DATABASE
SERVICE_NAME = "check-usb-storage-connection"
EXECUTE_INTERVAL = 5
def fillter_new_mountpoint(mountpoints, connected_usbs):
exist_mountpoints = list(map(lambda x: x['mountpoint'], connected_usbs))
new_mountpoints = []
for mount in mountpoints:
if mount not in exist_mountpoints:
new_mountpoints.append(mount)
return new_mountpoints
@main_decorator(SERVICE_NAME)
def main_without_kanban(opt: Options):
lprint("start main_with_kanban()")
# get cache kanban
conn = opt.get_conn()
num = opt.get_number()
# kanban = conn.get_one_kanban(SERVICE_NAME, num)
kanban = conn.set_kanban(SERVICE_NAME, num)
# main function #
usb = UsbConnectionMonitor()
while True:
is_change = False
mountpoints = usb.get_mount_points()
with UpdateUsbStateToDB() as db:
con_usbs = db.get_connected_usb_list()
# connected usb
new_mountpoints = fillter_new_mountpoint(mountpoints, con_usbs)
for mount in new_mountpoints:
db.update_usb_state(mount, 1)
lprint(f"found usb at:{mount}")
is_change = True
db.commit_query()
# unconnected usb
for conneted in con_usbs:
if conneted['mountpoint'] not in mountpoints:
db.update_unmounted_usb_state(conneted['usb_id'])
lprint(f"unconnected usb at: {conneted['mountpoint']}")
is_change = True
db.commit_query()
if is_change:
# output after kanban
conn.output_kanban(
result=True,
process_number=num,
metadata={"mountpoints": mountpoints, "mode": "all",
"database": DATABASE, "table": "usbs"},
)
time.sleep(EXECUTE_INTERVAL)
if __name__ == "__main__":
main_without_kanban()
| 32.492537 | 76 | 0.624713 |
import time
from aion.logger import lprint
from aion.microservice import Options, main_decorator
from .check import UpdateUsbStateToDB, UsbConnectionMonitor, DATABASE
SERVICE_NAME = "check-usb-storage-connection"
EXECUTE_INTERVAL = 5
def fillter_new_mountpoint(mountpoints, connected_usbs):
exist_mountpoints = list(map(lambda x: x['mountpoint'], connected_usbs))
new_mountpoints = []
for mount in mountpoints:
if mount not in exist_mountpoints:
new_mountpoints.append(mount)
return new_mountpoints
@main_decorator(SERVICE_NAME)
def main_without_kanban(opt: Options):
lprint("start main_with_kanban()")
conn = opt.get_conn()
num = opt.get_number()
kanban = conn.set_kanban(SERVICE_NAME, num)
usb = UsbConnectionMonitor()
while True:
is_change = False
mountpoints = usb.get_mount_points()
with UpdateUsbStateToDB() as db:
con_usbs = db.get_connected_usb_list()
new_mountpoints = fillter_new_mountpoint(mountpoints, con_usbs)
for mount in new_mountpoints:
db.update_usb_state(mount, 1)
lprint(f"found usb at:{mount}")
is_change = True
db.commit_query()
for conneted in con_usbs:
if conneted['mountpoint'] not in mountpoints:
db.update_unmounted_usb_state(conneted['usb_id'])
lprint(f"unconnected usb at: {conneted['mountpoint']}")
is_change = True
db.commit_query()
if is_change:
conn.output_kanban(
result=True,
process_number=num,
metadata={"mountpoints": mountpoints, "mode": "all",
"database": DATABASE, "table": "usbs"},
)
time.sleep(EXECUTE_INTERVAL)
if __name__ == "__main__":
main_without_kanban()
| true | true |
f72dfbc4e55cb3c44cbb43cba7284fbe42bebe64 | 8,416 | py | Python | sdk/python/pulumi_azure_nextgen/devtestlab/v20150521preview/virtual_network_resource.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/devtestlab/v20150521preview/virtual_network_resource.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/devtestlab/v20150521preview/virtual_network_resource.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['VirtualNetworkResource']
class VirtualNetworkResource(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allowed_subnets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetArgs']]]]] = None,
description: Optional[pulumi.Input[str]] = None,
external_provider_resource_id: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
lab_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_overrides: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetOverrideArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
A virtual network.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetArgs']]]] allowed_subnets: The allowed subnets of the virtual network.
:param pulumi.Input[str] description: The description of the virtual network.
:param pulumi.Input[str] external_provider_resource_id: The Microsoft.Network resource identifier of the virtual network.
:param pulumi.Input[str] id: The identifier of the resource.
:param pulumi.Input[str] lab_name: The name of the lab.
:param pulumi.Input[str] location: The location of the resource.
:param pulumi.Input[str] name: The name of the resource.
:param pulumi.Input[str] provisioning_state: The provisioning status of the resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetOverrideArgs']]]] subnet_overrides: The subnet overrides of the virtual network.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The tags of the resource.
:param pulumi.Input[str] type: The type of the resource.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['allowed_subnets'] = allowed_subnets
__props__['description'] = description
__props__['external_provider_resource_id'] = external_provider_resource_id
__props__['id'] = id
if lab_name is None:
raise TypeError("Missing required property 'lab_name'")
__props__['lab_name'] = lab_name
__props__['location'] = location
if name is None:
raise TypeError("Missing required property 'name'")
__props__['name'] = name
__props__['provisioning_state'] = provisioning_state
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['subnet_overrides'] = subnet_overrides
__props__['tags'] = tags
__props__['type'] = type
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:devtestlab/latest:VirtualNetworkResource"), pulumi.Alias(type_="azure-nextgen:devtestlab/v20160515:VirtualNetworkResource"), pulumi.Alias(type_="azure-nextgen:devtestlab/v20180915:VirtualNetworkResource")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(VirtualNetworkResource, __self__).__init__(
'azure-nextgen:devtestlab/v20150521preview:VirtualNetworkResource',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'VirtualNetworkResource':
"""
Get an existing VirtualNetworkResource resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return VirtualNetworkResource(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowedSubnets")
def allowed_subnets(self) -> pulumi.Output[Optional[Sequence['outputs.SubnetResponse']]]:
"""
The allowed subnets of the virtual network.
"""
return pulumi.get(self, "allowed_subnets")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the virtual network.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="externalProviderResourceId")
def external_provider_resource_id(self) -> pulumi.Output[Optional[str]]:
"""
The Microsoft.Network resource identifier of the virtual network.
"""
return pulumi.get(self, "external_provider_resource_id")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
The location of the resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[Optional[str]]:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
The provisioning status of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="subnetOverrides")
def subnet_overrides(self) -> pulumi.Output[Optional[Sequence['outputs.SubnetOverrideResponse']]]:
"""
The subnet overrides of the virtual network.
"""
return pulumi.get(self, "subnet_overrides")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
The tags of the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 43.833333 | 293 | 0.649715 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['VirtualNetworkResource']
class VirtualNetworkResource(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allowed_subnets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetArgs']]]]] = None,
description: Optional[pulumi.Input[str]] = None,
external_provider_resource_id: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
lab_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_overrides: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetOverrideArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['allowed_subnets'] = allowed_subnets
__props__['description'] = description
__props__['external_provider_resource_id'] = external_provider_resource_id
__props__['id'] = id
if lab_name is None:
raise TypeError("Missing required property 'lab_name'")
__props__['lab_name'] = lab_name
__props__['location'] = location
if name is None:
raise TypeError("Missing required property 'name'")
__props__['name'] = name
__props__['provisioning_state'] = provisioning_state
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['subnet_overrides'] = subnet_overrides
__props__['tags'] = tags
__props__['type'] = type
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:devtestlab/latest:VirtualNetworkResource"), pulumi.Alias(type_="azure-nextgen:devtestlab/v20160515:VirtualNetworkResource"), pulumi.Alias(type_="azure-nextgen:devtestlab/v20180915:VirtualNetworkResource")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(VirtualNetworkResource, __self__).__init__(
'azure-nextgen:devtestlab/v20150521preview:VirtualNetworkResource',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'VirtualNetworkResource':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return VirtualNetworkResource(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowedSubnets")
def allowed_subnets(self) -> pulumi.Output[Optional[Sequence['outputs.SubnetResponse']]]:
return pulumi.get(self, "allowed_subnets")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="externalProviderResourceId")
def external_provider_resource_id(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "external_provider_resource_id")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="subnetOverrides")
def subnet_overrides(self) -> pulumi.Output[Optional[Sequence['outputs.SubnetOverrideResponse']]]:
return pulumi.get(self, "subnet_overrides")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
f72dfbd00b36b323b598b7f2898878590d2fac42 | 5,199 | py | Python | bin/02-preprocess-data.py | csp-inc/fluvius | 8eb8c3caee2b98720ae17bef384302d6fa88c828 | [
"Apache-2.0"
] | null | null | null | bin/02-preprocess-data.py | csp-inc/fluvius | 8eb8c3caee2b98720ae17bef384302d6fa88c828 | [
"Apache-2.0"
] | 3 | 2022-02-24T15:07:42.000Z | 2022-02-25T23:59:31.000Z | bin/02-preprocess-data.py | csp-inc/fluvius | 8eb8c3caee2b98720ae17bef384302d6fa88c828 | [
"Apache-2.0"
] | 2 | 2022-03-17T19:59:33.000Z | 2022-03-17T20:00:41.000Z | import os
import pandas as pd
import fsspec
import argparse
from src.defaults import args_info
env_vars = open("/content/credentials","r").read().split('\n')
for var in env_vars[:-1]:
key, value = var.split(' = ')
os.environ[key] = value
storage_options={'account_name':os.environ['ACCOUNT_NAME'],\
'account_key':os.environ['BLOB_KEY']}
fs = fsspec.filesystem('az', account_name=storage_options['account_name'], account_key=storage_options['account_key'])
##env data acquired
def return_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--data-src',
type=args_info["data_src"]["type"],
help=args_info["data_src"]["help"])
parser.add_argument('--write-to-csv',
action=args_info["write_to_csv"]["action"],
help=args_info["write_to_csv"]["help"])
return parser
if __name__ == "__main__":
args = return_parser().parse_args()
if args.data_src == 'usgs':
#USGS DATA PROCESS
data_src = 'usgs'
container = 'usgs-data'
station_url = f'az://{container}/{args.data_src}_station_metadata_raw.csv'
station_df = pd.read_csv(station_url, storage_options=storage_options)
sites_str = [str(f).zfill(8) for f in station_df.site_no]
station_df['sites_str'] = sites_str
query = []
for f in fs.ls(f'{container}/stations'):
station = os.path.basename(f).split('_')[0]
query.append(station)
q = pd.DataFrame({'sites_str':query})
out = station_df.merge(q, on='sites_str')
out['site_no'] = out['sites_str']
out = out[['site_no','site_name', 'Latitude', 'Longitude','geometry']]
if args.write_to_csv:
out.to_csv(f'az://{container}/usgs_station_metadata.csv',index=False, storage_options=storage_options)
if args.data_src == 'ana':
container = 'ana-data'
station_url = f'az://{container}/ana_station_metadata.csv'
station_df = pd.read_csv(station_url, storage_options=storage_options)
for site_no in station_df.site_no:
station_url = f'az://{container}/{site_no}.csv'
station_url2 = f'az://{container}/{site_no}_2.csv'
site_df1_raw = pd.read_csv(station_url, delimiter=',', skiprows=10, storage_options=storage_options)
translation = pd.read_csv(f'az://{container}/ana_translations.csv', storage_options=storage_options)
trans = {p:e for p,e in zip(translation.Portuguese, translation.English)}
site_df1 = site_df1_raw.rename(columns=trans)
site_df1 = site_df1.dropna(subset=['Date'])
site_df1['TimeL'] = site_df1['TimeL'].fillna('01/01/1900 01:00')
site_df1['Date-Time'] = [d for d in site_df1['Date']]
site_df1['Date-Time'] = pd.to_datetime(site_df1['Date-Time'],\
format='%d/%m/%Y')
site_df2_raw = pd.read_csv(station_url2, delimiter=',', skiprows=14, storage_options=storage_options)
site_df2_raw = site_df2_raw.replace('01/01/1900', '01/01/1900 01:00')
translation2 = {'Data':'Date','Hora':'Hour','Turbidez':'Turbidity'}
site_df2 = site_df2_raw.rename(columns=translation2)
site_df2 = site_df2.dropna(subset=['Date'])
site_df2['Date-Time-HM'] = [f"{d} {t.split(' ')[1]}" for d,t in zip(site_df2['Date'],site_df2['Hour'])]
site_df2['Date-Time'] = [d for d in site_df2['Date']]
site_df2['Date-Time'] = pd.to_datetime(site_df2['Date-Time'],\
format='%d/%m/%Y')
site_df2 = site_df2[['Date', 'Hour', 'Date-Time','Turbidity']]
selection = ['Date-Time', 'Discharge', 'Suspended Sediment Concentration (mg/L)', 'Turbidity']
site_df = site_df1.merge(site_df2, on='Date', how='outer', suffixes=('_',''))
site_df['Date-Time'] = site_df['Date-Time'].fillna(site_df['Date-Time_'])
#site_df['Hour'] = site_df['Hour'].fillna(site_df['Hour_'])
site_df = site_df[selection]
s = str(site_no).zfill(8)
write_filename = f'az://{container}/stations/{str(site_no)}.csv'
print(f'writing to {write_filename}')
if args.write_to_csv:
site_df.to_csv(write_filename, index=False, storage_options=storage_options)
if args.data_src == 'itv':
container = 'itv-data'
station_url = f'az://{container}/itv_station_metadata.csv'
station_df = pd.read_csv(station_url, storage_options=storage_options)
for site_no in station_df.site_no:
station_url = f'az://{container}/{site_no}.csv'
site_df = pd.read_csv(station_url,\
storage_options=storage_options,\
delimiter=',')
site_df['Date-Time'] = pd.to_datetime(site_df['Campaign Date'], \
format='%d/%m/%Y')
if args.write_to_csv:
write_filename = f'az://{container}/stations/{site_no}.csv'
site_df.to_csv(write_filename, storage_options=storage_options,\
index=False)
| 46.837838 | 118 | 0.607425 | import os
import pandas as pd
import fsspec
import argparse
from src.defaults import args_info
env_vars = open("/content/credentials","r").read().split('\n')
for var in env_vars[:-1]:
key, value = var.split(' = ')
os.environ[key] = value
storage_options={'account_name':os.environ['ACCOUNT_NAME'],\
'account_key':os.environ['BLOB_KEY']}
fs = fsspec.filesystem('az', account_name=storage_options['account_name'], account_key=storage_options['account_key'])
r():
parser = argparse.ArgumentParser()
parser.add_argument('--data-src',
type=args_info["data_src"]["type"],
help=args_info["data_src"]["help"])
parser.add_argument('--write-to-csv',
action=args_info["write_to_csv"]["action"],
help=args_info["write_to_csv"]["help"])
return parser
if __name__ == "__main__":
args = return_parser().parse_args()
if args.data_src == 'usgs':
data_src = 'usgs'
container = 'usgs-data'
station_url = f'az://{container}/{args.data_src}_station_metadata_raw.csv'
station_df = pd.read_csv(station_url, storage_options=storage_options)
sites_str = [str(f).zfill(8) for f in station_df.site_no]
station_df['sites_str'] = sites_str
query = []
for f in fs.ls(f'{container}/stations'):
station = os.path.basename(f).split('_')[0]
query.append(station)
q = pd.DataFrame({'sites_str':query})
out = station_df.merge(q, on='sites_str')
out['site_no'] = out['sites_str']
out = out[['site_no','site_name', 'Latitude', 'Longitude','geometry']]
if args.write_to_csv:
out.to_csv(f'az://{container}/usgs_station_metadata.csv',index=False, storage_options=storage_options)
if args.data_src == 'ana':
container = 'ana-data'
station_url = f'az://{container}/ana_station_metadata.csv'
station_df = pd.read_csv(station_url, storage_options=storage_options)
for site_no in station_df.site_no:
station_url = f'az://{container}/{site_no}.csv'
station_url2 = f'az://{container}/{site_no}_2.csv'
site_df1_raw = pd.read_csv(station_url, delimiter=',', skiprows=10, storage_options=storage_options)
translation = pd.read_csv(f'az://{container}/ana_translations.csv', storage_options=storage_options)
trans = {p:e for p,e in zip(translation.Portuguese, translation.English)}
site_df1 = site_df1_raw.rename(columns=trans)
site_df1 = site_df1.dropna(subset=['Date'])
site_df1['TimeL'] = site_df1['TimeL'].fillna('01/01/1900 01:00')
site_df1['Date-Time'] = [d for d in site_df1['Date']]
site_df1['Date-Time'] = pd.to_datetime(site_df1['Date-Time'],\
format='%d/%m/%Y')
site_df2_raw = pd.read_csv(station_url2, delimiter=',', skiprows=14, storage_options=storage_options)
site_df2_raw = site_df2_raw.replace('01/01/1900', '01/01/1900 01:00')
translation2 = {'Data':'Date','Hora':'Hour','Turbidez':'Turbidity'}
site_df2 = site_df2_raw.rename(columns=translation2)
site_df2 = site_df2.dropna(subset=['Date'])
site_df2['Date-Time-HM'] = [f"{d} {t.split(' ')[1]}" for d,t in zip(site_df2['Date'],site_df2['Hour'])]
site_df2['Date-Time'] = [d for d in site_df2['Date']]
site_df2['Date-Time'] = pd.to_datetime(site_df2['Date-Time'],\
format='%d/%m/%Y')
site_df2 = site_df2[['Date', 'Hour', 'Date-Time','Turbidity']]
selection = ['Date-Time', 'Discharge', 'Suspended Sediment Concentration (mg/L)', 'Turbidity']
site_df = site_df1.merge(site_df2, on='Date', how='outer', suffixes=('_',''))
site_df['Date-Time'] = site_df['Date-Time'].fillna(site_df['Date-Time_'])
site_df = site_df[selection]
s = str(site_no).zfill(8)
write_filename = f'az://{container}/stations/{str(site_no)}.csv'
print(f'writing to {write_filename}')
if args.write_to_csv:
site_df.to_csv(write_filename, index=False, storage_options=storage_options)
if args.data_src == 'itv':
container = 'itv-data'
station_url = f'az://{container}/itv_station_metadata.csv'
station_df = pd.read_csv(station_url, storage_options=storage_options)
for site_no in station_df.site_no:
station_url = f'az://{container}/{site_no}.csv'
site_df = pd.read_csv(station_url,\
storage_options=storage_options,\
delimiter=',')
site_df['Date-Time'] = pd.to_datetime(site_df['Campaign Date'], \
format='%d/%m/%Y')
if args.write_to_csv:
write_filename = f'az://{container}/stations/{site_no}.csv'
site_df.to_csv(write_filename, storage_options=storage_options,\
index=False)
| true | true |
f72dfc067dd21a90b958beff455fb03217bc081d | 502 | py | Python | src/securityinsight/azext_sentinel/manual/_help.py | ravithanneeru/azure-cli-extensions | e0de87f3563ae39525370e9912589aac33e7bded | [
"MIT"
] | 207 | 2017-11-29T06:59:41.000Z | 2022-03-31T10:00:53.000Z | src/securityinsight/azext_sentinel/manual/_help.py | ravithanneeru/azure-cli-extensions | e0de87f3563ae39525370e9912589aac33e7bded | [
"MIT"
] | 4,061 | 2017-10-27T23:19:56.000Z | 2022-03-31T23:18:30.000Z | src/securityinsight/azext_sentinel/manual/_help.py | ravithanneeru/azure-cli-extensions | e0de87f3563ae39525370e9912589aac33e7bded | [
"MIT"
] | 802 | 2017-10-11T17:36:26.000Z | 2022-03-31T22:24:32.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=too-many-lines
from knack.help_files import helps
helps['sentinel'] = """
type: group
short-summary: Manage Security Insight
"""
| 38.615385 | 94 | 0.464143 |
from knack.help_files import helps
helps['sentinel'] = """
type: group
short-summary: Manage Security Insight
"""
| true | true |
f72dfcee28fa8947697777ba9bbb16c046528be1 | 5,870 | py | Python | BTrees/_module_builder.py | azmeuk/BTrees | 74f01d5de2f32f85c806b13b59cfbf7aa3bf5aa9 | [
"ZPL-2.1"
] | null | null | null | BTrees/_module_builder.py | azmeuk/BTrees | 74f01d5de2f32f85c806b13b59cfbf7aa3bf5aa9 | [
"ZPL-2.1"
] | null | null | null | BTrees/_module_builder.py | azmeuk/BTrees | 74f01d5de2f32f85c806b13b59cfbf7aa3bf5aa9 | [
"ZPL-2.1"
] | null | null | null | ##############################################################################
#
# Copyright Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
Support functions to eliminate the boilerplate involved in defining
BTree modules.
"""
import sys
from zope.interface import directlyProvides
def _create_classes(
module_name, key_datatype, value_datatype,
):
from ._base import Bucket
from ._base import MERGE # Won't always want this.
from ._base import Set
from ._base import Tree
from ._base import TreeSet
from ._base import _TreeIterator
from ._base import _fix_pickle
classes = {}
prefix = key_datatype.prefix_code + value_datatype.prefix_code
for base in (
Bucket,
Set,
(Tree, 'BTree'),
TreeSet,
(_TreeIterator, 'TreeIterator'),
):
if isinstance(base, tuple):
base, base_name = base
else:
base_name = base.__name__
# XXX: Consider defining these with their natural names
# now and only aliasing them to 'Py' instead of the
# opposite. That should make pickling easier.
name = prefix + base_name + 'Py'
cls = type(name, (base,), dict(
_to_key=key_datatype,
_to_value=value_datatype,
MERGE=MERGE,
MERGE_WEIGHT=value_datatype.apply_weight,
MERGE_DEFAULT=value_datatype.multiplication_identity,
max_leaf_size=key_datatype.bucket_size_for_value(value_datatype),
max_internal_size=key_datatype.tree_size,
))
cls.__module__ = module_name
classes[cls.__name__] = cls
# Importing the C extension does this for the non-py
# classes.
# TODO: Unify that.
classes[base_name + 'Py'] = cls
for cls in classes.values():
cls._mapping_type = classes['BucketPy']
cls._set_type = classes['SetPy']
if 'Set' in cls.__name__:
cls._bucket_type = classes['SetPy']
else:
cls._bucket_type = classes['BucketPy']
return classes
def _create_set_operations(module_name, key_type, value_type, set_type):
from ._base import set_operation
from ._base import difference
from ._base import intersection
from ._base import multiunion
from ._base import union
from ._base import weightedIntersection
from ._base import weightedUnion
ops = {
op.__name__ + 'Py': set_operation(op, set_type)
for op in (
difference, intersection,
union,
) + (
(weightedIntersection, weightedUnion,)
if value_type.supports_value_union()
else ()
) + (
(multiunion,)
if key_type.supports_value_union()
else ()
)
}
for key, op in ops.items():
op.__module__ = module_name
op.__name__ = key
# TODO: Pickling. These things should be looked up by name.
return ops
def _create_globals(module_name, key_datatype, value_datatype):
classes = _create_classes(module_name, key_datatype, value_datatype)
set_type = classes['SetPy']
set_ops = _create_set_operations(module_name, key_datatype, value_datatype, set_type)
classes.update(set_ops)
return classes
def populate_module(mod_globals,
key_datatype, value_datatype,
interface, module=None):
from ._compat import import_c_extension
from ._base import _fix_pickle
module_name = mod_globals['__name__']
# Define the Python implementations
mod_globals.update(_create_globals(module_name, key_datatype, value_datatype))
# Import the C versions, if possible. Whether or not this is possible,
# this currently makes the non-`Py' suffixed names available. This should change
# if we start defining the Python classes with their natural name, only aliased
# to the 'Py` suffix (which simplifies pickling)
import_c_extension(mod_globals)
# Next, define __all__ after all the name aliasing is done.
# XXX: Maybe derive this from the values we create.
mod_all = (
'Bucket', 'Set', 'BTree', 'TreeSet',
'union', 'intersection', 'difference',
'weightedUnion', 'weightedIntersection', 'multiunion',
)
prefix = key_datatype.prefix_code + value_datatype.prefix_code
mod_all += tuple(prefix + c for c in ('Bucket', 'Set', 'BTree', 'TreeSet'))
mod_globals['__all__'] = tuple(c for c in mod_all if c in mod_globals)
mod_globals['using64bits'] = key_datatype.using64bits or value_datatype.using64bits
# XXX: We can probably do better than fix_pickle now;
# we can know if we're going to be renaming classes
# ahead of time. See above.
_fix_pickle(mod_globals, module_name)
directlyProvides(module or sys.modules[module_name], interface)
def create_module(prefix):
import types
from . import _datatypes as datatypes
from . import Interfaces
mod = types.ModuleType('BTrees.' + prefix + 'BTree')
key_type = getattr(datatypes, prefix[0])()
val_type = getattr(datatypes, prefix[1])().as_value_type()
iface_name = 'I' + key_type.long_name + val_type.long_name + 'BTreeModule'
iface = getattr(Interfaces, iface_name)
populate_module(vars(mod), key_type, val_type, iface, mod)
return mod
| 33.542857 | 89 | 0.648893 | true | true | |
f72dfed2300295b2d390ece47ba74ea65e9119bc | 407 | py | Python | algebra/quadratic.py | fanying2015/algebra | 73ee1782a2e6fe9d012dc44ecf121bcd93865973 | [
"BSD-3-Clause"
] | null | null | null | algebra/quadratic.py | fanying2015/algebra | 73ee1782a2e6fe9d012dc44ecf121bcd93865973 | [
"BSD-3-Clause"
] | null | null | null | algebra/quadratic.py | fanying2015/algebra | 73ee1782a2e6fe9d012dc44ecf121bcd93865973 | [
"BSD-3-Clause"
] | null | null | null |
def poly(*args):
"""
f(x) = a * x + b * x**2 + c * x**3 + ...
*args = (x, a, b)
"""
if len(args) == 1:
raise Exception("You have only entered a value for x, and no cofficients.")
x = args[0] # x value
coef = args[1:]
results = 0
for power, c in enumerate(coef):
results += c * (x ** (power + 1))
return results | 18.5 | 83 | 0.434889 |
def poly(*args):
if len(args) == 1:
raise Exception("You have only entered a value for x, and no cofficients.")
x = args[0]
coef = args[1:]
results = 0
for power, c in enumerate(coef):
results += c * (x ** (power + 1))
return results | true | true |
f72dff361c570be4f17198f4b86c39fc82cf4786 | 11,003 | py | Python | aws_saml_auth/__init__.py | ekreative/aws-google-auth | a2a3e0f588067cfe90efb01f28d35562e591ee8d | [
"MIT"
] | 3 | 2021-09-09T14:36:17.000Z | 2022-02-17T05:52:35.000Z | aws_saml_auth/__init__.py | ekreative/aws-google-auth | a2a3e0f588067cfe90efb01f28d35562e591ee8d | [
"MIT"
] | null | null | null | aws_saml_auth/__init__.py | ekreative/aws-google-auth | a2a3e0f588067cfe90efb01f28d35562e591ee8d | [
"MIT"
] | 2 | 2022-01-07T10:09:54.000Z | 2022-03-16T17:05:28.000Z | #!/usr/bin/env python
from __future__ import print_function
import argparse
import base64
import os
import sys
import logging
from six import print_ as print
from tzlocal import get_localzone
from aws_saml_auth import amazon
from aws_saml_auth import configuration
from aws_saml_auth import saml
from aws_saml_auth import util
with open(
os.path.join(os.path.abspath(os.path.dirname(__file__)), "VERSION"),
encoding="utf-8",
) as version_file:
version = version_file.read().strip()
def parse_args(args):
parser = argparse.ArgumentParser(
prog="aws-saml-auth",
description="Acquire temporary AWS credentials via SAML",
)
main_group = parser.add_mutually_exclusive_group()
main_group.add_argument(
"--redirect-server",
action="store_true",
help="Run the redirect server on port ($PORT)",
)
main_group.add_argument(
"-L", "--login-url", help="SAML Provider login url ($ASA_LOGIN_URL)"
)
parser.add_argument(
"-R", "--region", help="AWS region endpoint ($AWS_DEFAULT_REGION)"
)
duration_group = parser.add_mutually_exclusive_group()
duration_group.add_argument(
"-d",
"--duration",
type=int,
help="Credential duration in seconds (defaults to value of $ASA_DURATION, then falls back to 43200)",
)
duration_group.add_argument(
"--auto-duration",
action="store_true",
help="Tries to use the longest allowed duration ($ASA_AUTO_DURATION=1)",
)
parser.add_argument(
"-p",
"--profile",
help="AWS profile (defaults to value of $AWS_PROFILE, then falls back to 'default')",
)
parser.add_argument(
"-A", "--account", help="Filter for specific AWS account ($ASA_AWS_ACCOUNT)"
)
parser.add_argument("-q", "--quiet", action="store_true", help="Quiet output")
parser.add_argument(
"--saml-assertion",
dest="saml_assertion",
help="Base64 encoded SAML assertion to use",
)
parser.add_argument(
"--no-saml-cache",
dest="use_saml_cache",
action="store_false",
help="Do not cache the SAML Assertion ($ASA_NO_SAML_CACHE=1)",
)
print_group = parser.add_mutually_exclusive_group()
print_group.add_argument(
"--print-creds", action="store_true", help="Print Credentials"
)
print_group.add_argument(
"--credential-process",
action="store_true",
help="Output suitable for aws cli credential_process ($ASA_CREDENTIAL_PROCESS=1)",
)
parser.add_argument(
"--no-resolve-aliases",
dest="resolve_aliases",
action="store_false",
help="Do not resolve AWS account aliases. ($ASA_NO_RESOLVE_ALIASES=1)",
)
parser.add_argument("--port", type=int, help="Port for the redirect server ($PORT)")
role_group = parser.add_mutually_exclusive_group()
role_group.add_argument(
"--no-ask-role",
dest="ask_role",
action="store_false",
help="Never ask to pick the role ($ASA_NO_ASK_ROLE=1)",
)
role_group.add_argument(
"-r", "--role-arn", help="The ARN of the role to assume ($ASA_ROLE_ARN)"
)
parser.add_argument(
"-l",
"--log",
dest="log_level",
choices=["debug", "info", "warn"],
default="warn",
help="Select log level (default: %(default)s)",
)
parser.add_argument(
"-V",
"--version",
action="version",
version="%(prog)s {version}".format(version=version),
)
return parser.parse_args(args)
def exit_if_unsupported_python():
if sys.version_info.major == 2 and sys.version_info.minor < 7:
logging.critical(
"%s requires Python 2.7 or higher. Please consider "
"upgrading. Support for Python 2.6 and lower was "
"dropped because this tool's dependencies dropped "
"support.",
__name__,
)
logging.critical(
"For debugging, it appears you're running: %s", sys.version_info
)
logging.critical(
"For more information, see: "
"https://github.com/cevoaustralia/aws-google-auth/"
"issues/41"
)
sys.exit(1)
def cli(cli_args):
try:
exit_if_unsupported_python()
args = parse_args(args=cli_args)
# Set up logging
logging.getLogger().setLevel(getattr(logging, args.log_level.upper(), None))
config = resolve_config(args)
if args.redirect_server:
from aws_saml_auth.redirect_server import start_redirect_server
start_redirect_server(config.port)
return
process_auth(args, config)
except amazon.ExpectedAmazonException as ex:
print(ex)
sys.exit(1)
except saml.ExpectedSamlException as ex:
print(ex)
sys.exit(1)
except KeyboardInterrupt:
pass
except Exception as ex:
logging.exception(ex)
def resolve_config(args):
# Shortening Convenience functions
coalesce = util.Util.coalesce
# Create a blank configuration object (has the defaults pre-filled)
config = configuration.Configuration()
# Have the configuration update itself via the ~/.aws/config on disk.
# Profile (Option priority = ARGS, ENV_VAR, DEFAULT)
config.profile = coalesce(args.profile, os.getenv("AWS_PROFILE"), config.profile)
# Now that we've established the profile, we can read the configuration and
# fill in all the other variables.
config.read(config.profile)
# Ask Role (Option priority = ARGS, ENV_VAR, DEFAULT)
config.ask_role = coalesce(
(False if os.getenv("ASA_NO_ASK_ROLE") != None else None),
args.ask_role,
config.ask_role,
)
# Do not cache the SAML Assertion (Option priority = ARGS, ENV_VAR, DEFAULT)
config.use_saml_cache = coalesce(
(False if os.getenv("ASA_NO_SAML_CACHE") != None else None),
args.use_saml_cache,
config.use_saml_cache,
)
# Duration (Option priority = ARGS, ENV_VAR, DEFAULT)
config.duration = int(
coalesce(args.duration, os.getenv("ASA_DURATION"), config.duration)
)
# Automatic duration (Option priority = ARGS, ENV_VAR, DEFAULT)
config.auto_duration = args.auto_duration or os.getenv("ASA_AUTO_DURATION") != None
# Login URL (Option priority = ARGS, ENV_VAR, DEFAULT)
config.login_url = coalesce(
args.login_url, os.getenv("ASA_LOGIN_URL"), config.login_url
)
# Region (Option priority = ARGS, ENV_VAR, DEFAULT)
config.region = coalesce(
args.region, os.getenv("AWS_DEFAULT_REGION"), config.region
)
# ROLE ARN (Option priority = ARGS, ENV_VAR, DEFAULT)
config.role_arn = coalesce(
args.role_arn, os.getenv("ASA_ROLE_ARN"), config.role_arn
)
# Resolve AWS aliases enabled (Option priority = ARGS, ENV_VAR, DEFAULT)
config.resolve_aliases = coalesce(
(False if os.getenv("ASA_NO_RESOLVE_ALIASES") != None else None),
args.resolve_aliases,
config.resolve_aliases,
)
# Account (Option priority = ARGS, ENV_VAR, DEFAULT)
config.account = coalesce(
args.account, os.getenv("ASA_AWS_ACCOUNT"), config.account
)
config.print_creds = coalesce(args.print_creds, config.print_creds)
# Quiet
config.quiet = coalesce(args.quiet, config.quiet)
config.port = int(coalesce(args.port, os.getenv("PORT"), config.port))
config.credential_process = (
args.credential_process or os.getenv("ASA_CREDENTIAL_PROCESS") != None
)
if config.credential_process:
config.quiet = True
config.ask_role = False
config.read_token_cache()
if config.use_saml_cache:
config.read_saml_cache()
return config
def process_auth(args, config):
if config.region is None:
config.region = util.Util.get_input("AWS Region: ")
logging.debug("%s: region is: %s", __name__, config.region)
if config.login_url is None:
config.login_url = util.Util.get_input("Login URL: ")
logging.debug("%s: login url is: %s", __name__, config.login_url)
# If there is a valid cache and the user opted to use it, use that instead
# of prompting the user for input (it will also ignroe any set variables
# such as username or sp_id and idp_id, as those are built into the SAML
# response). The user does not need to be prompted for a password if the
# SAML cache is used.
if args.saml_assertion:
saml_xml = base64.b64decode(args.saml_assertion)
elif config.token_cache:
saml_xml = None
elif config.saml_cache:
saml_xml = config.saml_cache
logging.info("%s: SAML cache found", __name__)
else:
saml_client = saml.Saml(config)
saml_xml = saml_client.do_browser_saml()
# We now have a new SAML value that can get cached (If the user asked
# for it to be)
if config.use_saml_cache:
config.saml_cache = saml_xml
# The amazon_client now has the SAML assertion it needed (Either via the
# cache or freshly generated). From here, we can get the roles and continue
# the rest of the workflow regardless of cache.
amazon_client = amazon.Amazon(config, saml_xml)
if saml_xml is not None:
roles = amazon_client.roles
# Determine the provider and the role arn (if the the user provided isn't an option)
if config.role_arn in roles and not config.ask_role:
config.provider = roles[config.role_arn]
else:
if config.account and config.resolve_aliases:
aliases = amazon_client.resolve_aws_aliases(roles)
config.role_arn, config.provider = util.Util.pick_a_role(
roles, aliases, config.account
)
elif config.account:
config.role_arn, config.provider = util.Util.pick_a_role(
roles, account=config.account
)
elif config.resolve_aliases:
aliases = amazon_client.resolve_aws_aliases(roles)
config.role_arn, config.provider = util.Util.pick_a_role(roles, aliases)
else:
config.role_arn, config.provider = util.Util.pick_a_role(roles)
if not config.quiet:
print("Assuming " + config.role_arn)
print(
"Credentials Expiration: "
+ format(amazon_client.expiration.astimezone(get_localzone()))
)
if config.credential_process:
amazon_client.print_credential_process()
config.write_token_cache(amazon_client)
elif config.print_creds:
amazon_client.print_export_line()
elif config.profile:
config.write(amazon_client)
config.write_saml_cache()
def main():
cli_args = sys.argv[1:]
cli(cli_args)
if __name__ == "__main__":
main()
| 32.844776 | 109 | 0.644551 |
from __future__ import print_function
import argparse
import base64
import os
import sys
import logging
from six import print_ as print
from tzlocal import get_localzone
from aws_saml_auth import amazon
from aws_saml_auth import configuration
from aws_saml_auth import saml
from aws_saml_auth import util
with open(
os.path.join(os.path.abspath(os.path.dirname(__file__)), "VERSION"),
encoding="utf-8",
) as version_file:
version = version_file.read().strip()
def parse_args(args):
parser = argparse.ArgumentParser(
prog="aws-saml-auth",
description="Acquire temporary AWS credentials via SAML",
)
main_group = parser.add_mutually_exclusive_group()
main_group.add_argument(
"--redirect-server",
action="store_true",
help="Run the redirect server on port ($PORT)",
)
main_group.add_argument(
"-L", "--login-url", help="SAML Provider login url ($ASA_LOGIN_URL)"
)
parser.add_argument(
"-R", "--region", help="AWS region endpoint ($AWS_DEFAULT_REGION)"
)
duration_group = parser.add_mutually_exclusive_group()
duration_group.add_argument(
"-d",
"--duration",
type=int,
help="Credential duration in seconds (defaults to value of $ASA_DURATION, then falls back to 43200)",
)
duration_group.add_argument(
"--auto-duration",
action="store_true",
help="Tries to use the longest allowed duration ($ASA_AUTO_DURATION=1)",
)
parser.add_argument(
"-p",
"--profile",
help="AWS profile (defaults to value of $AWS_PROFILE, then falls back to 'default')",
)
parser.add_argument(
"-A", "--account", help="Filter for specific AWS account ($ASA_AWS_ACCOUNT)"
)
parser.add_argument("-q", "--quiet", action="store_true", help="Quiet output")
parser.add_argument(
"--saml-assertion",
dest="saml_assertion",
help="Base64 encoded SAML assertion to use",
)
parser.add_argument(
"--no-saml-cache",
dest="use_saml_cache",
action="store_false",
help="Do not cache the SAML Assertion ($ASA_NO_SAML_CACHE=1)",
)
print_group = parser.add_mutually_exclusive_group()
print_group.add_argument(
"--print-creds", action="store_true", help="Print Credentials"
)
print_group.add_argument(
"--credential-process",
action="store_true",
help="Output suitable for aws cli credential_process ($ASA_CREDENTIAL_PROCESS=1)",
)
parser.add_argument(
"--no-resolve-aliases",
dest="resolve_aliases",
action="store_false",
help="Do not resolve AWS account aliases. ($ASA_NO_RESOLVE_ALIASES=1)",
)
parser.add_argument("--port", type=int, help="Port for the redirect server ($PORT)")
role_group = parser.add_mutually_exclusive_group()
role_group.add_argument(
"--no-ask-role",
dest="ask_role",
action="store_false",
help="Never ask to pick the role ($ASA_NO_ASK_ROLE=1)",
)
role_group.add_argument(
"-r", "--role-arn", help="The ARN of the role to assume ($ASA_ROLE_ARN)"
)
parser.add_argument(
"-l",
"--log",
dest="log_level",
choices=["debug", "info", "warn"],
default="warn",
help="Select log level (default: %(default)s)",
)
parser.add_argument(
"-V",
"--version",
action="version",
version="%(prog)s {version}".format(version=version),
)
return parser.parse_args(args)
def exit_if_unsupported_python():
if sys.version_info.major == 2 and sys.version_info.minor < 7:
logging.critical(
"%s requires Python 2.7 or higher. Please consider "
"upgrading. Support for Python 2.6 and lower was "
"dropped because this tool's dependencies dropped "
"support.",
__name__,
)
logging.critical(
"For debugging, it appears you're running: %s", sys.version_info
)
logging.critical(
"For more information, see: "
"https://github.com/cevoaustralia/aws-google-auth/"
"issues/41"
)
sys.exit(1)
def cli(cli_args):
try:
exit_if_unsupported_python()
args = parse_args(args=cli_args)
logging.getLogger().setLevel(getattr(logging, args.log_level.upper(), None))
config = resolve_config(args)
if args.redirect_server:
from aws_saml_auth.redirect_server import start_redirect_server
start_redirect_server(config.port)
return
process_auth(args, config)
except amazon.ExpectedAmazonException as ex:
print(ex)
sys.exit(1)
except saml.ExpectedSamlException as ex:
print(ex)
sys.exit(1)
except KeyboardInterrupt:
pass
except Exception as ex:
logging.exception(ex)
def resolve_config(args):
coalesce = util.Util.coalesce
config = configuration.Configuration()
config.profile = coalesce(args.profile, os.getenv("AWS_PROFILE"), config.profile)
# fill in all the other variables.
config.read(config.profile)
# Ask Role (Option priority = ARGS, ENV_VAR, DEFAULT)
config.ask_role = coalesce(
(False if os.getenv("ASA_NO_ASK_ROLE") != None else None),
args.ask_role,
config.ask_role,
)
# Do not cache the SAML Assertion (Option priority = ARGS, ENV_VAR, DEFAULT)
config.use_saml_cache = coalesce(
(False if os.getenv("ASA_NO_SAML_CACHE") != None else None),
args.use_saml_cache,
config.use_saml_cache,
)
# Duration (Option priority = ARGS, ENV_VAR, DEFAULT)
config.duration = int(
coalesce(args.duration, os.getenv("ASA_DURATION"), config.duration)
)
# Automatic duration (Option priority = ARGS, ENV_VAR, DEFAULT)
config.auto_duration = args.auto_duration or os.getenv("ASA_AUTO_DURATION") != None
# Login URL (Option priority = ARGS, ENV_VAR, DEFAULT)
config.login_url = coalesce(
args.login_url, os.getenv("ASA_LOGIN_URL"), config.login_url
)
# Region (Option priority = ARGS, ENV_VAR, DEFAULT)
config.region = coalesce(
args.region, os.getenv("AWS_DEFAULT_REGION"), config.region
)
# ROLE ARN (Option priority = ARGS, ENV_VAR, DEFAULT)
config.role_arn = coalesce(
args.role_arn, os.getenv("ASA_ROLE_ARN"), config.role_arn
)
# Resolve AWS aliases enabled (Option priority = ARGS, ENV_VAR, DEFAULT)
config.resolve_aliases = coalesce(
(False if os.getenv("ASA_NO_RESOLVE_ALIASES") != None else None),
args.resolve_aliases,
config.resolve_aliases,
)
# Account (Option priority = ARGS, ENV_VAR, DEFAULT)
config.account = coalesce(
args.account, os.getenv("ASA_AWS_ACCOUNT"), config.account
)
config.print_creds = coalesce(args.print_creds, config.print_creds)
# Quiet
config.quiet = coalesce(args.quiet, config.quiet)
config.port = int(coalesce(args.port, os.getenv("PORT"), config.port))
config.credential_process = (
args.credential_process or os.getenv("ASA_CREDENTIAL_PROCESS") != None
)
if config.credential_process:
config.quiet = True
config.ask_role = False
config.read_token_cache()
if config.use_saml_cache:
config.read_saml_cache()
return config
def process_auth(args, config):
if config.region is None:
config.region = util.Util.get_input("AWS Region: ")
logging.debug("%s: region is: %s", __name__, config.region)
if config.login_url is None:
config.login_url = util.Util.get_input("Login URL: ")
logging.debug("%s: login url is: %s", __name__, config.login_url)
# If there is a valid cache and the user opted to use it, use that instead
# of prompting the user for input (it will also ignroe any set variables
# such as username or sp_id and idp_id, as those are built into the SAML
# response). The user does not need to be prompted for a password if the
# SAML cache is used.
if args.saml_assertion:
saml_xml = base64.b64decode(args.saml_assertion)
elif config.token_cache:
saml_xml = None
elif config.saml_cache:
saml_xml = config.saml_cache
logging.info("%s: SAML cache found", __name__)
else:
saml_client = saml.Saml(config)
saml_xml = saml_client.do_browser_saml()
# We now have a new SAML value that can get cached (If the user asked
# for it to be)
if config.use_saml_cache:
config.saml_cache = saml_xml
# The amazon_client now has the SAML assertion it needed (Either via the
# cache or freshly generated). From here, we can get the roles and continue
# the rest of the workflow regardless of cache.
amazon_client = amazon.Amazon(config, saml_xml)
if saml_xml is not None:
roles = amazon_client.roles
# Determine the provider and the role arn (if the the user provided isn't an option)
if config.role_arn in roles and not config.ask_role:
config.provider = roles[config.role_arn]
else:
if config.account and config.resolve_aliases:
aliases = amazon_client.resolve_aws_aliases(roles)
config.role_arn, config.provider = util.Util.pick_a_role(
roles, aliases, config.account
)
elif config.account:
config.role_arn, config.provider = util.Util.pick_a_role(
roles, account=config.account
)
elif config.resolve_aliases:
aliases = amazon_client.resolve_aws_aliases(roles)
config.role_arn, config.provider = util.Util.pick_a_role(roles, aliases)
else:
config.role_arn, config.provider = util.Util.pick_a_role(roles)
if not config.quiet:
print("Assuming " + config.role_arn)
print(
"Credentials Expiration: "
+ format(amazon_client.expiration.astimezone(get_localzone()))
)
if config.credential_process:
amazon_client.print_credential_process()
config.write_token_cache(amazon_client)
elif config.print_creds:
amazon_client.print_export_line()
elif config.profile:
config.write(amazon_client)
config.write_saml_cache()
def main():
cli_args = sys.argv[1:]
cli(cli_args)
if __name__ == "__main__":
main()
| true | true |
f72dffe598e2a61b515ccd2a4c09fe00e02505aa | 9,887 | py | Python | contrib/spendfrom/spendfrom.py | everestcoin/dash | dd8fc6e1b912010cf980bf40ddd7686fa5c7b4b3 | [
"MIT"
] | null | null | null | contrib/spendfrom/spendfrom.py | everestcoin/dash | dd8fc6e1b912010cf980bf40ddd7686fa5c7b4b3 | [
"MIT"
] | null | null | null | contrib/spendfrom/spendfrom.py | everestcoin/dash | dd8fc6e1b912010cf980bf40ddd7686fa5c7b4b3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Use the raw transactions API to spend eves received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a eved or Eve-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the eve data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Eve/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Eve")
return os.path.expanduser("~/.eve")
def read_bitcoin_config(dbdir):
"""Read the eve.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "eve.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a eve JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 19886 if testnet else 9886
connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the eved we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(eved):
info = eved.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
eved.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = eved.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(eved):
address_summary = dict()
address_to_account = dict()
for info in eved.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = eved.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = eved.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-eve-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(eved, fromaddresses, toaddress, amount, fee):
all_coins = list_available(eved)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to eved.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = eved.createrawtransaction(inputs, outputs)
signed_rawtx = eved.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(eved, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = eved.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(eved, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = eved.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(eved, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get eves from")
parser.add_option("--to", dest="to", default=None,
help="address to get send eves to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of eve.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
eved = connect_JSON(config)
if options.amount is None:
address_summary = list_available(eved)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(eved) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(eved, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(eved, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = eved.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| 36.891791 | 111 | 0.627086 |
mport *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Eve/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Eve")
return os.path.expanduser("~/.eve")
def read_bitcoin_config(dbdir):
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "eve.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0)
if not 'rpcport' in config:
config['rpcport'] = 19886 if testnet else 9886
connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(eved):
info = eved.getinfo()
if 'unlocked_until' not in info:
return True
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
eved.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = eved.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(eved):
address_summary = dict()
address_to_account = dict()
for info in eved.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = eved.listunspent(0)
for output in unspent:
rawtx = eved.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-eve-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(eved, fromaddresses, toaddress, amount, fee):
all_coins = list_available(eved)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = eved.createrawtransaction(inputs, outputs)
signed_rawtx = eved.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(eved, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = eved.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(eved, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = eved.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(eved, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get eves from")
parser.add_option("--to", dest="to", default=None,
help="address to get send eves to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of eve.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
eved = connect_JSON(config)
if options.amount is None:
address_summary = list_available(eved)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(eved) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(eved, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(eved, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = eved.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| true | true |
f72e000d03d30ffdeca71b61cc02bd46d5c3bfb7 | 1,746 | py | Python | test/test_data_source_rule.py | iserko/lookml-tools | caa46e489e789f59987965be98038cab10c0fdf0 | [
"Apache-2.0"
] | null | null | null | test/test_data_source_rule.py | iserko/lookml-tools | caa46e489e789f59987965be98038cab10c0fdf0 | [
"Apache-2.0"
] | null | null | null | test/test_data_source_rule.py | iserko/lookml-tools | caa46e489e789f59987965be98038cab10c0fdf0 | [
"Apache-2.0"
] | null | null | null | import pytest
import json
import os
from lkmltools.linter.rules.filerules.data_source_rule import DataSourceRule
from conftest import get_lookml_from_raw_lookml
def test_run1():
raw_lookml = """
view: aview {
sql_table_name: bqdw.engagement_score ;;
}
"""
lookml = get_lookml_from_raw_lookml(raw_lookml, "aview.view")
relevant, passed = DataSourceRule().run(lookml)
assert relevant
assert passed
if os.path.exists(lookml.infilepath):
os.remove(lookml.infilepath)
def test_run2():
raw_lookml = """
view: aview {
dimension: memberID {
type: string
}
}
"""
lookml = get_lookml_from_raw_lookml(raw_lookml, "aview.view")
relevant, passed = DataSourceRule().run(lookml)
assert relevant
assert not passed
if os.path.exists(lookml.infilepath):
os.remove(lookml.infilepath)
def test_run3():
raw_lookml = """
view: aview {
derived_table: {
sql: SELECT * from table ;;
}
dimension: memberID {
type: string
}
}
"""
lookml = get_lookml_from_raw_lookml(raw_lookml, "aview.view")
relevant, passed = DataSourceRule().run(lookml)
assert relevant
assert passed
if os.path.exists(lookml.infilepath):
os.remove(lookml.infilepath)
def test_run4():
raw_lookml = """
connection: "datawarehouse"
include: "*.view.lkml"
explore: an_explore {
}
"""
lookml = get_lookml_from_raw_lookml(raw_lookml, "amodel.model")
relevant, passed = DataSourceRule().run(lookml)
assert not relevant
assert not passed
if os.path.exists(lookml.infilepath):
os.remove(lookml.infilepath)
| 24.591549 | 76 | 0.638603 | import pytest
import json
import os
from lkmltools.linter.rules.filerules.data_source_rule import DataSourceRule
from conftest import get_lookml_from_raw_lookml
def test_run1():
raw_lookml = """
view: aview {
sql_table_name: bqdw.engagement_score ;;
}
"""
lookml = get_lookml_from_raw_lookml(raw_lookml, "aview.view")
relevant, passed = DataSourceRule().run(lookml)
assert relevant
assert passed
if os.path.exists(lookml.infilepath):
os.remove(lookml.infilepath)
def test_run2():
raw_lookml = """
view: aview {
dimension: memberID {
type: string
}
}
"""
lookml = get_lookml_from_raw_lookml(raw_lookml, "aview.view")
relevant, passed = DataSourceRule().run(lookml)
assert relevant
assert not passed
if os.path.exists(lookml.infilepath):
os.remove(lookml.infilepath)
def test_run3():
raw_lookml = """
view: aview {
derived_table: {
sql: SELECT * from table ;;
}
dimension: memberID {
type: string
}
}
"""
lookml = get_lookml_from_raw_lookml(raw_lookml, "aview.view")
relevant, passed = DataSourceRule().run(lookml)
assert relevant
assert passed
if os.path.exists(lookml.infilepath):
os.remove(lookml.infilepath)
def test_run4():
raw_lookml = """
connection: "datawarehouse"
include: "*.view.lkml"
explore: an_explore {
}
"""
lookml = get_lookml_from_raw_lookml(raw_lookml, "amodel.model")
relevant, passed = DataSourceRule().run(lookml)
assert not relevant
assert not passed
if os.path.exists(lookml.infilepath):
os.remove(lookml.infilepath)
| true | true |
f72e0137ccbf46b8c0b2faeda97c181af3ac8660 | 464 | py | Python | setup.py | sbrudenell/sb6183_exporter | ec6028dfff3fbe79de750893ca122b7c62fbd6e0 | [
"MIT"
] | 6 | 2018-05-19T17:50:41.000Z | 2020-05-18T05:02:51.000Z | setup.py | sbrudenell/sb6183_exporter | ec6028dfff3fbe79de750893ca122b7c62fbd6e0 | [
"MIT"
] | null | null | null | setup.py | sbrudenell/sb6183_exporter | ec6028dfff3fbe79de750893ca122b7c62fbd6e0 | [
"MIT"
] | null | null | null | import setuptools
setuptools.setup(
name="sb6183_exporter",
version="0.0.1",
author="Steven Brudenell",
author_email="steven.brudenell@gmail.com",
packages=setuptools.find_packages(),
install_requires=[
"requests>=2.18.4",
"beautifulsoup4>=4.6.0",
"prometheus_client>=0.2.0",
],
entry_points={
"console_scripts": [
"sb6183_exporter = sb6183_exporter:exporter_main",
],
},
)
| 22.095238 | 62 | 0.607759 | import setuptools
setuptools.setup(
name="sb6183_exporter",
version="0.0.1",
author="Steven Brudenell",
author_email="steven.brudenell@gmail.com",
packages=setuptools.find_packages(),
install_requires=[
"requests>=2.18.4",
"beautifulsoup4>=4.6.0",
"prometheus_client>=0.2.0",
],
entry_points={
"console_scripts": [
"sb6183_exporter = sb6183_exporter:exporter_main",
],
},
)
| true | true |
f72e015e9fd8f980e66cc50a3e581e21706dbdfc | 62,499 | py | Python | google/cloud/translate_v3/services/translation_service/client.py | LaudateCorpus1/python-translate | 4b08cd56ce230b843ced78a3f81c2e6511ac2a4f | [
"Apache-2.0"
] | null | null | null | google/cloud/translate_v3/services/translation_service/client.py | LaudateCorpus1/python-translate | 4b08cd56ce230b843ced78a3f81c2e6511ac2a4f | [
"Apache-2.0"
] | null | null | null | google/cloud/translate_v3/services/translation_service/client.py | LaudateCorpus1/python-translate | 4b08cd56ce230b843ced78a3f81c2e6511ac2a4f | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.translate_v3.services.translation_service import pagers
from google.cloud.translate_v3.types import translation_service
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import TranslationServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import TranslationServiceGrpcTransport
from .transports.grpc_asyncio import TranslationServiceGrpcAsyncIOTransport
class TranslationServiceClientMeta(type):
"""Metaclass for the TranslationService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[TranslationServiceTransport]]
_transport_registry["grpc"] = TranslationServiceGrpcTransport
_transport_registry["grpc_asyncio"] = TranslationServiceGrpcAsyncIOTransport
def get_transport_class(
cls, label: str = None,
) -> Type[TranslationServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class TranslationServiceClient(metaclass=TranslationServiceClientMeta):
"""Provides natural language translation operations."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "translate.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
TranslationServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
TranslationServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> TranslationServiceTransport:
"""Returns the transport used by the client instance.
Returns:
TranslationServiceTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def glossary_path(project: str, location: str, glossary: str,) -> str:
"""Returns a fully-qualified glossary string."""
return "projects/{project}/locations/{location}/glossaries/{glossary}".format(
project=project, location=location, glossary=glossary,
)
@staticmethod
def parse_glossary_path(path: str) -> Dict[str, str]:
"""Parses a glossary path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/glossaries/(?P<glossary>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)
# Figure out the client cert source to use.
client_cert_source = None
if use_client_cert == "true":
if client_options.client_cert_source:
client_cert_source = client_options.client_cert_source
elif mtls.has_default_client_cert_source():
client_cert_source = mtls.default_client_cert_source()
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
elif use_mtls_endpoint == "always" or (
use_mtls_endpoint == "auto" and client_cert_source
):
api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = cls.DEFAULT_ENDPOINT
return api_endpoint, client_cert_source
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, TranslationServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the translation service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, TranslationServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(
client_options
)
api_key_value = getattr(client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, TranslationServiceTransport):
# transport is a TranslationServiceTransport instance.
if credentials or client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
import google.auth._default # type: ignore
if api_key_value and hasattr(
google.auth._default, "get_api_key_credentials"
):
credentials = google.auth._default.get_api_key_credentials(
api_key_value
)
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def translate_text(
self,
request: Union[translation_service.TranslateTextRequest, dict] = None,
*,
parent: str = None,
target_language_code: str = None,
contents: Sequence[str] = None,
model: str = None,
mime_type: str = None,
source_language_code: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.TranslateTextResponse:
r"""Translates input text and returns translated text.
Args:
request (Union[google.cloud.translate_v3.types.TranslateTextRequest, dict]):
The request object. The request message for synchronous
translation.
parent (str):
Required. Project or location to make a call. Must refer
to a caller's project.
Format: ``projects/{project-number-or-id}`` or
``projects/{project-number-or-id}/locations/{location-id}``.
For global calls, use
``projects/{project-number-or-id}/locations/global`` or
``projects/{project-number-or-id}``.
Non-global location is required for requests using
AutoML models or custom glossaries.
Models and glossaries must be within the same region
(have same location-id), otherwise an INVALID_ARGUMENT
(400) error is returned.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
target_language_code (str):
Required. The BCP-47 language code to
use for translation of the input text,
set to one of the language codes listed
in Language Support.
This corresponds to the ``target_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
contents (Sequence[str]):
Required. The content of the input in
string format. We recommend the total
content be less than 30k codepoints. The
max length of this field is 1024.
Use BatchTranslateText for larger text.
This corresponds to the ``contents`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
model (str):
Optional. The ``model`` type requested for this
translation.
The format depends on model type:
- AutoML Translation models:
``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}``
- General (built-in) models:
``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``,
For global (non-regionalized) requests, use
``location-id`` ``global``. For example,
``projects/{project-number-or-id}/locations/global/models/general/nmt``.
If not provided, the default Google model (NMT) will be
used.
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
mime_type (str):
Optional. The format of the source
text, for example, "text/html",
"text/plain". If left blank, the MIME
type defaults to "text/html".
This corresponds to the ``mime_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
source_language_code (str):
Optional. The BCP-47 language code of
the input text if known, for example,
"en-US" or "sr-Latn". Supported language
codes are listed in Language Support. If
the source language isn't specified, the
API attempts to identify the source
language automatically and returns the
source language within the response.
This corresponds to the ``source_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.translate_v3.types.TranslateTextResponse:
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any(
[
parent,
target_language_code,
contents,
model,
mime_type,
source_language_code,
]
)
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.TranslateTextRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.TranslateTextRequest):
request = translation_service.TranslateTextRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if target_language_code is not None:
request.target_language_code = target_language_code
if contents is not None:
request.contents = contents
if model is not None:
request.model = model
if mime_type is not None:
request.mime_type = mime_type
if source_language_code is not None:
request.source_language_code = source_language_code
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.translate_text]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def detect_language(
self,
request: Union[translation_service.DetectLanguageRequest, dict] = None,
*,
parent: str = None,
model: str = None,
mime_type: str = None,
content: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.DetectLanguageResponse:
r"""Detects the language of text within a request.
Args:
request (Union[google.cloud.translate_v3.types.DetectLanguageRequest, dict]):
The request object. The request message for language
detection.
parent (str):
Required. Project or location to make a call. Must refer
to a caller's project.
Format:
``projects/{project-number-or-id}/locations/{location-id}``
or ``projects/{project-number-or-id}``.
For global calls, use
``projects/{project-number-or-id}/locations/global`` or
``projects/{project-number-or-id}``.
Only models within the same region (has same
location-id) can be used. Otherwise an INVALID_ARGUMENT
(400) error is returned.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
model (str):
Optional. The language detection model to be used.
Format:
``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/{model-id}``
Only one language detection model is currently
supported:
``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default``.
If not specified, the default model is used.
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
mime_type (str):
Optional. The format of the source
text, for example, "text/html",
"text/plain". If left blank, the MIME
type defaults to "text/html".
This corresponds to the ``mime_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
content (str):
The content of the input stored as a
string.
This corresponds to the ``content`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.translate_v3.types.DetectLanguageResponse:
The response message for language
detection.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, model, mime_type, content])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.DetectLanguageRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.DetectLanguageRequest):
request = translation_service.DetectLanguageRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if model is not None:
request.model = model
if mime_type is not None:
request.mime_type = mime_type
if content is not None:
request.content = content
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.detect_language]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def get_supported_languages(
self,
request: Union[translation_service.GetSupportedLanguagesRequest, dict] = None,
*,
parent: str = None,
model: str = None,
display_language_code: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.SupportedLanguages:
r"""Returns a list of supported languages for
translation.
Args:
request (Union[google.cloud.translate_v3.types.GetSupportedLanguagesRequest, dict]):
The request object. The request message for discovering
supported languages.
parent (str):
Required. Project or location to make a call. Must refer
to a caller's project.
Format: ``projects/{project-number-or-id}`` or
``projects/{project-number-or-id}/locations/{location-id}``.
For global calls, use
``projects/{project-number-or-id}/locations/global`` or
``projects/{project-number-or-id}``.
Non-global location is required for AutoML models.
Only models within the same region (have same
location-id) can be used, otherwise an INVALID_ARGUMENT
(400) error is returned.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
model (str):
Optional. Get supported languages of this model.
The format depends on model type:
- AutoML Translation models:
``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}``
- General (built-in) models:
``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``,
Returns languages supported by the specified model. If
missing, we get supported languages of Google general
NMT model.
This corresponds to the ``model`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
display_language_code (str):
Optional. The language to use to
return localized, human readable names
of supported languages. If missing, then
display names are not returned in a
response.
This corresponds to the ``display_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.translate_v3.types.SupportedLanguages:
The response message for discovering
supported languages.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, model, display_language_code])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.GetSupportedLanguagesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.GetSupportedLanguagesRequest):
request = translation_service.GetSupportedLanguagesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if model is not None:
request.model = model
if display_language_code is not None:
request.display_language_code = display_language_code
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_supported_languages]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def translate_document(
self,
request: Union[translation_service.TranslateDocumentRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.TranslateDocumentResponse:
r"""Translates documents in synchronous mode.
Args:
request (Union[google.cloud.translate_v3.types.TranslateDocumentRequest, dict]):
The request object. A document translation request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.translate_v3.types.TranslateDocumentResponse:
A translated document response
message.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.TranslateDocumentRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.TranslateDocumentRequest):
request = translation_service.TranslateDocumentRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.translate_document]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def batch_translate_text(
self,
request: Union[translation_service.BatchTranslateTextRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Translates a large volume of text in asynchronous
batch mode. This function provides real-time output as
the inputs are being processed. If caller cancels a
request, the partial results (for an input file, it's
all or nothing) may still be available on the specified
output location.
This call returns immediately and you can
use google.longrunning.Operation.name to poll the status
of the call.
Args:
request (Union[google.cloud.translate_v3.types.BatchTranslateTextRequest, dict]):
The request object. The batch translation request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.translate_v3.types.BatchTranslateResponse` Stored in the
[google.longrunning.Operation.response][google.longrunning.Operation.response]
field returned by BatchTranslateText if at least one
sentence is translated successfully.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.BatchTranslateTextRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.BatchTranslateTextRequest):
request = translation_service.BatchTranslateTextRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.batch_translate_text]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
translation_service.BatchTranslateResponse,
metadata_type=translation_service.BatchTranslateMetadata,
)
# Done; return the response.
return response
def batch_translate_document(
self,
request: Union[translation_service.BatchTranslateDocumentRequest, dict] = None,
*,
parent: str = None,
source_language_code: str = None,
target_language_codes: Sequence[str] = None,
input_configs: Sequence[translation_service.BatchDocumentInputConfig] = None,
output_config: translation_service.BatchDocumentOutputConfig = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Translates a large volume of document in asynchronous
batch mode. This function provides real-time output as
the inputs are being processed. If caller cancels a
request, the partial results (for an input file, it's
all or nothing) may still be available on the specified
output location.
This call returns immediately and you can use
google.longrunning.Operation.name to poll the status of
the call.
Args:
request (Union[google.cloud.translate_v3.types.BatchTranslateDocumentRequest, dict]):
The request object. The BatchTranslateDocument request.
parent (str):
Required. Location to make a regional call.
Format:
``projects/{project-number-or-id}/locations/{location-id}``.
The ``global`` location is not supported for batch
translation.
Only AutoML Translation models or glossaries within the
same region (have the same location-id) can be used,
otherwise an INVALID_ARGUMENT (400) error is returned.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
source_language_code (str):
Required. The BCP-47 language code of
the input document if known, for
example, "en-US" or "sr-Latn". Supported
language codes are listed in Language
Support
(https://cloud.google.com/translate/docs/languages).
This corresponds to the ``source_language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
target_language_codes (Sequence[str]):
Required. The BCP-47 language code to
use for translation of the input
document. Specify up to 10 language
codes here.
This corresponds to the ``target_language_codes`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
input_configs (Sequence[google.cloud.translate_v3.types.BatchDocumentInputConfig]):
Required. Input configurations.
The total number of files matched should
be <= 100. The total content size to
translate should be <= 100M Unicode
codepoints. The files must use UTF-8
encoding.
This corresponds to the ``input_configs`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
output_config (google.cloud.translate_v3.types.BatchDocumentOutputConfig):
Required. Output configuration.
If 2 input configs match to the same
file (that is, same input path), we
don't generate output for duplicate
inputs.
This corresponds to the ``output_config`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.translate_v3.types.BatchTranslateDocumentResponse` Stored in the
[google.longrunning.Operation.response][google.longrunning.Operation.response]
field returned by BatchTranslateDocument if at least
one document is translated successfully.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any(
[
parent,
source_language_code,
target_language_codes,
input_configs,
output_config,
]
)
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.BatchTranslateDocumentRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.BatchTranslateDocumentRequest):
request = translation_service.BatchTranslateDocumentRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if source_language_code is not None:
request.source_language_code = source_language_code
if target_language_codes is not None:
request.target_language_codes = target_language_codes
if input_configs is not None:
request.input_configs = input_configs
if output_config is not None:
request.output_config = output_config
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.batch_translate_document]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
translation_service.BatchTranslateDocumentResponse,
metadata_type=translation_service.BatchTranslateDocumentMetadata,
)
# Done; return the response.
return response
def create_glossary(
self,
request: Union[translation_service.CreateGlossaryRequest, dict] = None,
*,
parent: str = None,
glossary: translation_service.Glossary = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Creates a glossary and returns the long-running operation.
Returns NOT_FOUND, if the project doesn't exist.
Args:
request (Union[google.cloud.translate_v3.types.CreateGlossaryRequest, dict]):
The request object. Request message for CreateGlossary.
parent (str):
Required. The project name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
glossary (google.cloud.translate_v3.types.Glossary):
Required. The glossary to create.
This corresponds to the ``glossary`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.translate_v3.types.Glossary`
Represents a glossary built from user provided data.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, glossary])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.CreateGlossaryRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.CreateGlossaryRequest):
request = translation_service.CreateGlossaryRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if glossary is not None:
request.glossary = glossary
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.create_glossary]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
translation_service.Glossary,
metadata_type=translation_service.CreateGlossaryMetadata,
)
# Done; return the response.
return response
def list_glossaries(
self,
request: Union[translation_service.ListGlossariesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListGlossariesPager:
r"""Lists glossaries in a project. Returns NOT_FOUND, if the project
doesn't exist.
Args:
request (Union[google.cloud.translate_v3.types.ListGlossariesRequest, dict]):
The request object. Request message for ListGlossaries.
parent (str):
Required. The name of the project
from which to list all of the
glossaries.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.translate_v3.services.translation_service.pagers.ListGlossariesPager:
Response message for ListGlossaries.
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.ListGlossariesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.ListGlossariesRequest):
request = translation_service.ListGlossariesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_glossaries]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListGlossariesPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
def get_glossary(
self,
request: Union[translation_service.GetGlossaryRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.Glossary:
r"""Gets a glossary. Returns NOT_FOUND, if the glossary doesn't
exist.
Args:
request (Union[google.cloud.translate_v3.types.GetGlossaryRequest, dict]):
The request object. Request message for GetGlossary.
name (str):
Required. The name of the glossary to
retrieve.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.translate_v3.types.Glossary:
Represents a glossary built from user
provided data.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.GetGlossaryRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.GetGlossaryRequest):
request = translation_service.GetGlossaryRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_glossary]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def delete_glossary(
self,
request: Union[translation_service.DeleteGlossaryRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Deletes a glossary, or cancels glossary construction if the
glossary isn't created yet. Returns NOT_FOUND, if the glossary
doesn't exist.
Args:
request (Union[google.cloud.translate_v3.types.DeleteGlossaryRequest, dict]):
The request object. Request message for DeleteGlossary.
name (str):
Required. The name of the glossary to
delete.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.translate_v3.types.DeleteGlossaryResponse` Stored in the
[google.longrunning.Operation.response][google.longrunning.Operation.response]
field returned by DeleteGlossary.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a translation_service.DeleteGlossaryRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, translation_service.DeleteGlossaryRequest):
request = translation_service.DeleteGlossaryRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.delete_glossary]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
translation_service.DeleteGlossaryResponse,
metadata_type=translation_service.DeleteGlossaryMetadata,
)
# Done; return the response.
return response
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-translate",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("TranslationServiceClient",)
| 43.341886 | 143 | 0.621722 |
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials
from google.auth.transport import mtls
from google.auth.transport.grpc import SslCredentials
from google.auth.exceptions import MutualTLSChannelError
from google.oauth2 import service_account
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError:
OptionalRetry = Union[retries.Retry, object]
from google.api_core import operation
from google.api_core import operation_async
from google.cloud.translate_v3.services.translation_service import pagers
from google.cloud.translate_v3.types import translation_service
from google.protobuf import timestamp_pb2
from .transports.base import TranslationServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import TranslationServiceGrpcTransport
from .transports.grpc_asyncio import TranslationServiceGrpcAsyncIOTransport
class TranslationServiceClientMeta(type):
_transport_registry = (
OrderedDict()
)
_transport_registry["grpc"] = TranslationServiceGrpcTransport
_transport_registry["grpc_asyncio"] = TranslationServiceGrpcAsyncIOTransport
def get_transport_class(
cls, label: str = None,
) -> Type[TranslationServiceTransport]:
if label:
return cls._transport_registry[label]
return next(iter(cls._transport_registry.values()))
class TranslationServiceClient(metaclass=TranslationServiceClientMeta):
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "translate.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> TranslationServiceTransport:
return self._transport
@staticmethod
def glossary_path(project: str, location: str, glossary: str,) -> str:
return "projects/{project}/locations/{location}/glossaries/{glossary}".format(
project=project, location=location, glossary=glossary,
)
@staticmethod
def parse_glossary_path(path: str) -> Dict[str, str]:
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/glossaries/(?P<glossary>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)
client_cert_source = None
if use_client_cert == "true":
if client_options.client_cert_source:
client_cert_source = client_options.client_cert_source
elif mtls.has_default_client_cert_source():
client_cert_source = mtls.default_client_cert_source()
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
elif use_mtls_endpoint == "always" or (
use_mtls_endpoint == "auto" and client_cert_source
):
api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = cls.DEFAULT_ENDPOINT
return api_endpoint, client_cert_source
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, TranslationServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(
client_options
)
api_key_value = getattr(client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
)
if isinstance(transport, TranslationServiceTransport):
if credentials or client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
import google.auth._default
if api_key_value and hasattr(
google.auth._default, "get_api_key_credentials"
):
credentials = google.auth._default.get_api_key_credentials(
api_key_value
)
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def translate_text(
self,
request: Union[translation_service.TranslateTextRequest, dict] = None,
*,
parent: str = None,
target_language_code: str = None,
contents: Sequence[str] = None,
model: str = None,
mime_type: str = None,
source_language_code: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.TranslateTextResponse:
has_flattened_params = any(
[
parent,
target_language_code,
contents,
model,
mime_type,
source_language_code,
]
)
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(request, translation_service.TranslateTextRequest):
request = translation_service.TranslateTextRequest(request)
if parent is not None:
request.parent = parent
if target_language_code is not None:
request.target_language_code = target_language_code
if contents is not None:
request.contents = contents
if model is not None:
request.model = model
if mime_type is not None:
request.mime_type = mime_type
if source_language_code is not None:
request.source_language_code = source_language_code
rpc = self._transport._wrapped_methods[self._transport.translate_text]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
return response
def detect_language(
self,
request: Union[translation_service.DetectLanguageRequest, dict] = None,
*,
parent: str = None,
model: str = None,
mime_type: str = None,
content: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.DetectLanguageResponse:
has_flattened_params = any([parent, model, mime_type, content])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(request, translation_service.DetectLanguageRequest):
request = translation_service.DetectLanguageRequest(request)
if parent is not None:
request.parent = parent
if model is not None:
request.model = model
if mime_type is not None:
request.mime_type = mime_type
if content is not None:
request.content = content
rpc = self._transport._wrapped_methods[self._transport.detect_language]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
return response
def get_supported_languages(
self,
request: Union[translation_service.GetSupportedLanguagesRequest, dict] = None,
*,
parent: str = None,
model: str = None,
display_language_code: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.SupportedLanguages:
has_flattened_params = any([parent, model, display_language_code])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(request, translation_service.GetSupportedLanguagesRequest):
request = translation_service.GetSupportedLanguagesRequest(request)
if parent is not None:
request.parent = parent
if model is not None:
request.model = model
if display_language_code is not None:
request.display_language_code = display_language_code
rpc = self._transport._wrapped_methods[self._transport.get_supported_languages]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
return response
def translate_document(
self,
request: Union[translation_service.TranslateDocumentRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.TranslateDocumentResponse:
if not isinstance(request, translation_service.TranslateDocumentRequest):
request = translation_service.TranslateDocumentRequest(request)
rpc = self._transport._wrapped_methods[self._transport.translate_document]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
return response
def batch_translate_text(
self,
request: Union[translation_service.BatchTranslateTextRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
if not isinstance(request, translation_service.BatchTranslateTextRequest):
request = translation_service.BatchTranslateTextRequest(request)
rpc = self._transport._wrapped_methods[self._transport.batch_translate_text]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
response = operation.from_gapic(
response,
self._transport.operations_client,
translation_service.BatchTranslateResponse,
metadata_type=translation_service.BatchTranslateMetadata,
)
return response
def batch_translate_document(
self,
request: Union[translation_service.BatchTranslateDocumentRequest, dict] = None,
*,
parent: str = None,
source_language_code: str = None,
target_language_codes: Sequence[str] = None,
input_configs: Sequence[translation_service.BatchDocumentInputConfig] = None,
output_config: translation_service.BatchDocumentOutputConfig = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
has_flattened_params = any(
[
parent,
source_language_code,
target_language_codes,
input_configs,
output_config,
]
)
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(request, translation_service.BatchTranslateDocumentRequest):
request = translation_service.BatchTranslateDocumentRequest(request)
if parent is not None:
request.parent = parent
if source_language_code is not None:
request.source_language_code = source_language_code
if target_language_codes is not None:
request.target_language_codes = target_language_codes
if input_configs is not None:
request.input_configs = input_configs
if output_config is not None:
request.output_config = output_config
rpc = self._transport._wrapped_methods[self._transport.batch_translate_document]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
response = operation.from_gapic(
response,
self._transport.operations_client,
translation_service.BatchTranslateDocumentResponse,
metadata_type=translation_service.BatchTranslateDocumentMetadata,
)
return response
def create_glossary(
self,
request: Union[translation_service.CreateGlossaryRequest, dict] = None,
*,
parent: str = None,
glossary: translation_service.Glossary = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
has_flattened_params = any([parent, glossary])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(request, translation_service.CreateGlossaryRequest):
request = translation_service.CreateGlossaryRequest(request)
if parent is not None:
request.parent = parent
if glossary is not None:
request.glossary = glossary
rpc = self._transport._wrapped_methods[self._transport.create_glossary]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
response = operation.from_gapic(
response,
self._transport.operations_client,
translation_service.Glossary,
metadata_type=translation_service.CreateGlossaryMetadata,
)
return response
def list_glossaries(
self,
request: Union[translation_service.ListGlossariesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListGlossariesPager:
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(request, translation_service.ListGlossariesRequest):
request = translation_service.ListGlossariesRequest(request)
if parent is not None:
request.parent = parent
rpc = self._transport._wrapped_methods[self._transport.list_glossaries]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
response = pagers.ListGlossariesPager(
method=rpc, request=request, response=response, metadata=metadata,
)
return response
def get_glossary(
self,
request: Union[translation_service.GetGlossaryRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> translation_service.Glossary:
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(request, translation_service.GetGlossaryRequest):
request = translation_service.GetGlossaryRequest(request)
if name is not None:
request.name = name
rpc = self._transport._wrapped_methods[self._transport.get_glossary]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
return response
def delete_glossary(
self,
request: Union[translation_service.DeleteGlossaryRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
if not isinstance(request, translation_service.DeleteGlossaryRequest):
request = translation_service.DeleteGlossaryRequest(request)
if name is not None:
request.name = name
rpc = self._transport._wrapped_methods[self._transport.delete_glossary]
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
response = operation.from_gapic(
response,
self._transport.operations_client,
translation_service.DeleteGlossaryResponse,
metadata_type=translation_service.DeleteGlossaryMetadata,
)
return response
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-translate",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("TranslationServiceClient",)
| true | true |
f72e01c36d606cd161f67fa38b987c08519c5866 | 105,431 | py | Python | octavia/tests/functional/api/v2/test_pool.py | lingxiankong/octavia | 4a5c24ef6fcd3b5f198a20d780dedd7f7976296d | [
"Apache-2.0"
] | null | null | null | octavia/tests/functional/api/v2/test_pool.py | lingxiankong/octavia | 4a5c24ef6fcd3b5f198a20d780dedd7f7976296d | [
"Apache-2.0"
] | null | null | null | octavia/tests/functional/api/v2/test_pool.py | lingxiankong/octavia | 4a5c24ef6fcd3b5f198a20d780dedd7f7976296d | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 Rackspace
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_utils import uuidutils
from octavia.common import constants
import octavia.common.context
from octavia.common import data_models
from octavia.common import exceptions
from octavia.db import api as db_api
from octavia.tests.common import sample_certs
from octavia.tests.functional.api.v2 import base
class TestPool(base.BaseAPITest):
root_tag = 'pool'
root_tag_list = 'pools'
root_tag_links = 'pools_links'
def setUp(self):
super(TestPool, self).setUp()
self.lb = self.create_load_balancer(
uuidutils.generate_uuid()).get('loadbalancer')
self.lb_id = self.lb.get('id')
self.project_id = self.lb.get('project_id')
self.set_lb_status(self.lb_id)
self.listener = self.create_listener(
constants.PROTOCOL_HTTP, 80,
self.lb_id).get('listener')
self.listener_id = self.listener.get('id')
self.set_lb_status(self.lb_id)
self._setup_udp_lb_resources()
def _setup_udp_lb_resources(self):
self.udp_lb = self.create_load_balancer(uuidutils.generate_uuid()).get(
'loadbalancer')
self.udp_lb_id = self.udp_lb.get('id')
self.set_lb_status(self.udp_lb_id)
self.udp_listener = self.create_listener(
constants.PROTOCOL_UDP, 8888,
self.udp_lb_id).get('listener')
self.udp_listener_id = self.udp_listener.get('id')
self.set_lb_status(self.udp_lb_id)
def test_get(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id, tags=['test_tag']).get(self.root_tag)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
def test_get_authorized(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
self.set_lb_status(lb_id=self.lb_id)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
def test_get_not_authorized(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
self.set_lb_status(lb_id=self.lb_id)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id')), status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, response.json)
def test_get_deleted_gives_404(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_object_status(self.pool_repo, api_pool.get('id'),
provisioning_status=constants.DELETED)
self.get(self.POOL_PATH.format(pool_id=api_pool.get('id')), status=404)
def test_bad_get(self):
self.get(self.POOL_PATH.format(pool_id=uuidutils.generate_uuid()),
status=404)
def test_get_all(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id, tags=['test_tag']).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
pools = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.assertIsInstance(pools, list)
self.assertEqual(1, len(pools))
self.assertEqual(api_pool.get('id'), pools[0].get('id'))
self.assertEqual(['test_tag'], pools[0]['tags'])
def test_get_all_hides_deleted(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
response = self.get(self.POOLS_PATH)
objects = response.json.get(self.root_tag_list)
self.assertEqual(len(objects), 1)
self.set_object_status(self.pool_repo, api_pool.get('id'),
provisioning_status=constants.DELETED)
response = self.get(self.POOLS_PATH)
objects = response.json.get(self.root_tag_list)
self.assertEqual(len(objects), 0)
def test_get_all_admin(self):
project_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
pool1 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool2 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool3 = self.create_pool(
lb1_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pools = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.assertEqual(3, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool1.get('id'), pool1.get('protocol')),
pool_id_protocols)
self.assertIn((pool2.get('id'), pool2.get('protocol')),
pool_id_protocols)
self.assertIn((pool3.get('id'), pool3.get('protocol')),
pool_id_protocols)
def test_get_all_non_admin(self):
project_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool3 = self.create_pool(
self.lb_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(self.lb_id)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings',
auth_strategy=constants.KEYSTONE)
with mock.patch.object(octavia.common.context.Context, 'project_id',
pool3['project_id']):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
pools = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(1, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool3.get('id'), pool3.get('protocol')),
pool_id_protocols)
def test_get_all_non_admin_global_observer(self):
project_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
pool1 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool2 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool3 = self.create_pool(
lb1_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_global_observer'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
pools = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(3, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool1.get('id'), pool1.get('protocol')),
pool_id_protocols)
self.assertIn((pool2.get('id'), pool2.get('protocol')),
pool_id_protocols)
self.assertIn((pool3.get('id'), pool3.get('protocol')),
pool_id_protocols)
def test_get_all_not_authorized(self):
project_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
pools = self.get(self.POOLS_PATH, status=403).json
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, pools)
def test_get_by_project_id(self):
project1_id = uuidutils.generate_uuid()
project2_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project1_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
lb2 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb2',
project_id=project2_id)
lb2_id = lb2.get('loadbalancer').get('id')
self.set_lb_status(lb2_id)
pool1 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool2 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool3 = self.create_pool(
lb2_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb2_id)
pools = self.get(
self.POOLS_PATH,
params={'project_id': project1_id}).json.get(self.root_tag_list)
self.assertEqual(2, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool1.get('id'), pool1.get('protocol')),
pool_id_protocols)
self.assertIn((pool2.get('id'), pool2.get('protocol')),
pool_id_protocols)
pools = self.get(
self.POOLS_PATH,
params={'project_id': project2_id}).json.get(self.root_tag_list)
self.assertEqual(1, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool3.get('id'), pool3.get('protocol')),
pool_id_protocols)
def test_get_all_with_listener(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.assertIsInstance(response, list)
self.assertEqual(1, len(response))
self.assertEqual(api_pool.get('id'), response[0].get('id'))
def test_get_all_sorted(self):
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3')
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOLS_PATH,
params={'sort': 'name:desc'})
pools_desc = response.json.get(self.root_tag_list)
response = self.get(self.POOLS_PATH,
params={'sort': 'name:asc'})
pools_asc = response.json.get(self.root_tag_list)
self.assertEqual(3, len(pools_desc))
self.assertEqual(3, len(pools_asc))
pool_id_names_desc = [(pool.get('id'), pool.get('name'))
for pool in pools_desc]
pool_id_names_asc = [(pool.get('id'), pool.get('name'))
for pool in pools_asc]
self.assertEqual(pool_id_names_asc,
list(reversed(pool_id_names_desc)))
def test_get_all_limited(self):
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3')
self.set_lb_status(lb_id=self.lb_id)
# First two -- should have 'next' link
first_two = self.get(self.POOLS_PATH, params={'limit': 2}).json
objs = first_two[self.root_tag_list]
links = first_two[self.root_tag_links]
self.assertEqual(2, len(objs))
self.assertEqual(1, len(links))
self.assertEqual('next', links[0]['rel'])
# Third + off the end -- should have previous link
third = self.get(self.POOLS_PATH, params={
'limit': 2,
'marker': first_two[self.root_tag_list][1]['id']}).json
objs = third[self.root_tag_list]
links = third[self.root_tag_links]
self.assertEqual(1, len(objs))
self.assertEqual(1, len(links))
self.assertEqual('previous', links[0]['rel'])
# Middle -- should have both links
middle = self.get(self.POOLS_PATH, params={
'limit': 1,
'marker': first_two[self.root_tag_list][0]['id']}).json
objs = middle[self.root_tag_list]
links = middle[self.root_tag_links]
self.assertEqual(1, len(objs))
self.assertEqual(2, len(links))
self.assertItemsEqual(['previous', 'next'], [l['rel'] for l in links])
def test_get_all_fields_filter(self):
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3')
self.set_lb_status(lb_id=self.lb_id)
pools = self.get(self.POOLS_PATH, params={
'fields': ['id', 'project_id']}).json
for pool in pools['pools']:
self.assertIn(u'id', pool)
self.assertIn(u'project_id', pool)
self.assertNotIn(u'description', pool)
def test_get_one_fields_filter(self):
pool1 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1').get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
pool = self.get(
self.POOL_PATH.format(pool_id=pool1.get('id')),
params={'fields': ['id', 'project_id']}).json.get(self.root_tag)
self.assertIn(u'id', pool)
self.assertIn(u'project_id', pool)
self.assertNotIn(u'description', pool)
def test_get_all_filter(self):
po1 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1').get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
hm = self.create_health_monitor(po1['id'],
constants.HEALTH_MONITOR_HTTP,
1, 1, 1, 1).get('healthmonitor')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2').get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3').get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
pools = self.get(self.POOLS_PATH, params={
'id': po1['id'], 'healthmonitor_id': hm['id']}).json
self.assertEqual(1, len(pools['pools']))
self.assertEqual(po1['id'],
pools['pools'][0]['id'])
def test_get_all_tags_filter(self):
po1 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1',
tags=['test_tag1', 'test_tag2']
).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
po2 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2',
tags=['test_tag2', 'test_tag3']
).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
po3 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3',
tags=['test_tag4', 'test_tag5']
).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
pos = self.get(
self.POOLS_PATH,
params={'tags': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(2, len(pos))
self.assertEqual(
[po1.get('id'), po2.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'tags': ['test_tag2', 'test_tag3']}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(1, len(pos))
self.assertEqual(
[po2.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'tags-any': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(2, len(pos))
self.assertEqual(
[po1.get('id'), po2.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'not-tags': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(1, len(pos))
self.assertEqual(
[po3.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'not-tags-any': ['test_tag2', 'test_tag4']}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(0, len(pos))
pos = self.get(
self.POOLS_PATH,
params={'tags': 'test_tag2',
'tags-any': ['test_tag1', 'test_tag3']}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(2, len(pos))
self.assertEqual(
[po1.get('id'), po2.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'tags': 'test_tag2', 'not-tags': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(0, len(pos))
def test_empty_get_all(self):
response = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.assertIsInstance(response, list)
self.assertEqual(0, len(response))
def test_create(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tags=['test_tag']).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assertEqual(['test_tag'], api_pool['tags'])
self.assertIsNotNone(api_pool.get('created_at'))
self.assertIsNone(api_pool.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assertIsNotNone(api_pool.get('created_at'))
self.assertIsNone(api_pool.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_not_authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_pool)
def test_create_with_proxy_protocol(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_PROXY,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(constants.PROTOCOL_PROXY, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assertIsNotNone(api_pool.get('created_at'))
self.assertIsNone(api_pool.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_sans_listener(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
# Make sure listener status is unchanged, but LB status is changed.
# LB should still be locked even with pool and subordinate object
# updates.
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.ACTIVE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
def test_create_sans_loadbalancer_id(self):
api_pool = self.create_pool(
None,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
def test_create_with_listener_id_in_pool_dict(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_with_project_id(self):
optionals = {
'listener_id': self.listener_id,
'project_id': self.project_id}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.assertEqual(self.project_id, api_pool.get('project_id'))
def test_create_udp_case_source_ip(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
api_pool = self.create_pool(
None,
constants.PROTOCOL_UDP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.udp_listener_id,
session_persistence=sp).get(self.root_tag)
self.assertEqual(constants.PROTOCOL_UDP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assertEqual(constants.SESSION_PERSISTENCE_SOURCE_IP,
api_pool.get('session_persistence')['type'])
self.assertEqual(3, api_pool.get(
'session_persistence')['persistence_timeout'])
self.assertEqual('255.255.255.0', api_pool.get(
'session_persistence')['persistence_granularity'])
self.assertIsNone(api_pool.get(
'session_persistence')['cookie_name'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
def test_create_with_tls_enabled_only(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tls_enabled=True).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertTrue(api_pool.get('tls_enabled'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_create_with_tls_container_ref(self, mock_cert_data):
tls_container_ref = uuidutils.generate_uuid()
pool_cert = data_models.TLSContainer(certificate='pool cert')
mock_cert_data.return_value = {'tls_cert': pool_cert,
'sni_certs': [],
'client_ca_cert': None}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tls_container_ref=tls_container_ref).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(tls_container_ref, api_pool.get('tls_container_ref'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_create_with_ca_and_crl(self, mock_cert_data):
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(ca_tls_container_ref,
api_pool.get('ca_tls_container_ref'))
self.assertEqual(crl_container_ref,
api_pool.get('crl_container_ref'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_with_bad_tls_container_ref(self):
tls_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_cert.side_effect = [Exception(
"bad cert")]
self.cert_manager_mock().get_secret.side_effect = [Exception(
"bad secret")]
api_pool = self.create_pool(
self.lb_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tls_container_ref=tls_container_ref, status=400)
self.assertIn(tls_container_ref, api_pool['faultstring'])
def test_create_with_bad_ca_tls_container_ref(self):
ca_tls_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_cert.side_effect = [Exception(
"bad ca cert")]
self.cert_manager_mock().get_secret.side_effect = [Exception(
"bad ca secret")]
api_pool = self.create_pool(
self.lb_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref, status=400)
self.assertIn(ca_tls_container_ref, api_pool['faultstring'])
def test_create_with_unreachable_crl(self):
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_cert.side_effect = [
'cert 1', Exception('unknow/bad cert')]
self.cert_manager_mock().get_secret.side_effect = [Exception(
'bad secret')]
api_pool = self.create_pool(
self.lb_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref, status=400)
self.assertIn(crl_container_ref, api_pool['faultstring'])
def test_create_with_crl_only(self):
crl_container_ref = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
crl_container_ref=crl_container_ref, status=400)
self.assertIn(
'A CA certificate reference is required to specify a '
'revocation list.', api_pool['faultstring'])
def test_negative_create_udp_case(self):
# Error create pool with udp protocol but non-udp-type
sp = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE,
"cookie_name": 'test-cookie-name'}
req_dict = {
'listener_id': self.udp_listener_id,
'protocol': constants.PROTOCOL_UDP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
expect_error_msg = ("Validation failure: Cookie names are not "
"supported for %s pools.") % constants.PROTOCOL_UDP
res = self.post(self.POOLS_PATH, self._build_body(req_dict),
status=400, expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
# Error create pool with any non-udp-types and udp session persistence
# options.
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
req_dict = {
'listener_id': self.udp_listener_id,
'protocol': constants.PROTOCOL_UDP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': None}
for type in [constants.SESSION_PERSISTENCE_HTTP_COOKIE,
constants.SESSION_PERSISTENCE_APP_COOKIE]:
expect_error_msg = ("Validation failure: Session persistence of "
"type %s is not supported for %s protocol "
"pools.") % (type, constants.PROTOCOL_UDP)
sp.update({'type': type})
req_dict['session_persistence'] = sp
res = self.post(self.POOLS_PATH, self._build_body(req_dict),
status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
# Error create pool with source ip session persistence and wrong
# options.
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0',
"cookie_name": 'test-cookie-name'}
req_dict = {
'listener_id': self.udp_listener_id,
'protocol': constants.PROTOCOL_UDP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
expect_error_msg = (
"Validation failure: session_persistence %s type for %s "
"protocol only accepts: type, persistence_timeout, "
"persistence_granularity.") % (
constants.SESSION_PERSISTENCE_SOURCE_IP, constants.PROTOCOL_UDP)
res = self.post(self.POOLS_PATH, self._build_body(req_dict),
status=400, expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
# Error create non-udp pool with udp session persistence
sps = [{"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'},
{"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}]
req_dict = {
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN}
expect_error_msg = ("Validation failure: persistence_timeout and "
"persistence_granularity is only for %s protocol "
"pools.") % constants.PROTOCOL_UDP
for s in sps:
req_dict.update({'session_persistence': s})
res = self.post(self.POOLS_PATH, self._build_body(req_dict),
status=400, expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id)
def test_bad_create(self):
pool = {'name': 'test1'}
self.post(self.POOLS_PATH, self._build_body(pool), status=400)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id)
def test_create_with_listener_with_default_pool_id_set(self):
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id)
self.set_lb_status(self.lb_id)
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=409)
def test_create_bad_protocol(self):
lb_pool = {
'loadbalancer_id': self.lb_id,
'protocol': 'STUPID_PROTOCOL',
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
@mock.patch('octavia.api.drivers.utils.call_provider')
def test_create_with_bad_provider(self, mock_provider):
mock_provider.side_effect = exceptions.ProviderDriverError(
prov='bad_driver', user_msg='broken')
lb_pool = {
'loadbalancer_id': self.lb_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
response = self.post(self.POOLS_PATH, self._build_body(lb_pool),
status=500)
self.assertIn('Provider \'bad_driver\' reports error: broken',
response.json.get('faultstring'))
def test_create_over_quota(self):
self.start_quota_mock(data_models.Pool)
lb_pool = {
'loadbalancer_id': self.lb_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=403)
def test_update(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id, tags=['old_tag']).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'new_name', 'tags': ['new_tag']}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual('new_name', response.get('name'))
self.assertEqual(['new_tag'], response['tags'])
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_update_authorized(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'new_name'}
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual('new_name', response.get('name'))
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_update_not_authorized(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'new_name'}
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
api_pool = self.put(
self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_pool.json)
self.assert_correct_lb_status(self.lb_id, constants.ONLINE,
constants.ACTIVE)
def test_update_get_session_persistence_from_db_if_no_request(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
optionals = {"listener_id": self.udp_listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
None,
constants.PROTOCOL_UDP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.udp_lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
ty = sess_p.pop('type')
sess_p['persistence_timeout'] = 4
sess_p['persistence_granularity'] = "255.255.0.0"
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
sess_p['type'] = ty
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sess_p, response.get('session_persistence'))
self.assert_correct_status(
listener_id=self.udp_listener_id,
pool_id=api_pool.get('id'),
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_update_udp_case_source_ip(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
optionals = {"listener_id": self.udp_listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
None,
constants.PROTOCOL_UDP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.udp_lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['persistence_timeout'] = 4
sess_p['persistence_granularity'] = "255.255.0.0"
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sess_p, response.get('session_persistence'))
self.assert_correct_status(
listener_id=self.udp_listener_id,
pool_id=api_pool.get('id'),
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.udp_lb_id)
self.set_object_status(self.pool_repo, api_pool.get('id'))
# Negative cases
# Error during update pool with non-UDP type and cookie_name.
expect_error_msg = (
"Validation failure: Cookie names are not supported for %s"
" pools.") % constants.PROTOCOL_UDP
sess_p['type'] = constants.SESSION_PERSISTENCE_HTTP_COOKIE
sess_p['cookie_name'] = 'test-cookie-name'
new_pool = {'session_persistence': sess_p}
res = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
# Error during update pool with source ip type and more options.
expect_error_msg = (
"Validation failure: session_persistence %s type for %s protocol "
"only accepts: type, persistence_timeout, "
"persistence_granularity.") % (
constants.SESSION_PERSISTENCE_SOURCE_IP, constants.PROTOCOL_UDP)
sess_p['type'] = constants.SESSION_PERSISTENCE_SOURCE_IP
sess_p['cookie_name'] = 'test-cookie-name'
sess_p['persistence_timeout'] = 4
sess_p['persistence_granularity'] = "255.255.0.0"
res = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
# Error during update pool with non-UDP session persistence type.
sess_p['cookie_name'] = None
for ty in [constants.SESSION_PERSISTENCE_APP_COOKIE,
constants.SESSION_PERSISTENCE_HTTP_COOKIE]:
expect_error_msg = ("Validation failure: Session persistence of "
"type %s is not supported for %s protocol "
"pools.") % (ty, constants.PROTOCOL_UDP)
sess_p['type'] = ty
res = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
def test_update_with_tls_enabled_only(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
self.assertFalse(api_pool['tls_enabled'])
new_pool = {'tls_enabled': True}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertTrue(response.get('tls_enabled'))
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_tls_enabled_only_on_pool_certs_exist(
self, mock_cert_data):
tls_container_ref = uuidutils.generate_uuid()
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
pool_cert = data_models.TLSContainer(certificate='pool cert')
mock_cert_data.return_value = {'tls_cert': pool_cert,
'sni_certs': [],
'client_ca_cert': None}
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tls_container_ref=tls_container_ref,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
self.assertFalse(api_pool['tls_enabled'])
new_pool = {'tls_enabled': True}
self.cert_manager_mock().get_cert.reset_mock()
self.cert_manager_mock().get_secret.reset_mock()
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertTrue(response.get('tls_enabled'))
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
@mock.patch(
'octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_tls_container_ref(self, mock_cert_data):
tls_container_ref = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'tls_container_ref': tls_container_ref}
pool_cert = data_models.TLSContainer(certificate='pool cert')
mock_cert_data.return_value = {'tls_cert': pool_cert,
'sni_certs': [],
'client_ca_cert': None}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(tls_container_ref, response.get('tls_container_ref'))
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_bad_update(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_pool = {'enabled': 'one'}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
@mock.patch('octavia.api.drivers.utils.call_provider')
def test_update_with_bad_provider(self, mock_provider):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'new_name'}
mock_provider.side_effect = exceptions.ProviderDriverError(
prov='bad_driver', user_msg='broken')
response = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=500)
self.assertIn('Provider \'bad_driver\' reports error: broken',
response.json.get('faultstring'))
def test_bad_update_non_udp_pool_with_udp_fields(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
self.set_lb_status(self.lb_id)
new_pool = {'session_persistence': sp}
expect_error_msg = ("Validation failure: persistence_timeout and "
"persistence_granularity is only for %s "
"protocol pools.") % constants.PROTOCOL_UDP
res = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
def test_update_with_bad_tls_container_ref(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
tls_container_ref = uuidutils.generate_uuid()
new_pool = {'tls_container_ref': tls_container_ref}
self.cert_manager_mock().get_cert.side_effect = [Exception(
"bad cert")]
self.cert_manager_mock().get_secret.side_effect = [Exception(
"bad secret")]
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(tls_container_ref, resp['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_ca_and_crl(self, mock_cert_data):
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'ca_tls_container_ref': ca_tls_container_ref,
'crl_container_ref': crl_container_ref}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(ca_tls_container_ref,
response.get('ca_tls_container_ref'))
self.assertEqual(crl_container_ref,
response.get('crl_container_ref'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_update_with_bad_ca_tls_container_ref(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
ca_tls_container_ref = uuidutils.generate_uuid()
new_pool = {'ca_tls_container_ref': ca_tls_container_ref}
self.cert_manager_mock().get_cert.side_effect = [Exception(
"bad cert")]
self.cert_manager_mock().get_secret.side_effect = [Exception(
"bad secret")]
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(ca_tls_container_ref, resp['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_crl(self, mock_cert_data):
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_crl_container_ref = uuidutils.generate_uuid()
new_pool = {'crl_container_ref': new_crl_container_ref}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(new_crl_container_ref,
response.get('crl_container_ref'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_update_with_crl_only_negative_case(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
crl_container_ref = uuidutils.generate_uuid()
new_pool = {'crl_container_ref': crl_container_ref}
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(
'A CA reference is required to specify a certificate revocation '
'list.', resp['faultstring'])
def test_update_with_crl_only_none_ca(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
crl_container_ref = uuidutils.generate_uuid()
new_pool = {'ca_tls_container_ref': None,
'crl_container_ref': crl_container_ref}
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(
'A CA reference is required to specify a certificate revocation '
'list.', resp['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_unreachable_crl(self, mock_cert_data):
crl_container_ref = uuidutils.generate_uuid()
new_crl_container_ref = uuidutils.generate_uuid()
ca_tls_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_crl_container_ref = uuidutils.generate_uuid()
new_pool = {'crl_container_ref': new_crl_container_ref}
self.cert_manager_mock().get_secret.side_effect = [
exceptions.CertificateRetrievalException(
ref=new_crl_container_ref)]
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(new_crl_container_ref, resp['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_unset_ca_cert(self, mock_cert_data):
self.cert_manager_mock().get_secret.return_value = (
sample_certs.X509_CA_CERT)
ca_tls_uuid = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_uuid).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_pool = {'ca_tls_container_ref': None}
body = self._build_body(new_pool)
listener_path = self.POOL_PATH.format(
pool_id=api_pool['id'])
api_pool = self.put(listener_path, body).json.get(self.root_tag)
self.assertIsNone(api_pool.get('ca_tls_container_ref'))
self.assertIsNone(api_pool.get('crl_container_ref'))
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_unset_ca_cert_with_crl(self, mock_cert_data):
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
ca_tls_uuid = uuidutils.generate_uuid()
crl_uuid = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_uuid,
crl_container_ref=crl_uuid).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_pool = {'ca_tls_container_ref': None}
body = self._build_body(new_pool)
listener_path = self.POOL_PATH.format(
pool_id=api_pool['id'])
response = self.put(listener_path, body, status=400).json
self.assertIn('A CA reference cannot be removed when a certificate '
'revocation list is present.', response['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_unset_crl(self, mock_cert_data):
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
ca_tls_uuid = uuidutils.generate_uuid()
crl_uuid = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_uuid,
crl_container_ref=crl_uuid).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_pool = {'crl_container_ref': None}
body = self._build_body(new_pool)
listener_path = self.POOL_PATH.format(
pool_id=api_pool['id'])
update_pool = self.put(listener_path, body).json.get(self.root_tag)
self.assertEqual(api_pool.get('ca_tls_container_ref'),
update_pool.get('ca_tls_container_ref'))
self.assertIsNone(update_pool.get('crl_container_ref'))
def test_delete(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_DELETE)
# Problems with TLS certs should not block a delete
def test_delete_with_bad_tls_ref(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
tls_uuid = uuidutils.generate_uuid()
self.pool_repo.update(db_api.get_session(),
api_pool.get('id'),
tls_certificate_id=tls_uuid)
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_DELETE)
def test_delete_authorize(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')))
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_DELETE)
def test_delete_not_authorize(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.ACTIVE,
listener_prov_status=constants.ACTIVE,
pool_prov_status=constants.ACTIVE)
def test_bad_delete(self):
self.delete(self.POOL_PATH.format(
pool_id=uuidutils.generate_uuid()), status=404)
def test_delete_with_l7policy(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7policy(
self.listener_id,
constants.L7POLICY_ACTION_REDIRECT_TO_POOL,
redirect_pool_id=api_pool.get('id'))
self.set_lb_status(self.lb_id)
self.delete(self.POOL_PATH.format(
pool_id=api_pool.get('id')), status=409)
@mock.patch('octavia.api.drivers.utils.call_provider')
def test_delete_with_bad_provider(self, mock_provider):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
# Set status to ACTIVE/ONLINE because set_lb_status did it in the db
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertIsNone(api_pool.pop('updated_at'))
self.assertIsNotNone(response.pop('updated_at'))
self.assertEqual(api_pool, response)
mock_provider.side_effect = exceptions.ProviderDriverError(
prov='bad_driver', user_msg='broken')
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=500)
def test_create_with_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
self.assertIsNotNone(sess_p)
self.assertEqual(constants.SESSION_PERSISTENCE_APP_COOKIE,
sess_p.get('type'))
self.assertEqual('test_cookie_name', sess_p.get('cookie_name'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_with_bad_session_persistence(self):
sp = {"type": "persistence_type",
"cookie_name": "test_cookie_name"}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_create_with_bad_SP_type_HTTP_cookie(self):
sp = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE,
"cookie_name": "test_cookie_name"}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_create_with_bad_SP_type_IP_cookie(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"cookie_name": "test_cookie_name"}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_create_with_bad_SP_cookie_name(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "b@d_cookie_name"}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_create_with_missing_cookie_name(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_add_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name",
'persistence_granularity': None,
'persistence_timeout': None}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'session_persistence': sp}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sp, response.get('session_persistence'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_update_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['cookie_name'] = None
sess_p['type'] = constants.SESSION_PERSISTENCE_SOURCE_IP
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sess_p, response.get('session_persistence'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_update_preserve_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name",
'persistence_granularity': None,
'persistence_timeout': None}
optionals = {"listener_id": self.listener_id,
"name": "name", "session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'update_name'}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sp, response.get('session_persistence'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_update_bad_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = 'fake_type'
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_update_with_bad_SP_type_HTTP_cookie(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = constants.SESSION_PERSISTENCE_HTTP_COOKIE
sess_p['cookie_name'] = 'test_cookie_name'
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_update_with_bad_SP_type_IP_cookie(self):
sp = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = constants.SESSION_PERSISTENCE_SOURCE_IP
sess_p['cookie_name'] = 'test_cookie_name'
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_update_with_bad_SP_cookie_name(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = constants.SESSION_PERSISTENCE_APP_COOKIE
sess_p['cookie_name'] = 'b@d_cookie_name'
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_update_with_missing_SP_cookie_name(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = constants.SESSION_PERSISTENCE_APP_COOKIE
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_delete_with_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_DELETE)
def test_delete_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_sp = {"pool": {"session_persistence": None}}
response = self.put(self.POOL_PATH.format(
pool_id=api_pool.get('id')), new_sp).json.get(self.root_tag)
self.assertIsNone(response.get('session_persistence'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_create_when_lb_pending_update(self):
self.put(self.LB_PATH.format(lb_id=self.lb_id),
{'loadbalancer': {'name': 'test_name_change'}})
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=409)
def test_update_when_lb_pending_update(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.put(self.LB_PATH.format(lb_id=self.lb_id),
{'loadbalancer': {'name': 'test_name_change'}})
new_pool = {'admin_state_up': False}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=409)
def test_delete_when_lb_pending_update(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.put(self.LB_PATH.format(lb_id=self.lb_id),
{"loadbalancer": {'name': 'test_name_change'}})
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=409)
def test_create_when_lb_pending_delete(self):
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
params={'cascade': "true"})
new_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
self.post(self.POOLS_PATH, self._build_body(new_pool), status=409)
def test_update_when_lb_pending_delete(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
params={'cascade': "true"})
new_pool = {'admin_state_up': False}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=409)
def test_delete_when_lb_pending_delete(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
params={'cascade': "true"})
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=409)
def test_update_already_deleted(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
# This updates the child objects
self.set_lb_status(self.lb_id, status=constants.DELETED)
new_pool = {'admin_state_up': False}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=404)
def test_delete_already_deleted(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
# This updates the child objects
self.set_lb_status(self.lb_id, status=constants.DELETED)
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=404)
| 45.799739 | 79 | 0.632233 |
import mock
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_utils import uuidutils
from octavia.common import constants
import octavia.common.context
from octavia.common import data_models
from octavia.common import exceptions
from octavia.db import api as db_api
from octavia.tests.common import sample_certs
from octavia.tests.functional.api.v2 import base
class TestPool(base.BaseAPITest):
root_tag = 'pool'
root_tag_list = 'pools'
root_tag_links = 'pools_links'
def setUp(self):
super(TestPool, self).setUp()
self.lb = self.create_load_balancer(
uuidutils.generate_uuid()).get('loadbalancer')
self.lb_id = self.lb.get('id')
self.project_id = self.lb.get('project_id')
self.set_lb_status(self.lb_id)
self.listener = self.create_listener(
constants.PROTOCOL_HTTP, 80,
self.lb_id).get('listener')
self.listener_id = self.listener.get('id')
self.set_lb_status(self.lb_id)
self._setup_udp_lb_resources()
def _setup_udp_lb_resources(self):
self.udp_lb = self.create_load_balancer(uuidutils.generate_uuid()).get(
'loadbalancer')
self.udp_lb_id = self.udp_lb.get('id')
self.set_lb_status(self.udp_lb_id)
self.udp_listener = self.create_listener(
constants.PROTOCOL_UDP, 8888,
self.udp_lb_id).get('listener')
self.udp_listener_id = self.udp_listener.get('id')
self.set_lb_status(self.udp_lb_id)
def test_get(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id, tags=['test_tag']).get(self.root_tag)
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
def test_get_authorized(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
self.set_lb_status(lb_id=self.lb_id)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
def test_get_not_authorized(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
self.set_lb_status(lb_id=self.lb_id)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id')), status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, response.json)
def test_get_deleted_gives_404(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_object_status(self.pool_repo, api_pool.get('id'),
provisioning_status=constants.DELETED)
self.get(self.POOL_PATH.format(pool_id=api_pool.get('id')), status=404)
def test_bad_get(self):
self.get(self.POOL_PATH.format(pool_id=uuidutils.generate_uuid()),
status=404)
def test_get_all(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id, tags=['test_tag']).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
pools = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.assertIsInstance(pools, list)
self.assertEqual(1, len(pools))
self.assertEqual(api_pool.get('id'), pools[0].get('id'))
self.assertEqual(['test_tag'], pools[0]['tags'])
def test_get_all_hides_deleted(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
response = self.get(self.POOLS_PATH)
objects = response.json.get(self.root_tag_list)
self.assertEqual(len(objects), 1)
self.set_object_status(self.pool_repo, api_pool.get('id'),
provisioning_status=constants.DELETED)
response = self.get(self.POOLS_PATH)
objects = response.json.get(self.root_tag_list)
self.assertEqual(len(objects), 0)
def test_get_all_admin(self):
project_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
pool1 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool2 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool3 = self.create_pool(
lb1_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pools = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.assertEqual(3, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool1.get('id'), pool1.get('protocol')),
pool_id_protocols)
self.assertIn((pool2.get('id'), pool2.get('protocol')),
pool_id_protocols)
self.assertIn((pool3.get('id'), pool3.get('protocol')),
pool_id_protocols)
def test_get_all_non_admin(self):
project_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool3 = self.create_pool(
self.lb_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(self.lb_id)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings',
auth_strategy=constants.KEYSTONE)
with mock.patch.object(octavia.common.context.Context, 'project_id',
pool3['project_id']):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
pools = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(1, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool3.get('id'), pool3.get('protocol')),
pool_id_protocols)
def test_get_all_non_admin_global_observer(self):
project_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
pool1 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool2 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool3 = self.create_pool(
lb1_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_global_observer'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
pools = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(3, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool1.get('id'), pool1.get('protocol')),
pool_id_protocols)
self.assertIn((pool2.get('id'), pool2.get('protocol')),
pool_id_protocols)
self.assertIn((pool3.get('id'), pool3.get('protocol')),
pool_id_protocols)
def test_get_all_not_authorized(self):
project_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
self.create_pool(
lb1_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
pools = self.get(self.POOLS_PATH, status=403).json
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, pools)
def test_get_by_project_id(self):
project1_id = uuidutils.generate_uuid()
project2_id = uuidutils.generate_uuid()
lb1 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb1',
project_id=project1_id)
lb1_id = lb1.get('loadbalancer').get('id')
self.set_lb_status(lb1_id)
lb2 = self.create_load_balancer(uuidutils.generate_uuid(), name='lb2',
project_id=project2_id)
lb2_id = lb2.get('loadbalancer').get('id')
self.set_lb_status(lb2_id)
pool1 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool2 = self.create_pool(
lb1_id, constants.PROTOCOL_HTTPS,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb1_id)
pool3 = self.create_pool(
lb2_id, constants.PROTOCOL_TCP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.set_lb_status(lb2_id)
pools = self.get(
self.POOLS_PATH,
params={'project_id': project1_id}).json.get(self.root_tag_list)
self.assertEqual(2, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool1.get('id'), pool1.get('protocol')),
pool_id_protocols)
self.assertIn((pool2.get('id'), pool2.get('protocol')),
pool_id_protocols)
pools = self.get(
self.POOLS_PATH,
params={'project_id': project2_id}).json.get(self.root_tag_list)
self.assertEqual(1, len(pools))
pool_id_protocols = [(p.get('id'), p.get('protocol')) for p in pools]
self.assertIn((pool3.get('id'), pool3.get('protocol')),
pool_id_protocols)
def test_get_all_with_listener(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.assertIsInstance(response, list)
self.assertEqual(1, len(response))
self.assertEqual(api_pool.get('id'), response[0].get('id'))
def test_get_all_sorted(self):
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3')
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOLS_PATH,
params={'sort': 'name:desc'})
pools_desc = response.json.get(self.root_tag_list)
response = self.get(self.POOLS_PATH,
params={'sort': 'name:asc'})
pools_asc = response.json.get(self.root_tag_list)
self.assertEqual(3, len(pools_desc))
self.assertEqual(3, len(pools_asc))
pool_id_names_desc = [(pool.get('id'), pool.get('name'))
for pool in pools_desc]
pool_id_names_asc = [(pool.get('id'), pool.get('name'))
for pool in pools_asc]
self.assertEqual(pool_id_names_asc,
list(reversed(pool_id_names_desc)))
def test_get_all_limited(self):
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3')
self.set_lb_status(lb_id=self.lb_id)
first_two = self.get(self.POOLS_PATH, params={'limit': 2}).json
objs = first_two[self.root_tag_list]
links = first_two[self.root_tag_links]
self.assertEqual(2, len(objs))
self.assertEqual(1, len(links))
self.assertEqual('next', links[0]['rel'])
third = self.get(self.POOLS_PATH, params={
'limit': 2,
'marker': first_two[self.root_tag_list][1]['id']}).json
objs = third[self.root_tag_list]
links = third[self.root_tag_links]
self.assertEqual(1, len(objs))
self.assertEqual(1, len(links))
self.assertEqual('previous', links[0]['rel'])
middle = self.get(self.POOLS_PATH, params={
'limit': 1,
'marker': first_two[self.root_tag_list][0]['id']}).json
objs = middle[self.root_tag_list]
links = middle[self.root_tag_links]
self.assertEqual(1, len(objs))
self.assertEqual(2, len(links))
self.assertItemsEqual(['previous', 'next'], [l['rel'] for l in links])
def test_get_all_fields_filter(self):
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3')
self.set_lb_status(lb_id=self.lb_id)
pools = self.get(self.POOLS_PATH, params={
'fields': ['id', 'project_id']}).json
for pool in pools['pools']:
self.assertIn(u'id', pool)
self.assertIn(u'project_id', pool)
self.assertNotIn(u'description', pool)
def test_get_one_fields_filter(self):
pool1 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1').get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
pool = self.get(
self.POOL_PATH.format(pool_id=pool1.get('id')),
params={'fields': ['id', 'project_id']}).json.get(self.root_tag)
self.assertIn(u'id', pool)
self.assertIn(u'project_id', pool)
self.assertNotIn(u'description', pool)
def test_get_all_filter(self):
po1 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1').get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
hm = self.create_health_monitor(po1['id'],
constants.HEALTH_MONITOR_HTTP,
1, 1, 1, 1).get('healthmonitor')
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2').get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3').get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
pools = self.get(self.POOLS_PATH, params={
'id': po1['id'], 'healthmonitor_id': hm['id']}).json
self.assertEqual(1, len(pools['pools']))
self.assertEqual(po1['id'],
pools['pools'][0]['id'])
def test_get_all_tags_filter(self):
po1 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool1',
tags=['test_tag1', 'test_tag2']
).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
po2 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool2',
tags=['test_tag2', 'test_tag3']
).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
po3 = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
name='pool3',
tags=['test_tag4', 'test_tag5']
).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
pos = self.get(
self.POOLS_PATH,
params={'tags': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(2, len(pos))
self.assertEqual(
[po1.get('id'), po2.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'tags': ['test_tag2', 'test_tag3']}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(1, len(pos))
self.assertEqual(
[po2.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'tags-any': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(2, len(pos))
self.assertEqual(
[po1.get('id'), po2.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'not-tags': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(1, len(pos))
self.assertEqual(
[po3.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'not-tags-any': ['test_tag2', 'test_tag4']}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(0, len(pos))
pos = self.get(
self.POOLS_PATH,
params={'tags': 'test_tag2',
'tags-any': ['test_tag1', 'test_tag3']}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(2, len(pos))
self.assertEqual(
[po1.get('id'), po2.get('id')],
[po.get('id') for po in pos]
)
pos = self.get(
self.POOLS_PATH,
params={'tags': 'test_tag2', 'not-tags': 'test_tag2'}
).json.get(self.root_tag_list)
self.assertIsInstance(pos, list)
self.assertEqual(0, len(pos))
def test_empty_get_all(self):
response = self.get(self.POOLS_PATH).json.get(self.root_tag_list)
self.assertIsInstance(response, list)
self.assertEqual(0, len(response))
def test_create(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tags=['test_tag']).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assertEqual(['test_tag'], api_pool['tags'])
self.assertIsNotNone(api_pool.get('created_at'))
self.assertIsNone(api_pool.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assertIsNotNone(api_pool.get('created_at'))
self.assertIsNone(api_pool.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_not_authorized(self):
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id, status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_pool)
def test_create_with_proxy_protocol(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_PROXY,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(constants.PROTOCOL_PROXY, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assertIsNotNone(api_pool.get('created_at'))
self.assertIsNone(api_pool.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_sans_listener(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN).get(self.root_tag)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.ACTIVE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
def test_create_sans_loadbalancer_id(self):
api_pool = self.create_pool(
None,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
def test_create_with_listener_id_in_pool_dict(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(constants.PROTOCOL_HTTP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_with_project_id(self):
optionals = {
'listener_id': self.listener_id,
'project_id': self.project_id}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.assertEqual(self.project_id, api_pool.get('project_id'))
def test_create_udp_case_source_ip(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
api_pool = self.create_pool(
None,
constants.PROTOCOL_UDP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.udp_listener_id,
session_persistence=sp).get(self.root_tag)
self.assertEqual(constants.PROTOCOL_UDP, api_pool.get('protocol'))
self.assertEqual(constants.LB_ALGORITHM_ROUND_ROBIN,
api_pool.get('lb_algorithm'))
self.assertEqual(constants.SESSION_PERSISTENCE_SOURCE_IP,
api_pool.get('session_persistence')['type'])
self.assertEqual(3, api_pool.get(
'session_persistence')['persistence_timeout'])
self.assertEqual('255.255.255.0', api_pool.get(
'session_persistence')['persistence_granularity'])
self.assertIsNone(api_pool.get(
'session_persistence')['cookie_name'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
def test_create_with_tls_enabled_only(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tls_enabled=True).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertTrue(api_pool.get('tls_enabled'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_create_with_tls_container_ref(self, mock_cert_data):
tls_container_ref = uuidutils.generate_uuid()
pool_cert = data_models.TLSContainer(certificate='pool cert')
mock_cert_data.return_value = {'tls_cert': pool_cert,
'sni_certs': [],
'client_ca_cert': None}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tls_container_ref=tls_container_ref).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(tls_container_ref, api_pool.get('tls_container_ref'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_create_with_ca_and_crl(self, mock_cert_data):
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
self.assertEqual(ca_tls_container_ref,
api_pool.get('ca_tls_container_ref'))
self.assertEqual(crl_container_ref,
api_pool.get('crl_container_ref'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_with_bad_tls_container_ref(self):
tls_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_cert.side_effect = [Exception(
"bad cert")]
self.cert_manager_mock().get_secret.side_effect = [Exception(
"bad secret")]
api_pool = self.create_pool(
self.lb_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tls_container_ref=tls_container_ref, status=400)
self.assertIn(tls_container_ref, api_pool['faultstring'])
def test_create_with_bad_ca_tls_container_ref(self):
ca_tls_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_cert.side_effect = [Exception(
"bad ca cert")]
self.cert_manager_mock().get_secret.side_effect = [Exception(
"bad ca secret")]
api_pool = self.create_pool(
self.lb_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref, status=400)
self.assertIn(ca_tls_container_ref, api_pool['faultstring'])
def test_create_with_unreachable_crl(self):
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_cert.side_effect = [
'cert 1', Exception('unknow/bad cert')]
self.cert_manager_mock().get_secret.side_effect = [Exception(
'bad secret')]
api_pool = self.create_pool(
self.lb_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref, status=400)
self.assertIn(crl_container_ref, api_pool['faultstring'])
def test_create_with_crl_only(self):
crl_container_ref = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id, constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
crl_container_ref=crl_container_ref, status=400)
self.assertIn(
'A CA certificate reference is required to specify a '
'revocation list.', api_pool['faultstring'])
def test_negative_create_udp_case(self):
sp = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE,
"cookie_name": 'test-cookie-name'}
req_dict = {
'listener_id': self.udp_listener_id,
'protocol': constants.PROTOCOL_UDP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
expect_error_msg = ("Validation failure: Cookie names are not "
"supported for %s pools.") % constants.PROTOCOL_UDP
res = self.post(self.POOLS_PATH, self._build_body(req_dict),
status=400, expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
req_dict = {
'listener_id': self.udp_listener_id,
'protocol': constants.PROTOCOL_UDP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': None}
for type in [constants.SESSION_PERSISTENCE_HTTP_COOKIE,
constants.SESSION_PERSISTENCE_APP_COOKIE]:
expect_error_msg = ("Validation failure: Session persistence of "
"type %s is not supported for %s protocol "
"pools.") % (type, constants.PROTOCOL_UDP)
sp.update({'type': type})
req_dict['session_persistence'] = sp
res = self.post(self.POOLS_PATH, self._build_body(req_dict),
status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0',
"cookie_name": 'test-cookie-name'}
req_dict = {
'listener_id': self.udp_listener_id,
'protocol': constants.PROTOCOL_UDP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
expect_error_msg = (
"Validation failure: session_persistence %s type for %s "
"protocol only accepts: type, persistence_timeout, "
"persistence_granularity.") % (
constants.SESSION_PERSISTENCE_SOURCE_IP, constants.PROTOCOL_UDP)
res = self.post(self.POOLS_PATH, self._build_body(req_dict),
status=400, expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
sps = [{"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'},
{"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}]
req_dict = {
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN}
expect_error_msg = ("Validation failure: persistence_timeout and "
"persistence_granularity is only for %s protocol "
"pools.") % constants.PROTOCOL_UDP
for s in sps:
req_dict.update({'session_persistence': s})
res = self.post(self.POOLS_PATH, self._build_body(req_dict),
status=400, expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id)
def test_bad_create(self):
pool = {'name': 'test1'}
self.post(self.POOLS_PATH, self._build_body(pool), status=400)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id)
def test_create_with_listener_with_default_pool_id_set(self):
self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id)
self.set_lb_status(self.lb_id)
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=409)
def test_create_bad_protocol(self):
lb_pool = {
'loadbalancer_id': self.lb_id,
'protocol': 'STUPID_PROTOCOL',
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
@mock.patch('octavia.api.drivers.utils.call_provider')
def test_create_with_bad_provider(self, mock_provider):
mock_provider.side_effect = exceptions.ProviderDriverError(
prov='bad_driver', user_msg='broken')
lb_pool = {
'loadbalancer_id': self.lb_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
response = self.post(self.POOLS_PATH, self._build_body(lb_pool),
status=500)
self.assertIn('Provider \'bad_driver\' reports error: broken',
response.json.get('faultstring'))
def test_create_over_quota(self):
self.start_quota_mock(data_models.Pool)
lb_pool = {
'loadbalancer_id': self.lb_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=403)
def test_update(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id, tags=['old_tag']).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'new_name', 'tags': ['new_tag']}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual('new_name', response.get('name'))
self.assertEqual(['new_tag'], response['tags'])
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_update_authorized(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'new_name'}
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual('new_name', response.get('name'))
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_update_not_authorized(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'new_name'}
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
api_pool = self.put(
self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assertEqual(self.NOT_AUTHORIZED_BODY, api_pool.json)
self.assert_correct_lb_status(self.lb_id, constants.ONLINE,
constants.ACTIVE)
def test_update_get_session_persistence_from_db_if_no_request(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
optionals = {"listener_id": self.udp_listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
None,
constants.PROTOCOL_UDP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.udp_lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
ty = sess_p.pop('type')
sess_p['persistence_timeout'] = 4
sess_p['persistence_granularity'] = "255.255.0.0"
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
sess_p['type'] = ty
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sess_p, response.get('session_persistence'))
self.assert_correct_status(
listener_id=self.udp_listener_id,
pool_id=api_pool.get('id'),
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_update_udp_case_source_ip(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
optionals = {"listener_id": self.udp_listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
None,
constants.PROTOCOL_UDP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.udp_lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['persistence_timeout'] = 4
sess_p['persistence_granularity'] = "255.255.0.0"
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sess_p, response.get('session_persistence'))
self.assert_correct_status(
listener_id=self.udp_listener_id,
pool_id=api_pool.get('id'),
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.udp_lb_id)
self.set_object_status(self.pool_repo, api_pool.get('id'))
expect_error_msg = (
"Validation failure: Cookie names are not supported for %s"
" pools.") % constants.PROTOCOL_UDP
sess_p['type'] = constants.SESSION_PERSISTENCE_HTTP_COOKIE
sess_p['cookie_name'] = 'test-cookie-name'
new_pool = {'session_persistence': sess_p}
res = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
expect_error_msg = (
"Validation failure: session_persistence %s type for %s protocol "
"only accepts: type, persistence_timeout, "
"persistence_granularity.") % (
constants.SESSION_PERSISTENCE_SOURCE_IP, constants.PROTOCOL_UDP)
sess_p['type'] = constants.SESSION_PERSISTENCE_SOURCE_IP
sess_p['cookie_name'] = 'test-cookie-name'
sess_p['persistence_timeout'] = 4
sess_p['persistence_granularity'] = "255.255.0.0"
res = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
sess_p['cookie_name'] = None
for ty in [constants.SESSION_PERSISTENCE_APP_COOKIE,
constants.SESSION_PERSISTENCE_HTTP_COOKIE]:
expect_error_msg = ("Validation failure: Session persistence of "
"type %s is not supported for %s protocol "
"pools.") % (ty, constants.PROTOCOL_UDP)
sess_p['type'] = ty
res = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
def test_update_with_tls_enabled_only(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
self.assertFalse(api_pool['tls_enabled'])
new_pool = {'tls_enabled': True}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertTrue(response.get('tls_enabled'))
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_tls_enabled_only_on_pool_certs_exist(
self, mock_cert_data):
tls_container_ref = uuidutils.generate_uuid()
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
pool_cert = data_models.TLSContainer(certificate='pool cert')
mock_cert_data.return_value = {'tls_cert': pool_cert,
'sni_certs': [],
'client_ca_cert': None}
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
tls_container_ref=tls_container_ref,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
self.assertFalse(api_pool['tls_enabled'])
new_pool = {'tls_enabled': True}
self.cert_manager_mock().get_cert.reset_mock()
self.cert_manager_mock().get_secret.reset_mock()
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertTrue(response.get('tls_enabled'))
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
@mock.patch(
'octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_tls_container_ref(self, mock_cert_data):
tls_container_ref = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'tls_container_ref': tls_container_ref}
pool_cert = data_models.TLSContainer(certificate='pool cert')
mock_cert_data.return_value = {'tls_cert': pool_cert,
'sni_certs': [],
'client_ca_cert': None}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(tls_container_ref, response.get('tls_container_ref'))
self.assertIsNotNone(response.get('created_at'))
self.assertIsNotNone(response.get('updated_at'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_bad_update(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_pool = {'enabled': 'one'}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
@mock.patch('octavia.api.drivers.utils.call_provider')
def test_update_with_bad_provider(self, mock_provider):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'new_name'}
mock_provider.side_effect = exceptions.ProviderDriverError(
prov='bad_driver', user_msg='broken')
response = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=500)
self.assertIn('Provider \'bad_driver\' reports error: broken',
response.json.get('faultstring'))
def test_bad_update_non_udp_pool_with_udp_fields(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"persistence_timeout": 3,
"persistence_granularity": '255.255.255.0'}
self.set_lb_status(self.lb_id)
new_pool = {'session_persistence': sp}
expect_error_msg = ("Validation failure: persistence_timeout and "
"persistence_granularity is only for %s "
"protocol pools.") % constants.PROTOCOL_UDP
res = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400,
expect_errors=True)
self.assertEqual(expect_error_msg, res.json['faultstring'])
self.assert_correct_status(
lb_id=self.udp_lb_id, listener_id=self.udp_listener_id)
def test_update_with_bad_tls_container_ref(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
tls_container_ref = uuidutils.generate_uuid()
new_pool = {'tls_container_ref': tls_container_ref}
self.cert_manager_mock().get_cert.side_effect = [Exception(
"bad cert")]
self.cert_manager_mock().get_secret.side_effect = [Exception(
"bad secret")]
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(tls_container_ref, resp['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_ca_and_crl(self, mock_cert_data):
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'ca_tls_container_ref': ca_tls_container_ref,
'crl_container_ref': crl_container_ref}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(ca_tls_container_ref,
response.get('ca_tls_container_ref'))
self.assertEqual(crl_container_ref,
response.get('crl_container_ref'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_update_with_bad_ca_tls_container_ref(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
ca_tls_container_ref = uuidutils.generate_uuid()
new_pool = {'ca_tls_container_ref': ca_tls_container_ref}
self.cert_manager_mock().get_cert.side_effect = [Exception(
"bad cert")]
self.cert_manager_mock().get_secret.side_effect = [Exception(
"bad secret")]
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(ca_tls_container_ref, resp['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_crl(self, mock_cert_data):
ca_tls_container_ref = uuidutils.generate_uuid()
crl_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_crl_container_ref = uuidutils.generate_uuid()
new_pool = {'crl_container_ref': new_crl_container_ref}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(new_crl_container_ref,
response.get('crl_container_ref'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=response.get('id'))
def test_update_with_crl_only_negative_case(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
crl_container_ref = uuidutils.generate_uuid()
new_pool = {'crl_container_ref': crl_container_ref}
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(
'A CA reference is required to specify a certificate revocation '
'list.', resp['faultstring'])
def test_update_with_crl_only_none_ca(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
crl_container_ref = uuidutils.generate_uuid()
new_pool = {'ca_tls_container_ref': None,
'crl_container_ref': crl_container_ref}
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(
'A CA reference is required to specify a certificate revocation '
'list.', resp['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_with_unreachable_crl(self, mock_cert_data):
crl_container_ref = uuidutils.generate_uuid()
new_crl_container_ref = uuidutils.generate_uuid()
ca_tls_container_ref = uuidutils.generate_uuid()
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_container_ref,
crl_container_ref=crl_container_ref).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_crl_container_ref = uuidutils.generate_uuid()
new_pool = {'crl_container_ref': new_crl_container_ref}
self.cert_manager_mock().get_secret.side_effect = [
exceptions.CertificateRetrievalException(
ref=new_crl_container_ref)]
resp = self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400).json
self.assertIn(new_crl_container_ref, resp['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_unset_ca_cert(self, mock_cert_data):
self.cert_manager_mock().get_secret.return_value = (
sample_certs.X509_CA_CERT)
ca_tls_uuid = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_uuid).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_pool = {'ca_tls_container_ref': None}
body = self._build_body(new_pool)
listener_path = self.POOL_PATH.format(
pool_id=api_pool['id'])
api_pool = self.put(listener_path, body).json.get(self.root_tag)
self.assertIsNone(api_pool.get('ca_tls_container_ref'))
self.assertIsNone(api_pool.get('crl_container_ref'))
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_unset_ca_cert_with_crl(self, mock_cert_data):
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
ca_tls_uuid = uuidutils.generate_uuid()
crl_uuid = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_uuid,
crl_container_ref=crl_uuid).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_pool = {'ca_tls_container_ref': None}
body = self._build_body(new_pool)
listener_path = self.POOL_PATH.format(
pool_id=api_pool['id'])
response = self.put(listener_path, body, status=400).json
self.assertIn('A CA reference cannot be removed when a certificate '
'revocation list is present.', response['faultstring'])
@mock.patch('octavia.common.tls_utils.cert_parser.load_certificates_data')
def test_update_unset_crl(self, mock_cert_data):
self.cert_manager_mock().get_secret.side_effect = [
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL,
sample_certs.X509_CA_CERT, sample_certs.X509_CA_CRL]
ca_tls_uuid = uuidutils.generate_uuid()
crl_uuid = uuidutils.generate_uuid()
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id,
ca_tls_container_ref=ca_tls_uuid,
crl_container_ref=crl_uuid).get(self.root_tag)
self.set_lb_status(self.lb_id)
new_pool = {'crl_container_ref': None}
body = self._build_body(new_pool)
listener_path = self.POOL_PATH.format(
pool_id=api_pool['id'])
update_pool = self.put(listener_path, body).json.get(self.root_tag)
self.assertEqual(api_pool.get('ca_tls_container_ref'),
update_pool.get('ca_tls_container_ref'))
self.assertIsNone(update_pool.get('crl_container_ref'))
def test_delete(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_DELETE)
def test_delete_with_bad_tls_ref(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
tls_uuid = uuidutils.generate_uuid()
self.pool_repo.update(db_api.get_session(),
api_pool.get('id'),
tls_certificate_id=tls_uuid)
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_DELETE)
def test_delete_authorize(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
self.project_id):
override_credentials = {
'service_user_id': None,
'user_domain_id': None,
'is_admin_project': True,
'service_project_domain_id': None,
'service_project_id': None,
'roles': ['load-balancer_member'],
'user_id': None,
'is_admin': False,
'service_user_domain_id': None,
'project_domain_id': None,
'service_roles': [],
'project_id': self.project_id}
with mock.patch(
"oslo_context.context.RequestContext.to_policy_values",
return_value=override_credentials):
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')))
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_DELETE)
def test_delete_not_authorize(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
api_pool.pop('updated_at')
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
response.pop('updated_at')
self.assertEqual(api_pool, response)
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
auth_strategy = self.conf.conf.api_settings.get('auth_strategy')
self.conf.config(group='api_settings', auth_strategy=constants.TESTING)
with mock.patch.object(octavia.common.context.Context, 'project_id',
uuidutils.generate_uuid()):
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=403)
self.conf.config(group='api_settings', auth_strategy=auth_strategy)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.ACTIVE,
listener_prov_status=constants.ACTIVE,
pool_prov_status=constants.ACTIVE)
def test_bad_delete(self):
self.delete(self.POOL_PATH.format(
pool_id=uuidutils.generate_uuid()), status=404)
def test_delete_with_l7policy(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.create_l7policy(
self.listener_id,
constants.L7POLICY_ACTION_REDIRECT_TO_POOL,
redirect_pool_id=api_pool.get('id'))
self.set_lb_status(self.lb_id)
self.delete(self.POOL_PATH.format(
pool_id=api_pool.get('id')), status=409)
@mock.patch('octavia.api.drivers.utils.call_provider')
def test_delete_with_bad_provider(self, mock_provider):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
api_pool['provisioning_status'] = constants.ACTIVE
api_pool['operating_status'] = constants.ONLINE
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertIsNone(api_pool.pop('updated_at'))
self.assertIsNotNone(response.pop('updated_at'))
self.assertEqual(api_pool, response)
mock_provider.side_effect = exceptions.ProviderDriverError(
prov='bad_driver', user_msg='broken')
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=500)
def test_create_with_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_CREATE,
pool_op_status=constants.OFFLINE)
self.set_lb_status(self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
self.assertIsNotNone(sess_p)
self.assertEqual(constants.SESSION_PERSISTENCE_APP_COOKIE,
sess_p.get('type'))
self.assertEqual('test_cookie_name', sess_p.get('cookie_name'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'))
def test_create_with_bad_session_persistence(self):
sp = {"type": "persistence_type",
"cookie_name": "test_cookie_name"}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_create_with_bad_SP_type_HTTP_cookie(self):
sp = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE,
"cookie_name": "test_cookie_name"}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_create_with_bad_SP_type_IP_cookie(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP,
"cookie_name": "test_cookie_name"}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_create_with_bad_SP_cookie_name(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "b@d_cookie_name"}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_create_with_missing_cookie_name(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE}
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'session_persistence': sp}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=400)
def test_add_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name",
'persistence_granularity': None,
'persistence_timeout': None}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'session_persistence': sp}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sp, response.get('session_persistence'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_update_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['cookie_name'] = None
sess_p['type'] = constants.SESSION_PERSISTENCE_SOURCE_IP
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sess_p, response.get('session_persistence'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_update_preserve_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name",
'persistence_granularity': None,
'persistence_timeout': None}
optionals = {"listener_id": self.listener_id,
"name": "name", "session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_pool = {'name': 'update_name'}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool))
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
self.assertEqual(sp, response.get('session_persistence'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_update_bad_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = 'fake_type'
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_update_with_bad_SP_type_HTTP_cookie(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = constants.SESSION_PERSISTENCE_HTTP_COOKIE
sess_p['cookie_name'] = 'test_cookie_name'
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_update_with_bad_SP_type_IP_cookie(self):
sp = {"type": constants.SESSION_PERSISTENCE_HTTP_COOKIE}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = constants.SESSION_PERSISTENCE_SOURCE_IP
sess_p['cookie_name'] = 'test_cookie_name'
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_update_with_bad_SP_cookie_name(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = constants.SESSION_PERSISTENCE_APP_COOKIE
sess_p['cookie_name'] = 'b@d_cookie_name'
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_update_with_missing_SP_cookie_name(self):
sp = {"type": constants.SESSION_PERSISTENCE_SOURCE_IP}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
response = self.get(self.POOL_PATH.format(
pool_id=api_pool.get('id'))).json.get(self.root_tag)
sess_p = response.get('session_persistence')
sess_p['type'] = constants.SESSION_PERSISTENCE_APP_COOKIE
new_pool = {'session_persistence': sess_p}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=400)
def test_delete_with_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_DELETE)
def test_delete_session_persistence(self):
sp = {"type": constants.SESSION_PERSISTENCE_APP_COOKIE,
"cookie_name": "test_cookie_name"}
optionals = {"listener_id": self.listener_id,
"session_persistence": sp}
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
**optionals).get(self.root_tag)
self.set_lb_status(lb_id=self.lb_id)
new_sp = {"pool": {"session_persistence": None}}
response = self.put(self.POOL_PATH.format(
pool_id=api_pool.get('id')), new_sp).json.get(self.root_tag)
self.assertIsNone(response.get('session_persistence'))
self.assert_correct_status(
lb_id=self.lb_id, listener_id=self.listener_id,
pool_id=api_pool.get('id'),
lb_prov_status=constants.PENDING_UPDATE,
listener_prov_status=constants.PENDING_UPDATE,
pool_prov_status=constants.PENDING_UPDATE)
def test_create_when_lb_pending_update(self):
self.put(self.LB_PATH.format(lb_id=self.lb_id),
{'loadbalancer': {'name': 'test_name_change'}})
lb_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
self.post(self.POOLS_PATH, self._build_body(lb_pool), status=409)
def test_update_when_lb_pending_update(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.put(self.LB_PATH.format(lb_id=self.lb_id),
{'loadbalancer': {'name': 'test_name_change'}})
new_pool = {'admin_state_up': False}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=409)
def test_delete_when_lb_pending_update(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.put(self.LB_PATH.format(lb_id=self.lb_id),
{"loadbalancer": {'name': 'test_name_change'}})
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=409)
def test_create_when_lb_pending_delete(self):
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
params={'cascade': "true"})
new_pool = {
'loadbalancer_id': self.lb_id,
'listener_id': self.listener_id,
'protocol': constants.PROTOCOL_HTTP,
'lb_algorithm': constants.LB_ALGORITHM_ROUND_ROBIN,
'project_id': self.project_id}
self.post(self.POOLS_PATH, self._build_body(new_pool), status=409)
def test_update_when_lb_pending_delete(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
params={'cascade': "true"})
new_pool = {'admin_state_up': False}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=409)
def test_delete_when_lb_pending_delete(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id)
self.delete(self.LB_PATH.format(lb_id=self.lb_id),
params={'cascade': "true"})
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=409)
def test_update_already_deleted(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id, status=constants.DELETED)
new_pool = {'admin_state_up': False}
self.put(self.POOL_PATH.format(pool_id=api_pool.get('id')),
self._build_body(new_pool), status=404)
def test_delete_already_deleted(self):
api_pool = self.create_pool(
self.lb_id,
constants.PROTOCOL_HTTP,
constants.LB_ALGORITHM_ROUND_ROBIN,
listener_id=self.listener_id).get(self.root_tag)
self.set_lb_status(self.lb_id, status=constants.DELETED)
self.delete(self.POOL_PATH.format(pool_id=api_pool.get('id')),
status=404)
| true | true |
f72e023192b6d3b2da64456602e7078ad0cae9ff | 4,220 | py | Python | ansible/my_env/lib/python2.7/site-packages/ansible/plugins/action/aruba_config.py | otus-devops-2019-02/yyashkin_infra | 0cd0c003884155ac922e3e301305ac202de7028c | [
"MIT"
] | null | null | null | ansible/my_env/lib/python2.7/site-packages/ansible/plugins/action/aruba_config.py | otus-devops-2019-02/yyashkin_infra | 0cd0c003884155ac922e3e301305ac202de7028c | [
"MIT"
] | null | null | null | ansible/my_env/lib/python2.7/site-packages/ansible/plugins/action/aruba_config.py | otus-devops-2019-02/yyashkin_infra | 0cd0c003884155ac922e3e301305ac202de7028c | [
"MIT"
] | null | null | null | #
# (c) 2017, Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import time
import glob
from ansible.plugins.action.aruba import ActionModule as _ActionModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlsplit
from ansible.utils.vars import merge_hash
PRIVATE_KEYS_RE = re.compile('__.+__')
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._task.args.get('src'):
try:
self._handle_template()
except ValueError as exc:
return dict(failed=True, msg=to_text(exc))
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
if self._task.args.get('backup') and result.get('__backup__'):
# User requested backup and no error occurred in module.
# NOTE: If there is a parameter error, _backup key may not be in results.
filepath = self._write_backup(task_vars['inventory_hostname'],
result['__backup__'])
result['backup_path'] = filepath
# strip out any keys that have two leading and two trailing
# underscore characters
for key in list(result):
if PRIVATE_KEYS_RE.match(key):
del result[key]
return result
def _get_working_path(self):
cwd = self._loader.get_basedir()
if self._task._role is not None:
cwd = self._task._role._role_path
return cwd
def _write_backup(self, host, contents):
backup_path = self._get_working_path() + '/backup'
if not os.path.exists(backup_path):
os.mkdir(backup_path)
for fn in glob.glob('%s/%s_config.*' % (backup_path, host)):
os.remove(fn)
tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time()))
filename = '%s/%s_config.%s' % (backup_path, host, tstamp)
open(filename, 'w').write(contents)
return filename
def _handle_template(self):
src = self._task.args.get('src')
working_path = self._get_working_path()
if os.path.isabs(src) or urlsplit('src').scheme:
source = src
else:
source = self._loader.path_dwim_relative(working_path, 'templates', src)
if not source:
source = self._loader.path_dwim_relative(working_path, src)
if not os.path.exists(source):
raise ValueError('path specified in src not found')
try:
with open(source, 'r') as f:
template_data = to_text(f.read())
except IOError:
return dict(failed=True, msg='unable to load src file')
# Create a template search path in the following order:
# [working_path, self_role_path, dependent_role_paths, dirname(source)]
searchpath = [working_path]
if self._task._role is not None:
searchpath.append(self._task._role._role_path)
if hasattr(self._task, "_block:"):
dep_chain = self._task._block.get_dep_chain()
if dep_chain is not None:
for role in dep_chain:
searchpath.append(role._role_path)
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
| 37.017544 | 85 | 0.641706 |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import time
import glob
from ansible.plugins.action.aruba import ActionModule as _ActionModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlsplit
from ansible.utils.vars import merge_hash
PRIVATE_KEYS_RE = re.compile('__.+__')
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._task.args.get('src'):
try:
self._handle_template()
except ValueError as exc:
return dict(failed=True, msg=to_text(exc))
result = super(ActionModule, self).run(tmp, task_vars)
del tmp
if self._task.args.get('backup') and result.get('__backup__'):
filepath = self._write_backup(task_vars['inventory_hostname'],
result['__backup__'])
result['backup_path'] = filepath
for key in list(result):
if PRIVATE_KEYS_RE.match(key):
del result[key]
return result
def _get_working_path(self):
cwd = self._loader.get_basedir()
if self._task._role is not None:
cwd = self._task._role._role_path
return cwd
def _write_backup(self, host, contents):
backup_path = self._get_working_path() + '/backup'
if not os.path.exists(backup_path):
os.mkdir(backup_path)
for fn in glob.glob('%s/%s_config.*' % (backup_path, host)):
os.remove(fn)
tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time()))
filename = '%s/%s_config.%s' % (backup_path, host, tstamp)
open(filename, 'w').write(contents)
return filename
def _handle_template(self):
src = self._task.args.get('src')
working_path = self._get_working_path()
if os.path.isabs(src) or urlsplit('src').scheme:
source = src
else:
source = self._loader.path_dwim_relative(working_path, 'templates', src)
if not source:
source = self._loader.path_dwim_relative(working_path, src)
if not os.path.exists(source):
raise ValueError('path specified in src not found')
try:
with open(source, 'r') as f:
template_data = to_text(f.read())
except IOError:
return dict(failed=True, msg='unable to load src file')
searchpath = [working_path]
if self._task._role is not None:
searchpath.append(self._task._role._role_path)
if hasattr(self._task, "_block:"):
dep_chain = self._task._block.get_dep_chain()
if dep_chain is not None:
for role in dep_chain:
searchpath.append(role._role_path)
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
| true | true |
f72e036e35ca0c5e912a14526012705e1ff4a85a | 5,239 | py | Python | darling_ansible/python_venv/lib/python3.7/site-packages/oci/core/models/emulated_volume_attachment.py | revnav/sandbox | f9c8422233d093b76821686b6c249417502cf61d | [
"Apache-2.0"
] | null | null | null | darling_ansible/python_venv/lib/python3.7/site-packages/oci/core/models/emulated_volume_attachment.py | revnav/sandbox | f9c8422233d093b76821686b6c249417502cf61d | [
"Apache-2.0"
] | null | null | null | darling_ansible/python_venv/lib/python3.7/site-packages/oci/core/models/emulated_volume_attachment.py | revnav/sandbox | f9c8422233d093b76821686b6c249417502cf61d | [
"Apache-2.0"
] | 1 | 2020-06-25T03:12:58.000Z | 2020-06-25T03:12:58.000Z | # coding: utf-8
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from .volume_attachment import VolumeAttachment
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class EmulatedVolumeAttachment(VolumeAttachment):
"""
An Emulated volume attachment.
"""
def __init__(self, **kwargs):
"""
Initializes a new EmulatedVolumeAttachment object with values from keyword arguments. The default value of the :py:attr:`~oci.core.models.EmulatedVolumeAttachment.attachment_type` attribute
of this class is ``emulated`` and it should not be changed.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param attachment_type:
The value to assign to the attachment_type property of this EmulatedVolumeAttachment.
:type attachment_type: str
:param availability_domain:
The value to assign to the availability_domain property of this EmulatedVolumeAttachment.
:type availability_domain: str
:param compartment_id:
The value to assign to the compartment_id property of this EmulatedVolumeAttachment.
:type compartment_id: str
:param device:
The value to assign to the device property of this EmulatedVolumeAttachment.
:type device: str
:param display_name:
The value to assign to the display_name property of this EmulatedVolumeAttachment.
:type display_name: str
:param id:
The value to assign to the id property of this EmulatedVolumeAttachment.
:type id: str
:param instance_id:
The value to assign to the instance_id property of this EmulatedVolumeAttachment.
:type instance_id: str
:param is_read_only:
The value to assign to the is_read_only property of this EmulatedVolumeAttachment.
:type is_read_only: bool
:param is_shareable:
The value to assign to the is_shareable property of this EmulatedVolumeAttachment.
:type is_shareable: bool
:param lifecycle_state:
The value to assign to the lifecycle_state property of this EmulatedVolumeAttachment.
Allowed values for this property are: "ATTACHING", "ATTACHED", "DETACHING", "DETACHED"
:type lifecycle_state: str
:param time_created:
The value to assign to the time_created property of this EmulatedVolumeAttachment.
:type time_created: datetime
:param volume_id:
The value to assign to the volume_id property of this EmulatedVolumeAttachment.
:type volume_id: str
:param is_pv_encryption_in_transit_enabled:
The value to assign to the is_pv_encryption_in_transit_enabled property of this EmulatedVolumeAttachment.
:type is_pv_encryption_in_transit_enabled: bool
"""
self.swagger_types = {
'attachment_type': 'str',
'availability_domain': 'str',
'compartment_id': 'str',
'device': 'str',
'display_name': 'str',
'id': 'str',
'instance_id': 'str',
'is_read_only': 'bool',
'is_shareable': 'bool',
'lifecycle_state': 'str',
'time_created': 'datetime',
'volume_id': 'str',
'is_pv_encryption_in_transit_enabled': 'bool'
}
self.attribute_map = {
'attachment_type': 'attachmentType',
'availability_domain': 'availabilityDomain',
'compartment_id': 'compartmentId',
'device': 'device',
'display_name': 'displayName',
'id': 'id',
'instance_id': 'instanceId',
'is_read_only': 'isReadOnly',
'is_shareable': 'isShareable',
'lifecycle_state': 'lifecycleState',
'time_created': 'timeCreated',
'volume_id': 'volumeId',
'is_pv_encryption_in_transit_enabled': 'isPvEncryptionInTransitEnabled'
}
self._attachment_type = None
self._availability_domain = None
self._compartment_id = None
self._device = None
self._display_name = None
self._id = None
self._instance_id = None
self._is_read_only = None
self._is_shareable = None
self._lifecycle_state = None
self._time_created = None
self._volume_id = None
self._is_pv_encryption_in_transit_enabled = None
self._attachment_type = 'emulated'
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 39.097015 | 245 | 0.655087 |
from .volume_attachment import VolumeAttachment
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class EmulatedVolumeAttachment(VolumeAttachment):
def __init__(self, **kwargs):
self.swagger_types = {
'attachment_type': 'str',
'availability_domain': 'str',
'compartment_id': 'str',
'device': 'str',
'display_name': 'str',
'id': 'str',
'instance_id': 'str',
'is_read_only': 'bool',
'is_shareable': 'bool',
'lifecycle_state': 'str',
'time_created': 'datetime',
'volume_id': 'str',
'is_pv_encryption_in_transit_enabled': 'bool'
}
self.attribute_map = {
'attachment_type': 'attachmentType',
'availability_domain': 'availabilityDomain',
'compartment_id': 'compartmentId',
'device': 'device',
'display_name': 'displayName',
'id': 'id',
'instance_id': 'instanceId',
'is_read_only': 'isReadOnly',
'is_shareable': 'isShareable',
'lifecycle_state': 'lifecycleState',
'time_created': 'timeCreated',
'volume_id': 'volumeId',
'is_pv_encryption_in_transit_enabled': 'isPvEncryptionInTransitEnabled'
}
self._attachment_type = None
self._availability_domain = None
self._compartment_id = None
self._device = None
self._display_name = None
self._id = None
self._instance_id = None
self._is_read_only = None
self._is_shareable = None
self._lifecycle_state = None
self._time_created = None
self._volume_id = None
self._is_pv_encryption_in_transit_enabled = None
self._attachment_type = 'emulated'
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f72e043a4792b259057750807d91f81eb3c3b3e9 | 11,339 | py | Python | src/main.py | Twente-Mining/tezos-reward-distributor | 8df0745fdb44cbd765084303882545202d2427f3 | [
"MIT"
] | null | null | null | src/main.py | Twente-Mining/tezos-reward-distributor | 8df0745fdb44cbd765084303882545202d2427f3 | [
"MIT"
] | null | null | null | src/main.py | Twente-Mining/tezos-reward-distributor | 8df0745fdb44cbd765084303882545202d2427f3 | [
"MIT"
] | null | null | null | import argparse
import argparse
import json
import os
import queue
import sys
import time
from Constants import RunMode
from NetworkConfiguration import network_config_map
from calc.service_fee_calculator import ServiceFeeCalculator
from cli.wallet_client_manager import WalletClientManager
from config.config_parser import ConfigParser
from config.yaml_baking_conf_parser import BakingYamlConfParser
from config.yaml_conf_parser import YamlConfParser
from log_config import main_logger
from model.baking_conf import BakingConf
from pay.payment_consumer import PaymentConsumer
from pay.payment_producer import PaymentProducer
from util.client_utils import get_client_path
from util.dir_utils import get_payment_root, \
get_calculations_root, get_successful_payments_dir, get_failed_payments_dir
from util.process_life_cycle import ProcessLifeCycle
LINER = "--------------------------------------------"
NB_CONSUMERS = 1
BUF_SIZE = 50
payments_queue = queue.Queue(BUF_SIZE)
logger = main_logger
life_cycle = ProcessLifeCycle()
def main(args):
logger.info("Arguments Configuration = {}".format(json.dumps(args.__dict__, indent=1)))
# 1- find where configuration is
config_dir = os.path.expanduser(args.config_dir)
# create configuration directory if it is not present
# so that user can easily put his configuration there
if config_dir and not os.path.exists(config_dir):
os.makedirs(config_dir)
# 2- Load master configuration file if it is present
master_config_file_path = os.path.join(config_dir, "master.yaml")
master_cfg = {}
if os.path.isfile(master_config_file_path):
logger.info("Loading master configuration file {}".format(master_config_file_path))
master_parser = YamlConfParser(ConfigParser.load_file(master_config_file_path))
master_cfg = master_parser.parse()
else:
logger.info("master configuration file not present.")
managers = None
contracts_by_alias = None
addresses_by_pkh = None
if 'managers' in master_cfg:
managers = master_cfg['managers']
if 'contracts_by_alias' in master_cfg:
contracts_by_alias = master_cfg['contracts_by_alias']
if 'addresses_by_pkh' in master_cfg:
addresses_by_pkh = master_cfg['addresses_by_pkh']
# 3-
# 4- get client path
network_config = network_config_map[args.network]
client_path = get_client_path([x.strip() for x in args.executable_dirs.split(',')],
args.docker, network_config,
args.verbose)
logger.debug("Tezos client path is {}".format(client_path))
# 5- load baking configuration file
config_file_path = get_baking_configuration_file(config_dir)
logger.info("Loading baking configuration file {}".format(config_file_path))
wllt_clnt_mngr = WalletClientManager(client_path, contracts_by_alias, addresses_by_pkh, managers,
verbose=args.verbose)
parser = BakingYamlConfParser(ConfigParser.load_file(config_file_path), wllt_clnt_mngr, network_config)
parser.parse()
parser.validate()
parser.process()
cfg_dict = parser.get_conf_obj()
# dictionary to BakingConf object, for a bit of type safety
cfg = BakingConf(cfg_dict, master_cfg)
logger.info("Baking Configuration {}".format(cfg))
baking_address = cfg.get_baking_address()
payment_address = cfg.get_payment_address()
logger.info(LINER)
logger.info("BAKING ADDRESS is {}".format(baking_address))
logger.info("PAYMENT ADDRESS is {}".format(payment_address))
logger.info(LINER)
# 6- is it a reports run
dry_run = args.dry_run_no_payments or args.dry_run
if args.dry_run_no_payments:
global NB_CONSUMERS
NB_CONSUMERS = 0
# 7- get reporting directories
reports_dir = os.path.expanduser(args.reports_dir)
# if in reports run mode, do not create consumers
# create reports in reports directory
if dry_run:
reports_dir = os.path.expanduser("./reports")
reports_dir = os.path.join(reports_dir, baking_address)
payments_root = get_payment_root(reports_dir, create=True)
calculations_root = get_calculations_root(reports_dir, create=True)
get_successful_payments_dir(payments_root, create=True)
get_failed_payments_dir(payments_root, create=True)
# 8- start the life cycle
life_cycle.start(not dry_run)
# 9- service fee calculator
srvc_fee_calc = ServiceFeeCalculator(cfg.get_full_supporters_set(), cfg.get_specials_map(), cfg.get_service_fee())
if args.initial_cycle is None:
recent = get_latest_report_file(payments_root)
# if payment logs exists set initial cycle to following cycle
# if payment logs does not exists, set initial cycle to 0, so that payment starts from last released rewards
args.initial_cycle = 0 if recent is None else int(recent) + 1
logger.info("initial_cycle set to {}".format(args.initial_cycle))
p = PaymentProducer(name='producer', initial_payment_cycle=args.initial_cycle, network_config=network_config,
payments_dir=payments_root, calculations_dir=calculations_root, run_mode=RunMode(args.run_mode),
service_fee_calc=srvc_fee_calc, release_override=args.release_override,
payment_offset=args.payment_offset, baking_cfg=cfg, life_cycle=life_cycle,
payments_queue=payments_queue, dry_run=dry_run, verbose=args.verbose)
p.start()
for i in range(NB_CONSUMERS):
c = PaymentConsumer(name='consumer' + str(i), payments_dir=payments_root, key_name=payment_address,
client_path=client_path, payments_queue=payments_queue, node_addr=args.node_addr,
wllt_clnt_mngr=wllt_clnt_mngr, verbose=args.verbose, dry_run=dry_run,
delegator_pays_xfer_fee=cfg.get_delegator_pays_xfer_fee())
time.sleep(1)
c.start()
logger.info("Application start completed")
logger.info(LINER)
try:
while life_cycle.is_running(): time.sleep(10)
except KeyboardInterrupt:
logger.info("Interrupted.")
life_cycle.stop()
def get_baking_configuration_file(config_dir):
config_file = None
for file in os.listdir(config_dir):
if file.endswith(".yaml") and not file.startswith("master"):
if config_file:
raise Exception(
"Application only supports one baking configuration file. Found at least 2 {}, {}".format(
config_file, file))
config_file = file
if config_file is None:
raise Exception(
"Unable to find any '.yaml' configuration files inside configuration directory({})".format(config_dir))
return os.path.join(config_dir, config_file)
def get_latest_report_file(payments_root):
recent = None
if get_successful_payments_dir(payments_root):
files = sorted([os.path.splitext(x)[0] for x in os.listdir(get_successful_payments_dir(payments_root))],
key=lambda x: int(x))
recent = files[-1] if len(files) > 0 else None
return recent
class ReleaseOverrideAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if not -11 <= values:
parser.error("Valid range for release-override({0}) is [-11,) ".format(option_string))
setattr(namespace, "release_override", values)
if __name__ == '__main__':
if sys.version_info[0] < 3:
raise Exception("Must be using Python 3")
parser = argparse.ArgumentParser()
parser.add_argument("-N", "--network", help="network name", choices=['ZERONET', 'ALPHANET', 'MAINNET'],
default='MAINNET')
parser.add_argument("-r", "--reports_dir", help="Directory to create reports", default='~/pymnt/reports')
parser.add_argument("-f", "--config_dir", help="Directory to find baking configurations", default='~/pymnt/cfg')
parser.add_argument("-A", "--node_addr", help="Node host:port pair", default='127.0.0.1:8732')
parser.add_argument("-D", "--dry_run",
help="Run without injecting payments. Suitable for testing. Does not require locking.",
action="store_true")
parser.add_argument("-Dn", "--dry_run_no_payments",
help="Run without doing any payments. Suitable for testing. Does not require locking.",
action="store_true")
parser.add_argument("-E", "--executable_dirs",
help="Comma separated list of directories to search for client executable. Prefer single "
"location when setting client directory. If -d is set, point to location where tezos docker "
"script (e.g. mainnet.sh for mainnet) is found. Default value is given for minimum configuration effort.",
default='~/,~/tezos')
parser.add_argument("-d", "--docker",
help="Docker installation flag. When set, docker script location should be set in -E",
action="store_true")
parser.add_argument("-V", "--verbose",
help="Low level details.",
action="store_true")
parser.add_argument("-M", "--run_mode",
help="Waiting decision after making pending payments. 1: default option. Run forever. "
"2: Run all pending payments and exit. 3: Run for one cycle and exit. "
"Suitable to use with -C option.",
default=1, choices=[1, 2, 3], type=int)
parser.add_argument("-R", "--release_override",
help="Override NB_FREEZE_CYCLE value. last released payment cycle will be "
"(current_cycle-(NB_FREEZE_CYCLE+1)-release_override). Suitable for future payments. "
"For future payments give negative values. Valid range is [-11,)",
default=0, type=int, action=ReleaseOverrideAction)
parser.add_argument("-O", "--payment_offset",
help="Number of blocks to wait after a cycle starts before starting payments. "
"This can be useful because cycle beginnings may be bussy.",
default=0, type=int)
parser.add_argument("-C", "--initial_cycle",
help="First cycle to start payment. For last released rewards, set to 0. Non-positive values "
"are interpreted as : current cycle - abs(initial_cycle) - (NB_FREEZE_CYCLE+1). "
"If not set application will continue from last payment made or last reward released.",
type=int)
args = parser.parse_args()
logger.info("Tezos Reward Distributor is Starting")
logger.info(LINER)
logger.info("Copyright Hüseyin ABANOZ 2019")
logger.info("huseyinabanox@gmail.com")
logger.info("Please leave copyright information")
logger.info(LINER)
if args.dry_run:
logger.info("DRY RUN MODE")
logger.info(LINER)
main(args)
| 44.466667 | 135 | 0.664873 | import argparse
import argparse
import json
import os
import queue
import sys
import time
from Constants import RunMode
from NetworkConfiguration import network_config_map
from calc.service_fee_calculator import ServiceFeeCalculator
from cli.wallet_client_manager import WalletClientManager
from config.config_parser import ConfigParser
from config.yaml_baking_conf_parser import BakingYamlConfParser
from config.yaml_conf_parser import YamlConfParser
from log_config import main_logger
from model.baking_conf import BakingConf
from pay.payment_consumer import PaymentConsumer
from pay.payment_producer import PaymentProducer
from util.client_utils import get_client_path
from util.dir_utils import get_payment_root, \
get_calculations_root, get_successful_payments_dir, get_failed_payments_dir
from util.process_life_cycle import ProcessLifeCycle
LINER = "--------------------------------------------"
NB_CONSUMERS = 1
BUF_SIZE = 50
payments_queue = queue.Queue(BUF_SIZE)
logger = main_logger
life_cycle = ProcessLifeCycle()
def main(args):
logger.info("Arguments Configuration = {}".format(json.dumps(args.__dict__, indent=1)))
config_dir = os.path.expanduser(args.config_dir)
if config_dir and not os.path.exists(config_dir):
os.makedirs(config_dir)
master_config_file_path = os.path.join(config_dir, "master.yaml")
master_cfg = {}
if os.path.isfile(master_config_file_path):
logger.info("Loading master configuration file {}".format(master_config_file_path))
master_parser = YamlConfParser(ConfigParser.load_file(master_config_file_path))
master_cfg = master_parser.parse()
else:
logger.info("master configuration file not present.")
managers = None
contracts_by_alias = None
addresses_by_pkh = None
if 'managers' in master_cfg:
managers = master_cfg['managers']
if 'contracts_by_alias' in master_cfg:
contracts_by_alias = master_cfg['contracts_by_alias']
if 'addresses_by_pkh' in master_cfg:
addresses_by_pkh = master_cfg['addresses_by_pkh']
network_config = network_config_map[args.network]
client_path = get_client_path([x.strip() for x in args.executable_dirs.split(',')],
args.docker, network_config,
args.verbose)
logger.debug("Tezos client path is {}".format(client_path))
config_file_path = get_baking_configuration_file(config_dir)
logger.info("Loading baking configuration file {}".format(config_file_path))
wllt_clnt_mngr = WalletClientManager(client_path, contracts_by_alias, addresses_by_pkh, managers,
verbose=args.verbose)
parser = BakingYamlConfParser(ConfigParser.load_file(config_file_path), wllt_clnt_mngr, network_config)
parser.parse()
parser.validate()
parser.process()
cfg_dict = parser.get_conf_obj()
cfg = BakingConf(cfg_dict, master_cfg)
logger.info("Baking Configuration {}".format(cfg))
baking_address = cfg.get_baking_address()
payment_address = cfg.get_payment_address()
logger.info(LINER)
logger.info("BAKING ADDRESS is {}".format(baking_address))
logger.info("PAYMENT ADDRESS is {}".format(payment_address))
logger.info(LINER)
dry_run = args.dry_run_no_payments or args.dry_run
if args.dry_run_no_payments:
global NB_CONSUMERS
NB_CONSUMERS = 0
reports_dir = os.path.expanduser(args.reports_dir)
if dry_run:
reports_dir = os.path.expanduser("./reports")
reports_dir = os.path.join(reports_dir, baking_address)
payments_root = get_payment_root(reports_dir, create=True)
calculations_root = get_calculations_root(reports_dir, create=True)
get_successful_payments_dir(payments_root, create=True)
get_failed_payments_dir(payments_root, create=True)
life_cycle.start(not dry_run)
srvc_fee_calc = ServiceFeeCalculator(cfg.get_full_supporters_set(), cfg.get_specials_map(), cfg.get_service_fee())
if args.initial_cycle is None:
recent = get_latest_report_file(payments_root)
args.initial_cycle = 0 if recent is None else int(recent) + 1
logger.info("initial_cycle set to {}".format(args.initial_cycle))
p = PaymentProducer(name='producer', initial_payment_cycle=args.initial_cycle, network_config=network_config,
payments_dir=payments_root, calculations_dir=calculations_root, run_mode=RunMode(args.run_mode),
service_fee_calc=srvc_fee_calc, release_override=args.release_override,
payment_offset=args.payment_offset, baking_cfg=cfg, life_cycle=life_cycle,
payments_queue=payments_queue, dry_run=dry_run, verbose=args.verbose)
p.start()
for i in range(NB_CONSUMERS):
c = PaymentConsumer(name='consumer' + str(i), payments_dir=payments_root, key_name=payment_address,
client_path=client_path, payments_queue=payments_queue, node_addr=args.node_addr,
wllt_clnt_mngr=wllt_clnt_mngr, verbose=args.verbose, dry_run=dry_run,
delegator_pays_xfer_fee=cfg.get_delegator_pays_xfer_fee())
time.sleep(1)
c.start()
logger.info("Application start completed")
logger.info(LINER)
try:
while life_cycle.is_running(): time.sleep(10)
except KeyboardInterrupt:
logger.info("Interrupted.")
life_cycle.stop()
def get_baking_configuration_file(config_dir):
config_file = None
for file in os.listdir(config_dir):
if file.endswith(".yaml") and not file.startswith("master"):
if config_file:
raise Exception(
"Application only supports one baking configuration file. Found at least 2 {}, {}".format(
config_file, file))
config_file = file
if config_file is None:
raise Exception(
"Unable to find any '.yaml' configuration files inside configuration directory({})".format(config_dir))
return os.path.join(config_dir, config_file)
def get_latest_report_file(payments_root):
recent = None
if get_successful_payments_dir(payments_root):
files = sorted([os.path.splitext(x)[0] for x in os.listdir(get_successful_payments_dir(payments_root))],
key=lambda x: int(x))
recent = files[-1] if len(files) > 0 else None
return recent
class ReleaseOverrideAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if not -11 <= values:
parser.error("Valid range for release-override({0}) is [-11,) ".format(option_string))
setattr(namespace, "release_override", values)
if __name__ == '__main__':
if sys.version_info[0] < 3:
raise Exception("Must be using Python 3")
parser = argparse.ArgumentParser()
parser.add_argument("-N", "--network", help="network name", choices=['ZERONET', 'ALPHANET', 'MAINNET'],
default='MAINNET')
parser.add_argument("-r", "--reports_dir", help="Directory to create reports", default='~/pymnt/reports')
parser.add_argument("-f", "--config_dir", help="Directory to find baking configurations", default='~/pymnt/cfg')
parser.add_argument("-A", "--node_addr", help="Node host:port pair", default='127.0.0.1:8732')
parser.add_argument("-D", "--dry_run",
help="Run without injecting payments. Suitable for testing. Does not require locking.",
action="store_true")
parser.add_argument("-Dn", "--dry_run_no_payments",
help="Run without doing any payments. Suitable for testing. Does not require locking.",
action="store_true")
parser.add_argument("-E", "--executable_dirs",
help="Comma separated list of directories to search for client executable. Prefer single "
"location when setting client directory. If -d is set, point to location where tezos docker "
"script (e.g. mainnet.sh for mainnet) is found. Default value is given for minimum configuration effort.",
default='~/,~/tezos')
parser.add_argument("-d", "--docker",
help="Docker installation flag. When set, docker script location should be set in -E",
action="store_true")
parser.add_argument("-V", "--verbose",
help="Low level details.",
action="store_true")
parser.add_argument("-M", "--run_mode",
help="Waiting decision after making pending payments. 1: default option. Run forever. "
"2: Run all pending payments and exit. 3: Run for one cycle and exit. "
"Suitable to use with -C option.",
default=1, choices=[1, 2, 3], type=int)
parser.add_argument("-R", "--release_override",
help="Override NB_FREEZE_CYCLE value. last released payment cycle will be "
"(current_cycle-(NB_FREEZE_CYCLE+1)-release_override). Suitable for future payments. "
"For future payments give negative values. Valid range is [-11,)",
default=0, type=int, action=ReleaseOverrideAction)
parser.add_argument("-O", "--payment_offset",
help="Number of blocks to wait after a cycle starts before starting payments. "
"This can be useful because cycle beginnings may be bussy.",
default=0, type=int)
parser.add_argument("-C", "--initial_cycle",
help="First cycle to start payment. For last released rewards, set to 0. Non-positive values "
"are interpreted as : current cycle - abs(initial_cycle) - (NB_FREEZE_CYCLE+1). "
"If not set application will continue from last payment made or last reward released.",
type=int)
args = parser.parse_args()
logger.info("Tezos Reward Distributor is Starting")
logger.info(LINER)
logger.info("Copyright Hüseyin ABANOZ 2019")
logger.info("huseyinabanox@gmail.com")
logger.info("Please leave copyright information")
logger.info(LINER)
if args.dry_run:
logger.info("DRY RUN MODE")
logger.info(LINER)
main(args)
| true | true |
f72e0617b476e1b47821fa731ad0d7e57218049c | 355 | py | Python | Lambda Code/S3/deleteobject.py | blitz-cmd/Boto3 | 203b75e3d8aa8c90340bd55d598b6d1b4f473dcd | [
"Apache-2.0"
] | null | null | null | Lambda Code/S3/deleteobject.py | blitz-cmd/Boto3 | 203b75e3d8aa8c90340bd55d598b6d1b4f473dcd | [
"Apache-2.0"
] | null | null | null | Lambda Code/S3/deleteobject.py | blitz-cmd/Boto3 | 203b75e3d8aa8c90340bd55d598b6d1b4f473dcd | [
"Apache-2.0"
] | null | null | null | # Boto3 code to delete an object
import json
import boto3
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
def lambda_handler(event, context):
try:
bucketname = 'whizlabs-53210'
result = s3.delete_object(Bucket=bucketname, Key='iam.png')
return result
except ClientError as error:
raise error
| 22.1875 | 67 | 0.695775 |
import json
import boto3
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
def lambda_handler(event, context):
try:
bucketname = 'whizlabs-53210'
result = s3.delete_object(Bucket=bucketname, Key='iam.png')
return result
except ClientError as error:
raise error
| true | true |
f72e061fca1870ee40655aff01a7eae1cec80788 | 1,373 | py | Python | terraform-serverless/function/main.py | suganyaprasad27/serverless-expeditions | ac7ef62b028a8304641360c5804c549224fba14f | [
"Apache-2.0"
] | 105 | 2020-09-24T22:30:19.000Z | 2022-03-26T13:23:46.000Z | terraform-serverless/function/main.py | brunocrt/serverless-expeditions | dd2df444c671c5b66e258db35f842a77b93de105 | [
"Apache-2.0"
] | 4 | 2021-01-22T03:15:19.000Z | 2022-02-15T02:46:47.000Z | terraform-serverless/function/main.py | brunocrt/serverless-expeditions | dd2df444c671c5b66e258db35f842a77b93de105 | [
"Apache-2.0"
] | 83 | 2020-11-08T13:11:56.000Z | 2022-03-29T09:10:06.000Z | import io
from flask import jsonify
from google.cloud import storage, vision
from PIL import Image
vision_client = vision.ImageAnnotatorClient()
storage_client = storage.Client()
def detect_cat(request):
"""
param:
bucket: gcs bucket
resource: gcs bucket resource
returns:
information about the image
Testing data: {"bucket": "glasnt-terraform-3476-test", "resource": "loan-7AIDE8PrvA0-unsplash.jpg"}
"""
bucket = request.args.get("bucket", None)
resource = request.args.get("resource", None)
if not bucket:
return "Invalid invocation: require bucket", 400
if not resource:
return "Invalid invocation: require resource", 400
uri = f"gs://{bucket}/{resource}"
data = {}
blob = storage_client.bucket(bucket).get_blob(resource).download_as_bytes()
# Image specifics
img = Image.open(io.BytesIO(blob))
data["image_details"] = {
"height": img.height,
"width": img.width,
"format": img.format,
}
# Vision API Labels
vision_image = vision.Image()
vision_image.source.image_uri = uri
response = vision_client.label_detection(image=vision_image)
labels = response.label_annotations
data["labels"] = [label.description for label in labels]
# Cat?
data["is_cat"] = "Cat" in data["labels"]
return jsonify(data)
| 24.087719 | 103 | 0.663511 | import io
from flask import jsonify
from google.cloud import storage, vision
from PIL import Image
vision_client = vision.ImageAnnotatorClient()
storage_client = storage.Client()
def detect_cat(request):
bucket = request.args.get("bucket", None)
resource = request.args.get("resource", None)
if not bucket:
return "Invalid invocation: require bucket", 400
if not resource:
return "Invalid invocation: require resource", 400
uri = f"gs://{bucket}/{resource}"
data = {}
blob = storage_client.bucket(bucket).get_blob(resource).download_as_bytes()
img = Image.open(io.BytesIO(blob))
data["image_details"] = {
"height": img.height,
"width": img.width,
"format": img.format,
}
vision_image = vision.Image()
vision_image.source.image_uri = uri
response = vision_client.label_detection(image=vision_image)
labels = response.label_annotations
data["labels"] = [label.description for label in labels]
data["is_cat"] = "Cat" in data["labels"]
return jsonify(data)
| true | true |
f72e0741eac102e74f861708728c23954dd58a8f | 958 | py | Python | capstone/utils/aec.py | davidrobles/mlnd-capstone-code | 19ca88aaa137665af147da9bbd0e510829a14cf1 | [
"MIT"
] | 2 | 2017-04-13T18:31:39.000Z | 2017-05-06T05:14:12.000Z | capstone/utils/aec.py | davidrobles/mlnd-capstone-code | 19ca88aaa137665af147da9bbd0e510829a14cf1 | [
"MIT"
] | null | null | null | capstone/utils/aec.py | davidrobles/mlnd-capstone-code | 19ca88aaa137665af147da9bbd0e510829a14cf1 | [
"MIT"
] | null | null | null | from __future__ import print_function
def str_aec(text, color):
"""Returns text wrapped by the given ansi color code"""
AEC_COLORS = {
'black': (0, 30),
'red': (0, 31),
'green': (0, 32),
'yellow': (0, 33),
'blue': (0, 34),
'purple': (0, 35),
'cyan': (0, 36),
'light_gray': (0, 37),
'dark_gray': (0, 30),
'bold_red': (1, 31),
'bold_green': (1, 32),
'bold_yellow': (1, 33),
'bold_blue': (1, 34),
'bold_purple': (1, 35),
'bold_cyan': (1, 36),
'white': (1, 37)
}
color = color.lower()
if color not in AEC_COLORS:
raise Exception(u"AEC color '{0}' does not exist".format(color))
a, b = AEC_COLORS[color]
return u'\033[{0};{1}m{2}\033[0m'.format(a, b, text)
def print_aec(text, color, end='\n'):
"""Prints text wrapped by the given ansi color code"""
print(str_aec(text, color), end=end)
| 28.176471 | 72 | 0.512526 | from __future__ import print_function
def str_aec(text, color):
AEC_COLORS = {
'black': (0, 30),
'red': (0, 31),
'green': (0, 32),
'yellow': (0, 33),
'blue': (0, 34),
'purple': (0, 35),
'cyan': (0, 36),
'light_gray': (0, 37),
'dark_gray': (0, 30),
'bold_red': (1, 31),
'bold_green': (1, 32),
'bold_yellow': (1, 33),
'bold_blue': (1, 34),
'bold_purple': (1, 35),
'bold_cyan': (1, 36),
'white': (1, 37)
}
color = color.lower()
if color not in AEC_COLORS:
raise Exception(u"AEC color '{0}' does not exist".format(color))
a, b = AEC_COLORS[color]
return u'\033[{0};{1}m{2}\033[0m'.format(a, b, text)
def print_aec(text, color, end='\n'):
print(str_aec(text, color), end=end)
| true | true |
f72e079660362cbc5b40ddbde0fa07599e13b0cb | 4,529 | py | Python | eggs/bx_python-0.7.1_7b95ff194725-py2.7-linux-i686-ucs4.egg/EGG-INFO/scripts/maf_tile.py | bopopescu/phyG | 023f505b705ab953f502cbc55e90612047867583 | [
"CC-BY-3.0"
] | null | null | null | eggs/bx_python-0.7.1_7b95ff194725-py2.7-linux-i686-ucs4.egg/EGG-INFO/scripts/maf_tile.py | bopopescu/phyG | 023f505b705ab953f502cbc55e90612047867583 | [
"CC-BY-3.0"
] | null | null | null | eggs/bx_python-0.7.1_7b95ff194725-py2.7-linux-i686-ucs4.egg/EGG-INFO/scripts/maf_tile.py | bopopescu/phyG | 023f505b705ab953f502cbc55e90612047867583 | [
"CC-BY-3.0"
] | 1 | 2020-07-25T21:03:18.000Z | 2020-07-25T21:03:18.000Z | #!/afs/bx.psu.edu/project/pythons/linux-i686-ucs4/bin/python2.7
"""
'Tile' the blocks of a maf file over each of a set of intervals. The
highest scoring block that covers any part of a region will be used, and
pieces not covered by any block filled with "-" or optionally "*". The list
of species to tile is specified by `tree` (either a tree or just a comma
separated list). The `seq_db` is a lookup table mapping chromosome names
to nib file for filling in the reference species. Maf files must be indexed.
NOTE: See maf_tile_2.py for a more sophisticated version of this program, I
think this one will be eliminated in the future.
usage: %prog tree maf_files...
-m, --missingData: Inserts wildcards for missing block rows instead of '-'
"""
import psyco_full
from bx.cookbook import doc_optparse
import bx.align.maf
import bx.align as align
from bx import misc
import bx.seq.nib
import os
import string
import sys
tree_tx = string.maketrans( "(),", " " )
def main():
options, args = doc_optparse.parse( __doc__ )
try:
sources = args[0].translate( tree_tx ).split()
seq_db = load_seq_db( args[1] )
index = bx.align.maf.MultiIndexed( args[2:] )
out = bx.align.maf.Writer( sys.stdout )
missing_data = bool(options.missingData)
except:
doc_optparse.exception()
for line in sys.stdin:
ref_src, start, end = line.split()[0:3]
do_interval( sources, index, out, ref_src, int( start ), int( end ), seq_db, missing_data )
out.close()
def load_seq_db( fname ):
db = {}
for line in open( fname ):
fields = line.split(',')
src = fields[1] + "." + fields[2]
seq = fields[4]
db[src]=seq.strip()
return db
def do_interval( sources, index, out, ref_src, start, end, seq_db, missing_data ):
assert sources[0].split('.')[0] == ref_src.split('.')[0], "%s != %s" % ( sources[0].split('.')[0], ref_src.split('.')[0] )
base_len = end - start
blocks = index.get( ref_src, start, end )
# From low to high score
blocks.sort( lambda a, b: cmp( a.score, b.score ) )
mask = [ -1 ] * base_len
# print len( blocks )
# print blocks[0]
ref_src_size = None
for i, block in enumerate( blocks ):
ref = block.get_component_by_src_start( ref_src )
ref_src_size = ref.src_size
assert ref.strand == "+"
slice_start = max( start, ref.start )
slice_end = min( end, ref.end )
for j in range( slice_start, slice_end ):
mask[j-start] = i
#print >>sys.stderr, mask
tiled = []
for i in range( len( sources ) ): tiled.append( [] )
for ss, ee, index in intervals_from_mask( mask ):
if index < 0:
tiled[0].append( bx.seq.nib.NibFile( open( seq_db[ ref_src ] ) ).get( start+ss, ee-ss ) )
for row in tiled[1:]:
if missing_data:
row.append( "*" * ( ee - ss ) )
else:
row.append( "-" * ( ee - ss ) )
else:
slice_start = start + ss
slice_end = start + ee
block = blocks[index]
ref = block.get_component_by_src_start( ref_src )
sliced = block.slice_by_component( ref, slice_start, slice_end )
sliced = sliced.limit_to_species( sources )
sliced.remove_all_gap_columns()
for i, src in enumerate( sources ):
comp = sliced.get_component_by_src_start( src )
if comp:
tiled[i].append( comp.text )
else:
if missing_data: tiled[i].append( "*" * sliced.text_size )
else: tiled[i].append( "-" * sliced.text_size )
a = align.Alignment()
for i, name in enumerate( sources ):
text = "".join( tiled[i] )
size = len( text ) - text.count( "-" )
if i == 0:
if ref_src_size is None: ref_src_size = bx.seq.nib.NibFile( open( seq_db[ ref_src ] ) ).length
c = align.Component( ref_src, start, end-start, "+", ref_src_size, text )
else:
c = align.Component( name + ".fake", 0, size, "?", size, text )
a.add_component( c )
out.write( a )
def intervals_from_mask( mask ):
start = 0
last = mask[0]
for i in range( 1, len( mask ) ):
if mask[i] != last:
yield start, i, last
start = i
last = mask[i]
yield start, len(mask), last
main()
| 32.582734 | 126 | 0.578715 |
import psyco_full
from bx.cookbook import doc_optparse
import bx.align.maf
import bx.align as align
from bx import misc
import bx.seq.nib
import os
import string
import sys
tree_tx = string.maketrans( "(),", " " )
def main():
options, args = doc_optparse.parse( __doc__ )
try:
sources = args[0].translate( tree_tx ).split()
seq_db = load_seq_db( args[1] )
index = bx.align.maf.MultiIndexed( args[2:] )
out = bx.align.maf.Writer( sys.stdout )
missing_data = bool(options.missingData)
except:
doc_optparse.exception()
for line in sys.stdin:
ref_src, start, end = line.split()[0:3]
do_interval( sources, index, out, ref_src, int( start ), int( end ), seq_db, missing_data )
out.close()
def load_seq_db( fname ):
db = {}
for line in open( fname ):
fields = line.split(',')
src = fields[1] + "." + fields[2]
seq = fields[4]
db[src]=seq.strip()
return db
def do_interval( sources, index, out, ref_src, start, end, seq_db, missing_data ):
assert sources[0].split('.')[0] == ref_src.split('.')[0], "%s != %s" % ( sources[0].split('.')[0], ref_src.split('.')[0] )
base_len = end - start
blocks = index.get( ref_src, start, end )
blocks.sort( lambda a, b: cmp( a.score, b.score ) )
mask = [ -1 ] * base_len
ref_src_size = None
for i, block in enumerate( blocks ):
ref = block.get_component_by_src_start( ref_src )
ref_src_size = ref.src_size
assert ref.strand == "+"
slice_start = max( start, ref.start )
slice_end = min( end, ref.end )
for j in range( slice_start, slice_end ):
mask[j-start] = i
tiled = []
for i in range( len( sources ) ): tiled.append( [] )
for ss, ee, index in intervals_from_mask( mask ):
if index < 0:
tiled[0].append( bx.seq.nib.NibFile( open( seq_db[ ref_src ] ) ).get( start+ss, ee-ss ) )
for row in tiled[1:]:
if missing_data:
row.append( "*" * ( ee - ss ) )
else:
row.append( "-" * ( ee - ss ) )
else:
slice_start = start + ss
slice_end = start + ee
block = blocks[index]
ref = block.get_component_by_src_start( ref_src )
sliced = block.slice_by_component( ref, slice_start, slice_end )
sliced = sliced.limit_to_species( sources )
sliced.remove_all_gap_columns()
for i, src in enumerate( sources ):
comp = sliced.get_component_by_src_start( src )
if comp:
tiled[i].append( comp.text )
else:
if missing_data: tiled[i].append( "*" * sliced.text_size )
else: tiled[i].append( "-" * sliced.text_size )
a = align.Alignment()
for i, name in enumerate( sources ):
text = "".join( tiled[i] )
size = len( text ) - text.count( "-" )
if i == 0:
if ref_src_size is None: ref_src_size = bx.seq.nib.NibFile( open( seq_db[ ref_src ] ) ).length
c = align.Component( ref_src, start, end-start, "+", ref_src_size, text )
else:
c = align.Component( name + ".fake", 0, size, "?", size, text )
a.add_component( c )
out.write( a )
def intervals_from_mask( mask ):
start = 0
last = mask[0]
for i in range( 1, len( mask ) ):
if mask[i] != last:
yield start, i, last
start = i
last = mask[i]
yield start, len(mask), last
main()
| true | true |
f72e0809d6a8ad8c59211d0b190cb4962950f2e6 | 2,420 | py | Python | tests/app/branson/inputs/cubanova.py | HPCToolkit/hpctest | 5ff4455582bf39e75530a31badcf6142081b386b | [
"BSD-3-Clause"
] | 14 | 2017-04-27T20:36:18.000Z | 2020-06-29T07:10:39.000Z | tests/app/branson/inputs/cubanova.py | HPCToolkit/hpctest | 5ff4455582bf39e75530a31badcf6142081b386b | [
"BSD-3-Clause"
] | 13 | 2017-05-02T15:19:40.000Z | 2021-05-13T16:59:23.000Z | tests/app/branson/inputs/cubanova.py | HPCToolkit/hpctest | 5ff4455582bf39e75530a31badcf6142081b386b | [
"BSD-3-Clause"
] | 19 | 2017-04-08T15:03:37.000Z | 2021-07-16T09:42:07.000Z | # This is a script to write an XML geometry for our "cubanova" calculation
import sys
import numpy
from BlockIt import block
from BlockIt import branson_run_param
from BlockIt import generate_input
# generate branson run_param
run_param = branson_run_param(t_stop=1.0e-2, dt_start=1.0e-3, photons=5000, seed=14706)
# build a symmetric cubanova
scale_size = 10**16
scale_cv = 1.0e12
smear_shock = 1.1
domain_size = 4.0*10**16/scale_size
CSM_size = 2.0*10**16/scale_size
CSM_shock_size = 2.0*10**15.831/scale_size*smear_shock
cubanova_size = 2.0*10**15.829/scale_size
block_list = []
# build up materials
void = {}
void["id"] = 1
void["density"] = 1e-6
void["cv"] = 1e16
void["opacA"] = 1e-6
void["opacB"] = 0.0
void["opacC"] = 0.0
void["opacS"] = 0.0
void["initial_T_e"] = 8.6e-4
void["initial_T_r"] = 8.6e-4
unit_cube = block([domain_size]*3, [0.0]*3, void, 1)
block_list.append(unit_cube)
CSM_material= {}
CSM_material["id"] = 2
CSM_material["density"] = 1.0e-14
CSM_material["cv"] = 1.0e-3*scale_cv
CSM_material["opacA"] = 1.0e-4*CSM_material["density"]*scale_size
CSM_material["opacB"] = 0.0
CSM_material["opacC"] = 0.0
CSM_material["opacS"] = 0.0
CSM_material["initial_T_e"] = 8.6e-4
CSM_material["initial_T_r"] = 8.6e-4
CSM_dim=[CSM_size]*3
CSM = block(CSM_dim, [0.0]*3, CSM_material, 4)
block_list.append(CSM)
CSM_Shock_material= {}
CSM_Shock_material["id"] = 3
CSM_Shock_material["density"] = 1.0e-12
CSM_Shock_material["cv"] = 1.0e-3*scale_cv
CSM_Shock_material["opacA"] = 0.3*CSM_Shock_material["density"]*scale_size/smear_shock
CSM_Shock_material["opacB"] = 0.0
CSM_Shock_material["opacC"] = 0.0
CSM_Shock_material["opacS"] = 0.0
CSM_Shock_material["initial_T_e"] = 8.6e-2
CSM_Shock_material["initial_T_r"] = 8.6e-2
CSM_Shock_dim=[CSM_shock_size]*3
CSM_Shock = block(CSM_Shock_dim, [0.0]*3, CSM_Shock_material, 10)
block_list.append(CSM_Shock)
cubanova_material= {}
cubanova_material["id"] = 4
cubanova_material["density"] = 1.0e-14
cubanova_material["cv"] = 1.0e-3*scale_cv
cubanova_material["opacA"] = 0.3*cubanova_material["density"]*scale_size
cubanova_material["opacB"] = 0.0
cubanova_material["opacC"] = 0.0
cubanova_material["opacS"] = 0.0
cubanova_material["initial_T_e"] = 8.6e-4
cubanova_material["initial_T_r"] = 8.6e-4
cubanova_dim = [cubanova_size]*3
cubanova = block(cubanova_dim, [0.0]*3, cubanova_material, 8)
block_list.append(cubanova)
generate_input(block_list, run_param)
| 27.191011 | 87 | 0.739669 |
import sys
import numpy
from BlockIt import block
from BlockIt import branson_run_param
from BlockIt import generate_input
run_param = branson_run_param(t_stop=1.0e-2, dt_start=1.0e-3, photons=5000, seed=14706)
scale_size = 10**16
scale_cv = 1.0e12
smear_shock = 1.1
domain_size = 4.0*10**16/scale_size
CSM_size = 2.0*10**16/scale_size
CSM_shock_size = 2.0*10**15.831/scale_size*smear_shock
cubanova_size = 2.0*10**15.829/scale_size
block_list = []
void = {}
void["id"] = 1
void["density"] = 1e-6
void["cv"] = 1e16
void["opacA"] = 1e-6
void["opacB"] = 0.0
void["opacC"] = 0.0
void["opacS"] = 0.0
void["initial_T_e"] = 8.6e-4
void["initial_T_r"] = 8.6e-4
unit_cube = block([domain_size]*3, [0.0]*3, void, 1)
block_list.append(unit_cube)
CSM_material= {}
CSM_material["id"] = 2
CSM_material["density"] = 1.0e-14
CSM_material["cv"] = 1.0e-3*scale_cv
CSM_material["opacA"] = 1.0e-4*CSM_material["density"]*scale_size
CSM_material["opacB"] = 0.0
CSM_material["opacC"] = 0.0
CSM_material["opacS"] = 0.0
CSM_material["initial_T_e"] = 8.6e-4
CSM_material["initial_T_r"] = 8.6e-4
CSM_dim=[CSM_size]*3
CSM = block(CSM_dim, [0.0]*3, CSM_material, 4)
block_list.append(CSM)
CSM_Shock_material= {}
CSM_Shock_material["id"] = 3
CSM_Shock_material["density"] = 1.0e-12
CSM_Shock_material["cv"] = 1.0e-3*scale_cv
CSM_Shock_material["opacA"] = 0.3*CSM_Shock_material["density"]*scale_size/smear_shock
CSM_Shock_material["opacB"] = 0.0
CSM_Shock_material["opacC"] = 0.0
CSM_Shock_material["opacS"] = 0.0
CSM_Shock_material["initial_T_e"] = 8.6e-2
CSM_Shock_material["initial_T_r"] = 8.6e-2
CSM_Shock_dim=[CSM_shock_size]*3
CSM_Shock = block(CSM_Shock_dim, [0.0]*3, CSM_Shock_material, 10)
block_list.append(CSM_Shock)
cubanova_material= {}
cubanova_material["id"] = 4
cubanova_material["density"] = 1.0e-14
cubanova_material["cv"] = 1.0e-3*scale_cv
cubanova_material["opacA"] = 0.3*cubanova_material["density"]*scale_size
cubanova_material["opacB"] = 0.0
cubanova_material["opacC"] = 0.0
cubanova_material["opacS"] = 0.0
cubanova_material["initial_T_e"] = 8.6e-4
cubanova_material["initial_T_r"] = 8.6e-4
cubanova_dim = [cubanova_size]*3
cubanova = block(cubanova_dim, [0.0]*3, cubanova_material, 8)
block_list.append(cubanova)
generate_input(block_list, run_param)
| true | true |
f72e097f6480ed778a05ab11672341a70aa64a42 | 2,847 | py | Python | examples/python/chemical_balance_sat.py | AlohaChina/or-tools | 1ece0518104db435593a1a21882801ab6ada3e15 | [
"Apache-2.0"
] | 8,273 | 2015-02-24T22:10:50.000Z | 2022-03-31T21:19:27.000Z | examples/python/chemical_balance_sat.py | AlohaChina/or-tools | 1ece0518104db435593a1a21882801ab6ada3e15 | [
"Apache-2.0"
] | 2,530 | 2015-03-05T04:27:21.000Z | 2022-03-31T06:13:02.000Z | examples/python/chemical_balance_sat.py | AlohaChina/or-tools | 1ece0518104db435593a1a21882801ab6ada3e15 | [
"Apache-2.0"
] | 2,057 | 2015-03-04T15:02:02.000Z | 2022-03-30T02:29:27.000Z | # Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We are trying to group items in equal sized groups.
# Each item has a color and a value. We want the sum of values of each group to
# be as close to the average as possible.
# Furthermore, if one color is an a group, at least k items with this color must
# be in that group.
from ortools.sat.python import cp_model
import math
# Data
max_quantities = [["N_Total", 1944], ["P2O5", 1166.4], ["K2O", 1822.5],
["CaO", 1458], ["MgO", 486], ["Fe", 9.7], ["B", 2.4]]
chemical_set = [["A", 0, 0, 510, 540, 0, 0, 0], ["B", 110, 0, 0, 0, 160, 0, 0],
["C", 61, 149, 384, 0, 30, 1,
0.2], ["D", 148, 70, 245, 0, 15, 1,
0.2], ["E", 160, 158, 161, 0, 10, 1, 0.2]]
num_products = len(max_quantities)
all_products = range(num_products)
num_sets = len(chemical_set)
all_sets = range(num_sets)
# Model
model = cp_model.CpModel()
# Scale quantities by 100.
max_set = [
int(
math.ceil(
min(max_quantities[q][1] * 1000 / chemical_set[s][q + 1]
for q in all_products if chemical_set[s][q + 1] != 0)))
for s in all_sets
]
set_vars = [model.NewIntVar(0, max_set[s], "set_%i" % s) for s in all_sets]
epsilon = model.NewIntVar(0, 10000000, "epsilon")
for p in all_products:
model.Add(
sum(int(chemical_set[s][p + 1] * 10) * set_vars[s]
for s in all_sets) <= int(max_quantities[p][1] * 10000))
model.Add(
sum(int(chemical_set[s][p + 1] * 10) * set_vars[s]
for s in all_sets) >= int(max_quantities[p][1] * 10000) - epsilon)
model.Minimize(epsilon)
# Creates a solver and solves.
solver = cp_model.CpSolver()
status = solver.Solve(model)
print("Status = %s" % solver.StatusName(status))
# The objective value of the solution.
print("Optimal objective value = %f" % (solver.ObjectiveValue() / 10000.0))
for s in all_sets:
print(
" %s = %f" % (chemical_set[s][0], solver.Value(set_vars[s]) / 1000.0),
end=" ")
print()
for p in all_products:
name = max_quantities[p][0]
max_quantity = max_quantities[p][1]
quantity = sum(
solver.Value(set_vars[s]) / 1000.0 * chemical_set[s][p + 1]
for s in all_sets)
print("%s: %f out of %f" % (name, quantity, max_quantity))
| 33.104651 | 80 | 0.632596 |
from ortools.sat.python import cp_model
import math
max_quantities = [["N_Total", 1944], ["P2O5", 1166.4], ["K2O", 1822.5],
["CaO", 1458], ["MgO", 486], ["Fe", 9.7], ["B", 2.4]]
chemical_set = [["A", 0, 0, 510, 540, 0, 0, 0], ["B", 110, 0, 0, 0, 160, 0, 0],
["C", 61, 149, 384, 0, 30, 1,
0.2], ["D", 148, 70, 245, 0, 15, 1,
0.2], ["E", 160, 158, 161, 0, 10, 1, 0.2]]
num_products = len(max_quantities)
all_products = range(num_products)
num_sets = len(chemical_set)
all_sets = range(num_sets)
model = cp_model.CpModel()
max_set = [
int(
math.ceil(
min(max_quantities[q][1] * 1000 / chemical_set[s][q + 1]
for q in all_products if chemical_set[s][q + 1] != 0)))
for s in all_sets
]
set_vars = [model.NewIntVar(0, max_set[s], "set_%i" % s) for s in all_sets]
epsilon = model.NewIntVar(0, 10000000, "epsilon")
for p in all_products:
model.Add(
sum(int(chemical_set[s][p + 1] * 10) * set_vars[s]
for s in all_sets) <= int(max_quantities[p][1] * 10000))
model.Add(
sum(int(chemical_set[s][p + 1] * 10) * set_vars[s]
for s in all_sets) >= int(max_quantities[p][1] * 10000) - epsilon)
model.Minimize(epsilon)
solver = cp_model.CpSolver()
status = solver.Solve(model)
print("Status = %s" % solver.StatusName(status))
print("Optimal objective value = %f" % (solver.ObjectiveValue() / 10000.0))
for s in all_sets:
print(
" %s = %f" % (chemical_set[s][0], solver.Value(set_vars[s]) / 1000.0),
end=" ")
print()
for p in all_products:
name = max_quantities[p][0]
max_quantity = max_quantities[p][1]
quantity = sum(
solver.Value(set_vars[s]) / 1000.0 * chemical_set[s][p + 1]
for s in all_sets)
print("%s: %f out of %f" % (name, quantity, max_quantity))
| true | true |
f72e0a3f831f9e9c61a2e9d77828ffb12d8428b1 | 20,450 | py | Python | tensorflow/contrib/training/python/training/training.py | tianyapiaozi/tensorflow | fb3ce0467766a8e91f1da0ad7ada7c24fde7a73a | [
"Apache-2.0"
] | 71 | 2017-05-25T16:02:15.000Z | 2021-06-09T16:08:08.000Z | tensorflow/contrib/training/python/training/training.py | shrikunjsarda/tensorflow | 7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae | [
"Apache-2.0"
] | 133 | 2017-04-26T16:49:49.000Z | 2019-10-15T11:39:26.000Z | tensorflow/contrib/training/python/training/training.py | shrikunjsarda/tensorflow | 7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae | [
"Apache-2.0"
] | 31 | 2018-09-11T02:17:17.000Z | 2021-12-15T10:33:35.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains various routines and helper functions for training models.
This script contains various functions for training models. These include
manipulating gradients, creating a `train_op` (an operation that computes the
loss and applies the gradients) and a training loop function. The training loop
allows the user to pass in the `train_op` and runs the optimization according
to user-specified arguments.
************************************
* A simple working training script *
************************************
# Load data and create the model:
images, labels = LoadData(...)
predictions = MyModel(images)
# Define the loss:
tf.contrib.losses.log_loss(predictions, labels)
total_loss = tf.contrib.losses.get_total_loss()
# Define the optimizer:
optimizer = tf.train.MomentumOptimizer(FLAGS.learning_rate, FLAGS.momentum)
# Create the train_op
train_op = tf.contrib.training.create_train_op(total_loss, optimizer)
# Run training.
tf.contrib.training.train(train_op, my_log_dir)
*************************
* Creating the train_op *
*************************
In order to use the `train` function, one needs a train_op: an `Operation` that
(a) computes the loss, (b) applies the gradients to update the weights and
(c) returns the value of the loss. tf.contrib.training.create_train_op creates
such an `Operation`. This function also provides the ability to manipulate
the gradients using a few arguments:
# Create the train_op and clip the gradient norms:
train_op = tf.contrib.training.create_train_op(
total_loss,
optimizer,
transform_grads_fn=clip_gradient_norms_fn(3))
# Create the train_op and scale the gradients by providing a map from variable
# name (or variable) to a scaling coefficient:
def transform_grads_fn(grads):
gradient_multipliers = {
'conv0/weights': 1.2,
'fc8/weights': 3.4,
}
return tf.contrib.training.multiply_gradients(
grads, gradient_multipliers)
train_op = tf.contrib.training.create_train_op(
total_loss,
optimizer,
transform_grads_fn=transform_grads_fn)
****************************************************************
* Performing additional (non-gradient) updates during training *
****************************************************************
Many networks utilize modules, like BatchNorm, that require performing a series
of non-gradient updates during training. tf.contrib.training.create_train_op
allows a user to pass in a list of update_ops to call along with the gradient
updates.
train_op = tf.contrib.training.create_train_op(
total_loss, optimizer, update_ops)
By default, tf.contrib.training.create_train_op includes all update ops that are
part of the `tf.GraphKeys.UPDATE_OPS` collection. Additionally, the
tf.contrib.layers.batch_norm function adds the moving mean and moving variance
updates to this collection. Consequently, users who want to use
tf.contrib.layers.batch_norm will not need to take any additional steps in order
to have the moving mean and moving variance updates be computed.
However, users with additional, specialized updates can either override the
default update ops or simply add additional update ops to the
`tf.GraphKeys.UPDATE_OPS` collection:
# Force `create_train_op` to NOT use ANY update_ops:
train_op = tf.contrib.training.create_train_op(
total_loss,
optimizer,
update_ops=[])
# Use an alternative set of update ops:
train_op = tf.contrib.training.create_train_op(
total_loss,
optimizer,
update_ops=my_other_update_ops)
# Use a set of update ops in addition to the default updates:
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, my_update0)
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, my_update1)
train_op = tf.contrib.training.create_train_op(
total_loss,
optimizer)
# Which is the same as:
train_op = tf.contrib.training.create_train_op(
total_loss,
optimizer,
update_ops=tf.get_collection(tf.GraphKeys.UPDATE_OPS))
******************************************
* Initializing a model from a checkpoint *
******************************************
It is common to want to 'warm-start' a model from a pre-trained checkpoint.
One can use a tf.Scaffold and an initializing function to do so.
...
# Create the train_op
train_op = tf.contrib.training.create_train_op(total_loss, optimizer)
# Create the initial assignment op
checkpoint_path = '/path/to/old_model_checkpoint'
variables_to_restore = tf.contrib.framework.get_model_variables()
init_fn = tf.contrib.framework.assign_from_checkpoint_fn(
checkpoint_path, variables_to_restore)
# Run training.
scaffold = tf.Scaffold(init_fn=init_fn)
tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold)
***************************************************************************
* Initializing a model from a checkpoint whose variable names don't match *
***************************************************************************
At times, a user may want to initialize a new model with values from a
checkpoint whose variable names do not match those of the current model. In this
case, one needs to create a mapping from the checkpoint variable names to the
current model variables. This requires only a small modification of the code
above:
...
# Creates a model with two variables, var0 and var1
predictions = MyModel(images)
...
# Create the train_op
train_op = tf.contrib.training.create_train_op(total_loss, optimizer)
checkpoint_path = '/path/to/old_model_checkpoint'
# Create the mapping:
variables_to_restore = {
'name_var_0_in_checkpoint':
tf.contrib.framework.get_unique_variable('var0'),
'name_var_1_in_checkpoint':
tf.contrib.framework.get_unique_variable('var1')
}
init_fn = tf.contrib.framework.assign_from_checkpoint_fn(
checkpoint_path, variables_to_restore)
scaffold = tf.Scaffold(init_fn=init_fn)
# Run training.
tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold)
*************************************************
* Fine-Tuning Part of a model from a checkpoint *
*************************************************
Rather than initializing all of the weights of a given model, we sometimes
only want to restore some of the weights from a checkpoint. To do this, one
need only filter those variables to initialize as follows:
...
# Create the train_op
train_op = tf.contrib.training.create_train_op(total_loss, optimizer)
checkpoint_path = '/path/to/old_model_checkpoint'
# Specify the variables to restore via a list of inclusion or exclusion
# patterns:
variables_to_restore = tf.contrib.framework.get_variables_to_restore(
include=["conv"], exclude=["fc8", "fc9])
# or
variables_to_restore = tf.contrib.framework.get_variables_to_restore(
exclude=["conv"])
init_fn = tf.contrib.framework.assign_from_checkpoint_fn(
checkpoint_path, variables_to_restore)
scaffold = tf.Scaffold(init_fn=init_fn)
# Run training.
tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold)
******************************************************
* Initializing model variables from values in memory *
******************************************************
One may want to initialize the weights of a model from values coming from an
arbitrary source (a text document, matlab file, etc). While this is technically
feasible using assign operations, this strategy results in the values of your
weights being stored in the graph. For large models, this becomes prohibitively
large. However, it's possible to perform this initial assignment without having
to store the values of the initial model in the graph itself by using
placeholders and a feed dictionary:
...
# Create the train_op
train_op = tf.contrib.training.create_train_op(total_loss, optimizer)
# Create the mapping from variable names to values:
var0_initial_value = ReadFromDisk(...)
var1_initial_value = ReadFromDisk(...)
var_names_to_values = {
'var0': var0_initial_value,
'var1': var1_initial_value,
}
init_fn = tf.contrib.framework.assign_from_values_fn(var_names_to_values)
scaffold = tf.Scaffold(init_fn=init_fn)
# Run training.
tf.contrib.training.train(train_op, my_log_dir, scaffold=scaffold)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary
from tensorflow.python.training import monitored_session
from tensorflow.python.training import optimizer as tf_optimizer
from tensorflow.python.training import training_util
# TODO(nsilberman): move add_gradients_summaries, clip_gradient_norms and
# multiply_gradients into contrib/summaries and contrib/optimizers.py
__all__ = [
'add_gradients_summaries',
'clip_gradient_norms',
'clip_gradient_norms_fn',
'create_train_op',
'multiply_gradients',
'train',
]
def add_gradients_summaries(grads_and_vars):
"""Add summaries to gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
Returns:
The list of created summaries.
"""
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(
summary.histogram(var.op.name + '_gradient', grad_values))
summaries.append(
summary.scalar(var.op.name + '_gradient_norm',
clip_ops.global_norm([grad_values])))
else:
logging.info('Var %s has no gradient', var.op.name)
return summaries
def clip_gradient_norms(gradients_to_variables, max_norm):
"""Clips the gradients by the given value.
Args:
gradients_to_variables: A list of gradient to variable pairs (tuples).
max_norm: the maximum norm value.
Returns:
A list of clipped gradient to variable pairs.
"""
clipped_grads_and_vars = []
for grad, var in gradients_to_variables:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
tmp = clip_ops.clip_by_norm(grad.values, max_norm)
grad = ops.IndexedSlices(tmp, grad.indices, grad.dense_shape)
else:
grad = clip_ops.clip_by_norm(grad, max_norm)
clipped_grads_and_vars.append((grad, var))
return clipped_grads_and_vars
def clip_gradient_norms_fn(max_norm):
"""Returns a `transform_grads_fn` function for gradient clipping."""
def clip_norms(gradients_to_variables):
return clip_gradient_norms(gradients_to_variables, max_norm)
return clip_norms
def multiply_gradients(grads_and_vars, gradient_multipliers):
"""Multiply specified gradients.
Args:
grads_and_vars: A list of gradient to variable pairs (tuples).
gradient_multipliers: A map from either `Variables` or `Variable` op names
to the coefficient by which the associated gradient should be scaled.
Returns:
The updated list of gradient to variable pairs.
Raises:
ValueError: If `grads_and_vars` is not a list or if `gradient_multipliers`
is empty or None or if `gradient_multipliers` is not a dictionary.
"""
if not isinstance(grads_and_vars, list):
raise ValueError('`grads_and_vars` must be a list.')
if not gradient_multipliers:
raise ValueError('`gradient_multipliers` is empty.')
if not isinstance(gradient_multipliers, dict):
raise ValueError('`gradient_multipliers` must be a dict.')
multiplied_grads_and_vars = []
for grad, var in grads_and_vars:
if var in gradient_multipliers or var.op.name in gradient_multipliers:
key = var if var in gradient_multipliers else var.op.name
if grad is None:
raise ValueError('Requested multiple of `None` gradient.')
if isinstance(grad, ops.IndexedSlices):
tmp = grad.values * constant_op.constant(
gradient_multipliers[key], dtype=grad.dtype)
grad = ops.IndexedSlices(tmp, grad.indices, grad.dense_shape)
else:
grad *= constant_op.constant(
gradient_multipliers[key], dtype=grad.dtype)
multiplied_grads_and_vars.append((grad, var))
return multiplied_grads_and_vars
_USE_GLOBAL_STEP = 0
def create_train_op(total_loss,
optimizer,
global_step=_USE_GLOBAL_STEP,
update_ops=None,
variables_to_train=None,
transform_grads_fn=None,
summarize_gradients=False,
gate_gradients=tf_optimizer.Optimizer.GATE_OP,
aggregation_method=None,
colocate_gradients_with_ops=False,
check_numerics=True):
"""Creates an `Operation` that evaluates the gradients and returns the loss.
Args:
total_loss: A `Tensor` representing the total loss.
optimizer: A tf.Optimizer to use for computing the gradients.
global_step: A `Tensor` representing the global step variable. If left as
`_USE_GLOBAL_STEP`, then tf.contrib.framework.global_step() is used.
update_ops: An optional list of updates to execute. If `update_ops` is
`None`, then the update ops are set to the contents of the
`tf.GraphKeys.UPDATE_OPS` collection. If `update_ops` is not `None`, but
it doesn't contain all of the update ops in `tf.GraphKeys.UPDATE_OPS`,
a warning will be displayed.
variables_to_train: an optional list of variables to train. If None, it will
default to all tf.trainable_variables().
transform_grads_fn: A function which takes a single argument, a list of
gradient to variable pairs (tuples), performs any requested gradient
updates, such as gradient clipping or multipliers, and returns the updated
list.
summarize_gradients: Whether or not add summaries for each gradient.
gate_gradients: How to gate the computation of gradients. See tf.Optimizer.
aggregation_method: Specifies the method used to combine gradient terms.
Valid values are defined in the class `AggregationMethod`.
colocate_gradients_with_ops: Whether or not to try colocating the gradients
with the ops that generated them.
check_numerics: Whether or not we apply check_numerics.
Returns:
A `Tensor` that when evaluated, computes the gradients and returns the total
loss value.
"""
if global_step is _USE_GLOBAL_STEP:
global_step = training_util.get_or_create_global_step()
# Update ops use GraphKeys.UPDATE_OPS collection if update_ops is None.
global_update_ops = set(ops.get_collection(ops.GraphKeys.UPDATE_OPS))
if update_ops is None:
update_ops = global_update_ops
else:
update_ops = set(update_ops)
if not global_update_ops.issubset(update_ops):
logging.warning('update_ops in create_train_op does not contain all the '
' update_ops in GraphKeys.UPDATE_OPS')
# Make sure update_ops are computed before total_loss.
if update_ops:
with ops.control_dependencies(update_ops):
barrier = control_flow_ops.no_op(name='update_barrier')
total_loss = control_flow_ops.with_dependencies([barrier], total_loss)
if variables_to_train is None:
# Default to tf.trainable_variables()
variables_to_train = tf_variables.trainable_variables()
else:
# Make sure that variables_to_train are in tf.trainable_variables()
for v in variables_to_train:
assert v in tf_variables.trainable_variables()
assert variables_to_train
# Create the gradients. Note that apply_gradients adds the gradient
# computation to the current graph.
grads = optimizer.compute_gradients(
total_loss,
variables_to_train,
gate_gradients=gate_gradients,
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops)
if transform_grads_fn:
grads = transform_grads_fn(grads)
# Summarize gradients.
if summarize_gradients:
with ops.name_scope('summarize_grads'):
add_gradients_summaries(grads)
# Create gradient updates.
grad_updates = optimizer.apply_gradients(grads, global_step=global_step)
with ops.name_scope('train_op'):
# Make sure total_loss is valid.
if check_numerics:
total_loss = array_ops.check_numerics(total_loss,
'LossTensor is inf or nan')
# Ensure the train_tensor computes grad_updates.
train_op = control_flow_ops.with_dependencies([grad_updates], total_loss)
# Add the operation used for training to the 'train_op' collection
train_ops = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
if train_op not in train_ops:
train_ops.append(train_op)
return train_op
def train(train_op,
logdir,
master='',
is_chief=True,
scaffold=None,
hooks=None,
chief_only_hooks=None,
save_checkpoint_secs=600,
save_summaries_steps=100,
config=None,
max_wait_secs=7200):
"""Runs the training loop.
Args:
train_op: A `Tensor` that, when executed, will apply the gradients and
return the loss value.
logdir: The directory where the graph and checkpoints are saved.
master: The URL of the master.
is_chief: Specifies whether or not the training is being run by the primary
replica during replica training.
scaffold: An tf.train.Scaffold instance.
hooks: List of `tf.train.SessionRunHook` callbacks which are run inside the
training loop.
chief_only_hooks: List of `tf.train.SessionRunHook` instances which are run
inside the training loop for the chief trainer only.
save_checkpoint_secs: The frequency, in seconds, that a checkpoint is saved
using a default checkpoint saver. If `save_checkpoint_secs` is set to
`None`, then the default checkpoint saver isn't used.
save_summaries_steps: The frequency, in number of global steps, that the
summaries are written to disk using a default summary saver. If
`save_summaries_steps` is set to `None`, then the default summary saver
isn't used.
config: An instance of `tf.ConfigProto`.
max_wait_secs: Maximum time workers should wait for the session to
become available. This should be kept relatively short to help detect
incorrect code, but sometimes may need to be increased if the chief takes
a while to start up.
Returns:
the value of the loss function after training.
Raises:
ValueError: if `logdir` is `None` and either `save_checkpoint_secs` or
`save_summaries_steps` are `None.
"""
if logdir is None and is_chief:
if save_summaries_steps:
raise ValueError(
'logdir cannot be None when save_summaries_steps is not None')
if save_checkpoint_secs:
raise ValueError(
'logdir cannot be None when save_checkpoint_secs is not None')
with monitored_session.MonitoredTrainingSession(
master=master,
is_chief=is_chief,
checkpoint_dir=logdir,
scaffold=scaffold,
hooks=hooks,
chief_only_hooks=chief_only_hooks,
save_checkpoint_secs=save_checkpoint_secs,
save_summaries_steps=save_summaries_steps,
config=config,
max_wait_secs=max_wait_secs) as session:
loss = None
while not session.should_stop():
loss = session.run(train_op)
return loss
| 37.454212 | 80 | 0.712567 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary
from tensorflow.python.training import monitored_session
from tensorflow.python.training import optimizer as tf_optimizer
from tensorflow.python.training import training_util
__all__ = [
'add_gradients_summaries',
'clip_gradient_norms',
'clip_gradient_norms_fn',
'create_train_op',
'multiply_gradients',
'train',
]
def add_gradients_summaries(grads_and_vars):
summaries = []
for grad, var in grads_and_vars:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values
else:
grad_values = grad
summaries.append(
summary.histogram(var.op.name + '_gradient', grad_values))
summaries.append(
summary.scalar(var.op.name + '_gradient_norm',
clip_ops.global_norm([grad_values])))
else:
logging.info('Var %s has no gradient', var.op.name)
return summaries
def clip_gradient_norms(gradients_to_variables, max_norm):
clipped_grads_and_vars = []
for grad, var in gradients_to_variables:
if grad is not None:
if isinstance(grad, ops.IndexedSlices):
tmp = clip_ops.clip_by_norm(grad.values, max_norm)
grad = ops.IndexedSlices(tmp, grad.indices, grad.dense_shape)
else:
grad = clip_ops.clip_by_norm(grad, max_norm)
clipped_grads_and_vars.append((grad, var))
return clipped_grads_and_vars
def clip_gradient_norms_fn(max_norm):
def clip_norms(gradients_to_variables):
return clip_gradient_norms(gradients_to_variables, max_norm)
return clip_norms
def multiply_gradients(grads_and_vars, gradient_multipliers):
if not isinstance(grads_and_vars, list):
raise ValueError('`grads_and_vars` must be a list.')
if not gradient_multipliers:
raise ValueError('`gradient_multipliers` is empty.')
if not isinstance(gradient_multipliers, dict):
raise ValueError('`gradient_multipliers` must be a dict.')
multiplied_grads_and_vars = []
for grad, var in grads_and_vars:
if var in gradient_multipliers or var.op.name in gradient_multipliers:
key = var if var in gradient_multipliers else var.op.name
if grad is None:
raise ValueError('Requested multiple of `None` gradient.')
if isinstance(grad, ops.IndexedSlices):
tmp = grad.values * constant_op.constant(
gradient_multipliers[key], dtype=grad.dtype)
grad = ops.IndexedSlices(tmp, grad.indices, grad.dense_shape)
else:
grad *= constant_op.constant(
gradient_multipliers[key], dtype=grad.dtype)
multiplied_grads_and_vars.append((grad, var))
return multiplied_grads_and_vars
_USE_GLOBAL_STEP = 0
def create_train_op(total_loss,
optimizer,
global_step=_USE_GLOBAL_STEP,
update_ops=None,
variables_to_train=None,
transform_grads_fn=None,
summarize_gradients=False,
gate_gradients=tf_optimizer.Optimizer.GATE_OP,
aggregation_method=None,
colocate_gradients_with_ops=False,
check_numerics=True):
if global_step is _USE_GLOBAL_STEP:
global_step = training_util.get_or_create_global_step()
global_update_ops = set(ops.get_collection(ops.GraphKeys.UPDATE_OPS))
if update_ops is None:
update_ops = global_update_ops
else:
update_ops = set(update_ops)
if not global_update_ops.issubset(update_ops):
logging.warning('update_ops in create_train_op does not contain all the '
' update_ops in GraphKeys.UPDATE_OPS')
if update_ops:
with ops.control_dependencies(update_ops):
barrier = control_flow_ops.no_op(name='update_barrier')
total_loss = control_flow_ops.with_dependencies([barrier], total_loss)
if variables_to_train is None:
variables_to_train = tf_variables.trainable_variables()
else:
for v in variables_to_train:
assert v in tf_variables.trainable_variables()
assert variables_to_train
grads = optimizer.compute_gradients(
total_loss,
variables_to_train,
gate_gradients=gate_gradients,
aggregation_method=aggregation_method,
colocate_gradients_with_ops=colocate_gradients_with_ops)
if transform_grads_fn:
grads = transform_grads_fn(grads)
if summarize_gradients:
with ops.name_scope('summarize_grads'):
add_gradients_summaries(grads)
grad_updates = optimizer.apply_gradients(grads, global_step=global_step)
with ops.name_scope('train_op'):
if check_numerics:
total_loss = array_ops.check_numerics(total_loss,
'LossTensor is inf or nan')
train_op = control_flow_ops.with_dependencies([grad_updates], total_loss)
train_ops = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
if train_op not in train_ops:
train_ops.append(train_op)
return train_op
def train(train_op,
logdir,
master='',
is_chief=True,
scaffold=None,
hooks=None,
chief_only_hooks=None,
save_checkpoint_secs=600,
save_summaries_steps=100,
config=None,
max_wait_secs=7200):
if logdir is None and is_chief:
if save_summaries_steps:
raise ValueError(
'logdir cannot be None when save_summaries_steps is not None')
if save_checkpoint_secs:
raise ValueError(
'logdir cannot be None when save_checkpoint_secs is not None')
with monitored_session.MonitoredTrainingSession(
master=master,
is_chief=is_chief,
checkpoint_dir=logdir,
scaffold=scaffold,
hooks=hooks,
chief_only_hooks=chief_only_hooks,
save_checkpoint_secs=save_checkpoint_secs,
save_summaries_steps=save_summaries_steps,
config=config,
max_wait_secs=max_wait_secs) as session:
loss = None
while not session.should_stop():
loss = session.run(train_op)
return loss
| true | true |
f72e0c8fafede2e5047571a94e61d1f1523072c6 | 5,446 | py | Python | cms/signals/__init__.py | intgr/django-cms | 92edf033ccc0938e41f3752935516572c3623695 | [
"BSD-3-Clause"
] | 1 | 2015-06-11T19:25:26.000Z | 2015-06-11T19:25:26.000Z | cms/signals/__init__.py | damianmoore/django-cms | 2d3e10a01e792ec7da5c1418811c1be5ac84e5e2 | [
"BSD-3-Clause"
] | 5 | 2021-03-19T15:39:27.000Z | 2021-09-08T02:47:21.000Z | cms/signals/__init__.py | Acidburn0zzz/django-cms | 5a105a1c75eeb4c8a4c1c34301d93855e6724407 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from cms.signals.apphook import debug_server_restart
from cms.signals.page import pre_save_page, post_save_page, pre_delete_page, post_delete_page, post_moved_page
from cms.signals.permissions import post_save_user, post_save_user_group, pre_save_user, pre_delete_user, pre_save_group, pre_delete_group, pre_save_pagepermission, pre_delete_pagepermission, pre_save_globalpagepermission, pre_delete_globalpagepermission
from cms.signals.placeholder import pre_delete_placeholder_ref, post_delete_placeholder_ref
from cms.signals.plugins import post_delete_plugins, pre_save_plugins, pre_delete_plugins
from cms.signals.reversion_signals import post_revision
from cms.signals.title import pre_save_title, post_save_title, pre_delete_title, post_delete_title
from cms.utils.conf import get_cms_setting
from django.db.models import signals
from django.dispatch import Signal
from cms.models import Page, Title, CMSPlugin, PagePermission, GlobalPagePermission, PageUser, PageUserGroup, PlaceholderReference
from django.conf import settings
from django.contrib.auth.models import User, Group
#################### Our own signals ###################
# fired after page location is changed - is moved from one node to other
page_moved = Signal(providing_args=["instance"])
# fired after page gets published - copied to public model - there may be more
# than one instances published before this signal gets called
post_publish = Signal(providing_args=["instance", "language"])
post_unpublish = Signal(providing_args=["instance", "language"])
# fired if a public page with an apphook is added or changed
urls_need_reloading = Signal(providing_args=[])
if settings.DEBUG:
urls_need_reloading.connect(debug_server_restart)
######################### plugins #######################
signals.pre_delete.connect(pre_delete_plugins, sender=CMSPlugin, dispatch_uid='cms_pre_delete_plugin')
signals.post_delete.connect(post_delete_plugins, sender=CMSPlugin, dispatch_uid='cms_post_delete_plugin')
signals.pre_save.connect(pre_save_plugins, sender=CMSPlugin, dispatch_uid='cms_pre_save_plugin')
########################## page #########################
signals.pre_save.connect(pre_save_page, sender=Page, dispatch_uid='cms_pre_save_page')
signals.post_save.connect(post_save_page, sender=Page, dispatch_uid='cms_post_save_page')
signals.pre_delete.connect(pre_delete_page, sender=Page, dispatch_uid='cms_pre_delete_page')
signals.post_delete.connect(post_delete_page, sender=Page, dispatch_uid='cms_post_delete_page')
page_moved.connect(post_moved_page, sender=Page, dispatch_uid='cms_post_move_page')
######################### title #########################
signals.pre_save.connect(pre_save_title, sender=Title, dispatch_uid='cms_pre_save_page')
signals.post_save.connect(post_save_title, sender=Title, dispatch_uid='cms_post_save_page')
signals.pre_delete.connect(pre_delete_title, sender=Title, dispatch_uid='cms_pre_delete_page')
signals.post_delete.connect(post_delete_title, sender=Title, dispatch_uid='cms_post_delete_page')
###################### placeholder #######################
signals.pre_delete.connect(pre_delete_placeholder_ref, sender=PlaceholderReference,
dispatch_uid='cms_pre_delete_placeholder_ref')
signals.post_delete.connect(post_delete_placeholder_ref, sender=PlaceholderReference,
dispatch_uid='cms_post_delete_placeholder_ref')
###################### permissions #######################
if get_cms_setting('PERMISSION'):
# only if permissions are in use
signals.pre_save.connect(pre_save_user, sender=User, dispatch_uid='cms_pre_save_user')
signals.post_save.connect(post_save_user, sender=User, dispatch_uid='cms_post_save_user')
signals.pre_delete.connect(pre_delete_user, sender=User, dispatch_uid='cms_pre_delete_user')
signals.pre_save.connect(pre_save_user, sender=PageUser, dispatch_uid='cms_pre_save_pageuser')
signals.pre_delete.connect(pre_delete_user, sender=PageUser, dispatch_uid='cms_pre_delete_pageuser')
signals.pre_save.connect(pre_save_group, sender=Group, dispatch_uid='cms_pre_save_group')
signals.post_save.connect(post_save_user_group, sender=Group, dispatch_uid='cms_post_save_group')
signals.pre_delete.connect(pre_delete_group, sender=Group, dispatch_uid='cms_post_save_group')
signals.pre_save.connect(pre_save_group, sender=PageUserGroup, dispatch_uid='cms_pre_save_pageusergroup')
signals.pre_delete.connect(pre_delete_group, sender=PageUserGroup, dispatch_uid='cms_pre_delete_pageusergroup')
signals.pre_save.connect(pre_save_pagepermission, sender=PagePermission, dispatch_uid='cms_pre_save_pagepermission')
signals.pre_delete.connect(pre_delete_pagepermission, sender=PagePermission,
dispatch_uid='cms_pre_delete_pagepermission')
signals.pre_save.connect(pre_save_globalpagepermission, sender=GlobalPagePermission,
dispatch_uid='cms_pre_save_globalpagepermission')
signals.pre_delete.connect(pre_delete_globalpagepermission, sender=GlobalPagePermission,
dispatch_uid='cms_pre_delete_globalpagepermission')
###################### reversion #########################
if 'reversion' in settings.INSTALLED_APPS:
from reversion.models import post_revision_commit
post_revision_commit.connect(post_revision, dispatch_uid='cms_post_revision')
| 57.326316 | 254 | 0.769739 |
from cms.signals.apphook import debug_server_restart
from cms.signals.page import pre_save_page, post_save_page, pre_delete_page, post_delete_page, post_moved_page
from cms.signals.permissions import post_save_user, post_save_user_group, pre_save_user, pre_delete_user, pre_save_group, pre_delete_group, pre_save_pagepermission, pre_delete_pagepermission, pre_save_globalpagepermission, pre_delete_globalpagepermission
from cms.signals.placeholder import pre_delete_placeholder_ref, post_delete_placeholder_ref
from cms.signals.plugins import post_delete_plugins, pre_save_plugins, pre_delete_plugins
from cms.signals.reversion_signals import post_revision
from cms.signals.title import pre_save_title, post_save_title, pre_delete_title, post_delete_title
from cms.utils.conf import get_cms_setting
from django.db.models import signals
from django.dispatch import Signal
from cms.models import Page, Title, CMSPlugin, PagePermission, GlobalPagePermission, PageUser, PageUserGroup, PlaceholderReference
from django.conf import settings
from django.contrib.auth.models import User, Group
| true | true |
f72e0d40354a45c2165e32efb5d977457a17c832 | 30,088 | py | Python | foreman/data_refinery_foreman/surveyor/array_express.py | cgreene/refinebio | fe75e42f2963d60c4307806cba11520754547190 | [
"BSD-3-Clause"
] | null | null | null | foreman/data_refinery_foreman/surveyor/array_express.py | cgreene/refinebio | fe75e42f2963d60c4307806cba11520754547190 | [
"BSD-3-Clause"
] | null | null | null | foreman/data_refinery_foreman/surveyor/array_express.py | cgreene/refinebio | fe75e42f2963d60c4307806cba11520754547190 | [
"BSD-3-Clause"
] | null | null | null | import requests
from django.utils.dateparse import parse_datetime
from typing import List, Dict
from data_refinery_common.job_lookup import ProcessorPipeline, Downloaders
from data_refinery_common.logging import get_and_configure_logger
from data_refinery_common.models import (
Experiment,
ExperimentAnnotation,
ExperimentOrganismAssociation,
ExperimentSampleAssociation,
Organism,
OriginalFile,
OriginalFileSampleAssociation,
Sample,
SampleAnnotation,
SurveyJobKeyValue,
)
from data_refinery_common.utils import (
get_normalized_platform,
get_readable_affymetrix_names,
get_supported_microarray_platforms,
)
from data_refinery_foreman.surveyor import harmony, utils
from data_refinery_foreman.surveyor.external_source import ExternalSourceSurveyor
logger = get_and_configure_logger(__name__)
EXPERIMENTS_URL = "https://www.ebi.ac.uk/arrayexpress/json/v3/experiments/"
SAMPLES_URL = EXPERIMENTS_URL + "{}/samples"
UNKNOWN = "UNKNOWN"
class UnsupportedPlatformException(Exception):
pass
class ArrayExpressSurveyor(ExternalSourceSurveyor):
def source_type(self):
return Downloaders.ARRAY_EXPRESS.value
@staticmethod
def _get_last_update_date(parsed_json: Dict) -> str:
if "lastupdatedate" in parsed_json:
return parsed_json["lastupdatedate"]
else:
return parsed_json["releasedate"]
@classmethod
def _apply_metadata_to_experiment(cls, experiment_object: Experiment, parsed_json: Dict):
# We aren't sure these fields will be populated, or how many there will be.
# Try to join them all together, or set a sensible default.
experiment_descripton = ""
if "description" in parsed_json and len(parsed_json["description"]) > 0:
for description_item in parsed_json["description"]:
if "text" in description_item:
experiment_descripton = experiment_descripton + description_item["text"] + "\n"
if experiment_descripton == "":
experiment_descripton = "Description not available.\n"
experiment_object.source_database = "ARRAY_EXPRESS"
experiment_object.title = parsed_json["name"]
# This will need to be updated if we ever use Array
# Express to get other kinds of data.
experiment_object.technology = "MICROARRAY"
experiment_object.description = experiment_descripton
experiment_object.source_first_published = parse_datetime(parsed_json["releasedate"])
experiment_object.source_last_modified \
= parse_datetime(cls._get_last_update_date(parsed_json))
def create_experiment_from_api(self, experiment_accession_code: str) -> (Experiment, Dict):
"""Given an experiment accession code, create an Experiment object.
Also returns a dictionary of additional information about the
platform discovered for the experiment.
Will raise an UnsupportedPlatformException if this experiment was
conducted using a platform which we don't support.
See an example at: https://www.ebi.ac.uk/arrayexpress/json/v3/experiments/E-MTAB-3050/sample
"""
request_url = EXPERIMENTS_URL + experiment_accession_code
experiment_request = utils.requests_retry_session().get(request_url, timeout=60)
try:
parsed_json = experiment_request.json()["experiments"]["experiment"][0]
except KeyError:
logger.error("Remote experiment has no Experiment data!",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
raise
experiment = {}
experiment["name"] = parsed_json["name"]
experiment["experiment_accession_code"] = experiment_accession_code
# This experiment has no platform at all, and is therefore useless.
if 'arraydesign' not in parsed_json or len(parsed_json["arraydesign"]) == 0:
logger.warn("Remote experiment has no arraydesign listed.",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
raise UnsupportedPlatformException
# If there is more than one arraydesign listed in the experiment
# then there is no other way to determine which array was used
# for which sample other than looking at the header of the CEL
# file. That obviously cannot happen until the CEL file has been
# downloaded so we can just mark it as UNKNOWN and let the
# downloader inspect the downloaded file to determine the
# array then.
elif len(parsed_json["arraydesign"]) != 1 or "accession" not in parsed_json["arraydesign"][0]:
experiment["platform_accession_code"] = UNKNOWN
experiment["platform_accession_name"] = UNKNOWN
experiment["manufacturer"] = UNKNOWN
else:
external_accession = parsed_json["arraydesign"][0]["accession"]
for platform in get_supported_microarray_platforms():
if platform["external_accession"] == external_accession:
experiment["platform_accession_code"] = get_normalized_platform(platform["platform_accession"])
# Illumina appears in the accession codes for
# platforms manufactured by Illumina
if "ILLUMINA" in experiment["platform_accession_code"].upper():
experiment["manufacturer"] = "ILLUMINA"
experiment["platform_accession_name"] = platform["platform_accession"]
else:
# It's not Illumina, the only other supported Microarray platform is
# Affy. As our list of supported platforms grows this logic will
# need to get more sophisticated.
experiment["manufacturer"] = "AFFYMETRIX"
platform_mapping = get_readable_affymetrix_names()
experiment["platform_accession_name"] = platform_mapping[
platform["platform_accession"]]
if "platform_accession_code" not in experiment:
# We don't know what platform this accession corresponds to.
experiment["platform_accession_code"] = external_accession
experiment["platform_accession_name"] = UNKNOWN
experiment["manufacturer"] = UNKNOWN
experiment["release_date"] = parsed_json["releasedate"]
experiment["last_update_date"] = self._get_last_update_date(parsed_json)
# Create the experiment object
try:
experiment_object = Experiment.objects.get(accession_code=experiment_accession_code)
logger.debug("Experiment already exists, skipping object creation.",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
except Experiment.DoesNotExist:
experiment_object = Experiment()
experiment_object.accession_code = experiment_accession_code
experiment_object.source_url = request_url
ArrayExpressSurveyor._apply_metadata_to_experiment(experiment_object, parsed_json)
experiment_object.save()
json_xa = ExperimentAnnotation()
json_xa.experiment = experiment_object
json_xa.data = parsed_json
json_xa.is_ccdl = False
json_xa.save()
# Fetch and parse the IDF/SDRF file for any other fields
IDF_URL_TEMPLATE = "https://www.ebi.ac.uk/arrayexpress/files/{code}/{code}.idf.txt"
idf_url = IDF_URL_TEMPLATE.format(code=experiment_accession_code)
idf_text = utils.requests_retry_session().get(idf_url, timeout=60).text
lines = idf_text.split('\n')
idf_dict = {}
for line in lines:
keyval = line.strip().split('\t')
if len(keyval) == 2:
idf_dict[keyval[0]] = keyval[1]
elif len(keyval) > 2:
idf_dict[keyval[0]] = keyval[1:]
idf_xa = ExperimentAnnotation()
idf_xa.data = idf_dict
idf_xa.experiment = experiment_object
idf_xa.is_ccdl = False
idf_xa.save()
if 'Investigation Title' in idf_dict and isinstance(idf_dict['Investigation Title'], str):
experiment_object.title = idf_dict['Investigation Title']
if 'Person Affiliation' in idf_dict:
# This is very rare, ex: E-MEXP-32
if isinstance(idf_dict['Person Affiliation'], list):
unique_people = list(set(idf_dict['Person Affiliation']))
experiment_object.submitter_institution = ", ".join(unique_people)[:255]
else:
experiment_object.submitter_institution = idf_dict['Person Affiliation']
# Get protocol_description from "<experiment_url>/protocols"
# instead of from idf_dict, because the former provides more
# details.
protocol_url = request_url + '/protocols'
protocol_request = utils.requests_retry_session().get(protocol_url, timeout=60)
try:
experiment_object.protocol_description = protocol_request.json()['protocols']
except KeyError:
logger.warning("Remote experiment has no protocol data!",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
if 'Publication Title' in idf_dict:
# This will happen for some superseries.
# Ex: E-GEOD-29536
# Assume most recent is "best:, store the rest in experiment annotation.
if isinstance(idf_dict['Publication Title'], list):
experiment_object.publication_title = "; ".join(idf_dict['Publication Title'])
else:
experiment_object.publication_title = idf_dict['Publication Title']
experiment_object.has_publication = True
if 'Publication DOI' in idf_dict:
if isinstance(idf_dict['Publication DOI'], list):
experiment_object.publication_doi = ", ".join(idf_dict['Publication DOI'])
else:
experiment_object.publication_doi = idf_dict['Publication DOI']
experiment_object.has_publication = True
if 'PubMed ID' in idf_dict:
if isinstance(idf_dict['PubMed ID'], list):
experiment_object.pubmed_id = ", ".join(idf_dict['PubMed ID'])
else:
experiment_object.pubmed_id = idf_dict['PubMed ID']
experiment_object.has_publication = True
# Scrape publication title and authorship from Pubmed
if experiment_object.pubmed_id:
pubmed_metadata = utils.get_title_and_authors_for_pubmed_id(experiment_object.pubmed_id)
experiment_object.publication_title = pubmed_metadata[0]
experiment_object.publication_authors = pubmed_metadata[1]
experiment_object.save()
platform_dict = {}
for k in ('platform_accession_code', 'platform_accession_name', 'manufacturer'):
platform_dict[k] = experiment[k]
return experiment_object, platform_dict
def determine_sample_accession(self, experiment_accession: str, sample_source_name: str,
sample_assay_name: str, filename: str) -> str:
"""Determine what to use as the sample's accession code.
This is a complicated heuristic to determine the sample
accession because there isn't a field that consistently
contains it so we're trying to figure out a heuristic that
will work for all the data. This may need even further
refinements if we come across examples that break it.
However, what's going on is that we think either the `source`
or `assay` field will be the sample accession but it's not
always the same.
Ex: E-MEXP-669 has it in sample_assay_name.
Therefore we try a few different things to determine which it
is.
The experiment accession must be prefixed since accessions
are non-unique on AE, ex "Sample 1" is a valid assay name.
"""
# It SEEMS like the filename often contains part or all of the
# sample name so we first try to see if either field contains
# the filename with the extension stripped off:
if isinstance(filename, str):
stripped_filename = ".".join(filename.split(".")[:-1])
if stripped_filename != "":
if stripped_filename in sample_source_name:
return experiment_accession + "-" + sample_source_name
elif stripped_filename in sample_assay_name:
return experiment_accession + "-" + sample_assay_name
# Accessions don't have spaces in them, but sometimes these
# fields do so next we try to see if one has spaces and the
# other doesn't:
source_has_spaces = " " in sample_source_name
assay_has_spaces = " " in sample_assay_name
if assay_has_spaces and not source_has_spaces:
return experiment_accession + "-" + sample_source_name
elif source_has_spaces and not assay_has_spaces:
return experiment_accession + "-" + sample_assay_name
# We're out of options so return the longest one.
if len(sample_source_name) >= len(sample_assay_name):
return experiment_accession + "-" + sample_source_name
else:
return experiment_accession + "-" + sample_assay_name
@staticmethod
def extract_protocol_text(protocol_text):
"""Returns a string representation of protocol_text.
protocol_text may be a string or a list containing both
strings and dicts, like so (it's what the API returns
sometimes, see E-MEXP-2381 as an example):
[
"Microarrays were imaged using an Agilent microarray scanner in XDR (eXtended Dynamic Range function) mode and a scan resolution of 5 \u00b5m.",
{
"br": null
},
"(Parameters: Scanning hardware = DNA Microarray Scanner BA [Agilent Technologies], Scanning software = Feature Extraction Software [Agilent])"
]
"""
if not protocol_text:
return ''
elif type(protocol_text) == str:
return protocol_text.strip()
elif type(protocol_text) == list:
# These can be {"br": None}, so skip non string lines
return " ".join([line.strip() for line in protocol_text if type(line) == str])
else:
# Not sure what would get us here, but it's not worth raising an error over
return str(protocol_text)
@staticmethod
def update_sample_protocol_info(existing_protocols, experiment_protocol, protocol_url):
"""Compares experiment_protocol with a sample's
existing_protocols and updates the latter if the former includes
any new entry.
Returns a two-element tuple, the first is existing_protocols
(which may or may not have been updated) and the second is a
bool indicating whether exisiting_protocols has been updated.
Note that the ArrayExpress experiment-level protocol may include
multiple protocol entries.
"""
if not 'protocol' in experiment_protocol:
return (existing_protocols, False)
is_updated = False
# Compare each entry in experiment protocol with the existing
# protocols; if the entry is new, add it to exising_protocols.
for new_protocol in experiment_protocol['protocol']:
new_protocol_text = new_protocol.get('text', '')
new_protocol_text = ArrayExpressSurveyor.extract_protocol_text(new_protocol_text)
# Ignore experiment-level protocols whose accession or text
# field is unavailable or empty.
if (not new_protocol.get('accession', '').strip() or
not new_protocol_text):
continue
new_protocol_is_found = False
for existing_protocol in existing_protocols:
if (new_protocol.get('accession', '') == existing_protocol['Accession']
and new_protocol_text == existing_protocol['Text']
and new_protocol.get('type', '') == existing_protocol['Type']):
new_protocol_is_found = True
break
if not new_protocol_is_found:
existing_protocols.append({
'Accession': new_protocol['accession'],
'Text': new_protocol_text,
'Type': new_protocol.get('type', ''), # in case 'type' field is unavailable
'Reference': protocol_url
})
is_updated = True
return (existing_protocols, is_updated)
@staticmethod
def _apply_harmonized_metadata_to_sample(sample: Sample, harmonized_metadata: dict):
"""Applies the harmonized metadata to `sample`"""
for key, value in harmonized_metadata.items():
setattr(sample, key, value)
def create_samples_from_api(self,
experiment: Experiment,
platform_dict: Dict
) -> List[Sample]:
"""Generates a Sample item for each sample in an AE experiment.
There are many possible data situations for a sample:
- If the sample only has raw data available:
- If it is on a platform that we support:
Download this raw data and process it
- If it is not on a platform we support:
Don't download anything, don't process anything
- If the sample has both raw and derived data:
- If the raw data is on a platform we support:
Download the raw data and process it, abandon the derived data
- If the raw data is not on a platform we support
Download the derived data and no-op it, abandon the raw data
- If the sample only has derived data:
Download the derived data and no-op it.
See an example at: https://www.ebi.ac.uk/arrayexpress/json/v3/experiments/E-MTAB-3050/samples
"""
created_samples = []
samples_endpoint = SAMPLES_URL.format(experiment.accession_code)
r = utils.requests_retry_session().get(samples_endpoint, timeout=60)
samples = r.json()["experiment"]["sample"]
# The SDRF is the complete metadata record on a sample/property basis.
# We run this through our harmonizer and then attach the properties
# to our created samples.
SDRF_URL_TEMPLATE = "https://www.ebi.ac.uk/arrayexpress/files/{code}/{code}.sdrf.txt"
sdrf_url = SDRF_URL_TEMPLATE.format(code=experiment.accession_code)
sdrf_samples = harmony.parse_sdrf(sdrf_url)
harmonized_samples = harmony.harmonize(sdrf_samples)
# An experiment can have many samples
for sample_data in samples:
# For some reason, this sample has no files associated with it.
if "file" not in sample_data or len(sample_data['file']) == 0:
continue
# Each sample is given an experimenatlly-unique title.
flat_sample = utils.flatten(sample_data)
title = harmony.extract_title(flat_sample)
# A sample may actually have many sub files.
# If there is raw data, take that.
# If not, take the derived.
has_raw = False
for sub_file in sample_data['file']:
# For ex: E-GEOD-15645
if isinstance(sub_file['comment'], list):
sub_file_mod = sub_file
sub_file_mod['comment'] = sub_file['comment'][0]
else:
sub_file_mod = sub_file
# Some have the 'data' field, but not the actual data
# Ex: E-GEOD-9656
if sub_file_mod['type'] == "data" and sub_file_mod['comment'].get('value', None) != None:
has_raw = True
# 'value' can be None, convert to an empty string to
# make it easier to use.
comment_value = sub_file_mod['comment'].get('value', '') or ''
if 'raw' in comment_value:
has_raw = True
skip_sample = False
for sub_file in sample_data['file']:
# Don't get the raw data if it's only a 1-color sample.
if 'Cy3' in str(sample_data) and 'Cy5' not in str(sample_data):
has_raw = False
# Skip derived data if we have it raw.
if has_raw and "derived data" in sub_file['type']:
continue
download_url = None
filename = sub_file["name"]
# sub_file["comment"] is only a list if there's
# more than one comment...
comments = sub_file["comment"]
if isinstance(comments, list):
# Could be: "Derived ArrayExpress Data Matrix FTP
# file" or: "ArrayExpress FTP file". If there is
# no comment with a name including "FTP file" then
# we don't know where to download it so we need to
# mark this job as an error. Therefore don't catch
# the potential exception where download_url
# doesn't get defined.
for comment in comments:
if "FTP file" in comment["name"]:
download_url = comment["value"]
break
else:
download_url = comments["value"]
if not download_url:
logger.error("Sample %s did not specify a download url, skipping.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id,
sub_file=sub_file)
skip_sample = True
continue
if not filename:
logger.error("Sample %s did not specify a filename, skipping.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id,
sub_file=sub_file)
skip_sample = True
continue
if skip_sample:
continue
# The accession code is not a simple matter to determine.
sample_source_name = sample_data["source"].get("name", "")
sample_assay_name = sample_data["assay"].get("name", "")
sample_accession_code = self.determine_sample_accession(
experiment.accession_code,
sample_source_name,
sample_assay_name,
filename)
# Figure out the Organism for this sample
organism_name = UNKNOWN
for characteristic in sample_data["characteristic"]:
if characteristic["category"].upper() == "ORGANISM":
organism_name = characteristic["value"].upper()
if organism_name == UNKNOWN:
logger.error("Sample %s did not specify the organism name.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id)
organism = None
continue
else:
organism = Organism.get_object_for_name(organism_name)
# Create the sample object
try:
# Associate it with the experiment, but since it
# already exists it already has original files
# associated with it and it's already been downloaded,
# so don't add it to created_samples.
sample_object = Sample.objects.get(accession_code=sample_accession_code)
# If input experiment includes new protocol information,
# update sample's protocol_info.
existing_protocols = sample_object.protocol_info
protocol_info, is_updated = self.update_sample_protocol_info(
existing_protocols,
experiment.protocol_description,
experiment.source_url + '/protocols'
)
if is_updated:
sample_object.protocol_info = protocol_info
sample_obejct.save()
logger.debug("Sample %s already exists, skipping object creation.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id)
except Sample.DoesNotExist:
sample_object = Sample()
# The basics
sample_object.source_database = "ARRAY_EXPRESS"
sample_object.title = title
sample_object.accession_code = sample_accession_code
sample_object.source_archive_url = samples_endpoint
sample_object.organism = organism
sample_object.platform_name = platform_dict["platform_accession_name"]
sample_object.platform_accession_code = platform_dict["platform_accession_code"]
sample_object.manufacturer = platform_dict["manufacturer"]
sample_object.technology = "MICROARRAY"
protocol_info, is_updated = self.update_sample_protocol_info(
existing_protocols=[],
experiment_protocol=experiment.protocol_description,
protocol_url=experiment.source_url + '/protocols'
)
# Do not check is_updated the first time because we must
# save a list so we can append to it later.
sample_object.protocol_info = protocol_info
sample_object.save()
# Directly assign the harmonized properties
harmonized_sample = harmonized_samples[title]
ArrayExpressSurveyor._apply_harmonized_metadata_to_sample(sample_object, harmonized_sample)
sample_annotation = SampleAnnotation()
sample_annotation.data = sample_data
sample_annotation.sample = sample_object
sample_annotation.is_ccdl = False
sample_annotation.save()
original_file = OriginalFile()
original_file.filename = filename
original_file.source_filename = filename
original_file.source_url = download_url
original_file.is_downloaded = False
original_file.is_archive = True
original_file.has_raw = has_raw
original_file.save()
original_file_sample_association = OriginalFileSampleAssociation()
original_file_sample_association.original_file = original_file
original_file_sample_association.sample = sample_object
original_file_sample_association.save()
created_samples.append(sample_object)
logger.debug("Created " + str(sample_object),
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id,
sample=sample_object.id)
# Create associations if they don't already exist
ExperimentSampleAssociation.objects.get_or_create(
experiment=experiment, sample=sample_object)
ExperimentOrganismAssociation.objects.get_or_create(
experiment=experiment, organism=organism)
return created_samples
def discover_experiment_and_samples(self) -> (Experiment, List[Sample]):
experiment_accession_code = (
SurveyJobKeyValue
.objects
.get(survey_job_id=self.survey_job.id,
key__exact="experiment_accession_code")
.value
)
logger.info("Surveying experiment with accession code: %s.",
experiment_accession_code,
survey_job=self.survey_job.id)
try:
experiment, platform_dict = self.create_experiment_from_api(experiment_accession_code)
except UnsupportedPlatformException as e:
logger.info("Experiment was not on a supported platform, skipping.",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
return None, []
except:
logger.exception("Error occurred while surveying experiment!",
experiment_accession_code=experiment_accession_code)
return None, []
samples = self.create_samples_from_api(experiment, platform_dict)
return experiment, samples
| 46.866044 | 154 | 0.61031 | import requests
from django.utils.dateparse import parse_datetime
from typing import List, Dict
from data_refinery_common.job_lookup import ProcessorPipeline, Downloaders
from data_refinery_common.logging import get_and_configure_logger
from data_refinery_common.models import (
Experiment,
ExperimentAnnotation,
ExperimentOrganismAssociation,
ExperimentSampleAssociation,
Organism,
OriginalFile,
OriginalFileSampleAssociation,
Sample,
SampleAnnotation,
SurveyJobKeyValue,
)
from data_refinery_common.utils import (
get_normalized_platform,
get_readable_affymetrix_names,
get_supported_microarray_platforms,
)
from data_refinery_foreman.surveyor import harmony, utils
from data_refinery_foreman.surveyor.external_source import ExternalSourceSurveyor
logger = get_and_configure_logger(__name__)
EXPERIMENTS_URL = "https://www.ebi.ac.uk/arrayexpress/json/v3/experiments/"
SAMPLES_URL = EXPERIMENTS_URL + "{}/samples"
UNKNOWN = "UNKNOWN"
class UnsupportedPlatformException(Exception):
pass
class ArrayExpressSurveyor(ExternalSourceSurveyor):
def source_type(self):
return Downloaders.ARRAY_EXPRESS.value
@staticmethod
def _get_last_update_date(parsed_json: Dict) -> str:
if "lastupdatedate" in parsed_json:
return parsed_json["lastupdatedate"]
else:
return parsed_json["releasedate"]
@classmethod
def _apply_metadata_to_experiment(cls, experiment_object: Experiment, parsed_json: Dict):
# Try to join them all together, or set a sensible default.
experiment_descripton = ""
if "description" in parsed_json and len(parsed_json["description"]) > 0:
for description_item in parsed_json["description"]:
if "text" in description_item:
experiment_descripton = experiment_descripton + description_item["text"] + "\n"
if experiment_descripton == "":
experiment_descripton = "Description not available.\n"
experiment_object.source_database = "ARRAY_EXPRESS"
experiment_object.title = parsed_json["name"]
# This will need to be updated if we ever use Array
# Express to get other kinds of data.
experiment_object.technology = "MICROARRAY"
experiment_object.description = experiment_descripton
experiment_object.source_first_published = parse_datetime(parsed_json["releasedate"])
experiment_object.source_last_modified \
= parse_datetime(cls._get_last_update_date(parsed_json))
def create_experiment_from_api(self, experiment_accession_code: str) -> (Experiment, Dict):
request_url = EXPERIMENTS_URL + experiment_accession_code
experiment_request = utils.requests_retry_session().get(request_url, timeout=60)
try:
parsed_json = experiment_request.json()["experiments"]["experiment"][0]
except KeyError:
logger.error("Remote experiment has no Experiment data!",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
raise
experiment = {}
experiment["name"] = parsed_json["name"]
experiment["experiment_accession_code"] = experiment_accession_code
# This experiment has no platform at all, and is therefore useless.
if 'arraydesign' not in parsed_json or len(parsed_json["arraydesign"]) == 0:
logger.warn("Remote experiment has no arraydesign listed.",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
raise UnsupportedPlatformException
# If there is more than one arraydesign listed in the experiment
# then there is no other way to determine which array was used
# for which sample other than looking at the header of the CEL
# file. That obviously cannot happen until the CEL file has been
# downloaded so we can just mark it as UNKNOWN and let the
# downloader inspect the downloaded file to determine the
# array then.
elif len(parsed_json["arraydesign"]) != 1 or "accession" not in parsed_json["arraydesign"][0]:
experiment["platform_accession_code"] = UNKNOWN
experiment["platform_accession_name"] = UNKNOWN
experiment["manufacturer"] = UNKNOWN
else:
external_accession = parsed_json["arraydesign"][0]["accession"]
for platform in get_supported_microarray_platforms():
if platform["external_accession"] == external_accession:
experiment["platform_accession_code"] = get_normalized_platform(platform["platform_accession"])
# Illumina appears in the accession codes for
# platforms manufactured by Illumina
if "ILLUMINA" in experiment["platform_accession_code"].upper():
experiment["manufacturer"] = "ILLUMINA"
experiment["platform_accession_name"] = platform["platform_accession"]
else:
# It's not Illumina, the only other supported Microarray platform is
experiment["manufacturer"] = "AFFYMETRIX"
platform_mapping = get_readable_affymetrix_names()
experiment["platform_accession_name"] = platform_mapping[
platform["platform_accession"]]
if "platform_accession_code" not in experiment:
experiment["platform_accession_code"] = external_accession
experiment["platform_accession_name"] = UNKNOWN
experiment["manufacturer"] = UNKNOWN
experiment["release_date"] = parsed_json["releasedate"]
experiment["last_update_date"] = self._get_last_update_date(parsed_json)
# Create the experiment object
try:
experiment_object = Experiment.objects.get(accession_code=experiment_accession_code)
logger.debug("Experiment already exists, skipping object creation.",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
except Experiment.DoesNotExist:
experiment_object = Experiment()
experiment_object.accession_code = experiment_accession_code
experiment_object.source_url = request_url
ArrayExpressSurveyor._apply_metadata_to_experiment(experiment_object, parsed_json)
experiment_object.save()
json_xa = ExperimentAnnotation()
json_xa.experiment = experiment_object
json_xa.data = parsed_json
json_xa.is_ccdl = False
json_xa.save()
# Fetch and parse the IDF/SDRF file for any other fields
IDF_URL_TEMPLATE = "https://www.ebi.ac.uk/arrayexpress/files/{code}/{code}.idf.txt"
idf_url = IDF_URL_TEMPLATE.format(code=experiment_accession_code)
idf_text = utils.requests_retry_session().get(idf_url, timeout=60).text
lines = idf_text.split('\n')
idf_dict = {}
for line in lines:
keyval = line.strip().split('\t')
if len(keyval) == 2:
idf_dict[keyval[0]] = keyval[1]
elif len(keyval) > 2:
idf_dict[keyval[0]] = keyval[1:]
idf_xa = ExperimentAnnotation()
idf_xa.data = idf_dict
idf_xa.experiment = experiment_object
idf_xa.is_ccdl = False
idf_xa.save()
if 'Investigation Title' in idf_dict and isinstance(idf_dict['Investigation Title'], str):
experiment_object.title = idf_dict['Investigation Title']
if 'Person Affiliation' in idf_dict:
# This is very rare, ex: E-MEXP-32
if isinstance(idf_dict['Person Affiliation'], list):
unique_people = list(set(idf_dict['Person Affiliation']))
experiment_object.submitter_institution = ", ".join(unique_people)[:255]
else:
experiment_object.submitter_institution = idf_dict['Person Affiliation']
# Get protocol_description from "<experiment_url>/protocols"
# instead of from idf_dict, because the former provides more
# details.
protocol_url = request_url + '/protocols'
protocol_request = utils.requests_retry_session().get(protocol_url, timeout=60)
try:
experiment_object.protocol_description = protocol_request.json()['protocols']
except KeyError:
logger.warning("Remote experiment has no protocol data!",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
if 'Publication Title' in idf_dict:
# This will happen for some superseries.
# Ex: E-GEOD-29536
# Assume most recent is "best:, store the rest in experiment annotation.
if isinstance(idf_dict['Publication Title'], list):
experiment_object.publication_title = "; ".join(idf_dict['Publication Title'])
else:
experiment_object.publication_title = idf_dict['Publication Title']
experiment_object.has_publication = True
if 'Publication DOI' in idf_dict:
if isinstance(idf_dict['Publication DOI'], list):
experiment_object.publication_doi = ", ".join(idf_dict['Publication DOI'])
else:
experiment_object.publication_doi = idf_dict['Publication DOI']
experiment_object.has_publication = True
if 'PubMed ID' in idf_dict:
if isinstance(idf_dict['PubMed ID'], list):
experiment_object.pubmed_id = ", ".join(idf_dict['PubMed ID'])
else:
experiment_object.pubmed_id = idf_dict['PubMed ID']
experiment_object.has_publication = True
# Scrape publication title and authorship from Pubmed
if experiment_object.pubmed_id:
pubmed_metadata = utils.get_title_and_authors_for_pubmed_id(experiment_object.pubmed_id)
experiment_object.publication_title = pubmed_metadata[0]
experiment_object.publication_authors = pubmed_metadata[1]
experiment_object.save()
platform_dict = {}
for k in ('platform_accession_code', 'platform_accession_name', 'manufacturer'):
platform_dict[k] = experiment[k]
return experiment_object, platform_dict
def determine_sample_accession(self, experiment_accession: str, sample_source_name: str,
sample_assay_name: str, filename: str) -> str:
# It SEEMS like the filename often contains part or all of the
# sample name so we first try to see if either field contains
# the filename with the extension stripped off:
if isinstance(filename, str):
stripped_filename = ".".join(filename.split(".")[:-1])
if stripped_filename != "":
if stripped_filename in sample_source_name:
return experiment_accession + "-" + sample_source_name
elif stripped_filename in sample_assay_name:
return experiment_accession + "-" + sample_assay_name
# Accessions don't have spaces in them, but sometimes these
# fields do so next we try to see if one has spaces and the
# other doesn't:
source_has_spaces = " " in sample_source_name
assay_has_spaces = " " in sample_assay_name
if assay_has_spaces and not source_has_spaces:
return experiment_accession + "-" + sample_source_name
elif source_has_spaces and not assay_has_spaces:
return experiment_accession + "-" + sample_assay_name
# We're out of options so return the longest one.
if len(sample_source_name) >= len(sample_assay_name):
return experiment_accession + "-" + sample_source_name
else:
return experiment_accession + "-" + sample_assay_name
@staticmethod
def extract_protocol_text(protocol_text):
if not protocol_text:
return ''
elif type(protocol_text) == str:
return protocol_text.strip()
elif type(protocol_text) == list:
# These can be {"br": None}, so skip non string lines
return " ".join([line.strip() for line in protocol_text if type(line) == str])
else:
# Not sure what would get us here, but it's not worth raising an error over
return str(protocol_text)
@staticmethod
def update_sample_protocol_info(existing_protocols, experiment_protocol, protocol_url):
if not 'protocol' in experiment_protocol:
return (existing_protocols, False)
is_updated = False
# Compare each entry in experiment protocol with the existing
# protocols; if the entry is new, add it to exising_protocols.
for new_protocol in experiment_protocol['protocol']:
new_protocol_text = new_protocol.get('text', '')
new_protocol_text = ArrayExpressSurveyor.extract_protocol_text(new_protocol_text)
# Ignore experiment-level protocols whose accession or text
# field is unavailable or empty.
if (not new_protocol.get('accession', '').strip() or
not new_protocol_text):
continue
new_protocol_is_found = False
for existing_protocol in existing_protocols:
if (new_protocol.get('accession', '') == existing_protocol['Accession']
and new_protocol_text == existing_protocol['Text']
and new_protocol.get('type', '') == existing_protocol['Type']):
new_protocol_is_found = True
break
if not new_protocol_is_found:
existing_protocols.append({
'Accession': new_protocol['accession'],
'Text': new_protocol_text,
'Type': new_protocol.get('type', ''), # in case 'type' field is unavailable
'Reference': protocol_url
})
is_updated = True
return (existing_protocols, is_updated)
@staticmethod
def _apply_harmonized_metadata_to_sample(sample: Sample, harmonized_metadata: dict):
for key, value in harmonized_metadata.items():
setattr(sample, key, value)
def create_samples_from_api(self,
experiment: Experiment,
platform_dict: Dict
) -> List[Sample]:
created_samples = []
samples_endpoint = SAMPLES_URL.format(experiment.accession_code)
r = utils.requests_retry_session().get(samples_endpoint, timeout=60)
samples = r.json()["experiment"]["sample"]
# The SDRF is the complete metadata record on a sample/property basis.
# We run this through our harmonizer and then attach the properties
# to our created samples.
SDRF_URL_TEMPLATE = "https://www.ebi.ac.uk/arrayexpress/files/{code}/{code}.sdrf.txt"
sdrf_url = SDRF_URL_TEMPLATE.format(code=experiment.accession_code)
sdrf_samples = harmony.parse_sdrf(sdrf_url)
harmonized_samples = harmony.harmonize(sdrf_samples)
# An experiment can have many samples
for sample_data in samples:
# For some reason, this sample has no files associated with it.
if "file" not in sample_data or len(sample_data['file']) == 0:
continue
# Each sample is given an experimenatlly-unique title.
flat_sample = utils.flatten(sample_data)
title = harmony.extract_title(flat_sample)
# A sample may actually have many sub files.
# If there is raw data, take that.
# If not, take the derived.
has_raw = False
for sub_file in sample_data['file']:
# For ex: E-GEOD-15645
if isinstance(sub_file['comment'], list):
sub_file_mod = sub_file
sub_file_mod['comment'] = sub_file['comment'][0]
else:
sub_file_mod = sub_file
# Some have the 'data' field, but not the actual data
# Ex: E-GEOD-9656
if sub_file_mod['type'] == "data" and sub_file_mod['comment'].get('value', None) != None:
has_raw = True
# 'value' can be None, convert to an empty string to
# make it easier to use.
comment_value = sub_file_mod['comment'].get('value', '') or ''
if 'raw' in comment_value:
has_raw = True
skip_sample = False
for sub_file in sample_data['file']:
# Don't get the raw data if it's only a 1-color sample.
if 'Cy3' in str(sample_data) and 'Cy5' not in str(sample_data):
has_raw = False
# Skip derived data if we have it raw.
if has_raw and "derived data" in sub_file['type']:
continue
download_url = None
filename = sub_file["name"]
# sub_file["comment"] is only a list if there's
# more than one comment...
comments = sub_file["comment"]
if isinstance(comments, list):
# Could be: "Derived ArrayExpress Data Matrix FTP
# no comment with a name including "FTP file" then
# we don't know where to download it so we need to
# mark this job as an error. Therefore don't catch
# the potential exception where download_url
# doesn't get defined.
for comment in comments:
if "FTP file" in comment["name"]:
download_url = comment["value"]
break
else:
download_url = comments["value"]
if not download_url:
logger.error("Sample %s did not specify a download url, skipping.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id,
sub_file=sub_file)
skip_sample = True
continue
if not filename:
logger.error("Sample %s did not specify a filename, skipping.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id,
sub_file=sub_file)
skip_sample = True
continue
if skip_sample:
continue
# The accession code is not a simple matter to determine.
sample_source_name = sample_data["source"].get("name", "")
sample_assay_name = sample_data["assay"].get("name", "")
sample_accession_code = self.determine_sample_accession(
experiment.accession_code,
sample_source_name,
sample_assay_name,
filename)
# Figure out the Organism for this sample
organism_name = UNKNOWN
for characteristic in sample_data["characteristic"]:
if characteristic["category"].upper() == "ORGANISM":
organism_name = characteristic["value"].upper()
if organism_name == UNKNOWN:
logger.error("Sample %s did not specify the organism name.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id)
organism = None
continue
else:
organism = Organism.get_object_for_name(organism_name)
# Create the sample object
try:
# Associate it with the experiment, but since it
# already exists it already has original files
# associated with it and it's already been downloaded,
# so don't add it to created_samples.
sample_object = Sample.objects.get(accession_code=sample_accession_code)
# If input experiment includes new protocol information,
# update sample's protocol_info.
existing_protocols = sample_object.protocol_info
protocol_info, is_updated = self.update_sample_protocol_info(
existing_protocols,
experiment.protocol_description,
experiment.source_url + '/protocols'
)
if is_updated:
sample_object.protocol_info = protocol_info
sample_obejct.save()
logger.debug("Sample %s already exists, skipping object creation.",
sample_accession_code,
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id)
except Sample.DoesNotExist:
sample_object = Sample()
# The basics
sample_object.source_database = "ARRAY_EXPRESS"
sample_object.title = title
sample_object.accession_code = sample_accession_code
sample_object.source_archive_url = samples_endpoint
sample_object.organism = organism
sample_object.platform_name = platform_dict["platform_accession_name"]
sample_object.platform_accession_code = platform_dict["platform_accession_code"]
sample_object.manufacturer = platform_dict["manufacturer"]
sample_object.technology = "MICROARRAY"
protocol_info, is_updated = self.update_sample_protocol_info(
existing_protocols=[],
experiment_protocol=experiment.protocol_description,
protocol_url=experiment.source_url + '/protocols'
)
# Do not check is_updated the first time because we must
# save a list so we can append to it later.
sample_object.protocol_info = protocol_info
sample_object.save()
# Directly assign the harmonized properties
harmonized_sample = harmonized_samples[title]
ArrayExpressSurveyor._apply_harmonized_metadata_to_sample(sample_object, harmonized_sample)
sample_annotation = SampleAnnotation()
sample_annotation.data = sample_data
sample_annotation.sample = sample_object
sample_annotation.is_ccdl = False
sample_annotation.save()
original_file = OriginalFile()
original_file.filename = filename
original_file.source_filename = filename
original_file.source_url = download_url
original_file.is_downloaded = False
original_file.is_archive = True
original_file.has_raw = has_raw
original_file.save()
original_file_sample_association = OriginalFileSampleAssociation()
original_file_sample_association.original_file = original_file
original_file_sample_association.sample = sample_object
original_file_sample_association.save()
created_samples.append(sample_object)
logger.debug("Created " + str(sample_object),
experiment_accession_code=experiment.accession_code,
survey_job=self.survey_job.id,
sample=sample_object.id)
# Create associations if they don't already exist
ExperimentSampleAssociation.objects.get_or_create(
experiment=experiment, sample=sample_object)
ExperimentOrganismAssociation.objects.get_or_create(
experiment=experiment, organism=organism)
return created_samples
def discover_experiment_and_samples(self) -> (Experiment, List[Sample]):
experiment_accession_code = (
SurveyJobKeyValue
.objects
.get(survey_job_id=self.survey_job.id,
key__exact="experiment_accession_code")
.value
)
logger.info("Surveying experiment with accession code: %s.",
experiment_accession_code,
survey_job=self.survey_job.id)
try:
experiment, platform_dict = self.create_experiment_from_api(experiment_accession_code)
except UnsupportedPlatformException as e:
logger.info("Experiment was not on a supported platform, skipping.",
experiment_accession_code=experiment_accession_code,
survey_job=self.survey_job.id)
return None, []
except:
logger.exception("Error occurred while surveying experiment!",
experiment_accession_code=experiment_accession_code)
return None, []
samples = self.create_samples_from_api(experiment, platform_dict)
return experiment, samples
| true | true |
f72e0d86ece935f9ce0d74a04588a051fdc373b0 | 20,108 | py | Python | intersight/apis/ucsd_backup_info_api.py | sdnit-se/intersight-python | 551f7685c0f76bb8af60ec83ffb6f9672d49a4ae | [
"Apache-2.0"
] | 21 | 2018-03-29T14:20:35.000Z | 2021-10-13T05:11:41.000Z | intersight/apis/ucsd_backup_info_api.py | sdnit-se/intersight-python | 551f7685c0f76bb8af60ec83ffb6f9672d49a4ae | [
"Apache-2.0"
] | 14 | 2018-01-30T15:45:46.000Z | 2022-02-23T14:23:21.000Z | intersight/apis/ucsd_backup_info_api.py | sdnit-se/intersight-python | 551f7685c0f76bb8af60ec83ffb6f9672d49a4ae | [
"Apache-2.0"
] | 18 | 2018-01-03T15:09:56.000Z | 2021-07-16T02:21:54.000Z | # coding: utf-8
"""
Cisco Intersight OpenAPI specification.
The Cisco Intersight OpenAPI specification.
OpenAPI spec version: 1.0.9-1461
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UcsdBackupInfoApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def ucsd_backup_infos_get(self, **kwargs):
"""
Read a 'ucsd.BackupInfo' resource.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ucsd_backup_infos_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool count: The $count query option allows clients to request a count of the matching resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response
:param int top: The max number of documents to return.
:param int skip: The number of documents to skip.
:param str filter: Filter criteria for documents to return. A URI with a $filter System Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in $filter operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section. Query examples: $filter=Name eq 'Bob' $filter=Tags/any(t: t/Key eq 'Site') $filter=Tags/any(t: t/Key eq 'Site' and t/Value eq 'London')
:param str select: Specifies a subset of properties to return.
:param str orderby: Determines what values are used to order a collection of documents.
:param str expand: Specify additional attributes or related documents to return. Supports only 'DisplayNames' attribute now. Query examples: $expand=DisplayNames
:param str apply: Specify one or more transformation operations to perform aggregation on documents. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. Query examples: $apply=groupby((Model), aggregate($count as Total)) $apply=groupby((Model), aggregate(AvailableMemory with average as AverageAvailableMemory))
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for documents to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section. Query examples: at=VersionType eq 'Configured' at=InterestedMos.Moid eq '5b5877e56c6730367acf46cd'
:return: UcsdBackupInfoList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ucsd_backup_infos_get_with_http_info(**kwargs)
else:
(data) = self.ucsd_backup_infos_get_with_http_info(**kwargs)
return data
def ucsd_backup_infos_get_with_http_info(self, **kwargs):
"""
Read a 'ucsd.BackupInfo' resource.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ucsd_backup_infos_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool count: The $count query option allows clients to request a count of the matching resources.
:param str inlinecount: The $inlinecount query option allows clients to request an inline count of the matching resources included with the resources in the response
:param int top: The max number of documents to return.
:param int skip: The number of documents to skip.
:param str filter: Filter criteria for documents to return. A URI with a $filter System Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in $filter operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section. Query examples: $filter=Name eq 'Bob' $filter=Tags/any(t: t/Key eq 'Site') $filter=Tags/any(t: t/Key eq 'Site' and t/Value eq 'London')
:param str select: Specifies a subset of properties to return.
:param str orderby: Determines what values are used to order a collection of documents.
:param str expand: Specify additional attributes or related documents to return. Supports only 'DisplayNames' attribute now. Query examples: $expand=DisplayNames
:param str apply: Specify one or more transformation operations to perform aggregation on documents. The transformations are processed in order with the output from a transformation being used as input for the subsequent transformation. Query examples: $apply=groupby((Model), aggregate($count as Total)) $apply=groupby((Model), aggregate(AvailableMemory with average as AverageAvailableMemory))
:param str at: Similar to \"$filter\", but \"at\" is specifically used to filter versioning information properties for documents to return. A URI with an \"at\" Query Option identifies a subset of the Entries from the Collection of Entries identified by the Resource Path section of the URI. The subset is determined by selecting only the Entries that satisfy the predicate expression specified by the query option. The expression language that is used in at operators supports references to properties and literals. The literal values can be strings enclosed in single quotes, numbers and boolean values (true or false) or any of the additional literal representations shown in the Abstract Type System section. Query examples: at=VersionType eq 'Configured' at=InterestedMos.Moid eq '5b5877e56c6730367acf46cd'
:return: UcsdBackupInfoList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['count', 'inlinecount', 'top', 'skip', 'filter', 'select', 'orderby', 'expand', 'apply', 'at']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ucsd_backup_infos_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'count' in params:
query_params.append(('$count', params['count']))
if 'inlinecount' in params:
query_params.append(('$inlinecount', params['inlinecount']))
if 'top' in params:
query_params.append(('$top', params['top']))
if 'skip' in params:
query_params.append(('$skip', params['skip']))
if 'filter' in params:
query_params.append(('$filter', params['filter']))
if 'select' in params:
query_params.append(('$select', params['select']))
if 'orderby' in params:
query_params.append(('$orderby', params['orderby']))
if 'expand' in params:
query_params.append(('$expand', params['expand']))
if 'apply' in params:
query_params.append(('$apply', params['apply']))
if 'at' in params:
query_params.append(('at', params['at']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/ucsd/BackupInfos', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UcsdBackupInfoList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ucsd_backup_infos_moid_delete(self, moid, **kwargs):
"""
Delete a 'ucsd.BackupInfo' resource.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ucsd_backup_infos_moid_delete(moid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str moid: The Moid of the ucsdBackupInfo instance. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ucsd_backup_infos_moid_delete_with_http_info(moid, **kwargs)
else:
(data) = self.ucsd_backup_infos_moid_delete_with_http_info(moid, **kwargs)
return data
def ucsd_backup_infos_moid_delete_with_http_info(self, moid, **kwargs):
"""
Delete a 'ucsd.BackupInfo' resource.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ucsd_backup_infos_moid_delete_with_http_info(moid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str moid: The Moid of the ucsdBackupInfo instance. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['moid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ucsd_backup_infos_moid_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'moid' is set
if ('moid' not in params) or (params['moid'] is None):
raise ValueError("Missing the required parameter `moid` when calling `ucsd_backup_infos_moid_delete`")
collection_formats = {}
path_params = {}
if 'moid' in params:
path_params['Moid'] = params['moid']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/ucsd/BackupInfos/{Moid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ucsd_backup_infos_moid_get(self, moid, **kwargs):
"""
Read a 'ucsd.BackupInfo' resource.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ucsd_backup_infos_moid_get(moid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str moid: The Moid of the ucsdBackupInfo instance. (required)
:return: UcsdBackupInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ucsd_backup_infos_moid_get_with_http_info(moid, **kwargs)
else:
(data) = self.ucsd_backup_infos_moid_get_with_http_info(moid, **kwargs)
return data
def ucsd_backup_infos_moid_get_with_http_info(self, moid, **kwargs):
"""
Read a 'ucsd.BackupInfo' resource.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.ucsd_backup_infos_moid_get_with_http_info(moid, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str moid: The Moid of the ucsdBackupInfo instance. (required)
:return: UcsdBackupInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['moid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ucsd_backup_infos_moid_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'moid' is set
if ('moid' not in params) or (params['moid'] is None):
raise ValueError("Missing the required parameter `moid` when calling `ucsd_backup_infos_moid_get`")
collection_formats = {}
path_params = {}
if 'moid' in params:
path_params['Moid'] = params['moid']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/ucsd/BackupInfos/{Moid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UcsdBackupInfo',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 51.958656 | 819 | 0.626368 |
from __future__ import absolute_import
import sys
import os
import re
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UcsdBackupInfoApi(object):
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def ucsd_backup_infos_get(self, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ucsd_backup_infos_get_with_http_info(**kwargs)
else:
(data) = self.ucsd_backup_infos_get_with_http_info(**kwargs)
return data
def ucsd_backup_infos_get_with_http_info(self, **kwargs):
all_params = ['count', 'inlinecount', 'top', 'skip', 'filter', 'select', 'orderby', 'expand', 'apply', 'at']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ucsd_backup_infos_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'count' in params:
query_params.append(('$count', params['count']))
if 'inlinecount' in params:
query_params.append(('$inlinecount', params['inlinecount']))
if 'top' in params:
query_params.append(('$top', params['top']))
if 'skip' in params:
query_params.append(('$skip', params['skip']))
if 'filter' in params:
query_params.append(('$filter', params['filter']))
if 'select' in params:
query_params.append(('$select', params['select']))
if 'orderby' in params:
query_params.append(('$orderby', params['orderby']))
if 'expand' in params:
query_params.append(('$expand', params['expand']))
if 'apply' in params:
query_params.append(('$apply', params['apply']))
if 'at' in params:
query_params.append(('at', params['at']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
auth_settings = []
return self.api_client.call_api('/ucsd/BackupInfos', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UcsdBackupInfoList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ucsd_backup_infos_moid_delete(self, moid, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ucsd_backup_infos_moid_delete_with_http_info(moid, **kwargs)
else:
(data) = self.ucsd_backup_infos_moid_delete_with_http_info(moid, **kwargs)
return data
def ucsd_backup_infos_moid_delete_with_http_info(self, moid, **kwargs):
all_params = ['moid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ucsd_backup_infos_moid_delete" % key
)
params[key] = val
del params['kwargs']
if ('moid' not in params) or (params['moid'] is None):
raise ValueError("Missing the required parameter `moid` when calling `ucsd_backup_infos_moid_delete`")
collection_formats = {}
path_params = {}
if 'moid' in params:
path_params['Moid'] = params['moid']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
auth_settings = []
return self.api_client.call_api('/ucsd/BackupInfos/{Moid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ucsd_backup_infos_moid_get(self, moid, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.ucsd_backup_infos_moid_get_with_http_info(moid, **kwargs)
else:
(data) = self.ucsd_backup_infos_moid_get_with_http_info(moid, **kwargs)
return data
def ucsd_backup_infos_moid_get_with_http_info(self, moid, **kwargs):
all_params = ['moid']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ucsd_backup_infos_moid_get" % key
)
params[key] = val
del params['kwargs']
if ('moid' not in params) or (params['moid'] is None):
raise ValueError("Missing the required parameter `moid` when calling `ucsd_backup_infos_moid_get`")
collection_formats = {}
path_params = {}
if 'moid' in params:
path_params['Moid'] = params['moid']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
auth_settings = []
return self.api_client.call_api('/ucsd/BackupInfos/{Moid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UcsdBackupInfo',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| true | true |
f72e0dbdebf66295cccbcec383ab79663418be87 | 1,759 | py | Python | cirq/sim/__init__.py | muneerqu/Cirq | 729d993312467d8ea9127103f9e15ae2391e7d85 | [
"Apache-2.0"
] | null | null | null | cirq/sim/__init__.py | muneerqu/Cirq | 729d993312467d8ea9127103f9e15ae2391e7d85 | [
"Apache-2.0"
] | null | null | null | cirq/sim/__init__.py | muneerqu/Cirq | 729d993312467d8ea9127103f9e15ae2391e7d85 | [
"Apache-2.0"
] | 1 | 2018-10-25T19:36:50.000Z | 2018-10-25T19:36:50.000Z | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base simulation classes and generic simulators."""
from cirq.sim.density_matrix_utils import (
measure_density_matrix,
sample_density_matrix,
to_valid_density_matrix,
von_neumann_entropy,
)
from cirq.sim.density_matrix_simulator import (
DensityMatrixSimulator,
DensityMatrixSimulatorState,
DensityMatrixStepResult,
DensityMatrixTrialResult,
)
from cirq.sim.mux import (
final_wavefunction,
sample,
sample_sweep,
)
from cirq.sim.simulator import (
SimulatesAmplitudes,
SimulatesFinalState,
SimulatesIntermediateState,
SimulatesSamples,
SimulationTrialResult,
StepResult,
)
from cirq.sim.sparse_simulator import (
Simulator,
SparseSimulatorStep,
)
from cirq.sim.wave_function_simulator import (
SimulatesIntermediateWaveFunction,
WaveFunctionSimulatorState,
WaveFunctionStepResult,
WaveFunctionTrialResult,
)
from cirq.sim.wave_function import (
bloch_vector_from_state_vector,
density_matrix_from_state_vector,
dirac_notation,
measure_state_vector,
sample_state_vector,
StateVectorMixin,
to_valid_state_vector,
validate_normalized_state,
)
| 25.867647 | 74 | 0.768619 |
from cirq.sim.density_matrix_utils import (
measure_density_matrix,
sample_density_matrix,
to_valid_density_matrix,
von_neumann_entropy,
)
from cirq.sim.density_matrix_simulator import (
DensityMatrixSimulator,
DensityMatrixSimulatorState,
DensityMatrixStepResult,
DensityMatrixTrialResult,
)
from cirq.sim.mux import (
final_wavefunction,
sample,
sample_sweep,
)
from cirq.sim.simulator import (
SimulatesAmplitudes,
SimulatesFinalState,
SimulatesIntermediateState,
SimulatesSamples,
SimulationTrialResult,
StepResult,
)
from cirq.sim.sparse_simulator import (
Simulator,
SparseSimulatorStep,
)
from cirq.sim.wave_function_simulator import (
SimulatesIntermediateWaveFunction,
WaveFunctionSimulatorState,
WaveFunctionStepResult,
WaveFunctionTrialResult,
)
from cirq.sim.wave_function import (
bloch_vector_from_state_vector,
density_matrix_from_state_vector,
dirac_notation,
measure_state_vector,
sample_state_vector,
StateVectorMixin,
to_valid_state_vector,
validate_normalized_state,
)
| true | true |
f72e0f0acbb2c1dff5a35b68521abb7b1732324f | 649 | py | Python | projects/mars/model_classes/MassSimulator.py | ModelFlow/modelflow | c2b720b2da8bb17462baff5c00bbe942644474b0 | [
"MIT"
] | 6 | 2020-07-28T19:58:28.000Z | 2021-05-01T18:51:37.000Z | projects/mars/model_classes/MassSimulator.py | ModelFlow/modelflow | c2b720b2da8bb17462baff5c00bbe942644474b0 | [
"MIT"
] | 81 | 2020-07-30T07:08:10.000Z | 2021-07-28T02:17:43.000Z | projects/mars/model_classes/MassSimulator.py | ModelFlow/modelflow | c2b720b2da8bb17462baff5c00bbe942644474b0 | [
"MIT"
] | null | null | null | class MassSimulator:
name = "Mass Simulator"
params = [
{
"key": "mass",
"label": "",
"units": "kg",
"private": False,
"value": 100000000,
"confidence": 0,
"notes": "",
"source": "fake"
},
{
"key": "volume",
"label": "",
"units": "m3",
"private": False,
"value": 100000000,
"confidence": 0,
"notes": "",
"source": "fake"
}
]
states = []
@staticmethod
def run_step(states, params, utils):
pass
| 22.37931 | 40 | 0.362096 | class MassSimulator:
name = "Mass Simulator"
params = [
{
"key": "mass",
"label": "",
"units": "kg",
"private": False,
"value": 100000000,
"confidence": 0,
"notes": "",
"source": "fake"
},
{
"key": "volume",
"label": "",
"units": "m3",
"private": False,
"value": 100000000,
"confidence": 0,
"notes": "",
"source": "fake"
}
]
states = []
@staticmethod
def run_step(states, params, utils):
pass
| true | true |
f72e0f2e9757031e6c309928d5fc69045f1fbec9 | 4,902 | py | Python | examples/get_fw_inventory.py | samerhaj/python-redfish-lenovo | ec37e01e56937bf1389731f84d5d70914f798788 | [
"Apache-2.0"
] | 56 | 2017-10-12T23:47:27.000Z | 2022-03-17T08:58:24.000Z | examples/get_fw_inventory.py | samerhaj/python-redfish-lenovo | ec37e01e56937bf1389731f84d5d70914f798788 | [
"Apache-2.0"
] | 38 | 2018-09-06T12:29:01.000Z | 2022-03-11T15:36:27.000Z | examples/get_fw_inventory.py | samerhaj/python-redfish-lenovo | ec37e01e56937bf1389731f84d5d70914f798788 | [
"Apache-2.0"
] | 34 | 2018-04-23T03:44:03.000Z | 2022-03-19T19:59:12.000Z | ###
#
# Lenovo Redfish examples - Get FW inventory
#
# Copyright Notice:
#
# Copyright 2018 Lenovo Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
###
import sys
import redfish
import json
import traceback
import lenovo_utils as utils
def get_fw_inventory(ip, login_account, login_password):
"""Get BMC inventory
:params ip: BMC IP address
:type ip: string
:params login_account: BMC user name
:type login_account: string
:params login_password: BMC user password
:type login_password: string
:returns: returns firmware inventory when succeeded or error message when failed
"""
result = {}
try:
# Connect using the BMC address, account name, and password
# Create a REDFISH object
login_host = "https://" + ip
REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,
password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)
# Login into the server and create a session
REDFISH_OBJ.login(auth=utils.g_AUTH)
except:
traceback.print_exc()
result = {'ret': False, 'msg': "Please check if the username, password, IP is correct."}
return result
fw_version = []
# Get ServiceRoot resource
response_base_url = REDFISH_OBJ.get('/redfish/v1', None)
# Get response_update_service_url
if response_base_url.status == 200:
update_service_url = response_base_url.dict['UpdateService']['@odata.id']
else:
result = {'ret': False, 'msg': "response base url Error code %s" % response_base_url.status}
REDFISH_OBJ.logout()
return result
response_update_service_url = REDFISH_OBJ.get(update_service_url, None)
if response_update_service_url.status == 200:
firmware_inventory_url = response_update_service_url.dict['FirmwareInventory']['@odata.id']
response_firmware_url = REDFISH_OBJ.get(firmware_inventory_url, None)
if response_firmware_url.status == 200:
for firmware_url in response_firmware_url.dict["Members"]:
firmware_version_url = firmware_url['@odata.id']
firmware_list = firmware_version_url.split("/")
response_firmware_version = REDFISH_OBJ.get(firmware_version_url, None)
if response_firmware_version.status == 200:
fw = {}
for property in ['Version', 'SoftwareId', 'Description', 'Status']:
if property in response_firmware_version.dict:
fw[property] = response_firmware_version.dict[property]
fw = {firmware_list[-1]: fw}
fw_version.append(fw)
else:
result = {'ret': False,
'msg': "response firmware version Error code %s" % response_firmware_version.status}
REDFISH_OBJ.logout()
return result
else:
result = {'ret': False, 'msg': "response firmware url Error code %s" % response_firmware_url.status}
REDFISH_OBJ.logout()
return result
else:
result = {'ret': False, 'msg': "response update service_url Error code %s" % response_update_service_url.status}
REDFISH_OBJ.logout()
return result
result['ret'] = True
result['fw_version_detail'] = fw_version
try:
REDFISH_OBJ.logout()
except:
pass
return result
def add_parameter():
"""Add parameter"""
argget = utils.create_common_parameter_list()
args = argget.parse_args()
parameter_info = utils.parse_parameter(args)
return parameter_info
if __name__ == '__main__':
# Get parameters from config.ini and/or command line
parameter_info = add_parameter()
# Get connection info from the parameters user specified
ip = parameter_info['ip']
login_account = parameter_info["user"]
login_password = parameter_info["passwd"]
# Get firmware inventory and check result
result = get_fw_inventory(ip, login_account, login_password)
if result['ret'] is True:
del result['ret']
sys.stdout.write(json.dumps(result['fw_version_detail'], sort_keys=True, indent=2) + '\n')
else:
sys.stderr.write(result['msg'] + '\n')
| 37.707692 | 122 | 0.655039 |
import sys
import redfish
import json
import traceback
import lenovo_utils as utils
def get_fw_inventory(ip, login_account, login_password):
result = {}
try:
login_host = "https://" + ip
REDFISH_OBJ = redfish.redfish_client(base_url=login_host, username=login_account, timeout=utils.g_timeout,
password=login_password, default_prefix='/redfish/v1', cafile=utils.g_CAFILE)
REDFISH_OBJ.login(auth=utils.g_AUTH)
except:
traceback.print_exc()
result = {'ret': False, 'msg': "Please check if the username, password, IP is correct."}
return result
fw_version = []
response_base_url = REDFISH_OBJ.get('/redfish/v1', None)
if response_base_url.status == 200:
update_service_url = response_base_url.dict['UpdateService']['@odata.id']
else:
result = {'ret': False, 'msg': "response base url Error code %s" % response_base_url.status}
REDFISH_OBJ.logout()
return result
response_update_service_url = REDFISH_OBJ.get(update_service_url, None)
if response_update_service_url.status == 200:
firmware_inventory_url = response_update_service_url.dict['FirmwareInventory']['@odata.id']
response_firmware_url = REDFISH_OBJ.get(firmware_inventory_url, None)
if response_firmware_url.status == 200:
for firmware_url in response_firmware_url.dict["Members"]:
firmware_version_url = firmware_url['@odata.id']
firmware_list = firmware_version_url.split("/")
response_firmware_version = REDFISH_OBJ.get(firmware_version_url, None)
if response_firmware_version.status == 200:
fw = {}
for property in ['Version', 'SoftwareId', 'Description', 'Status']:
if property in response_firmware_version.dict:
fw[property] = response_firmware_version.dict[property]
fw = {firmware_list[-1]: fw}
fw_version.append(fw)
else:
result = {'ret': False,
'msg': "response firmware version Error code %s" % response_firmware_version.status}
REDFISH_OBJ.logout()
return result
else:
result = {'ret': False, 'msg': "response firmware url Error code %s" % response_firmware_url.status}
REDFISH_OBJ.logout()
return result
else:
result = {'ret': False, 'msg': "response update service_url Error code %s" % response_update_service_url.status}
REDFISH_OBJ.logout()
return result
result['ret'] = True
result['fw_version_detail'] = fw_version
try:
REDFISH_OBJ.logout()
except:
pass
return result
def add_parameter():
argget = utils.create_common_parameter_list()
args = argget.parse_args()
parameter_info = utils.parse_parameter(args)
return parameter_info
if __name__ == '__main__':
parameter_info = add_parameter()
ip = parameter_info['ip']
login_account = parameter_info["user"]
login_password = parameter_info["passwd"]
result = get_fw_inventory(ip, login_account, login_password)
if result['ret'] is True:
del result['ret']
sys.stdout.write(json.dumps(result['fw_version_detail'], sort_keys=True, indent=2) + '\n')
else:
sys.stderr.write(result['msg'] + '\n')
| true | true |
f72e1011955a719906527c88ace33c87f218c56f | 5,731 | py | Python | docs/conf.py | kponder/astrorapid | 91053af8049724a07d1f55baf4e1f60fc36b1101 | [
"MIT"
] | 12 | 2019-01-15T19:40:11.000Z | 2020-12-17T11:36:18.000Z | docs/conf.py | kponder/astrorapid | 91053af8049724a07d1f55baf4e1f60fc36b1101 | [
"MIT"
] | 9 | 2019-07-19T15:29:19.000Z | 2022-03-12T00:59:37.000Z | docs/conf.py | kponder/astrorapid | 91053af8049724a07d1f55baf4e1f60fc36b1101 | [
"MIT"
] | 11 | 2019-02-19T20:35:08.000Z | 2021-07-16T05:56:28.000Z | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('./..'))
sys.path.insert(0, os.path.abspath('./../..'))
sys.path.insert(0, 'astrorapid')
sys.path.insert(0, 'astrorapid/read_from_database')
sys.path.insert(0, './../astrorapid')
sys.path.insert(0, './../astrorapid/read_from_database')
# -- Project information -----------------------------------------------------
project = 'astrorapid'
copyright = '2019, Daniel Muthukrishna'
author = 'Daniel Muthukrishna'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinx.ext.autosummary'
]
autoclass_content = 'both'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'Python'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'astrorapiddoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'astrorapid.tex', 'astrorapid Documentation',
'Daniel Muthukrishna', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'astrorapid', 'astrorapid Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'astrorapid', 'astrorapid Documentation',
author, 'astrorapid', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
| 30.005236 | 79 | 0.652242 |
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('./..'))
sys.path.insert(0, os.path.abspath('./../..'))
sys.path.insert(0, 'astrorapid')
sys.path.insert(0, 'astrorapid/read_from_database')
sys.path.insert(0, './../astrorapid')
sys.path.insert(0, './../astrorapid/read_from_database')
project = 'astrorapid'
copyright = '2019, Daniel Muthukrishna'
author = 'Daniel Muthukrishna'
version = ''
release = ''
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinx.ext.autosummary'
]
autoclass_content = 'both'
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
language = 'Python'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
pygments_style = 'sphinx'
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'astrorapiddoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'astrorapid.tex', 'astrorapid Documentation',
'Daniel Muthukrishna', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'astrorapid', 'astrorapid Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'astrorapid', 'astrorapid Documentation',
author, 'astrorapid', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
| true | true |
f72e104473c66eaa3c157c663f9da03d3d092d44 | 8,099 | py | Python | script/cam_prm_reader.py | YOODS/RoVI | dda4adeb65922ea89f6ba44a9410d3eb4f5e1ef9 | [
"BSD-3-Clause"
] | 13 | 2018-04-05T10:12:53.000Z | 2022-02-02T02:36:25.000Z | script/cam_prm_reader.py | YOODS/RoVI | dda4adeb65922ea89f6ba44a9410d3eb4f5e1ef9 | [
"BSD-3-Clause"
] | 73 | 2018-03-01T06:32:30.000Z | 2021-07-02T05:58:43.000Z | script/cam_prm_reader.py | YOODS/RoVI | dda4adeb65922ea89f6ba44a9410d3eb4f5e1ef9 | [
"BSD-3-Clause"
] | 16 | 2019-03-04T14:36:58.000Z | 2021-07-08T09:35:49.000Z | #!/usr/bin/env python3
# coding: utf-8
import xml.etree.ElementTree as ET
import subprocess
import sys
import re
import rospy
from pprint import pprint
from collections import defaultdict
NODE_NM = 'genpc'
LOG_HEADER = '<cam_prm_reader> '
print (LOG_HEADER + "start")
if len(sys.argv) != 3:
# print('Usage:calib_loader.py <camera_id> <camera_res>')
print (LOG_HEADER + "error: wrong arguments.")
sys.exit(-1)
camera_id = sys.argv[1] # カメラid
camera_res = sys.argv[2] # カメラ解像度
print (LOG_HEADER + "arg[1] camera_id=" + camera_id)
print (LOG_HEADER + "arg[2] camera_res=" + camera_res)
camera_res = camera_res.upper()
cam_xml = ""
print(LOG_HEADER + "camera xml read start.")
try:
cmds = ["arv-tool-0.6"]
if len(camera_id) > 0:
cmds.append("-n")
cmds.append(camera_id)
cmds.append("genicam")
print(LOG_HEADER + "cmds=" + ' '.join(cmds))
cam_xml = subprocess.check_output(cmds)
print(LOG_HEADER + "camera xml read finished.")
except subprocess.CalledProcessError:
print(LOG_HEADER + 'error: arravis call failed.')
sys.exit(-1)
if len(cam_xml) == 0:
print(LOG_HEADER + "error: xml data read failed.")
sys.exit(-1)
cam_xml_str = cam_xml.decode()
pattern = "YOODS Co,LTD.-YCAM3D-III-.*-->\n(.*)"
result = re.match(pattern, cam_xml_str, re.MULTILINE | re.DOTALL)
if result:
print(LOG_HEADER + "xml comment area found. removed.")
cam_xml_str = result.group(1)
result = re.match("(.*</RegisterDescription>)(.*)",cam_xml_str,re.MULTILINE| re.DOTALL)
if result:
cam_xml_str = result.group(1)
print(LOG_HEADER + "xml parse start")
# tree = ET.parse("genicam.txt")
tree = ET.fromstring(cam_xml_str)
GEN_NS = '{http://www.genicam.org/GenApi/Version_1_0}'
ycam_serial_no = ""
ycam_major_ver = ""
ycam_minor_ver = ""
ycam_img_width = 0
ycam_img_height = 0
mat_K_rows = 0
mat_K_cols = 0
mat_R_rows = 0
mat_R_cols = 0
mat_T_rows = 0
mat_T_cols = 0
nested_dict = lambda: defaultdict(nested_dict)
cameras = nested_dict()
# ()で取りたい文字を
regex_cam_item = re.compile('^YCam_([\w]+)_cam([\w]+)_([\w]+)$')
regex_res_item = re.compile('^YCam_([\w]+)_([\w]+)$')
regex_calib_mat = re.compile('^([a-zA-Z]+)([0-9]+)$')
for node in tree.iter():
name = node.attrib.get('Name')
# Ycam_Major_VersionだけYcamで始まるので「Ycam_」も含める
if not (name and name.startswith(('YCam_', 'Ycam_'))):
continue
val = None
for item in node.iter(GEN_NS + 'Value'):
# print("value=" + item.text)
val = item.text
if 'YCam_Serial_No' == name:
for wkItem in node.iter(GEN_NS + 'Description'):
ycam_serial_no = wkItem.text
elif 'Ycam_Major_Version' == name: ycam_major_ver = int(val)
elif 'YCam_Minor_Version' == name: ycam_minor_ver = int(val)
elif 'YCam_Minor_Version' == name: ycam_minor_ver = int(val)
elif 'YCam_Minor_Version' == name: ycam_minor_ver = int(val)
elif 'YCam_K_Rows' == name: mat_K_rows = int(val)
elif 'YCam_K_Cols' == name: mat_K_cols = int(val)
elif 'YCam_R_Rows' == name: mat_R_rows = int(val)
elif 'YCam_R_Cols' == name: mat_R_cols = int(val)
elif 'YCam_T_Rows' == name: mat_T_rows = int(val)
elif 'YCam_T_Cols' == name: mat_T_cols = int(val)
else:
match_result_ycam = regex_cam_item.match(name)
if match_result_ycam: # YCam_<Resolution>_cam<CameraNo>_<Key>
ycam_res = match_result_ycam.group(1)
ycam_no = match_result_ycam.group(2)
ycam_key = match_result_ycam.group(3)
if 'NDistortion' == ycam_key:
cameras[ycam_res][ycam_no]['mat_D_rows'] = 1
cameras[ycam_res][ycam_no]['mat_D_cols'] = int(val)
else: # 行列
match_result_cam_mat = regex_calib_mat.match(ycam_key)
if match_result_ycam:
mat_nm = match_result_cam_mat.group(1)
mat_no = match_result_cam_mat.group(2)
cameras[ycam_res][ycam_no][mat_nm][mat_no] = float(val)
else: # YCam_<Resolution>_<Key>
match_result_res = regex_res_item.match(name)
if match_result_res:
ycam_res = match_result_res.group(1)
ycam_key = match_result_res.group(2)
if 'Width' == ycam_key: cameras[ycam_res][ycam_key] = int(val)
elif 'Height' == ycam_key: cameras[ycam_res][ycam_key] = int(val)
else: # 行列
# print("res=" + ycam_res + " key=" + ycam_key)
match_result_cam_mat = regex_calib_mat.match(ycam_key)
if match_result_cam_mat:
mat_nm = match_result_cam_mat.group(1)
mat_no = match_result_cam_mat.group(2)
cameras[ycam_res][mat_nm][mat_no] = float(val)
print(LOG_HEADER + "xml parse finished.")
print(LOG_HEADER + "ros param regist start.")
rospy.set_param('/rovi/camera/serial_no', ycam_serial_no)
rospy.set_param('/rovi/camera/major_version', ycam_major_ver)
rospy.set_param('/rovi/camera/minor_version', ycam_minor_ver)
ycam_params = cameras[camera_res]
if not ycam_params:
print (LOG_HEADER + "error: camera not found. res=" + camera_res)
sys.exit(-1)
# print(dict(ycam_params))
ycam_width = ycam_params['Width']
ycam_height = ycam_params['Height']
ycam_params_l = ycam_params['0']
if not ycam_params_l:
print (LOG_HEADER + "error: left camera not found. res=" + camera_res)
sys.exit(-1)
def to_mat_array(mat_map, rows, cols):
keys = sorted(mat_map)
values = []
for key in keys:
values.append(mat_map[key])
return values
rospy.set_param('/rovi/left/' + NODE_NM + '/Width', ycam_width)
rospy.set_param('/rovi/left/' + NODE_NM + '/Height', ycam_height)
rospy.set_param('/rovi/left/' + NODE_NM + '/K_Rows', mat_K_rows)
rospy.set_param('/rovi/left/' + NODE_NM + '/K_Cols', mat_K_cols)
rospy.set_param('/rovi/left/' + NODE_NM + '/K', to_mat_array(ycam_params_l['K'], mat_K_rows, mat_K_cols))
rospy.set_param('/rovi/left/' + NODE_NM + '/R_Rows', mat_R_rows)
rospy.set_param('/rovi/left/' + NODE_NM + '/R_Cols', mat_R_cols)
rospy.set_param('/rovi/left/' + NODE_NM + '/R', [1., 0., 0., 0., 1., 0., 0., 0., 1.])
rospy.set_param('/rovi/left/' + NODE_NM + '/T_Rows', mat_T_rows)
rospy.set_param('/rovi/left/' + NODE_NM + '/T_Cols', mat_T_cols)
rospy.set_param('/rovi/left/' + NODE_NM + '/T', [0., 0., 0.])
rospy.set_param('/rovi/left/' + NODE_NM + '/D_Rows', ycam_params_l['mat_D_rows'])
rospy.set_param('/rovi/left/' + NODE_NM + '/D_Cols', ycam_params_l['mat_D_cols'])
rospy.set_param('/rovi/left/' + NODE_NM + '/D', to_mat_array(ycam_params_l['D'], ycam_params_l['mat_D_rows'], ycam_params_l['mat_D_cols']))
ycam_params_r = ycam_params['1']
if not ycam_params_r:
print (LOG_HEADER + "error: right camera not found. res=" + camera_res)
sys.exit(-1)
rospy.set_param('/rovi/right/' + NODE_NM + '/Width', ycam_width)
rospy.set_param('/rovi/right/' + NODE_NM + '/Height', ycam_height)
rospy.set_param('/rovi/right/' + NODE_NM + '/K_Rows', mat_K_rows)
rospy.set_param('/rovi/right/' + NODE_NM + '/K_Cols', mat_K_cols)
rospy.set_param('/rovi/right/' + NODE_NM + '/K', to_mat_array(ycam_params_r['K'], mat_K_rows, mat_K_cols))
rospy.set_param('/rovi/right/' + NODE_NM + '/R_Rows', mat_R_rows)
rospy.set_param('/rovi/right/' + NODE_NM + '/R_Cols', mat_R_cols)
rospy.set_param('/rovi/right/' + NODE_NM + '/R', to_mat_array(ycam_params['R'], mat_R_rows, mat_R_cols))
rospy.set_param('/rovi/right/' + NODE_NM + '/T_Rows', mat_T_rows)
rospy.set_param('/rovi/right/' + NODE_NM + '/T_Cols', mat_T_cols)
rospy.set_param('/rovi/right/' + NODE_NM + '/T', to_mat_array(ycam_params['T'], mat_T_rows, mat_T_cols))
rospy.set_param('/rovi/right/' + NODE_NM + '/D_Rows', ycam_params_r['mat_D_rows'])
rospy.set_param('/rovi/right/' + NODE_NM + '/D_Cols', ycam_params_r['mat_D_cols'])
rospy.set_param('/rovi/right/' + NODE_NM + '/D', to_mat_array(ycam_params_r['D'], ycam_params_r['mat_D_rows'], ycam_params_r['mat_D_cols']))
print(LOG_HEADER + "ros param regist finished.")
print(LOG_HEADER + "finished")
| 37.322581 | 140 | 0.658106 |
import xml.etree.ElementTree as ET
import subprocess
import sys
import re
import rospy
from pprint import pprint
from collections import defaultdict
NODE_NM = 'genpc'
LOG_HEADER = '<cam_prm_reader> '
print (LOG_HEADER + "start")
if len(sys.argv) != 3:
print (LOG_HEADER + "error: wrong arguments.")
sys.exit(-1)
camera_id = sys.argv[1]
camera_res = sys.argv[2]
print (LOG_HEADER + "arg[1] camera_id=" + camera_id)
print (LOG_HEADER + "arg[2] camera_res=" + camera_res)
camera_res = camera_res.upper()
cam_xml = ""
print(LOG_HEADER + "camera xml read start.")
try:
cmds = ["arv-tool-0.6"]
if len(camera_id) > 0:
cmds.append("-n")
cmds.append(camera_id)
cmds.append("genicam")
print(LOG_HEADER + "cmds=" + ' '.join(cmds))
cam_xml = subprocess.check_output(cmds)
print(LOG_HEADER + "camera xml read finished.")
except subprocess.CalledProcessError:
print(LOG_HEADER + 'error: arravis call failed.')
sys.exit(-1)
if len(cam_xml) == 0:
print(LOG_HEADER + "error: xml data read failed.")
sys.exit(-1)
cam_xml_str = cam_xml.decode()
pattern = "YOODS Co,LTD.-YCAM3D-III-.*-->\n(.*)"
result = re.match(pattern, cam_xml_str, re.MULTILINE | re.DOTALL)
if result:
print(LOG_HEADER + "xml comment area found. removed.")
cam_xml_str = result.group(1)
result = re.match("(.*</RegisterDescription>)(.*)",cam_xml_str,re.MULTILINE| re.DOTALL)
if result:
cam_xml_str = result.group(1)
print(LOG_HEADER + "xml parse start")
tree = ET.fromstring(cam_xml_str)
GEN_NS = '{http://www.genicam.org/GenApi/Version_1_0}'
ycam_serial_no = ""
ycam_major_ver = ""
ycam_minor_ver = ""
ycam_img_width = 0
ycam_img_height = 0
mat_K_rows = 0
mat_K_cols = 0
mat_R_rows = 0
mat_R_cols = 0
mat_T_rows = 0
mat_T_cols = 0
nested_dict = lambda: defaultdict(nested_dict)
cameras = nested_dict()
regex_cam_item = re.compile('^YCam_([\w]+)_cam([\w]+)_([\w]+)$')
regex_res_item = re.compile('^YCam_([\w]+)_([\w]+)$')
regex_calib_mat = re.compile('^([a-zA-Z]+)([0-9]+)$')
for node in tree.iter():
name = node.attrib.get('Name')
if not (name and name.startswith(('YCam_', 'Ycam_'))):
continue
val = None
for item in node.iter(GEN_NS + 'Value'):
val = item.text
if 'YCam_Serial_No' == name:
for wkItem in node.iter(GEN_NS + 'Description'):
ycam_serial_no = wkItem.text
elif 'Ycam_Major_Version' == name: ycam_major_ver = int(val)
elif 'YCam_Minor_Version' == name: ycam_minor_ver = int(val)
elif 'YCam_Minor_Version' == name: ycam_minor_ver = int(val)
elif 'YCam_Minor_Version' == name: ycam_minor_ver = int(val)
elif 'YCam_K_Rows' == name: mat_K_rows = int(val)
elif 'YCam_K_Cols' == name: mat_K_cols = int(val)
elif 'YCam_R_Rows' == name: mat_R_rows = int(val)
elif 'YCam_R_Cols' == name: mat_R_cols = int(val)
elif 'YCam_T_Rows' == name: mat_T_rows = int(val)
elif 'YCam_T_Cols' == name: mat_T_cols = int(val)
else:
match_result_ycam = regex_cam_item.match(name)
if match_result_ycam:
ycam_res = match_result_ycam.group(1)
ycam_no = match_result_ycam.group(2)
ycam_key = match_result_ycam.group(3)
if 'NDistortion' == ycam_key:
cameras[ycam_res][ycam_no]['mat_D_rows'] = 1
cameras[ycam_res][ycam_no]['mat_D_cols'] = int(val)
else:
match_result_cam_mat = regex_calib_mat.match(ycam_key)
if match_result_ycam:
mat_nm = match_result_cam_mat.group(1)
mat_no = match_result_cam_mat.group(2)
cameras[ycam_res][ycam_no][mat_nm][mat_no] = float(val)
else:
match_result_res = regex_res_item.match(name)
if match_result_res:
ycam_res = match_result_res.group(1)
ycam_key = match_result_res.group(2)
if 'Width' == ycam_key: cameras[ycam_res][ycam_key] = int(val)
elif 'Height' == ycam_key: cameras[ycam_res][ycam_key] = int(val)
else:
match_result_cam_mat = regex_calib_mat.match(ycam_key)
if match_result_cam_mat:
mat_nm = match_result_cam_mat.group(1)
mat_no = match_result_cam_mat.group(2)
cameras[ycam_res][mat_nm][mat_no] = float(val)
print(LOG_HEADER + "xml parse finished.")
print(LOG_HEADER + "ros param regist start.")
rospy.set_param('/rovi/camera/serial_no', ycam_serial_no)
rospy.set_param('/rovi/camera/major_version', ycam_major_ver)
rospy.set_param('/rovi/camera/minor_version', ycam_minor_ver)
ycam_params = cameras[camera_res]
if not ycam_params:
print (LOG_HEADER + "error: camera not found. res=" + camera_res)
sys.exit(-1)
ycam_width = ycam_params['Width']
ycam_height = ycam_params['Height']
ycam_params_l = ycam_params['0']
if not ycam_params_l:
print (LOG_HEADER + "error: left camera not found. res=" + camera_res)
sys.exit(-1)
def to_mat_array(mat_map, rows, cols):
keys = sorted(mat_map)
values = []
for key in keys:
values.append(mat_map[key])
return values
rospy.set_param('/rovi/left/' + NODE_NM + '/Width', ycam_width)
rospy.set_param('/rovi/left/' + NODE_NM + '/Height', ycam_height)
rospy.set_param('/rovi/left/' + NODE_NM + '/K_Rows', mat_K_rows)
rospy.set_param('/rovi/left/' + NODE_NM + '/K_Cols', mat_K_cols)
rospy.set_param('/rovi/left/' + NODE_NM + '/K', to_mat_array(ycam_params_l['K'], mat_K_rows, mat_K_cols))
rospy.set_param('/rovi/left/' + NODE_NM + '/R_Rows', mat_R_rows)
rospy.set_param('/rovi/left/' + NODE_NM + '/R_Cols', mat_R_cols)
rospy.set_param('/rovi/left/' + NODE_NM + '/R', [1., 0., 0., 0., 1., 0., 0., 0., 1.])
rospy.set_param('/rovi/left/' + NODE_NM + '/T_Rows', mat_T_rows)
rospy.set_param('/rovi/left/' + NODE_NM + '/T_Cols', mat_T_cols)
rospy.set_param('/rovi/left/' + NODE_NM + '/T', [0., 0., 0.])
rospy.set_param('/rovi/left/' + NODE_NM + '/D_Rows', ycam_params_l['mat_D_rows'])
rospy.set_param('/rovi/left/' + NODE_NM + '/D_Cols', ycam_params_l['mat_D_cols'])
rospy.set_param('/rovi/left/' + NODE_NM + '/D', to_mat_array(ycam_params_l['D'], ycam_params_l['mat_D_rows'], ycam_params_l['mat_D_cols']))
ycam_params_r = ycam_params['1']
if not ycam_params_r:
print (LOG_HEADER + "error: right camera not found. res=" + camera_res)
sys.exit(-1)
rospy.set_param('/rovi/right/' + NODE_NM + '/Width', ycam_width)
rospy.set_param('/rovi/right/' + NODE_NM + '/Height', ycam_height)
rospy.set_param('/rovi/right/' + NODE_NM + '/K_Rows', mat_K_rows)
rospy.set_param('/rovi/right/' + NODE_NM + '/K_Cols', mat_K_cols)
rospy.set_param('/rovi/right/' + NODE_NM + '/K', to_mat_array(ycam_params_r['K'], mat_K_rows, mat_K_cols))
rospy.set_param('/rovi/right/' + NODE_NM + '/R_Rows', mat_R_rows)
rospy.set_param('/rovi/right/' + NODE_NM + '/R_Cols', mat_R_cols)
rospy.set_param('/rovi/right/' + NODE_NM + '/R', to_mat_array(ycam_params['R'], mat_R_rows, mat_R_cols))
rospy.set_param('/rovi/right/' + NODE_NM + '/T_Rows', mat_T_rows)
rospy.set_param('/rovi/right/' + NODE_NM + '/T_Cols', mat_T_cols)
rospy.set_param('/rovi/right/' + NODE_NM + '/T', to_mat_array(ycam_params['T'], mat_T_rows, mat_T_cols))
rospy.set_param('/rovi/right/' + NODE_NM + '/D_Rows', ycam_params_r['mat_D_rows'])
rospy.set_param('/rovi/right/' + NODE_NM + '/D_Cols', ycam_params_r['mat_D_cols'])
rospy.set_param('/rovi/right/' + NODE_NM + '/D', to_mat_array(ycam_params_r['D'], ycam_params_r['mat_D_rows'], ycam_params_r['mat_D_cols']))
print(LOG_HEADER + "ros param regist finished.")
print(LOG_HEADER + "finished")
| true | true |
f72e10a6490e0565103cce7abbf5c779b8784918 | 13,082 | py | Python | deepblink/cli/_argparse.py | BioinfoTongLI/deepBlink | aa819b71f380507f9fcfa0664ab0f5a8eca4b209 | [
"MIT"
] | 13 | 2020-07-02T13:15:55.000Z | 2022-01-17T13:16:27.000Z | deepblink/cli/_argparse.py | BioinfoTongLI/deepBlink | aa819b71f380507f9fcfa0664ab0f5a8eca4b209 | [
"MIT"
] | 71 | 2020-06-29T08:43:20.000Z | 2022-03-29T12:03:09.000Z | deepblink/cli/_argparse.py | BioinfoTongLI/deepBlink | aa819b71f380507f9fcfa0664ab0f5a8eca4b209 | [
"MIT"
] | 5 | 2020-09-18T12:32:12.000Z | 2021-11-03T07:33:43.000Z | """CLI argument parsing."""
import argparse
# from ..io import EXTENSIONS
from ._parseutil import Color
from ._parseutil import CustomFormatter
from ._parseutil import FileFolderType
from ._parseutil import FileType
from ._parseutil import FolderType
from ._parseutil import ProbabilityType
from ._parseutil import ShapeType
from ._parseutil import _add_utils
EXTENSIONS = ("tif", "jpeg", "jpg", "png")
def _parse_args_check(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
"""Subparser for checking."""
parser = subparsers.add_parser(
"check",
parents=[parent_parser],
formatter_class=CustomFormatter,
add_help=False,
description=(
f"\U0001F537 {Color.title}Checking submodule{Color.end} \U0001F537\n\n"
"Check the arrangement of your image's axis also known as image shape. "
),
help="\U0001F537 Determine your input image's shape.",
)
group1 = parser.add_argument_group(f"{Color.required}Required{Color.end}")
group1.add_argument(
"INPUT",
type=FileType(EXTENSIONS),
help=(
"Input image. "
"Path to the image file to be checked. "
"The path be relative (e.g. ../dir) or absolute (e.g. /Users/myname/). "
"Input can either be given as path to a directory containing files or as a single file. "
"Note that only the specified filetypes will be processed. "
f"[required] [filetypes: {', '.join(EXTENSIONS)}]"
),
)
_add_utils(parser)
def _parse_args_config(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser
):
"""Subparser for configuration."""
parser = subparsers.add_parser(
"config",
parents=[parent_parser],
add_help=False,
formatter_class=CustomFormatter,
description=(
f"\U0001F528 {Color.title}Configuration submodule{Color.end} \U0001F528\n\n"
"Prepare a configuration file used to adjust parameters during training. "
),
help="\U0001F528 Create a configuration file for training.",
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-n",
"--name",
type=str,
default="config",
help=(
"Custom configuration name. "
'The file extension "yaml" will be added automatically to the given name. '
'[default: "config"]'
),
)
group2.add_argument(
"-r",
"--raw",
action="store_true",
help=(
"Save configuration file without description of values. "
"Shorter but not descriptive."
),
)
_add_utils(parser)
def _parse_args_create(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
"""Subparser for creation."""
parser = subparsers.add_parser(
"create",
parents=[parent_parser],
formatter_class=CustomFormatter,
add_help=False,
description=(
f"\U0001F4BE {Color.title}Creation submodule{Color.end} \U0001F4BE\n\n"
"Create a custom dataset with raw files and corresponding labels. "
"Relies on labeling output from FIJI that was saved with the provided macro "
"or the standard TrackMate coordinate output. "
'Both are described here "https://github.com/BBQuercus/deepBlink/wiki/Datasets".'
),
help="\U0001F4BE Create a new dataset from raw files.",
)
group1 = parser.add_argument_group(f"{Color.required}Required{Color.end}")
group1.add_argument(
"-i",
"--input",
required=True,
type=FolderType(),
help=(
"Path to the directory containing raw images. "
"Note that only the specified filetypes will be processed. "
f"[required] [filetypes: {', '.join(EXTENSIONS)}]"
),
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-l",
"--labels",
type=FolderType(),
help=(
"Path to the directory containing labels in csv format. "
"The default path accounts for using the FIJI macro described on the wiki. "
"[default: --INPUT/labels/]"
),
)
group2.add_argument(
"-n",
"--name",
default="dataset",
type=str,
help=(
"Custom dataset name. "
'The file extension "npz" will be added automatically. '
'[default: "dataset"]'
),
)
group2.add_argument(
"-s",
"--size",
default=512,
type=int,
help=(
"Image crop size. "
"If given, crops all images into the specified size. "
"Will crop non-overlapping and ignore areas that did not get covered."
"deepBlink requires square images powers of 2, such as 256, 512... "
"[default: 512]"
),
)
group2.add_argument(
"-m",
"--minspots",
default=1,
type=int,
help=(
"Minimum number of spots per crop. "
"Ignores fields of view generated with fewer than minspots number of spots. "
"[default: 1]"
),
)
group2.add_argument(
"-vs",
"--validsplit",
default=0.2,
type=float,
help=(
"Validation split. "
"Split percentage (scaled between 0 - 1) of validation vs. train set. "
"Note the validation split is done after splitting test and trainval. "
"[default: 0.2]"
),
)
group2.add_argument(
"-ts",
"--testsplit",
default=0.2,
type=float,
help=(
"Testing split. "
"Split percentage (scaled between 0 - 1) of test vs. trainval set. "
"[default: 0.2]"
),
)
_add_utils(parser)
def _parse_args_download(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
"""Subparser for downloading."""
parser = subparsers.add_parser(
"download",
parents=[parent_parser],
formatter_class=CustomFormatter,
add_help=False,
description=(
f"\U0001F4E5 {Color.title}Downloading submodule{Color.end} \U0001F4E5\n\n"
"Download pre-trained models from our online figshare repository to predict. "
),
help="\U0001F4E5 Download pre-trained models for use.",
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-i",
"--input",
type=str,
default=None,
help=(
"Input name. "
"Name of the model to be downloaded. "
'Note that only the models listed in "deepblink download --list" will be processed. '
"[default: None]"
),
)
group2.add_argument(
"-l",
"--list",
action="store_true",
help=("List available models. " "Name of the model to be downloaded. "),
)
group2.add_argument(
"-a",
"--all",
action="store_true",
help=(
"Download all available models. "
"If passed, all models will be downloaded. "
),
)
_add_utils(parser)
def _parse_args_predict(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
"""Subparser for prediction."""
parser = subparsers.add_parser(
"predict",
parents=[parent_parser],
add_help=False,
formatter_class=CustomFormatter,
description=(
f"\U0001F914 {Color.title}Prediction submodule{Color.end} \U0001F914\n\n"
"Use a pre-trained model to predict blob coordinates on new data. "
"In addition to the required model and input file or folder, "
"several optional features are accessible as described below."
),
help="\U0001F914 Predict on data with a pre-trained model.",
)
group1 = parser.add_argument_group(f"{Color.required}Required{Color.end}")
group1.add_argument(
"-i",
"--input",
required=True,
type=FileFolderType(EXTENSIONS),
help=(
"Image files to predict on. "
"Input can either be given as path to a directory containing files or as a single file. "
"The path be relative (e.g. ../dir) or absolute (e.g. /Users/myname/). "
"Fileglobs are currently not available. "
"Note that only the specified filetypes will be processed. "
f"[required] [filetypes: {', '.join(EXTENSIONS)}]"
),
)
group1.add_argument(
"-m",
"--model",
required=True,
type=FileType(["h5"]),
help=(
"DeepBlink model. "
'Model has to be of file type ".h5". '
'The path can be relative or absolute as described in "--input". '
'Model can either be trained on custom data using "deepblink train" or using a pre-trained '
'model available through the GitHub wiki on "https://github.com/BBQuercus/deepBlink/wiki". '
"[required]"
),
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-o",
"--output",
type=FolderType(),
help=(
"Output folder path. "
"Path to the directory into which all output files are saved. "
"Output files will automatically take the same name as their corresponding image. "
"[default: input location]"
),
)
group2.add_argument(
"-r",
"--radius",
type=int,
default=None,
help=(
"Intensity radius. "
"If given, will calculate the integrated intensity in the specified radius around each coordinate. "
"If the radius is set to zero if only the central pixels intensity should be calculated. "
'The intensity is added as additional column to the output file called "i". '
"[default: None]"
),
)
group2.add_argument(
"-s",
"--shape",
type=ShapeType(),
default=None,
help=(
"Image shape. "
"Used to assess the arrangement of input image axes otherwise known as shape. "
"If not given, uses a basic prediction based on common defaults. "
'Must be in the format "(x,y,z,t,c,3)" using the specified characters. '
'If unsure, use "deepblink check" to determine your images shape '
"and more detailed information. "
"[default: None]"
),
)
group2.add_argument(
"-p",
"--probability",
type=ProbabilityType(),
default=None,
help=(
"Prediction probability. "
"By default, the model's output probability map is rounded. "
"I.e. probabilities above 0.5 are included in the final output. "
"Setting this flag will first change this rounding behaviour to the "
"number provided (0.0 - 1.0) and secondly, add a probability / p "
"column in the output csv file. "
"[default: None]"
),
)
_add_utils(parser)
def _parse_args_train(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
"""Subparser for training."""
parser = subparsers.add_parser(
"train",
parents=[parent_parser],
formatter_class=CustomFormatter,
add_help=False,
description=(
f"\U0001F686 {Color.title}Training submodule{Color.end} \U0001F686\n\n"
'Train a custom model using a custom dataset created in "deepblink create" '
"or using a published dataset."
),
help="\U0001F686 Train a freshly baked model on a dataset.",
)
group1 = parser.add_argument_group(f"{Color.required}Required{Color.end}")
group1.add_argument(
"-c",
"--config",
type=FileType(["yaml"]),
required=True,
help=(
"Configuration file. "
'Path to the config.yaml created using "deepblink config". '
"The path be relative (e.g. ../dir) or absolute (e.g. /Users/myname/). "
"Please see the training information on the wiki to configure the file to your requirements. "
"[required]"
),
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-g",
"--gpu",
type=int,
default=None,
help=(
"GPU index. "
"Value passed CUDA_VISIBLE_DEVICES if a GPU is used for training. "
"[default: None]"
),
)
_add_utils(parser)
| 34.156658 | 112 | 0.582098 |
import argparse
from ._parseutil import Color
from ._parseutil import CustomFormatter
from ._parseutil import FileFolderType
from ._parseutil import FileType
from ._parseutil import FolderType
from ._parseutil import ProbabilityType
from ._parseutil import ShapeType
from ._parseutil import _add_utils
EXTENSIONS = ("tif", "jpeg", "jpg", "png")
def _parse_args_check(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
parser = subparsers.add_parser(
"check",
parents=[parent_parser],
formatter_class=CustomFormatter,
add_help=False,
description=(
f"\U0001F537 {Color.title}Checking submodule{Color.end} \U0001F537\n\n"
"Check the arrangement of your image's axis also known as image shape. "
),
help="\U0001F537 Determine your input image's shape.",
)
group1 = parser.add_argument_group(f"{Color.required}Required{Color.end}")
group1.add_argument(
"INPUT",
type=FileType(EXTENSIONS),
help=(
"Input image. "
"Path to the image file to be checked. "
"The path be relative (e.g. ../dir) or absolute (e.g. /Users/myname/). "
"Input can either be given as path to a directory containing files or as a single file. "
"Note that only the specified filetypes will be processed. "
f"[required] [filetypes: {', '.join(EXTENSIONS)}]"
),
)
_add_utils(parser)
def _parse_args_config(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser
):
parser = subparsers.add_parser(
"config",
parents=[parent_parser],
add_help=False,
formatter_class=CustomFormatter,
description=(
f"\U0001F528 {Color.title}Configuration submodule{Color.end} \U0001F528\n\n"
"Prepare a configuration file used to adjust parameters during training. "
),
help="\U0001F528 Create a configuration file for training.",
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-n",
"--name",
type=str,
default="config",
help=(
"Custom configuration name. "
'The file extension "yaml" will be added automatically to the given name. '
'[default: "config"]'
),
)
group2.add_argument(
"-r",
"--raw",
action="store_true",
help=(
"Save configuration file without description of values. "
"Shorter but not descriptive."
),
)
_add_utils(parser)
def _parse_args_create(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
parser = subparsers.add_parser(
"create",
parents=[parent_parser],
formatter_class=CustomFormatter,
add_help=False,
description=(
f"\U0001F4BE {Color.title}Creation submodule{Color.end} \U0001F4BE\n\n"
"Create a custom dataset with raw files and corresponding labels. "
"Relies on labeling output from FIJI that was saved with the provided macro "
"or the standard TrackMate coordinate output. "
'Both are described here "https://github.com/BBQuercus/deepBlink/wiki/Datasets".'
),
help="\U0001F4BE Create a new dataset from raw files.",
)
group1 = parser.add_argument_group(f"{Color.required}Required{Color.end}")
group1.add_argument(
"-i",
"--input",
required=True,
type=FolderType(),
help=(
"Path to the directory containing raw images. "
"Note that only the specified filetypes will be processed. "
f"[required] [filetypes: {', '.join(EXTENSIONS)}]"
),
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-l",
"--labels",
type=FolderType(),
help=(
"Path to the directory containing labels in csv format. "
"The default path accounts for using the FIJI macro described on the wiki. "
"[default: --INPUT/labels/]"
),
)
group2.add_argument(
"-n",
"--name",
default="dataset",
type=str,
help=(
"Custom dataset name. "
'The file extension "npz" will be added automatically. '
'[default: "dataset"]'
),
)
group2.add_argument(
"-s",
"--size",
default=512,
type=int,
help=(
"Image crop size. "
"If given, crops all images into the specified size. "
"Will crop non-overlapping and ignore areas that did not get covered."
"deepBlink requires square images powers of 2, such as 256, 512... "
"[default: 512]"
),
)
group2.add_argument(
"-m",
"--minspots",
default=1,
type=int,
help=(
"Minimum number of spots per crop. "
"Ignores fields of view generated with fewer than minspots number of spots. "
"[default: 1]"
),
)
group2.add_argument(
"-vs",
"--validsplit",
default=0.2,
type=float,
help=(
"Validation split. "
"Split percentage (scaled between 0 - 1) of validation vs. train set. "
"Note the validation split is done after splitting test and trainval. "
"[default: 0.2]"
),
)
group2.add_argument(
"-ts",
"--testsplit",
default=0.2,
type=float,
help=(
"Testing split. "
"Split percentage (scaled between 0 - 1) of test vs. trainval set. "
"[default: 0.2]"
),
)
_add_utils(parser)
def _parse_args_download(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
parser = subparsers.add_parser(
"download",
parents=[parent_parser],
formatter_class=CustomFormatter,
add_help=False,
description=(
f"\U0001F4E5 {Color.title}Downloading submodule{Color.end} \U0001F4E5\n\n"
"Download pre-trained models from our online figshare repository to predict. "
),
help="\U0001F4E5 Download pre-trained models for use.",
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-i",
"--input",
type=str,
default=None,
help=(
"Input name. "
"Name of the model to be downloaded. "
'Note that only the models listed in "deepblink download --list" will be processed. '
"[default: None]"
),
)
group2.add_argument(
"-l",
"--list",
action="store_true",
help=("List available models. " "Name of the model to be downloaded. "),
)
group2.add_argument(
"-a",
"--all",
action="store_true",
help=(
"Download all available models. "
"If passed, all models will be downloaded. "
),
)
_add_utils(parser)
def _parse_args_predict(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
parser = subparsers.add_parser(
"predict",
parents=[parent_parser],
add_help=False,
formatter_class=CustomFormatter,
description=(
f"\U0001F914 {Color.title}Prediction submodule{Color.end} \U0001F914\n\n"
"Use a pre-trained model to predict blob coordinates on new data. "
"In addition to the required model and input file or folder, "
"several optional features are accessible as described below."
),
help="\U0001F914 Predict on data with a pre-trained model.",
)
group1 = parser.add_argument_group(f"{Color.required}Required{Color.end}")
group1.add_argument(
"-i",
"--input",
required=True,
type=FileFolderType(EXTENSIONS),
help=(
"Image files to predict on. "
"Input can either be given as path to a directory containing files or as a single file. "
"The path be relative (e.g. ../dir) or absolute (e.g. /Users/myname/). "
"Fileglobs are currently not available. "
"Note that only the specified filetypes will be processed. "
f"[required] [filetypes: {', '.join(EXTENSIONS)}]"
),
)
group1.add_argument(
"-m",
"--model",
required=True,
type=FileType(["h5"]),
help=(
"DeepBlink model. "
'Model has to be of file type ".h5". '
'The path can be relative or absolute as described in "--input". '
'Model can either be trained on custom data using "deepblink train" or using a pre-trained '
'model available through the GitHub wiki on "https://github.com/BBQuercus/deepBlink/wiki". '
"[required]"
),
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-o",
"--output",
type=FolderType(),
help=(
"Output folder path. "
"Path to the directory into which all output files are saved. "
"Output files will automatically take the same name as their corresponding image. "
"[default: input location]"
),
)
group2.add_argument(
"-r",
"--radius",
type=int,
default=None,
help=(
"Intensity radius. "
"If given, will calculate the integrated intensity in the specified radius around each coordinate. "
"If the radius is set to zero if only the central pixels intensity should be calculated. "
'The intensity is added as additional column to the output file called "i". '
"[default: None]"
),
)
group2.add_argument(
"-s",
"--shape",
type=ShapeType(),
default=None,
help=(
"Image shape. "
"Used to assess the arrangement of input image axes otherwise known as shape. "
"If not given, uses a basic prediction based on common defaults. "
'Must be in the format "(x,y,z,t,c,3)" using the specified characters. '
'If unsure, use "deepblink check" to determine your images shape '
"and more detailed information. "
"[default: None]"
),
)
group2.add_argument(
"-p",
"--probability",
type=ProbabilityType(),
default=None,
help=(
"Prediction probability. "
"By default, the model's output probability map is rounded. "
"I.e. probabilities above 0.5 are included in the final output. "
"Setting this flag will first change this rounding behaviour to the "
"number provided (0.0 - 1.0) and secondly, add a probability / p "
"column in the output csv file. "
"[default: None]"
),
)
_add_utils(parser)
def _parse_args_train(
subparsers: argparse._SubParsersAction, parent_parser: argparse.ArgumentParser,
):
parser = subparsers.add_parser(
"train",
parents=[parent_parser],
formatter_class=CustomFormatter,
add_help=False,
description=(
f"\U0001F686 {Color.title}Training submodule{Color.end} \U0001F686\n\n"
'Train a custom model using a custom dataset created in "deepblink create" '
"or using a published dataset."
),
help="\U0001F686 Train a freshly baked model on a dataset.",
)
group1 = parser.add_argument_group(f"{Color.required}Required{Color.end}")
group1.add_argument(
"-c",
"--config",
type=FileType(["yaml"]),
required=True,
help=(
"Configuration file. "
'Path to the config.yaml created using "deepblink config". '
"The path be relative (e.g. ../dir) or absolute (e.g. /Users/myname/). "
"Please see the training information on the wiki to configure the file to your requirements. "
"[required]"
),
)
group2 = parser.add_argument_group(f"{Color.optional}Optional{Color.end}")
group2.add_argument(
"-g",
"--gpu",
type=int,
default=None,
help=(
"GPU index. "
"Value passed CUDA_VISIBLE_DEVICES if a GPU is used for training. "
"[default: None]"
),
)
_add_utils(parser)
| true | true |
f72e11901a3d68eb071a6c31d0d0dbd54fa0edb8 | 14,868 | py | Python | selfdrive/car/hyundai/interface.py | Superkingggg/OpenPilotGenesis | fca25d04c17f80eefa91dfb5f719f12c087395f0 | [
"MIT"
] | null | null | null | selfdrive/car/hyundai/interface.py | Superkingggg/OpenPilotGenesis | fca25d04c17f80eefa91dfb5f719f12c087395f0 | [
"MIT"
] | null | null | null | selfdrive/car/hyundai/interface.py | Superkingggg/OpenPilotGenesis | fca25d04c17f80eefa91dfb5f719f12c087395f0 | [
"MIT"
] | 2 | 2020-08-06T23:37:07.000Z | 2021-04-03T12:18:10.000Z | #!/usr/bin/env python3
from cereal import car
from selfdrive.config import Conversions as CV
from selfdrive.controls.lib.drive_helpers import EventTypes as ET, create_event
from selfdrive.controls.lib.vehicle_model import VehicleModel
from selfdrive.car.hyundai.carstate import CarState, get_can_parser, get_can2_parser, get_camera_parser
from selfdrive.car.hyundai.values import Ecu, ECU_FINGERPRINT, CAR, FINGERPRINTS
from selfdrive.car import STD_CARGO_KG, scale_rot_inertia, scale_tire_stiffness, is_ecu_disconnected, gen_empty_fingerprint
from selfdrive.car.interfaces import CarInterfaceBase
GearShifter = car.CarState.GearShifter
ButtonType = car.CarState.ButtonEvent.Type
class CarInterface(CarInterfaceBase):
def __init__(self, CP, CarController):
self.CP = CP
self.VM = VehicleModel(CP)
self.frame = 0
self.gas_pressed_prev = False
self.brake_pressed_prev = False
self.cruise_enabled_prev = False
self.low_speed_alert = False
self.vEgo_prev = False
# *** init the major players ***
self.CS = CarState(CP)
self.cp = get_can_parser(CP)
self.cp2 = get_can2_parser(CP)
self.cp_cam = get_camera_parser(CP)
self.CC = None
if CarController is not None:
self.CC = CarController(self.cp.dbc_name, CP.carFingerprint)
@staticmethod
def compute_gb(accel, speed):
return float(accel) / 3.0
@staticmethod
def get_params(candidate, fingerprint=gen_empty_fingerprint(), has_relay=False, car_fw=[]):
ret = car.CarParams.new_message()
ret.carName = "hyundai"
ret.carFingerprint = candidate
ret.isPandaBlack = has_relay
ret.safetyModel = car.CarParams.SafetyModel.hyundai
ret.enableCruise = True # stock acc
ret.steerActuatorDelay = 0.4 # Default delay 0.15
ret.steerRateCost = 0.5 #default is .45 but .5 is good. Value is active in kegman file
ret.steerLimitTimer = 0.1 #default is 0.8. 0.1 is good
tire_stiffness_factor = 0.7
ret.minEnableSpeed = -1. # enable is done by stock ACC, so ignore this
if candidate in [CAR.SANTA_FE, CAR.SANTA_FE_1]:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 3982. * CV.LB_TO_KG + STD_CARGO_KG
ret.wheelbase = 2.766
# Values from optimizer
ret.steerRatio = 16.55 # 13.8 is spec end-to-end
tire_stiffness_factor = 0.82
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[9., 22.], [9., 22.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.2, 0.35], [0.05, 0.09]]
ret.minSteerSpeed = 0.
elif candidate == CAR.KIA_SORENTO:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 1985. + STD_CARGO_KG
ret.wheelbase = 2.78
ret.steerRatio = 14.4 * 1.1 # 10% higher at the center seems reasonable
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
ret.minSteerSpeed = 0.
elif candidate in [CAR.ELANTRA, CAR.ELANTRA_GT_I30]:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 1275. + STD_CARGO_KG
ret.wheelbase = 2.7
ret.steerRatio = 13.73 #Spec
tire_stiffness_factor = 0.685
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.10], [0.02]]
ret.minSteerSpeed = 32 * CV.MPH_TO_MS
ret.minEnableSpeed = 32 * CV.MPH_TO_MS
elif candidate == CAR.GENESIS:
ret.lateralTuning.init('indi')
ret.lateralTuning.indi.innerLoopGain = 3.1 #stock is 3.0 but 4.0 seems good
ret.lateralTuning.indi.outerLoopGain = 2.1 #stock is 2.0. Trying out 2.5
ret.lateralTuning.indi.timeConstant = 1.4 #Stock is 1.5. 1.3 is good
ret.lateralTuning.indi.actuatorEffectiveness = 1.3 #Stock is 1.0 1.4 is good
ret.mass = 2140. + STD_CARGO_KG
# ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
# ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.15], [0.06]]
# ret.lateralTuning.pid.kf = 0.00005
ret.wheelbase = 3.01
ret.steerRatio = 15 #active value is in Kegman file
#ret.minSteerSpeed = 57 * CV.KPH_TO_MS
#ret.minEnableSpeed = 15 * CV.KPH_TO_MS
elif candidate in [CAR.GENESIS_G90, CAR.GENESIS_G80]:
ret.mass = 2200
ret.wheelbase = 3.15
ret.steerRatio = 12.069
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.16], [0.01]]
elif candidate in [CAR.KIA_OPTIMA, CAR.KIA_OPTIMA_H]:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 3558. * CV.LB_TO_KG
ret.wheelbase = 2.80
ret.steerRatio = 13.75
tire_stiffness_factor = 0.5
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
elif candidate == CAR.KIA_STINGER:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 1825. + STD_CARGO_KG
ret.wheelbase = 2.78
ret.steerRatio = 14.4 * 1.15 # 15% higher at the center seems reasonable
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
ret.minSteerSpeed = 0.
elif candidate == CAR.KONA:
ret.lateralTuning.pid.kf = 0.00006
ret.mass = 1275. + STD_CARGO_KG
ret.wheelbase = 2.7
ret.steerRatio = 13.73 #Spec
tire_stiffness_factor = 0.385
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
elif candidate == CAR.IONIQ:
ret.lateralTuning.pid.kf = 0.00006
ret.mass = 1275. + STD_CARGO_KG
ret.wheelbase = 2.7
ret.steerRatio = 13.73 #Spec
tire_stiffness_factor = 0.385
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
elif candidate == CAR.IONIQ_EV_LTD:
ret.lateralTuning.pid.kf = 0.00006
ret.mass = 1490. + STD_CARGO_KG #weight per hyundai site https://www.hyundaiusa.com/ioniq-electric/specifications.aspx
ret.wheelbase = 2.7
ret.steerRatio = 13.73 #Spec
tire_stiffness_factor = 0.385
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
ret.minSteerSpeed = 32 * CV.MPH_TO_MS
elif candidate == CAR.KIA_FORTE:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 3558. * CV.LB_TO_KG
ret.wheelbase = 2.80
ret.steerRatio = 13.75
tire_stiffness_factor = 0.5
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
ret.longitudinalTuning.kpBP = [0., 5., 35.]
ret.longitudinalTuning.kpV = [1.2, 0.8, 0.5]
ret.longitudinalTuning.kiBP = [0., 35.]
ret.longitudinalTuning.kiV = [0.18, 0.12]
ret.longitudinalTuning.deadzoneBP = [0.]
ret.longitudinalTuning.deadzoneV = [0.]
ret.centerToFront = ret.wheelbase * 0.4
# TODO: get actual value, for now starting with reasonable value for
# civic and scaling by mass and wheelbase
ret.rotationalInertia = scale_rot_inertia(ret.mass, ret.wheelbase)
# TODO: start from empirically derived lateral slip stiffness for the civic and scale by
# mass and CG position, so all cars will have approximately similar dyn behaviors
ret.tireStiffnessFront, ret.tireStiffnessRear = scale_tire_stiffness(ret.mass, ret.wheelbase, ret.centerToFront,
tire_stiffness_factor=tire_stiffness_factor)
# no rear steering, at least on the listed cars above
ret.steerRatioRear = 0.
ret.steerControlType = car.CarParams.SteerControlType.torque
# steer, gas, brake limitations VS speed
ret.steerMaxBP = [0.]
ret.steerMaxV = [1.0]
ret.gasMaxBP = [0.]
ret.gasMaxV = [0.5]
ret.brakeMaxBP = [0., 20.]
ret.brakeMaxV = [1., 0.8]
ret.enableCamera = is_ecu_disconnected(fingerprint[0], FINGERPRINTS, ECU_FINGERPRINT, candidate, Ecu.fwdCamera) or has_relay
ret.openpilotLongitudinalControl = False
ret.stoppingControl = True
ret.startAccel = 0.0
# ignore CAN2 address if L-CAN on the same BUS
ret.mdpsBus = 1 if 593 in fingerprint[1] and 1296 not in fingerprint[1] else 0
ret.sasBus = 1 if 688 in fingerprint[1] and 1296 not in fingerprint[1] else 0
ret.sccBus = 0 if 1056 in fingerprint[0] else 1 if 1056 in fingerprint[1] and 1296 not in fingerprint[1] \
else 2 if 1056 in fingerprint[2] else -1
ret.autoLcaEnabled = 1
return ret
# returns a car.CarState
def update(self, c, can_strings):
# ******************* do can recv *******************
self.cp.update_strings(can_strings)
self.cp2.update_strings(can_strings)
self.cp_cam.update_strings(can_strings)
self.CS.update(self.cp, self.cp2, self.cp_cam)
# create message
ret = car.CarState.new_message()
ret.canValid = self.cp.can_valid and self.cp_cam.can_valid
# speeds
ret.vEgo = self.CS.v_ego
ret.vEgoRaw = self.CS.v_ego_raw
ret.aEgo = self.CS.a_ego
ret.yawRate = self.CS.yaw_rate
ret.standstill = self.CS.standstill
ret.wheelSpeeds.fl = self.CS.v_wheel_fl
ret.wheelSpeeds.fr = self.CS.v_wheel_fr
ret.wheelSpeeds.rl = self.CS.v_wheel_rl
ret.wheelSpeeds.rr = self.CS.v_wheel_rr
# gear shifter
ret.gearShifter = self.CS.gear_shifter
# gas pedal
ret.gas = self.CS.car_gas
ret.gasPressed = self.CS.pedal_gas > 1e-3 # tolerance to avoid false press reading
# brake pedal
ret.brake = self.CS.user_brake
ret.brakePressed = self.CS.brake_pressed != 0
ret.brakeLights = self.CS.brake_lights
# steering wheel
ret.steeringAngle = self.CS.angle_steers
ret.steeringRate = self.CS.angle_steers_rate # it's unsigned
ret.steeringTorque = self.CS.steer_torque_driver
ret.steeringPressed = self.CS.steer_override
# cruise state
# most HKG cars has no long control, it is safer and easier to engage by main on
ret.cruiseState.enabled = (self.CS.pcm_acc_status != 0) if self.CC.longcontrol else bool(self.CS.main_on)
if self.CS.pcm_acc_status != 0:
ret.cruiseState.speed = self.CS.cruise_set_speed
else:
ret.cruiseState.speed = 0
ret.cruiseState.available = bool(self.CS.main_on)
ret.cruiseState.standstill = False
ret.lcaLeft = self.CS.lca_left != 0
ret.lcaRight = self.CS.lca_right != 0
# TODO: button presses
buttonEvents = []
if self.CS.left_blinker_flash != self.CS.prev_left_blinker_flash:
be = car.CarState.ButtonEvent.new_message()
be.type = ButtonType.leftBlinker
be.pressed = self.CS.left_blinker_flash != 0
buttonEvents.append(be)
if self.CS.right_blinker_flash != self.CS.prev_right_blinker_flash:
be = car.CarState.ButtonEvent.new_message()
be.type = ButtonType.rightBlinker
be.pressed = self.CS.right_blinker_flash != 0
buttonEvents.append(be)
ret.buttonEvents = buttonEvents
ret.leftBlinker = bool(self.CS.left_blinker_flash)
ret.rightBlinker = bool(self.CS.right_blinker_flash)
ret.doorOpen = not self.CS.door_all_closed
ret.seatbeltUnlatched = not self.CS.seatbelt
# low speed steer alert hysteresis logic (only for cars with steer cut off above 10 m/s)
if ret.vEgo < self.CP.minSteerSpeed and self.CP.minSteerSpeed > 10.:
self.low_speed_alert = True
if ret.vEgo > self.CP.minSteerSpeed:
self.low_speed_alert = False
# turning indicator alert hysteresis logic
self.turning_indicator_alert = True if (self.CS.left_blinker_flash or self.CS.right_blinker_flash) and self.CS.v_ego < 17.5 else False
# LKAS button alert logic
self.lkas_button_alert = True if not self.CC.lkas_button else False
events = []
# if not ret.gearShifter == GearShifter.drive:
# events.append(create_event('wrongGear', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if ret.doorOpen:
events.append(create_event('doorOpen', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
# if ret.seatbeltUnlatched:
# events.append(create_event('seatbeltNotLatched', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if self.CS.esp_disabled:
events.append(create_event('espDisabled', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if not self.CS.main_on:
events.append(create_event('wrongCarMode', [ET.NO_ENTRY, ET.USER_DISABLE]))
if ret.gearShifter == GearShifter.reverse:
events.append(create_event('reverseGear', [ET.NO_ENTRY, ET.IMMEDIATE_DISABLE]))
if self.CS.steer_error:
events.append(create_event('steerTempUnavailable', [ET.NO_ENTRY, ET.WARNING]))
if ret.cruiseState.enabled and not self.cruise_enabled_prev:
events.append(create_event('pcmEnable', [ET.ENABLE]))
elif not ret.cruiseState.enabled:
events.append(create_event('pcmDisable', [ET.USER_DISABLE]))
# disable on pedals rising edge or when brake is pressed and speed isn't zero
if ((ret.gasPressed and not self.gas_pressed_prev) or \
(ret.brakePressed and (not self.brake_pressed_prev or ret.vEgoRaw > 0.1))) and self.CC.longcontrol:
events.append(create_event('pedalPressed', [ET.NO_ENTRY, ET.USER_DISABLE]))
if ret.gasPressed and self.CC.longcontrol:
events.append(create_event('pedalPressed', [ET.PRE_ENABLE]))
if self.low_speed_alert and not self.CS.mdps_bus :
events.append(create_event('belowSteerSpeed', [ET.WARNING]))
if self.turning_indicator_alert:
events.append(create_event('turningIndicatorOn', [ET.WARNING]))
if self.lkas_button_alert:
events.append(create_event('lkasButtonOff', [ET.WARNING]))
if ret.rightBlinker and ret.lcaRight and self.CS.v_ego > (40 * CV.MPH_TO_MS):
events.append(create_event('rightLCAbsm', [ET.WARNING]))
if ret.leftBlinker and ret.lcaLeft and self.CS.v_ego > (40 * CV.MPH_TO_MS):
events.append(create_event('leftLCAbsm', [ET.WARNING]))
ret.events = events
self.gas_pressed_prev = ret.gasPressed
self.brake_pressed_prev = ret.brakePressed
self.cruise_enabled_prev = ret.cruiseState.enabled
self.vEgo_prev = ret.vEgo
return ret.as_reader()
def apply(self, c):
can_sends = self.CC.update(c.enabled, self.CS, self.frame, c.actuators,
c.cruiseControl.cancel, c.hudControl.visualAlert, c.hudControl.leftLaneVisible,
c.hudControl.rightLaneVisible, c.hudControl.leftLaneDepart, c.hudControl.rightLaneDepart)
self.frame += 1
return can_sends
| 42.358974 | 138 | 0.68375 |
from cereal import car
from selfdrive.config import Conversions as CV
from selfdrive.controls.lib.drive_helpers import EventTypes as ET, create_event
from selfdrive.controls.lib.vehicle_model import VehicleModel
from selfdrive.car.hyundai.carstate import CarState, get_can_parser, get_can2_parser, get_camera_parser
from selfdrive.car.hyundai.values import Ecu, ECU_FINGERPRINT, CAR, FINGERPRINTS
from selfdrive.car import STD_CARGO_KG, scale_rot_inertia, scale_tire_stiffness, is_ecu_disconnected, gen_empty_fingerprint
from selfdrive.car.interfaces import CarInterfaceBase
GearShifter = car.CarState.GearShifter
ButtonType = car.CarState.ButtonEvent.Type
class CarInterface(CarInterfaceBase):
def __init__(self, CP, CarController):
self.CP = CP
self.VM = VehicleModel(CP)
self.frame = 0
self.gas_pressed_prev = False
self.brake_pressed_prev = False
self.cruise_enabled_prev = False
self.low_speed_alert = False
self.vEgo_prev = False
self.CS = CarState(CP)
self.cp = get_can_parser(CP)
self.cp2 = get_can2_parser(CP)
self.cp_cam = get_camera_parser(CP)
self.CC = None
if CarController is not None:
self.CC = CarController(self.cp.dbc_name, CP.carFingerprint)
@staticmethod
def compute_gb(accel, speed):
return float(accel) / 3.0
@staticmethod
def get_params(candidate, fingerprint=gen_empty_fingerprint(), has_relay=False, car_fw=[]):
ret = car.CarParams.new_message()
ret.carName = "hyundai"
ret.carFingerprint = candidate
ret.isPandaBlack = has_relay
ret.safetyModel = car.CarParams.SafetyModel.hyundai
ret.enableCruise = True
ret.steerActuatorDelay = 0.4
ret.steerRateCost = 0.5
ret.steerLimitTimer = 0.1
tire_stiffness_factor = 0.7
ret.minEnableSpeed = -1.
if candidate in [CAR.SANTA_FE, CAR.SANTA_FE_1]:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 3982. * CV.LB_TO_KG + STD_CARGO_KG
ret.wheelbase = 2.766
ret.steerRatio = 16.55
tire_stiffness_factor = 0.82
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[9., 22.], [9., 22.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.2, 0.35], [0.05, 0.09]]
ret.minSteerSpeed = 0.
elif candidate == CAR.KIA_SORENTO:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 1985. + STD_CARGO_KG
ret.wheelbase = 2.78
ret.steerRatio = 14.4 * 1.1
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
ret.minSteerSpeed = 0.
elif candidate in [CAR.ELANTRA, CAR.ELANTRA_GT_I30]:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 1275. + STD_CARGO_KG
ret.wheelbase = 2.7
ret.steerRatio = 13.73
tire_stiffness_factor = 0.685
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.10], [0.02]]
ret.minSteerSpeed = 32 * CV.MPH_TO_MS
ret.minEnableSpeed = 32 * CV.MPH_TO_MS
elif candidate == CAR.GENESIS:
ret.lateralTuning.init('indi')
ret.lateralTuning.indi.innerLoopGain = 3.1
ret.lateralTuning.indi.outerLoopGain = 2.1
ret.lateralTuning.indi.timeConstant = 1.4
ret.lateralTuning.indi.actuatorEffectiveness = 1.3
ret.mass = 2140. + STD_CARGO_KG
ret.wheelbase = 3.01
ret.steerRatio = 15
elif candidate in [CAR.GENESIS_G90, CAR.GENESIS_G80]:
ret.mass = 2200
ret.wheelbase = 3.15
ret.steerRatio = 12.069
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.16], [0.01]]
elif candidate in [CAR.KIA_OPTIMA, CAR.KIA_OPTIMA_H]:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 3558. * CV.LB_TO_KG
ret.wheelbase = 2.80
ret.steerRatio = 13.75
tire_stiffness_factor = 0.5
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
elif candidate == CAR.KIA_STINGER:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 1825. + STD_CARGO_KG
ret.wheelbase = 2.78
ret.steerRatio = 14.4 * 1.15
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
ret.minSteerSpeed = 0.
elif candidate == CAR.KONA:
ret.lateralTuning.pid.kf = 0.00006
ret.mass = 1275. + STD_CARGO_KG
ret.wheelbase = 2.7
ret.steerRatio = 13.73
tire_stiffness_factor = 0.385
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
elif candidate == CAR.IONIQ:
ret.lateralTuning.pid.kf = 0.00006
ret.mass = 1275. + STD_CARGO_KG
ret.wheelbase = 2.7
ret.steerRatio = 13.73
tire_stiffness_factor = 0.385
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
elif candidate == CAR.IONIQ_EV_LTD:
ret.lateralTuning.pid.kf = 0.00006
ret.mass = 1490. + STD_CARGO_KG
ret.wheelbase = 2.7
ret.steerRatio = 13.73
tire_stiffness_factor = 0.385
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
ret.minSteerSpeed = 32 * CV.MPH_TO_MS
elif candidate == CAR.KIA_FORTE:
ret.lateralTuning.pid.kf = 0.00005
ret.mass = 3558. * CV.LB_TO_KG
ret.wheelbase = 2.80
ret.steerRatio = 13.75
tire_stiffness_factor = 0.5
ret.lateralTuning.pid.kiBP, ret.lateralTuning.pid.kpBP = [[0.], [0.]]
ret.lateralTuning.pid.kpV, ret.lateralTuning.pid.kiV = [[0.25], [0.05]]
ret.longitudinalTuning.kpBP = [0., 5., 35.]
ret.longitudinalTuning.kpV = [1.2, 0.8, 0.5]
ret.longitudinalTuning.kiBP = [0., 35.]
ret.longitudinalTuning.kiV = [0.18, 0.12]
ret.longitudinalTuning.deadzoneBP = [0.]
ret.longitudinalTuning.deadzoneV = [0.]
ret.centerToFront = ret.wheelbase * 0.4
ret.rotationalInertia = scale_rot_inertia(ret.mass, ret.wheelbase)
ret.tireStiffnessFront, ret.tireStiffnessRear = scale_tire_stiffness(ret.mass, ret.wheelbase, ret.centerToFront,
tire_stiffness_factor=tire_stiffness_factor)
ret.steerRatioRear = 0.
ret.steerControlType = car.CarParams.SteerControlType.torque
ret.steerMaxBP = [0.]
ret.steerMaxV = [1.0]
ret.gasMaxBP = [0.]
ret.gasMaxV = [0.5]
ret.brakeMaxBP = [0., 20.]
ret.brakeMaxV = [1., 0.8]
ret.enableCamera = is_ecu_disconnected(fingerprint[0], FINGERPRINTS, ECU_FINGERPRINT, candidate, Ecu.fwdCamera) or has_relay
ret.openpilotLongitudinalControl = False
ret.stoppingControl = True
ret.startAccel = 0.0
ret.mdpsBus = 1 if 593 in fingerprint[1] and 1296 not in fingerprint[1] else 0
ret.sasBus = 1 if 688 in fingerprint[1] and 1296 not in fingerprint[1] else 0
ret.sccBus = 0 if 1056 in fingerprint[0] else 1 if 1056 in fingerprint[1] and 1296 not in fingerprint[1] \
else 2 if 1056 in fingerprint[2] else -1
ret.autoLcaEnabled = 1
return ret
def update(self, c, can_strings):
self.cp.update_strings(can_strings)
self.cp2.update_strings(can_strings)
self.cp_cam.update_strings(can_strings)
self.CS.update(self.cp, self.cp2, self.cp_cam)
ret = car.CarState.new_message()
ret.canValid = self.cp.can_valid and self.cp_cam.can_valid
ret.vEgo = self.CS.v_ego
ret.vEgoRaw = self.CS.v_ego_raw
ret.aEgo = self.CS.a_ego
ret.yawRate = self.CS.yaw_rate
ret.standstill = self.CS.standstill
ret.wheelSpeeds.fl = self.CS.v_wheel_fl
ret.wheelSpeeds.fr = self.CS.v_wheel_fr
ret.wheelSpeeds.rl = self.CS.v_wheel_rl
ret.wheelSpeeds.rr = self.CS.v_wheel_rr
ret.gearShifter = self.CS.gear_shifter
ret.gas = self.CS.car_gas
ret.gasPressed = self.CS.pedal_gas > 1e-3
ret.brake = self.CS.user_brake
ret.brakePressed = self.CS.brake_pressed != 0
ret.brakeLights = self.CS.brake_lights
ret.steeringAngle = self.CS.angle_steers
ret.steeringRate = self.CS.angle_steers_rate
ret.steeringTorque = self.CS.steer_torque_driver
ret.steeringPressed = self.CS.steer_override
# cruise state
# most HKG cars has no long control, it is safer and easier to engage by main on
ret.cruiseState.enabled = (self.CS.pcm_acc_status != 0) if self.CC.longcontrol else bool(self.CS.main_on)
if self.CS.pcm_acc_status != 0:
ret.cruiseState.speed = self.CS.cruise_set_speed
else:
ret.cruiseState.speed = 0
ret.cruiseState.available = bool(self.CS.main_on)
ret.cruiseState.standstill = False
ret.lcaLeft = self.CS.lca_left != 0
ret.lcaRight = self.CS.lca_right != 0
# TODO: button presses
buttonEvents = []
if self.CS.left_blinker_flash != self.CS.prev_left_blinker_flash:
be = car.CarState.ButtonEvent.new_message()
be.type = ButtonType.leftBlinker
be.pressed = self.CS.left_blinker_flash != 0
buttonEvents.append(be)
if self.CS.right_blinker_flash != self.CS.prev_right_blinker_flash:
be = car.CarState.ButtonEvent.new_message()
be.type = ButtonType.rightBlinker
be.pressed = self.CS.right_blinker_flash != 0
buttonEvents.append(be)
ret.buttonEvents = buttonEvents
ret.leftBlinker = bool(self.CS.left_blinker_flash)
ret.rightBlinker = bool(self.CS.right_blinker_flash)
ret.doorOpen = not self.CS.door_all_closed
ret.seatbeltUnlatched = not self.CS.seatbelt
# low speed steer alert hysteresis logic (only for cars with steer cut off above 10 m/s)
if ret.vEgo < self.CP.minSteerSpeed and self.CP.minSteerSpeed > 10.:
self.low_speed_alert = True
if ret.vEgo > self.CP.minSteerSpeed:
self.low_speed_alert = False
# turning indicator alert hysteresis logic
self.turning_indicator_alert = True if (self.CS.left_blinker_flash or self.CS.right_blinker_flash) and self.CS.v_ego < 17.5 else False
# LKAS button alert logic
self.lkas_button_alert = True if not self.CC.lkas_button else False
events = []
# if not ret.gearShifter == GearShifter.drive:
# events.append(create_event('wrongGear', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if ret.doorOpen:
events.append(create_event('doorOpen', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
# if ret.seatbeltUnlatched:
# events.append(create_event('seatbeltNotLatched', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if self.CS.esp_disabled:
events.append(create_event('espDisabled', [ET.NO_ENTRY, ET.SOFT_DISABLE]))
if not self.CS.main_on:
events.append(create_event('wrongCarMode', [ET.NO_ENTRY, ET.USER_DISABLE]))
if ret.gearShifter == GearShifter.reverse:
events.append(create_event('reverseGear', [ET.NO_ENTRY, ET.IMMEDIATE_DISABLE]))
if self.CS.steer_error:
events.append(create_event('steerTempUnavailable', [ET.NO_ENTRY, ET.WARNING]))
if ret.cruiseState.enabled and not self.cruise_enabled_prev:
events.append(create_event('pcmEnable', [ET.ENABLE]))
elif not ret.cruiseState.enabled:
events.append(create_event('pcmDisable', [ET.USER_DISABLE]))
# disable on pedals rising edge or when brake is pressed and speed isn't zero
if ((ret.gasPressed and not self.gas_pressed_prev) or \
(ret.brakePressed and (not self.brake_pressed_prev or ret.vEgoRaw > 0.1))) and self.CC.longcontrol:
events.append(create_event('pedalPressed', [ET.NO_ENTRY, ET.USER_DISABLE]))
if ret.gasPressed and self.CC.longcontrol:
events.append(create_event('pedalPressed', [ET.PRE_ENABLE]))
if self.low_speed_alert and not self.CS.mdps_bus :
events.append(create_event('belowSteerSpeed', [ET.WARNING]))
if self.turning_indicator_alert:
events.append(create_event('turningIndicatorOn', [ET.WARNING]))
if self.lkas_button_alert:
events.append(create_event('lkasButtonOff', [ET.WARNING]))
if ret.rightBlinker and ret.lcaRight and self.CS.v_ego > (40 * CV.MPH_TO_MS):
events.append(create_event('rightLCAbsm', [ET.WARNING]))
if ret.leftBlinker and ret.lcaLeft and self.CS.v_ego > (40 * CV.MPH_TO_MS):
events.append(create_event('leftLCAbsm', [ET.WARNING]))
ret.events = events
self.gas_pressed_prev = ret.gasPressed
self.brake_pressed_prev = ret.brakePressed
self.cruise_enabled_prev = ret.cruiseState.enabled
self.vEgo_prev = ret.vEgo
return ret.as_reader()
def apply(self, c):
can_sends = self.CC.update(c.enabled, self.CS, self.frame, c.actuators,
c.cruiseControl.cancel, c.hudControl.visualAlert, c.hudControl.leftLaneVisible,
c.hudControl.rightLaneVisible, c.hudControl.leftLaneDepart, c.hudControl.rightLaneDepart)
self.frame += 1
return can_sends
| true | true |
f72e11fc27845101d13fc8a17eac7938a6ce7171 | 383,723 | py | Python | rpc_pb2.py | Kixunil/ln_reject_mobile_wallets | 5ef02632217461bdbd7143e7d367d1c8d332f4f1 | [
"MITNFA"
] | null | null | null | rpc_pb2.py | Kixunil/ln_reject_mobile_wallets | 5ef02632217461bdbd7143e7d367d1c8d332f4f1 | [
"MITNFA"
] | null | null | null | rpc_pb2.py | Kixunil/ln_reject_mobile_wallets | 5ef02632217461bdbd7143e7d367d1c8d332f4f1 | [
"MITNFA"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: rpc.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='rpc.proto',
package='lnrpc',
syntax='proto3',
serialized_options=_b('Z%github.com/lightningnetwork/lnd/lnrpc'),
serialized_pb=_b('\n\trpc.proto\x12\x05lnrpc\x1a\x1cgoogle/api/annotations.proto\"A\n\x0eGenSeedRequest\x12\x19\n\x11\x61\x65zeed_passphrase\x18\x01 \x01(\x0c\x12\x14\n\x0cseed_entropy\x18\x02 \x01(\x0c\"H\n\x0fGenSeedResponse\x12\x1c\n\x14\x63ipher_seed_mnemonic\x18\x01 \x03(\t\x12\x17\n\x0f\x65nciphered_seed\x18\x02 \x01(\x0c\"\xb2\x01\n\x11InitWalletRequest\x12\x17\n\x0fwallet_password\x18\x01 \x01(\x0c\x12\x1c\n\x14\x63ipher_seed_mnemonic\x18\x02 \x03(\t\x12\x19\n\x11\x61\x65zeed_passphrase\x18\x03 \x01(\x0c\x12\x17\n\x0frecovery_window\x18\x04 \x01(\x05\x12\x32\n\x0f\x63hannel_backups\x18\x05 \x01(\x0b\x32\x19.lnrpc.ChanBackupSnapshot\"\x14\n\x12InitWalletResponse\"{\n\x13UnlockWalletRequest\x12\x17\n\x0fwallet_password\x18\x01 \x01(\x0c\x12\x17\n\x0frecovery_window\x18\x02 \x01(\x05\x12\x32\n\x0f\x63hannel_backups\x18\x03 \x01(\x0b\x32\x19.lnrpc.ChanBackupSnapshot\"\x16\n\x14UnlockWalletResponse\"G\n\x15\x43hangePasswordRequest\x12\x18\n\x10\x63urrent_password\x18\x01 \x01(\x0c\x12\x14\n\x0cnew_password\x18\x02 \x01(\x0c\"\x18\n\x16\x43hangePasswordResponse\"\xe1\x01\n\x04Utxo\x12.\n\x04type\x18\x01 \x01(\x0e\x32\x12.lnrpc.AddressTypeR\x0c\x61\x64\x64ress_type\x12\x18\n\x07\x61\x64\x64ress\x18\x02 \x01(\tR\x07\x61\x64\x64ress\x12\x1e\n\namount_sat\x18\x03 \x01(\x03R\namount_sat\x12\x1c\n\tpk_script\x18\x04 \x01(\tR\tpk_script\x12+\n\x08outpoint\x18\x05 \x01(\x0b\x32\x0f.lnrpc.OutPointR\x08outpoint\x12$\n\rconfirmations\x18\x06 \x01(\x03R\rconfirmations\"\xb9\x02\n\x0bTransaction\x12\x18\n\x07tx_hash\x18\x01 \x01(\tR\x07tx_hash\x12\x16\n\x06\x61mount\x18\x02 \x01(\x03R\x06\x61mount\x12,\n\x11num_confirmations\x18\x03 \x01(\x05R\x11num_confirmations\x12\x1e\n\nblock_hash\x18\x04 \x01(\tR\nblock_hash\x12\"\n\x0c\x62lock_height\x18\x05 \x01(\x05R\x0c\x62lock_height\x12\x1e\n\ntime_stamp\x18\x06 \x01(\x03R\ntime_stamp\x12\x1e\n\ntotal_fees\x18\x07 \x01(\x03R\ntotal_fees\x12&\n\x0e\x64\x65st_addresses\x18\x08 \x03(\tR\x0e\x64\x65st_addresses\x12\x1e\n\nraw_tx_hex\x18\t \x01(\tR\nraw_tx_hex\"\x18\n\x16GetTransactionsRequest\"L\n\x12TransactionDetails\x12\x36\n\x0ctransactions\x18\x01 \x03(\x0b\x32\x12.lnrpc.TransactionR\x0ctransactions\"M\n\x08\x46\x65\x65Limit\x12\x0f\n\x05\x66ixed\x18\x01 \x01(\x03H\x00\x12\x14\n\nfixed_msat\x18\x03 \x01(\x03H\x00\x12\x11\n\x07percent\x18\x02 \x01(\x03H\x00\x42\x07\n\x05limit\"\xab\x03\n\x0bSendRequest\x12\x0c\n\x04\x64\x65st\x18\x01 \x01(\x0c\x12\x17\n\x0b\x64\x65st_string\x18\x02 \x01(\tB\x02\x18\x01\x12\x0b\n\x03\x61mt\x18\x03 \x01(\x03\x12\x10\n\x08\x61mt_msat\x18\x0c \x01(\x03\x12\x14\n\x0cpayment_hash\x18\x04 \x01(\x0c\x12\x1f\n\x13payment_hash_string\x18\x05 \x01(\tB\x02\x18\x01\x12\x17\n\x0fpayment_request\x18\x06 \x01(\t\x12\x18\n\x10\x66inal_cltv_delta\x18\x07 \x01(\x05\x12\"\n\tfee_limit\x18\x08 \x01(\x0b\x32\x0f.lnrpc.FeeLimit\x12\x1c\n\x10outgoing_chan_id\x18\t \x01(\x04\x42\x02\x30\x01\x12\x17\n\x0flast_hop_pubkey\x18\r \x01(\x0c\x12\x12\n\ncltv_limit\x18\n \x01(\r\x12\x31\n\x08\x64\x65st_tlv\x18\x0b \x03(\x0b\x32\x1f.lnrpc.SendRequest.DestTlvEntry\x12\x1a\n\x12\x61llow_self_payment\x18\x0e \x01(\x08\x1a.\n\x0c\x44\x65stTlvEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"\xb8\x01\n\x0cSendResponse\x12$\n\rpayment_error\x18\x01 \x01(\tR\rpayment_error\x12*\n\x10payment_preimage\x18\x02 \x01(\x0cR\x10payment_preimage\x12\x32\n\rpayment_route\x18\x03 \x01(\x0b\x32\x0c.lnrpc.RouteR\rpayment_route\x12\"\n\x0cpayment_hash\x18\x04 \x01(\x0cR\x0cpayment_hash\"n\n\x12SendToRouteRequest\x12\x14\n\x0cpayment_hash\x18\x01 \x01(\x0c\x12\x1f\n\x13payment_hash_string\x18\x02 \x01(\tB\x02\x18\x01\x12\x1b\n\x05route\x18\x04 \x01(\x0b\x32\x0c.lnrpc.RouteJ\x04\x08\x03\x10\x04\"\xb5\x02\n\x14\x43hannelAcceptRequest\x12\x13\n\x0bnode_pubkey\x18\x01 \x01(\x0c\x12\x12\n\nchain_hash\x18\x02 \x01(\x0c\x12\x17\n\x0fpending_chan_id\x18\x03 \x01(\x0c\x12\x13\n\x0b\x66unding_amt\x18\x04 \x01(\x04\x12\x10\n\x08push_amt\x18\x05 \x01(\x04\x12\x12\n\ndust_limit\x18\x06 \x01(\x04\x12\x1b\n\x13max_value_in_flight\x18\x07 \x01(\x04\x12\x17\n\x0f\x63hannel_reserve\x18\x08 \x01(\x04\x12\x10\n\x08min_htlc\x18\t \x01(\x04\x12\x12\n\nfee_per_kw\x18\n \x01(\x04\x12\x11\n\tcsv_delay\x18\x0b \x01(\r\x12\x1a\n\x12max_accepted_htlcs\x18\x0c \x01(\r\x12\x15\n\rchannel_flags\x18\r \x01(\r\"@\n\x15\x43hannelAcceptResponse\x12\x0e\n\x06\x61\x63\x63\x65pt\x18\x01 \x01(\x08\x12\x17\n\x0fpending_chan_id\x18\x02 \x01(\x0c\"\xa2\x01\n\x0c\x43hannelPoint\x12\x30\n\x12\x66unding_txid_bytes\x18\x01 \x01(\x0cH\x00R\x12\x66unding_txid_bytes\x12,\n\x10\x66unding_txid_str\x18\x02 \x01(\tH\x00R\x10\x66unding_txid_str\x12\"\n\x0coutput_index\x18\x03 \x01(\rR\x0coutput_indexB\x0e\n\x0c\x66unding_txid\"j\n\x08OutPoint\x12\x1e\n\ntxid_bytes\x18\x01 \x01(\x0cR\ntxid_bytes\x12\x1a\n\x08txid_str\x18\x02 \x01(\tR\x08txid_str\x12\"\n\x0coutput_index\x18\x03 \x01(\rR\x0coutput_index\">\n\x10LightningAddress\x12\x16\n\x06pubkey\x18\x01 \x01(\tR\x06pubkey\x12\x12\n\x04host\x18\x02 \x01(\tR\x04host\"\xa1\x01\n\x12\x45stimateFeeRequest\x12\x41\n\x0c\x41\x64\x64rToAmount\x18\x01 \x03(\x0b\x32+.lnrpc.EstimateFeeRequest.AddrToAmountEntry\x12\x13\n\x0btarget_conf\x18\x02 \x01(\x05\x1a\x33\n\x11\x41\x64\x64rToAmountEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\"c\n\x13\x45stimateFeeResponse\x12\x18\n\x07\x66\x65\x65_sat\x18\x01 \x01(\x03R\x07\x66\x65\x65_sat\x12\x32\n\x14\x66\x65\x65rate_sat_per_byte\x18\x02 \x01(\x03R\x14\x66\x65\x65rate_sat_per_byte\"\xb1\x01\n\x0fSendManyRequest\x12>\n\x0c\x41\x64\x64rToAmount\x18\x01 \x03(\x0b\x32(.lnrpc.SendManyRequest.AddrToAmountEntry\x12\x13\n\x0btarget_conf\x18\x03 \x01(\x05\x12\x14\n\x0csat_per_byte\x18\x05 \x01(\x03\x1a\x33\n\x11\x41\x64\x64rToAmountEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\"&\n\x10SendManyResponse\x12\x12\n\x04txid\x18\x01 \x01(\tR\x04txid\"m\n\x10SendCoinsRequest\x12\x0c\n\x04\x61\x64\x64r\x18\x01 \x01(\t\x12\x0e\n\x06\x61mount\x18\x02 \x01(\x03\x12\x13\n\x0btarget_conf\x18\x03 \x01(\x05\x12\x14\n\x0csat_per_byte\x18\x05 \x01(\x03\x12\x10\n\x08send_all\x18\x06 \x01(\x08\"\'\n\x11SendCoinsResponse\x12\x12\n\x04txid\x18\x01 \x01(\tR\x04txid\":\n\x12ListUnspentRequest\x12\x11\n\tmin_confs\x18\x01 \x01(\x05\x12\x11\n\tmax_confs\x18\x02 \x01(\x05\"8\n\x13ListUnspentResponse\x12!\n\x05utxos\x18\x01 \x03(\x0b\x32\x0b.lnrpc.UtxoR\x05utxos\"5\n\x11NewAddressRequest\x12 \n\x04type\x18\x01 \x01(\x0e\x32\x12.lnrpc.AddressType\".\n\x12NewAddressResponse\x12\x18\n\x07\x61\x64\x64ress\x18\x01 \x01(\tR\x07\x61\x64\x64ress\"&\n\x12SignMessageRequest\x12\x10\n\x03msg\x18\x01 \x01(\x0cR\x03msg\"3\n\x13SignMessageResponse\x12\x1c\n\tsignature\x18\x01 \x01(\tR\tsignature\"F\n\x14VerifyMessageRequest\x12\x10\n\x03msg\x18\x01 \x01(\x0cR\x03msg\x12\x1c\n\tsignature\x18\x02 \x01(\tR\tsignature\"E\n\x15VerifyMessageResponse\x12\x14\n\x05valid\x18\x01 \x01(\x08R\x05valid\x12\x16\n\x06pubkey\x18\x02 \x01(\tR\x06pubkey\"I\n\x12\x43onnectPeerRequest\x12%\n\x04\x61\x64\x64r\x18\x01 \x01(\x0b\x32\x17.lnrpc.LightningAddress\x12\x0c\n\x04perm\x18\x02 \x01(\x08\"\x15\n\x13\x43onnectPeerResponse\"1\n\x15\x44isconnectPeerRequest\x12\x18\n\x07pub_key\x18\x01 \x01(\tR\x07pub_key\"\x18\n\x16\x44isconnectPeerResponse\"\x86\x01\n\x04HTLC\x12\x1a\n\x08incoming\x18\x01 \x01(\x08R\x08incoming\x12\x16\n\x06\x61mount\x18\x02 \x01(\x03R\x06\x61mount\x12\x1c\n\thash_lock\x18\x03 \x01(\x0cR\thash_lock\x12,\n\x11\x65xpiration_height\x18\x04 \x01(\rR\x11\x65xpiration_height\"\xa2\x07\n\x07\x43hannel\x12\x16\n\x06\x61\x63tive\x18\x01 \x01(\x08R\x06\x61\x63tive\x12$\n\rremote_pubkey\x18\x02 \x01(\tR\rremote_pubkey\x12$\n\rchannel_point\x18\x03 \x01(\tR\rchannel_point\x12\x1c\n\x07\x63han_id\x18\x04 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12\x1a\n\x08\x63\x61pacity\x18\x05 \x01(\x03R\x08\x63\x61pacity\x12$\n\rlocal_balance\x18\x06 \x01(\x03R\rlocal_balance\x12&\n\x0eremote_balance\x18\x07 \x01(\x03R\x0eremote_balance\x12\x1e\n\ncommit_fee\x18\x08 \x01(\x03R\ncommit_fee\x12$\n\rcommit_weight\x18\t \x01(\x03R\rcommit_weight\x12\x1e\n\nfee_per_kw\x18\n \x01(\x03R\nfee_per_kw\x12,\n\x11unsettled_balance\x18\x0b \x01(\x03R\x11unsettled_balance\x12\x30\n\x13total_satoshis_sent\x18\x0c \x01(\x03R\x13total_satoshis_sent\x12\x38\n\x17total_satoshis_received\x18\r \x01(\x03R\x17total_satoshis_received\x12 \n\x0bnum_updates\x18\x0e \x01(\x04R\x0bnum_updates\x12\x31\n\rpending_htlcs\x18\x0f \x03(\x0b\x32\x0b.lnrpc.HTLCR\rpending_htlcs\x12\x1c\n\tcsv_delay\x18\x10 \x01(\rR\tcsv_delay\x12\x18\n\x07private\x18\x11 \x01(\x08R\x07private\x12\x1c\n\tinitiator\x18\x12 \x01(\x08R\tinitiator\x12,\n\x11\x63han_status_flags\x18\x13 \x01(\tR\x11\x63han_status_flags\x12\x36\n\x16local_chan_reserve_sat\x18\x14 \x01(\x03R\x16local_chan_reserve_sat\x12\x38\n\x17remote_chan_reserve_sat\x18\x15 \x01(\x03R\x17remote_chan_reserve_sat\x12,\n\x11static_remote_key\x18\x16 \x01(\x08R\x11static_remote_key\x12\x1a\n\x08lifetime\x18\x17 \x01(\x03R\x08lifetime\x12\x16\n\x06uptime\x18\x18 \x01(\x03R\x06uptime\"l\n\x13ListChannelsRequest\x12\x13\n\x0b\x61\x63tive_only\x18\x01 \x01(\x08\x12\x15\n\rinactive_only\x18\x02 \x01(\x08\x12\x13\n\x0bpublic_only\x18\x03 \x01(\x08\x12\x14\n\x0cprivate_only\x18\x04 \x01(\x08\"B\n\x14ListChannelsResponse\x12*\n\x08\x63hannels\x18\x0b \x03(\x0b\x32\x0e.lnrpc.ChannelR\x08\x63hannels\"\xba\x04\n\x13\x43hannelCloseSummary\x12$\n\rchannel_point\x18\x01 \x01(\tR\rchannel_point\x12\x1c\n\x07\x63han_id\x18\x02 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12\x1e\n\nchain_hash\x18\x03 \x01(\tR\nchain_hash\x12(\n\x0f\x63losing_tx_hash\x18\x04 \x01(\tR\x0f\x63losing_tx_hash\x12$\n\rremote_pubkey\x18\x05 \x01(\tR\rremote_pubkey\x12\x1a\n\x08\x63\x61pacity\x18\x06 \x01(\x03R\x08\x63\x61pacity\x12\"\n\x0c\x63lose_height\x18\x07 \x01(\rR\x0c\x63lose_height\x12(\n\x0fsettled_balance\x18\x08 \x01(\x03R\x0fsettled_balance\x12\x30\n\x13time_locked_balance\x18\t \x01(\x03R\x13time_locked_balance\x12\x46\n\nclose_type\x18\n \x01(\x0e\x32&.lnrpc.ChannelCloseSummary.ClosureTypeR\nclose_type\"\x8a\x01\n\x0b\x43losureType\x12\x15\n\x11\x43OOPERATIVE_CLOSE\x10\x00\x12\x15\n\x11LOCAL_FORCE_CLOSE\x10\x01\x12\x16\n\x12REMOTE_FORCE_CLOSE\x10\x02\x12\x10\n\x0c\x42REACH_CLOSE\x10\x03\x12\x14\n\x10\x46UNDING_CANCELED\x10\x04\x12\r\n\tABANDONED\x10\x05\"\x94\x01\n\x15\x43losedChannelsRequest\x12\x13\n\x0b\x63ooperative\x18\x01 \x01(\x08\x12\x13\n\x0blocal_force\x18\x02 \x01(\x08\x12\x14\n\x0cremote_force\x18\x03 \x01(\x08\x12\x0e\n\x06\x62reach\x18\x04 \x01(\x08\x12\x18\n\x10\x66unding_canceled\x18\x05 \x01(\x08\x12\x11\n\tabandoned\x18\x06 \x01(\x08\"P\n\x16\x43losedChannelsResponse\x12\x36\n\x08\x63hannels\x18\x01 \x03(\x0b\x32\x1a.lnrpc.ChannelCloseSummaryR\x08\x63hannels\"\xdf\x02\n\x04Peer\x12\x18\n\x07pub_key\x18\x01 \x01(\tR\x07pub_key\x12\x18\n\x07\x61\x64\x64ress\x18\x03 \x01(\tR\x07\x61\x64\x64ress\x12\x1e\n\nbytes_sent\x18\x04 \x01(\x04R\nbytes_sent\x12\x1e\n\nbytes_recv\x18\x05 \x01(\x04R\nbytes_recv\x12\x1a\n\x08sat_sent\x18\x06 \x01(\x03R\x08sat_sent\x12\x1a\n\x08sat_recv\x18\x07 \x01(\x03R\x08sat_recv\x12\x18\n\x07inbound\x18\x08 \x01(\x08R\x07inbound\x12\x1c\n\tping_time\x18\t \x01(\x03R\tping_time\x12\x32\n\tsync_type\x18\n \x01(\x0e\x32\x14.lnrpc.Peer.SyncTypeR\tsync_type\"?\n\x08SyncType\x12\x10\n\x0cUNKNOWN_SYNC\x10\x00\x12\x0f\n\x0b\x41\x43TIVE_SYNC\x10\x01\x12\x10\n\x0cPASSIVE_SYNC\x10\x02\"\x12\n\x10ListPeersRequest\"6\n\x11ListPeersResponse\x12!\n\x05peers\x18\x01 \x03(\x0b\x32\x0b.lnrpc.PeerR\x05peers\"\x10\n\x0eGetInfoRequest\"\xe7\x04\n\x0fGetInfoResponse\x12(\n\x0fidentity_pubkey\x18\x01 \x01(\tR\x0fidentity_pubkey\x12\x14\n\x05\x61lias\x18\x02 \x01(\tR\x05\x61lias\x12\x32\n\x14num_pending_channels\x18\x03 \x01(\rR\x14num_pending_channels\x12\x30\n\x13num_active_channels\x18\x04 \x01(\rR\x13num_active_channels\x12\x1c\n\tnum_peers\x18\x05 \x01(\rR\tnum_peers\x12\"\n\x0c\x62lock_height\x18\x06 \x01(\rR\x0c\x62lock_height\x12\x1e\n\nblock_hash\x18\x08 \x01(\tR\nblock_hash\x12(\n\x0fsynced_to_chain\x18\t \x01(\x08R\x0fsynced_to_chain\x12\x1c\n\x07testnet\x18\n \x01(\x08\x42\x02\x18\x01R\x07testnet\x12\x12\n\x04uris\x18\x0c \x03(\tR\x04uris\x12\x34\n\x15\x62\x65st_header_timestamp\x18\r \x01(\x03R\x15\x62\x65st_header_timestamp\x12\x18\n\x07version\x18\x0e \x01(\tR\x07version\x12\x34\n\x15num_inactive_channels\x18\x0f \x01(\rR\x15num_inactive_channels\x12$\n\x06\x63hains\x18\x10 \x03(\x0b\x32\x0c.lnrpc.ChainR\x06\x63hains\x12\x14\n\x05\x63olor\x18\x11 \x01(\tR\x05\x63olor\x12(\n\x0fsynced_to_graph\x18\x12 \x01(\x08R\x0fsynced_to_graphJ\x04\x08\x0b\x10\x0c\"7\n\x05\x43hain\x12\x14\n\x05\x63hain\x18\x01 \x01(\tR\x05\x63hain\x12\x18\n\x07network\x18\x02 \x01(\tR\x07network\"U\n\x12\x43onfirmationUpdate\x12\x11\n\tblock_sha\x18\x01 \x01(\x0c\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x05\x12\x16\n\x0enum_confs_left\x18\x03 \x01(\r\"N\n\x11\x43hannelOpenUpdate\x12\x39\n\rchannel_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPointR\rchannel_point\"R\n\x12\x43hannelCloseUpdate\x12\"\n\x0c\x63losing_txid\x18\x01 \x01(\x0cR\x0c\x63losing_txid\x12\x18\n\x07success\x18\x02 \x01(\x08R\x07success\"{\n\x13\x43loseChannelRequest\x12*\n\rchannel_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\x12\r\n\x05\x66orce\x18\x02 \x01(\x08\x12\x13\n\x0btarget_conf\x18\x03 \x01(\x05\x12\x14\n\x0csat_per_byte\x18\x04 \x01(\x03\"\x98\x01\n\x11\x43loseStatusUpdate\x12<\n\rclose_pending\x18\x01 \x01(\x0b\x32\x14.lnrpc.PendingUpdateH\x00R\rclose_pending\x12;\n\nchan_close\x18\x03 \x01(\x0b\x32\x19.lnrpc.ChannelCloseUpdateH\x00R\nchan_closeB\x08\n\x06update\"G\n\rPendingUpdate\x12\x12\n\x04txid\x18\x01 \x01(\x0cR\x04txid\x12\"\n\x0coutput_index\x18\x02 \x01(\rR\x0coutput_index\"\x9d\x03\n\x12OpenChannelRequest\x12 \n\x0bnode_pubkey\x18\x02 \x01(\x0cR\x0bnode_pubkey\x12\x32\n\x12node_pubkey_string\x18\x03 \x01(\tB\x02\x18\x01R\x12node_pubkey_string\x12\x32\n\x14local_funding_amount\x18\x04 \x01(\x03R\x14local_funding_amount\x12\x1a\n\x08push_sat\x18\x05 \x01(\x03R\x08push_sat\x12\x13\n\x0btarget_conf\x18\x06 \x01(\x05\x12\x14\n\x0csat_per_byte\x18\x07 \x01(\x03\x12\x18\n\x07private\x18\x08 \x01(\x08R\x07private\x12$\n\rmin_htlc_msat\x18\t \x01(\x03R\rmin_htlc_msat\x12*\n\x10remote_csv_delay\x18\n \x01(\rR\x10remote_csv_delay\x12\x1c\n\tmin_confs\x18\x0b \x01(\x05R\tmin_confs\x12,\n\x11spend_unconfirmed\x18\x0c \x01(\x08R\x11spend_unconfirmed\"\x92\x01\n\x10OpenStatusUpdate\x12:\n\x0c\x63han_pending\x18\x01 \x01(\x0b\x32\x14.lnrpc.PendingUpdateH\x00R\x0c\x63han_pending\x12\x38\n\tchan_open\x18\x03 \x01(\x0b\x32\x18.lnrpc.ChannelOpenUpdateH\x00R\tchan_openB\x08\n\x06update\"\xcf\x01\n\x0bPendingHTLC\x12\x1a\n\x08incoming\x18\x01 \x01(\x08R\x08incoming\x12\x16\n\x06\x61mount\x18\x02 \x01(\x03R\x06\x61mount\x12\x1a\n\x08outpoint\x18\x03 \x01(\tR\x08outpoint\x12(\n\x0fmaturity_height\x18\x04 \x01(\rR\x0fmaturity_height\x12\x30\n\x13\x62locks_til_maturity\x18\x05 \x01(\x05R\x13\x62locks_til_maturity\x12\x14\n\x05stage\x18\x06 \x01(\rR\x05stage\"\x18\n\x16PendingChannelsRequest\"\x9c\r\n\x17PendingChannelsResponse\x12\x30\n\x13total_limbo_balance\x18\x01 \x01(\x03R\x13total_limbo_balance\x12g\n\x15pending_open_channels\x18\x02 \x03(\x0b\x32\x31.lnrpc.PendingChannelsResponse.PendingOpenChannelR\x15pending_open_channels\x12h\n\x18pending_closing_channels\x18\x03 \x03(\x0b\x32,.lnrpc.PendingChannelsResponse.ClosedChannelR\x18pending_closing_channels\x12y\n\x1epending_force_closing_channels\x18\x04 \x03(\x0b\x32\x31.lnrpc.PendingChannelsResponse.ForceClosedChannelR\x1epending_force_closing_channels\x12j\n\x16waiting_close_channels\x18\x05 \x03(\x0b\x32\x32.lnrpc.PendingChannelsResponse.WaitingCloseChannelR\x16waiting_close_channels\x1a\xbc\x02\n\x0ePendingChannel\x12(\n\x0fremote_node_pub\x18\x01 \x01(\tR\x0fremote_node_pub\x12$\n\rchannel_point\x18\x02 \x01(\tR\rchannel_point\x12\x1a\n\x08\x63\x61pacity\x18\x03 \x01(\x03R\x08\x63\x61pacity\x12$\n\rlocal_balance\x18\x04 \x01(\x03R\rlocal_balance\x12&\n\x0eremote_balance\x18\x05 \x01(\x03R\x0eremote_balance\x12\x36\n\x16local_chan_reserve_sat\x18\x06 \x01(\x03R\x16local_chan_reserve_sat\x12\x38\n\x17remote_chan_reserve_sat\x18\x07 \x01(\x03R\x17remote_chan_reserve_sat\x1a\xf5\x01\n\x12PendingOpenChannel\x12G\n\x07\x63hannel\x18\x01 \x01(\x0b\x32-.lnrpc.PendingChannelsResponse.PendingChannelR\x07\x63hannel\x12\x30\n\x13\x63onfirmation_height\x18\x02 \x01(\rR\x13\x63onfirmation_height\x12\x1e\n\ncommit_fee\x18\x04 \x01(\x03R\ncommit_fee\x12$\n\rcommit_weight\x18\x05 \x01(\x03R\rcommit_weight\x12\x1e\n\nfee_per_kw\x18\x06 \x01(\x03R\nfee_per_kw\x1a{\n\x13WaitingCloseChannel\x12>\n\x07\x63hannel\x18\x01 \x01(\x0b\x32-.lnrpc.PendingChannelsResponse.PendingChannel\x12$\n\rlimbo_balance\x18\x02 \x01(\x03R\rlimbo_balance\x1as\n\rClosedChannel\x12>\n\x07\x63hannel\x18\x01 \x01(\x0b\x32-.lnrpc.PendingChannelsResponse.PendingChannel\x12\"\n\x0c\x63losing_txid\x18\x02 \x01(\tR\x0c\x63losing_txid\x1a\xeb\x02\n\x12\x46orceClosedChannel\x12G\n\x07\x63hannel\x18\x01 \x01(\x0b\x32-.lnrpc.PendingChannelsResponse.PendingChannelR\x07\x63hannel\x12\"\n\x0c\x63losing_txid\x18\x02 \x01(\tR\x0c\x63losing_txid\x12$\n\rlimbo_balance\x18\x03 \x01(\x03R\rlimbo_balance\x12(\n\x0fmaturity_height\x18\x04 \x01(\rR\x0fmaturity_height\x12\x30\n\x13\x62locks_til_maturity\x18\x05 \x01(\x05R\x13\x62locks_til_maturity\x12,\n\x11recovered_balance\x18\x06 \x01(\x03R\x11recovered_balance\x12\x38\n\rpending_htlcs\x18\x08 \x03(\x0b\x32\x12.lnrpc.PendingHTLCR\rpending_htlcs\"\x1a\n\x18\x43hannelEventSubscription\"\xb5\x03\n\x12\x43hannelEventUpdate\x12\x34\n\x0copen_channel\x18\x01 \x01(\x0b\x32\x0e.lnrpc.ChannelH\x00R\x0copen_channel\x12\x44\n\x0e\x63losed_channel\x18\x02 \x01(\x0b\x32\x1a.lnrpc.ChannelCloseSummaryH\x00R\x0e\x63losed_channel\x12=\n\x0e\x61\x63tive_channel\x18\x03 \x01(\x0b\x32\x13.lnrpc.ChannelPointH\x00R\x0e\x61\x63tive_channel\x12\x41\n\x10inactive_channel\x18\x04 \x01(\x0b\x32\x13.lnrpc.ChannelPointH\x00R\x10inactive_channel\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32$.lnrpc.ChannelEventUpdate.UpdateTypeR\x04type\"\\\n\nUpdateType\x12\x10\n\x0cOPEN_CHANNEL\x10\x00\x12\x12\n\x0e\x43LOSED_CHANNEL\x10\x01\x12\x12\n\x0e\x41\x43TIVE_CHANNEL\x10\x02\x12\x14\n\x10INACTIVE_CHANNEL\x10\x03\x42\t\n\x07\x63hannel\"\x16\n\x14WalletBalanceRequest\"\x9d\x01\n\x15WalletBalanceResponse\x12$\n\rtotal_balance\x18\x01 \x01(\x03R\rtotal_balance\x12,\n\x11\x63onfirmed_balance\x18\x02 \x01(\x03R\x11\x63onfirmed_balance\x12\x30\n\x13unconfirmed_balance\x18\x03 \x01(\x03R\x13unconfirmed_balance\"\x17\n\x15\x43hannelBalanceRequest\"f\n\x16\x43hannelBalanceResponse\x12\x18\n\x07\x62\x61lance\x18\x01 \x01(\x03R\x07\x62\x61lance\x12\x32\n\x14pending_open_balance\x18\x02 \x01(\x03R\x14pending_open_balance\"\xbf\x02\n\x12QueryRoutesRequest\x12\x0f\n\x07pub_key\x18\x01 \x01(\t\x12\x0b\n\x03\x61mt\x18\x02 \x01(\x03\x12\x10\n\x08\x61mt_msat\x18\x0c \x01(\x03\x12\x18\n\x10\x66inal_cltv_delta\x18\x04 \x01(\x05\x12\"\n\tfee_limit\x18\x05 \x01(\x0b\x32\x0f.lnrpc.FeeLimit\x12\x15\n\rignored_nodes\x18\x06 \x03(\x0c\x12-\n\rignored_edges\x18\x07 \x03(\x0b\x32\x12.lnrpc.EdgeLocatorB\x02\x18\x01\x12\x16\n\x0esource_pub_key\x18\x08 \x01(\t\x12\x1b\n\x13use_mission_control\x18\t \x01(\x08\x12&\n\rignored_pairs\x18\n \x03(\x0b\x32\x0f.lnrpc.NodePair\x12\x12\n\ncltv_limit\x18\x0b \x01(\rJ\x04\x08\x03\x10\x04\"$\n\x08NodePair\x12\x0c\n\x04\x66rom\x18\x01 \x01(\x0c\x12\n\n\x02to\x18\x02 \x01(\x0c\"@\n\x0b\x45\x64geLocator\x12\x16\n\nchannel_id\x18\x01 \x01(\x04\x42\x02\x30\x01\x12\x19\n\x11\x64irection_reverse\x18\x02 \x01(\x08\"_\n\x13QueryRoutesResponse\x12$\n\x06routes\x18\x01 \x03(\x0b\x32\x0c.lnrpc.RouteR\x06routes\x12\"\n\x0csuccess_prob\x18\x02 \x01(\x01R\x0csuccess_prob\"\xdf\x02\n\x03Hop\x12\x1c\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12$\n\rchan_capacity\x18\x02 \x01(\x03R\rchan_capacity\x12*\n\x0e\x61mt_to_forward\x18\x03 \x01(\x03\x42\x02\x18\x01R\x0e\x61mt_to_forward\x12\x14\n\x03\x66\x65\x65\x18\x04 \x01(\x03\x42\x02\x18\x01R\x03\x66\x65\x65\x12\x16\n\x06\x65xpiry\x18\x05 \x01(\rR\x06\x65xpiry\x12\x30\n\x13\x61mt_to_forward_msat\x18\x06 \x01(\x03R\x13\x61mt_to_forward_msat\x12\x1a\n\x08\x66\x65\x65_msat\x18\x07 \x01(\x03R\x08\x66\x65\x65_msat\x12\x18\n\x07pub_key\x18\x08 \x01(\tR\x07pub_key\x12 \n\x0btlv_payload\x18\t \x01(\x08R\x0btlv_payload\x12\x30\n\nmpp_record\x18\n \x01(\x0b\x32\x10.lnrpc.MPPRecordR\nmpp_record\"W\n\tMPPRecord\x12\"\n\x0cpayment_addr\x18\x0b \x01(\x0cR\x0cpayment_addr\x12&\n\x0etotal_amt_msat\x18\n \x01(\x03R\x0etotal_amt_msat\"\xe9\x01\n\x05Route\x12(\n\x0ftotal_time_lock\x18\x01 \x01(\rR\x0ftotal_time_lock\x12\"\n\ntotal_fees\x18\x02 \x01(\x03\x42\x02\x18\x01R\ntotal_fees\x12 \n\ttotal_amt\x18\x03 \x01(\x03\x42\x02\x18\x01R\ttotal_amt\x12\x1e\n\x04hops\x18\x04 \x03(\x0b\x32\n.lnrpc.HopR\x04hops\x12(\n\x0ftotal_fees_msat\x18\x05 \x01(\x03R\x0ftotal_fees_msat\x12&\n\x0etotal_amt_msat\x18\x06 \x01(\x03R\x0etotal_amt_msat\"<\n\x0fNodeInfoRequest\x12\x0f\n\x07pub_key\x18\x01 \x01(\t\x12\x18\n\x10include_channels\x18\x02 \x01(\x08\"\xb0\x01\n\x08NodeInfo\x12(\n\x04node\x18\x01 \x01(\x0b\x32\x14.lnrpc.LightningNodeR\x04node\x12\"\n\x0cnum_channels\x18\x02 \x01(\rR\x0cnum_channels\x12&\n\x0etotal_capacity\x18\x03 \x01(\x03R\x0etotal_capacity\x12.\n\x08\x63hannels\x18\x04 \x03(\x0b\x32\x12.lnrpc.ChannelEdgeR\x08\x63hannels\"\xa9\x01\n\rLightningNode\x12 \n\x0blast_update\x18\x01 \x01(\rR\x0blast_update\x12\x18\n\x07pub_key\x18\x02 \x01(\tR\x07pub_key\x12\x14\n\x05\x61lias\x18\x03 \x01(\tR\x05\x61lias\x12\x30\n\taddresses\x18\x04 \x03(\x0b\x32\x12.lnrpc.NodeAddressR\taddresses\x12\x14\n\x05\x63olor\x18\x05 \x01(\tR\x05\x63olor\";\n\x0bNodeAddress\x12\x18\n\x07network\x18\x01 \x01(\tR\x07network\x12\x12\n\x04\x61\x64\x64r\x18\x02 \x01(\tR\x04\x61\x64\x64r\"\x91\x02\n\rRoutingPolicy\x12(\n\x0ftime_lock_delta\x18\x01 \x01(\rR\x0ftime_lock_delta\x12\x1a\n\x08min_htlc\x18\x02 \x01(\x03R\x08min_htlc\x12$\n\rfee_base_msat\x18\x03 \x01(\x03R\rfee_base_msat\x12\x30\n\x13\x66\x65\x65_rate_milli_msat\x18\x04 \x01(\x03R\x13\x66\x65\x65_rate_milli_msat\x12\x1a\n\x08\x64isabled\x18\x05 \x01(\x08R\x08\x64isabled\x12$\n\rmax_htlc_msat\x18\x06 \x01(\x04R\rmax_htlc_msat\x12 \n\x0blast_update\x18\x07 \x01(\rR\x0blast_update\"\xc3\x02\n\x0b\x43hannelEdge\x12\"\n\nchannel_id\x18\x01 \x01(\x04\x42\x02\x30\x01R\nchannel_id\x12\x1e\n\nchan_point\x18\x02 \x01(\tR\nchan_point\x12$\n\x0blast_update\x18\x03 \x01(\rB\x02\x18\x01R\x0blast_update\x12\x1c\n\tnode1_pub\x18\x04 \x01(\tR\tnode1_pub\x12\x1c\n\tnode2_pub\x18\x05 \x01(\tR\tnode2_pub\x12\x1a\n\x08\x63\x61pacity\x18\x06 \x01(\x03R\x08\x63\x61pacity\x12\x38\n\x0cnode1_policy\x18\x07 \x01(\x0b\x32\x14.lnrpc.RoutingPolicyR\x0cnode1_policy\x12\x38\n\x0cnode2_policy\x18\x08 \x01(\x0b\x32\x14.lnrpc.RoutingPolicyR\x0cnode2_policy\"G\n\x13\x43hannelGraphRequest\x12\x30\n\x13include_unannounced\x18\x01 \x01(\x08R\x13include_unannounced\"d\n\x0c\x43hannelGraph\x12*\n\x05nodes\x18\x01 \x03(\x0b\x32\x14.lnrpc.LightningNodeR\x05nodes\x12(\n\x05\x65\x64ges\x18\x02 \x03(\x0b\x32\x12.lnrpc.ChannelEdgeR\x05\x65\x64ges\"&\n\x0f\x43hanInfoRequest\x12\x13\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01\"\x14\n\x12NetworkInfoRequest\"\xe9\x03\n\x0bNetworkInfo\x12&\n\x0egraph_diameter\x18\x01 \x01(\rR\x0egraph_diameter\x12&\n\x0e\x61vg_out_degree\x18\x02 \x01(\x01R\x0e\x61vg_out_degree\x12&\n\x0emax_out_degree\x18\x03 \x01(\rR\x0emax_out_degree\x12\x1c\n\tnum_nodes\x18\x04 \x01(\rR\tnum_nodes\x12\"\n\x0cnum_channels\x18\x05 \x01(\rR\x0cnum_channels\x12\x36\n\x16total_network_capacity\x18\x06 \x01(\x03R\x16total_network_capacity\x12*\n\x10\x61vg_channel_size\x18\x07 \x01(\x01R\x10\x61vg_channel_size\x12*\n\x10min_channel_size\x18\x08 \x01(\x03R\x10min_channel_size\x12*\n\x10max_channel_size\x18\t \x01(\x03R\x10max_channel_size\x12\x38\n\x17median_channel_size_sat\x18\n \x01(\x03R\x17median_channel_size_sat\x12*\n\x10num_zombie_chans\x18\x0b \x01(\x04R\x10num_zombie_chans\"\r\n\x0bStopRequest\"\x0e\n\x0cStopResponse\"\x1b\n\x19GraphTopologySubscription\"\xa3\x01\n\x13GraphTopologyUpdate\x12\'\n\x0cnode_updates\x18\x01 \x03(\x0b\x32\x11.lnrpc.NodeUpdate\x12\x31\n\x0f\x63hannel_updates\x18\x02 \x03(\x0b\x32\x18.lnrpc.ChannelEdgeUpdate\x12\x30\n\x0c\x63losed_chans\x18\x03 \x03(\x0b\x32\x1a.lnrpc.ClosedChannelUpdate\"l\n\nNodeUpdate\x12\x11\n\taddresses\x18\x01 \x03(\t\x12\x14\n\x0cidentity_key\x18\x02 \x01(\t\x12\x17\n\x0fglobal_features\x18\x03 \x01(\x0c\x12\r\n\x05\x61lias\x18\x04 \x01(\t\x12\r\n\x05\x63olor\x18\x05 \x01(\t\"\xc4\x01\n\x11\x43hannelEdgeUpdate\x12\x13\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01\x12\'\n\nchan_point\x18\x02 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\x12\x10\n\x08\x63\x61pacity\x18\x03 \x01(\x03\x12,\n\x0erouting_policy\x18\x04 \x01(\x0b\x32\x14.lnrpc.RoutingPolicy\x12\x18\n\x10\x61\x64vertising_node\x18\x05 \x01(\t\x12\x17\n\x0f\x63onnecting_node\x18\x06 \x01(\t\"|\n\x13\x43losedChannelUpdate\x12\x13\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01\x12\x10\n\x08\x63\x61pacity\x18\x02 \x01(\x03\x12\x15\n\rclosed_height\x18\x03 \x01(\r\x12\'\n\nchan_point\x18\x04 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\"\xd7\x01\n\x07HopHint\x12\x18\n\x07node_id\x18\x01 \x01(\tR\x07node_id\x12\x1c\n\x07\x63han_id\x18\x02 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12$\n\rfee_base_msat\x18\x03 \x01(\rR\rfee_base_msat\x12@\n\x1b\x66\x65\x65_proportional_millionths\x18\x04 \x01(\rR\x1b\x66\x65\x65_proportional_millionths\x12,\n\x11\x63ltv_expiry_delta\x18\x05 \x01(\rR\x11\x63ltv_expiry_delta\"9\n\tRouteHint\x12,\n\thop_hints\x18\x01 \x03(\x0b\x32\x0e.lnrpc.HopHintR\thop_hints\"\xc7\x06\n\x07Invoice\x12\x12\n\x04memo\x18\x01 \x01(\tR\x04memo\x12\x1e\n\nr_preimage\x18\x03 \x01(\x0cR\nr_preimage\x12\x16\n\x06r_hash\x18\x04 \x01(\x0cR\x06r_hash\x12\x14\n\x05value\x18\x05 \x01(\x03R\x05value\x12\x1e\n\nvalue_msat\x18\x17 \x01(\x03R\nvalue_msat\x12\x1c\n\x07settled\x18\x06 \x01(\x08\x42\x02\x18\x01R\x07settled\x12$\n\rcreation_date\x18\x07 \x01(\x03R\rcreation_date\x12 \n\x0bsettle_date\x18\x08 \x01(\x03R\x0bsettle_date\x12(\n\x0fpayment_request\x18\t \x01(\tR\x0fpayment_request\x12*\n\x10\x64\x65scription_hash\x18\n \x01(\x0cR\x10\x64\x65scription_hash\x12\x16\n\x06\x65xpiry\x18\x0b \x01(\x03R\x06\x65xpiry\x12$\n\rfallback_addr\x18\x0c \x01(\tR\rfallback_addr\x12 \n\x0b\x63ltv_expiry\x18\r \x01(\x04R\x0b\x63ltv_expiry\x12\x32\n\x0broute_hints\x18\x0e \x03(\x0b\x32\x10.lnrpc.RouteHintR\x0broute_hints\x12\x18\n\x07private\x18\x0f \x01(\x08R\x07private\x12\x1c\n\tadd_index\x18\x10 \x01(\x04R\tadd_index\x12\"\n\x0csettle_index\x18\x11 \x01(\x04R\x0csettle_index\x12\x1e\n\x08\x61mt_paid\x18\x12 \x01(\x03\x42\x02\x18\x01R\x08\x61mt_paid\x12\"\n\x0c\x61mt_paid_sat\x18\x13 \x01(\x03R\x0c\x61mt_paid_sat\x12$\n\ramt_paid_msat\x18\x14 \x01(\x03R\ramt_paid_msat\x12\x31\n\x05state\x18\x15 \x01(\x0e\x32\x1b.lnrpc.Invoice.InvoiceStateR\x05state\x12(\n\x05htlcs\x18\x16 \x03(\x0b\x32\x12.lnrpc.InvoiceHTLCR\x05htlcs\"A\n\x0cInvoiceState\x12\x08\n\x04OPEN\x10\x00\x12\x0b\n\x07SETTLED\x10\x01\x12\x0c\n\x08\x43\x41NCELED\x10\x02\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x03J\x04\x08\x02\x10\x03\"\xa8\x02\n\x0bInvoiceHTLC\x12\x1c\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12\x1e\n\nhtlc_index\x18\x02 \x01(\x04R\nhtlc_index\x12\x1a\n\x08\x61mt_msat\x18\x03 \x01(\x04R\x08\x61mt_msat\x12$\n\raccept_height\x18\x04 \x01(\x05R\raccept_height\x12 \n\x0b\x61\x63\x63\x65pt_time\x18\x05 \x01(\x03R\x0b\x61\x63\x63\x65pt_time\x12\"\n\x0cresolve_time\x18\x06 \x01(\x03R\x0cresolve_time\x12$\n\rexpiry_height\x18\x07 \x01(\x05R\rexpiry_height\x12-\n\x05state\x18\x08 \x01(\x0e\x32\x17.lnrpc.InvoiceHTLCStateR\x05state\"t\n\x12\x41\x64\x64InvoiceResponse\x12\x16\n\x06r_hash\x18\x01 \x01(\x0cR\x06r_hash\x12(\n\x0fpayment_request\x18\x02 \x01(\tR\x0fpayment_request\x12\x1c\n\tadd_index\x18\x10 \x01(\x04R\tadd_index\"I\n\x0bPaymentHash\x12\"\n\nr_hash_str\x18\x01 \x01(\tB\x02\x18\x01R\nr_hash_str\x12\x16\n\x06r_hash\x18\x02 \x01(\x0cR\x06r_hash\"\xa4\x01\n\x12ListInvoiceRequest\x12\"\n\x0cpending_only\x18\x01 \x01(\x08R\x0cpending_only\x12\"\n\x0cindex_offset\x18\x04 \x01(\x04R\x0cindex_offset\x12*\n\x10num_max_invoices\x18\x05 \x01(\x04R\x10num_max_invoices\x12\x1a\n\x08reversed\x18\x06 \x01(\x08R\x08reversed\"\x9f\x01\n\x13ListInvoiceResponse\x12*\n\x08invoices\x18\x01 \x03(\x0b\x32\x0e.lnrpc.InvoiceR\x08invoices\x12,\n\x11last_index_offset\x18\x02 \x01(\x04R\x11last_index_offset\x12.\n\x12\x66irst_index_offset\x18\x03 \x01(\x04R\x12\x66irst_index_offset\"W\n\x13InvoiceSubscription\x12\x1c\n\tadd_index\x18\x01 \x01(\x04R\tadd_index\x12\"\n\x0csettle_index\x18\x02 \x01(\x04R\x0csettle_index\"\xbd\x04\n\x07Payment\x12\"\n\x0cpayment_hash\x18\x01 \x01(\tR\x0cpayment_hash\x12\x18\n\x05value\x18\x02 \x01(\x03\x42\x02\x18\x01R\x05value\x12(\n\rcreation_date\x18\x03 \x01(\x03\x42\x02\x18\x01R\rcreation_date\x12\x16\n\x04path\x18\x04 \x03(\tB\x02\x18\x01R\x04path\x12\x14\n\x03\x66\x65\x65\x18\x05 \x01(\x03\x42\x02\x18\x01R\x03\x66\x65\x65\x12*\n\x10payment_preimage\x18\x06 \x01(\tR\x10payment_preimage\x12\x1c\n\tvalue_sat\x18\x07 \x01(\x03R\tvalue_sat\x12\x1e\n\nvalue_msat\x18\x08 \x01(\x03R\nvalue_msat\x12(\n\x0fpayment_request\x18\t \x01(\tR\x0fpayment_request\x12\x34\n\x06status\x18\n \x01(\x0e\x32\x1c.lnrpc.Payment.PaymentStatusR\x06status\x12\x18\n\x07\x66\x65\x65_sat\x18\x0b \x01(\x03R\x07\x66\x65\x65_sat\x12\x1a\n\x08\x66\x65\x65_msat\x18\x0c \x01(\x03R\x08\x66\x65\x65_msat\x12*\n\x10\x63reation_time_ns\x18\r \x01(\x03R\x10\x63reation_time_ns\x12(\n\x05htlcs\x18\x0e \x03(\x0b\x32\x12.lnrpc.HTLCAttemptR\x05htlcs\"F\n\rPaymentStatus\x12\x0b\n\x07UNKNOWN\x10\x00\x12\r\n\tIN_FLIGHT\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\"\xf4\x01\n\x0bHTLCAttempt\x12\x35\n\x06status\x18\x01 \x01(\x0e\x32\x1d.lnrpc.HTLCAttempt.HTLCStatusR\x06status\x12\"\n\x05route\x18\x02 \x01(\x0b\x32\x0c.lnrpc.RouteR\x05route\x12(\n\x0f\x61ttempt_time_ns\x18\x03 \x01(\x03R\x0f\x61ttempt_time_ns\x12(\n\x0fresolve_time_ns\x18\x04 \x01(\x03R\x0fresolve_time_ns\"6\n\nHTLCStatus\x12\r\n\tIN_FLIGHT\x10\x00\x12\r\n\tSUCCEEDED\x10\x01\x12\n\n\x06\x46\x41ILED\x10\x02\"1\n\x13ListPaymentsRequest\x12\x1a\n\x12include_incomplete\x18\x01 \x01(\x08\"B\n\x14ListPaymentsResponse\x12*\n\x08payments\x18\x01 \x03(\x0b\x32\x0e.lnrpc.PaymentR\x08payments\"\x1a\n\x18\x44\x65leteAllPaymentsRequest\"\x1b\n\x19\x44\x65leteAllPaymentsResponse\"C\n\x15\x41\x62\x61ndonChannelRequest\x12*\n\rchannel_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\"\x18\n\x16\x41\x62\x61ndonChannelResponse\"5\n\x11\x44\x65\x62ugLevelRequest\x12\x0c\n\x04show\x18\x01 \x01(\x08\x12\x12\n\nlevel_spec\x18\x02 \x01(\t\"6\n\x12\x44\x65\x62ugLevelResponse\x12 \n\x0bsub_systems\x18\x01 \x01(\tR\x0bsub_systems\"\x1f\n\x0cPayReqString\x12\x0f\n\x07pay_req\x18\x01 \x01(\t\"\xf2\x02\n\x06PayReq\x12 \n\x0b\x64\x65stination\x18\x01 \x01(\tR\x0b\x64\x65stination\x12\"\n\x0cpayment_hash\x18\x02 \x01(\tR\x0cpayment_hash\x12\"\n\x0cnum_satoshis\x18\x03 \x01(\x03R\x0cnum_satoshis\x12\x1c\n\ttimestamp\x18\x04 \x01(\x03R\ttimestamp\x12\x16\n\x06\x65xpiry\x18\x05 \x01(\x03R\x06\x65xpiry\x12 \n\x0b\x64\x65scription\x18\x06 \x01(\tR\x0b\x64\x65scription\x12*\n\x10\x64\x65scription_hash\x18\x07 \x01(\tR\x10\x64\x65scription_hash\x12$\n\rfallback_addr\x18\x08 \x01(\tR\rfallback_addr\x12 \n\x0b\x63ltv_expiry\x18\t \x01(\x03R\x0b\x63ltv_expiry\x12\x32\n\x0broute_hints\x18\n \x03(\x0b\x32\x10.lnrpc.RouteHintR\x0broute_hints\"\x12\n\x10\x46\x65\x65ReportRequest\"\x99\x01\n\x10\x43hannelFeeReport\x12!\n\nchan_point\x18\x01 \x01(\tR\rchannel_point\x12$\n\rbase_fee_msat\x18\x02 \x01(\x03R\rbase_fee_msat\x12 \n\x0b\x66\x65\x65_per_mil\x18\x03 \x01(\x03R\x0b\x66\x65\x65_per_mil\x12\x1a\n\x08\x66\x65\x65_rate\x18\x04 \x01(\x01R\x08\x66\x65\x65_rate\"\xbc\x01\n\x11\x46\x65\x65ReportResponse\x12;\n\x0c\x63hannel_fees\x18\x01 \x03(\x0b\x32\x17.lnrpc.ChannelFeeReportR\x0c\x63hannel_fees\x12 \n\x0b\x64\x61y_fee_sum\x18\x02 \x01(\x04R\x0b\x64\x61y_fee_sum\x12\"\n\x0cweek_fee_sum\x18\x03 \x01(\x04R\x0cweek_fee_sum\x12$\n\rmonth_fee_sum\x18\x04 \x01(\x04R\rmonth_fee_sum\"\x81\x02\n\x13PolicyUpdateRequest\x12\x18\n\x06global\x18\x01 \x01(\x08H\x00R\x06global\x12\x35\n\nchan_point\x18\x02 \x01(\x0b\x32\x13.lnrpc.ChannelPointH\x00R\nchan_point\x12$\n\rbase_fee_msat\x18\x03 \x01(\x03R\rbase_fee_msat\x12\x1a\n\x08\x66\x65\x65_rate\x18\x04 \x01(\x01R\x08\x66\x65\x65_rate\x12(\n\x0ftime_lock_delta\x18\x05 \x01(\rR\x0ftime_lock_delta\x12$\n\rmax_htlc_msat\x18\x06 \x01(\x04R\rmax_htlc_msatB\x07\n\x05scope\"\x16\n\x14PolicyUpdateResponse\"\xa2\x01\n\x18\x46orwardingHistoryRequest\x12\x1e\n\nstart_time\x18\x01 \x01(\x04R\nstart_time\x12\x1a\n\x08\x65nd_time\x18\x02 \x01(\x04R\x08\x65nd_time\x12\"\n\x0cindex_offset\x18\x03 \x01(\rR\x0cindex_offset\x12&\n\x0enum_max_events\x18\x04 \x01(\rR\x0enum_max_events\"\x9f\x02\n\x0f\x46orwardingEvent\x12\x1c\n\ttimestamp\x18\x01 \x01(\x04R\ttimestamp\x12\"\n\nchan_id_in\x18\x02 \x01(\x04\x42\x02\x30\x01R\nchan_id_in\x12$\n\x0b\x63han_id_out\x18\x04 \x01(\x04\x42\x02\x30\x01R\x0b\x63han_id_out\x12\x16\n\x06\x61mt_in\x18\x05 \x01(\x04R\x06\x61mt_in\x12\x18\n\x07\x61mt_out\x18\x06 \x01(\x04R\x07\x61mt_out\x12\x10\n\x03\x66\x65\x65\x18\x07 \x01(\x04R\x03\x66\x65\x65\x12\x1a\n\x08\x66\x65\x65_msat\x18\x08 \x01(\x04R\x08\x66\x65\x65_msat\x12 \n\x0b\x61mt_in_msat\x18\t \x01(\x04R\x0b\x61mt_in_msat\x12\"\n\x0c\x61mt_out_msat\x18\n \x01(\x04R\x0c\x61mt_out_msat\"\x8f\x01\n\x19\x46orwardingHistoryResponse\x12\x44\n\x11\x66orwarding_events\x18\x01 \x03(\x0b\x32\x16.lnrpc.ForwardingEventR\x11\x66orwarding_events\x12,\n\x11last_offset_index\x18\x02 \x01(\rR\x11last_offset_index\"E\n\x1a\x45xportChannelBackupRequest\x12\'\n\nchan_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\"f\n\rChannelBackup\x12\x33\n\nchan_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPointR\nchan_point\x12 \n\x0b\x63han_backup\x18\x02 \x01(\x0cR\x0b\x63han_backup\"v\n\x0fMultiChanBackup\x12\x35\n\x0b\x63han_points\x18\x01 \x03(\x0b\x32\x13.lnrpc.ChannelPointR\x0b\x63han_points\x12,\n\x11multi_chan_backup\x18\x02 \x01(\x0cR\x11multi_chan_backup\"\x19\n\x17\x43hanBackupExportRequest\"\xa3\x01\n\x12\x43hanBackupSnapshot\x12G\n\x13single_chan_backups\x18\x01 \x01(\x0b\x32\x15.lnrpc.ChannelBackupsR\x13single_chan_backups\x12\x44\n\x11multi_chan_backup\x18\x02 \x01(\x0b\x32\x16.lnrpc.MultiChanBackupR\x11multi_chan_backup\"J\n\x0e\x43hannelBackups\x12\x38\n\x0c\x63han_backups\x18\x01 \x03(\x0b\x32\x14.lnrpc.ChannelBackupR\x0c\x63han_backups\"\x91\x01\n\x18RestoreChanBackupRequest\x12;\n\x0c\x63han_backups\x18\x01 \x01(\x0b\x32\x15.lnrpc.ChannelBackupsH\x00R\x0c\x63han_backups\x12.\n\x11multi_chan_backup\x18\x02 \x01(\x0cH\x00R\x11multi_chan_backupB\x08\n\x06\x62\x61\x63kup\"\x17\n\x15RestoreBackupResponse\"\x1b\n\x19\x43hannelBackupSubscription\"\x1a\n\x18VerifyChanBackupResponse\"D\n\x12MacaroonPermission\x12\x16\n\x06\x65ntity\x18\x01 \x01(\tR\x06\x65ntity\x12\x16\n\x06\x61\x63tion\x18\x02 \x01(\tR\x06\x61\x63tion\"R\n\x13\x42\x61keMacaroonRequest\x12;\n\x0bpermissions\x18\x01 \x03(\x0b\x32\x19.lnrpc.MacaroonPermissionR\x0bpermissions\"2\n\x14\x42\x61keMacaroonResponse\x12\x1a\n\x08macaroon\x18\x01 \x01(\tR\x08macaroon*}\n\x0b\x41\x64\x64ressType\x12\x17\n\x13WITNESS_PUBKEY_HASH\x10\x00\x12\x16\n\x12NESTED_PUBKEY_HASH\x10\x01\x12\x1e\n\x1aUNUSED_WITNESS_PUBKEY_HASH\x10\x02\x12\x1d\n\x19UNUSED_NESTED_PUBKEY_HASH\x10\x03*;\n\x10InvoiceHTLCState\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x00\x12\x0b\n\x07SETTLED\x10\x01\x12\x0c\n\x08\x43\x41NCELED\x10\x02\x32\x91\x03\n\x0eWalletUnlocker\x12M\n\x07GenSeed\x12\x15.lnrpc.GenSeedRequest\x1a\x16.lnrpc.GenSeedResponse\"\x13\x82\xd3\xe4\x93\x02\r\x12\x0b/v1/genseed\x12\\\n\nInitWallet\x12\x18.lnrpc.InitWalletRequest\x1a\x19.lnrpc.InitWalletResponse\"\x19\x82\xd3\xe4\x93\x02\x13\"\x0e/v1/initwallet:\x01*\x12\x64\n\x0cUnlockWallet\x12\x1a.lnrpc.UnlockWalletRequest\x1a\x1b.lnrpc.UnlockWalletResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v1/unlockwallet:\x01*\x12l\n\x0e\x43hangePassword\x12\x1c.lnrpc.ChangePasswordRequest\x1a\x1d.lnrpc.ChangePasswordResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\"\x12/v1/changepassword:\x01*2\xca\'\n\tLightning\x12j\n\rWalletBalance\x12\x1b.lnrpc.WalletBalanceRequest\x1a\x1c.lnrpc.WalletBalanceResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/balance/blockchain\x12k\n\x0e\x43hannelBalance\x12\x1c.lnrpc.ChannelBalanceRequest\x1a\x1d.lnrpc.ChannelBalanceResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/balance/channels\x12\x65\n\x0fGetTransactions\x12\x1d.lnrpc.GetTransactionsRequest\x1a\x19.lnrpc.TransactionDetails\"\x18\x82\xd3\xe4\x93\x02\x12\x12\x10/v1/transactions\x12\x62\n\x0b\x45stimateFee\x12\x19.lnrpc.EstimateFeeRequest\x1a\x1a.lnrpc.EstimateFeeResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/transactions/fee\x12[\n\tSendCoins\x12\x17.lnrpc.SendCoinsRequest\x1a\x18.lnrpc.SendCoinsResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v1/transactions:\x01*\x12W\n\x0bListUnspent\x12\x19.lnrpc.ListUnspentRequest\x1a\x1a.lnrpc.ListUnspentResponse\"\x11\x82\xd3\xe4\x93\x02\x0b\x12\t/v1/utxos\x12L\n\x15SubscribeTransactions\x12\x1d.lnrpc.GetTransactionsRequest\x1a\x12.lnrpc.Transaction0\x01\x12;\n\x08SendMany\x12\x16.lnrpc.SendManyRequest\x1a\x17.lnrpc.SendManyResponse\x12Y\n\nNewAddress\x12\x18.lnrpc.NewAddressRequest\x1a\x19.lnrpc.NewAddressResponse\"\x16\x82\xd3\xe4\x93\x02\x10\x12\x0e/v1/newaddress\x12`\n\x0bSignMessage\x12\x19.lnrpc.SignMessageRequest\x1a\x1a.lnrpc.SignMessageResponse\"\x1a\x82\xd3\xe4\x93\x02\x14\"\x0f/v1/signmessage:\x01*\x12h\n\rVerifyMessage\x12\x1b.lnrpc.VerifyMessageRequest\x1a\x1c.lnrpc.VerifyMessageResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v1/verifymessage:\x01*\x12Z\n\x0b\x43onnectPeer\x12\x19.lnrpc.ConnectPeerRequest\x1a\x1a.lnrpc.ConnectPeerResponse\"\x14\x82\xd3\xe4\x93\x02\x0e\"\t/v1/peers:\x01*\x12j\n\x0e\x44isconnectPeer\x12\x1c.lnrpc.DisconnectPeerRequest\x1a\x1d.lnrpc.DisconnectPeerResponse\"\x1b\x82\xd3\xe4\x93\x02\x15*\x13/v1/peers/{pub_key}\x12Q\n\tListPeers\x12\x17.lnrpc.ListPeersRequest\x1a\x18.lnrpc.ListPeersResponse\"\x11\x82\xd3\xe4\x93\x02\x0b\x12\t/v1/peers\x12M\n\x07GetInfo\x12\x15.lnrpc.GetInfoRequest\x1a\x16.lnrpc.GetInfoResponse\"\x13\x82\xd3\xe4\x93\x02\r\x12\x0b/v1/getinfo\x12n\n\x0fPendingChannels\x12\x1d.lnrpc.PendingChannelsRequest\x1a\x1e.lnrpc.PendingChannelsResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/channels/pending\x12]\n\x0cListChannels\x12\x1a.lnrpc.ListChannelsRequest\x1a\x1b.lnrpc.ListChannelsResponse\"\x14\x82\xd3\xe4\x93\x02\x0e\x12\x0c/v1/channels\x12V\n\x16SubscribeChannelEvents\x12\x1f.lnrpc.ChannelEventSubscription\x1a\x19.lnrpc.ChannelEventUpdate0\x01\x12j\n\x0e\x43losedChannels\x12\x1c.lnrpc.ClosedChannelsRequest\x1a\x1d.lnrpc.ClosedChannelsResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\x12\x13/v1/channels/closed\x12Z\n\x0fOpenChannelSync\x12\x19.lnrpc.OpenChannelRequest\x1a\x13.lnrpc.ChannelPoint\"\x17\x82\xd3\xe4\x93\x02\x11\"\x0c/v1/channels:\x01*\x12\x43\n\x0bOpenChannel\x12\x19.lnrpc.OpenChannelRequest\x1a\x17.lnrpc.OpenStatusUpdate0\x01\x12P\n\x0f\x43hannelAcceptor\x12\x1c.lnrpc.ChannelAcceptResponse\x1a\x1b.lnrpc.ChannelAcceptRequest(\x01\x30\x01\x12\x9a\x01\n\x0c\x43loseChannel\x12\x1a.lnrpc.CloseChannelRequest\x1a\x18.lnrpc.CloseStatusUpdate\"R\x82\xd3\xe4\x93\x02L*J/v1/channels/{channel_point.funding_txid_str}/{channel_point.output_index}0\x01\x12\xa9\x01\n\x0e\x41\x62\x61ndonChannel\x12\x1c.lnrpc.AbandonChannelRequest\x1a\x1d.lnrpc.AbandonChannelResponse\"Z\x82\xd3\xe4\x93\x02T*R/v1/channels/abandon/{channel_point.funding_txid_str}/{channel_point.output_index}\x12:\n\x0bSendPayment\x12\x12.lnrpc.SendRequest\x1a\x13.lnrpc.SendResponse(\x01\x30\x01\x12`\n\x0fSendPaymentSync\x12\x12.lnrpc.SendRequest\x1a\x13.lnrpc.SendResponse\"$\x82\xd3\xe4\x93\x02\x1e\"\x19/v1/channels/transactions:\x01*\x12\x41\n\x0bSendToRoute\x12\x19.lnrpc.SendToRouteRequest\x1a\x13.lnrpc.SendResponse(\x01\x30\x01\x12m\n\x0fSendToRouteSync\x12\x19.lnrpc.SendToRouteRequest\x1a\x13.lnrpc.SendResponse\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1/channels/transactions/route:\x01*\x12P\n\nAddInvoice\x12\x0e.lnrpc.Invoice\x1a\x19.lnrpc.AddInvoiceResponse\"\x17\x82\xd3\xe4\x93\x02\x11\"\x0c/v1/invoices:\x01*\x12[\n\x0cListInvoices\x12\x19.lnrpc.ListInvoiceRequest\x1a\x1a.lnrpc.ListInvoiceResponse\"\x14\x82\xd3\xe4\x93\x02\x0e\x12\x0c/v1/invoices\x12U\n\rLookupInvoice\x12\x12.lnrpc.PaymentHash\x1a\x0e.lnrpc.Invoice\" \x82\xd3\xe4\x93\x02\x1a\x12\x18/v1/invoice/{r_hash_str}\x12\x61\n\x11SubscribeInvoices\x12\x1a.lnrpc.InvoiceSubscription\x1a\x0e.lnrpc.Invoice\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/invoices/subscribe0\x01\x12P\n\x0c\x44\x65\x63odePayReq\x12\x13.lnrpc.PayReqString\x1a\r.lnrpc.PayReq\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/payreq/{pay_req}\x12]\n\x0cListPayments\x12\x1a.lnrpc.ListPaymentsRequest\x1a\x1b.lnrpc.ListPaymentsResponse\"\x14\x82\xd3\xe4\x93\x02\x0e\x12\x0c/v1/payments\x12l\n\x11\x44\x65leteAllPayments\x12\x1f.lnrpc.DeleteAllPaymentsRequest\x1a .lnrpc.DeleteAllPaymentsResponse\"\x14\x82\xd3\xe4\x93\x02\x0e*\x0c/v1/payments\x12S\n\rDescribeGraph\x12\x1a.lnrpc.ChannelGraphRequest\x1a\x13.lnrpc.ChannelGraph\"\x11\x82\xd3\xe4\x93\x02\x0b\x12\t/v1/graph\x12[\n\x0bGetChanInfo\x12\x16.lnrpc.ChanInfoRequest\x1a\x12.lnrpc.ChannelEdge\" \x82\xd3\xe4\x93\x02\x1a\x12\x18/v1/graph/edge/{chan_id}\x12X\n\x0bGetNodeInfo\x12\x16.lnrpc.NodeInfoRequest\x1a\x0f.lnrpc.NodeInfo\" \x82\xd3\xe4\x93\x02\x1a\x12\x18/v1/graph/node/{pub_key}\x12n\n\x0bQueryRoutes\x12\x19.lnrpc.QueryRoutesRequest\x1a\x1a.lnrpc.QueryRoutesResponse\"(\x82\xd3\xe4\x93\x02\"\x12 /v1/graph/routes/{pub_key}/{amt}\x12W\n\x0eGetNetworkInfo\x12\x19.lnrpc.NetworkInfoRequest\x1a\x12.lnrpc.NetworkInfo\"\x16\x82\xd3\xe4\x93\x02\x10\x12\x0e/v1/graph/info\x12\x35\n\nStopDaemon\x12\x12.lnrpc.StopRequest\x1a\x13.lnrpc.StopResponse\x12W\n\x15SubscribeChannelGraph\x12 .lnrpc.GraphTopologySubscription\x1a\x1a.lnrpc.GraphTopologyUpdate0\x01\x12\x41\n\nDebugLevel\x12\x18.lnrpc.DebugLevelRequest\x1a\x19.lnrpc.DebugLevelResponse\x12P\n\tFeeReport\x12\x17.lnrpc.FeeReportRequest\x1a\x18.lnrpc.FeeReportResponse\"\x10\x82\xd3\xe4\x93\x02\n\x12\x08/v1/fees\x12i\n\x13UpdateChannelPolicy\x12\x1a.lnrpc.PolicyUpdateRequest\x1a\x1b.lnrpc.PolicyUpdateResponse\"\x19\x82\xd3\xe4\x93\x02\x13\"\x0e/v1/chanpolicy:\x01*\x12m\n\x11\x46orwardingHistory\x12\x1f.lnrpc.ForwardingHistoryRequest\x1a .lnrpc.ForwardingHistoryResponse\"\x15\x82\xd3\xe4\x93\x02\x0f\"\n/v1/switch:\x01*\x12\xa3\x01\n\x13\x45xportChannelBackup\x12!.lnrpc.ExportChannelBackupRequest\x1a\x14.lnrpc.ChannelBackup\"S\x82\xd3\xe4\x93\x02M\x12K/v1/channels/backup/{chan_point.funding_txid_str}/{chan_point.output_index}\x12q\n\x17\x45xportAllChannelBackups\x12\x1e.lnrpc.ChanBackupExportRequest\x1a\x19.lnrpc.ChanBackupSnapshot\"\x1b\x82\xd3\xe4\x93\x02\x15\x12\x13/v1/channels/backup\x12u\n\x10VerifyChanBackup\x12\x19.lnrpc.ChanBackupSnapshot\x1a\x1f.lnrpc.VerifyChanBackupResponse\"%\x82\xd3\xe4\x93\x02\x1f\"\x1a/v1/channels/backup/verify:\x01*\x12~\n\x15RestoreChannelBackups\x12\x1f.lnrpc.RestoreChanBackupRequest\x1a\x1c.lnrpc.RestoreBackupResponse\"&\x82\xd3\xe4\x93\x02 \"\x1b/v1/channels/backup/restore:\x01*\x12Z\n\x17SubscribeChannelBackups\x12 .lnrpc.ChannelBackupSubscription\x1a\x19.lnrpc.ChanBackupSnapshot\"\x00\x30\x01\x12`\n\x0c\x42\x61keMacaroon\x12\x1a.lnrpc.BakeMacaroonRequest\x1a\x1b.lnrpc.BakeMacaroonResponse\"\x17\x82\xd3\xe4\x93\x02\x11\"\x0c/v1/macaroon:\x01*B\'Z%github.com/lightningnetwork/lnd/lnrpcb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_ADDRESSTYPE = _descriptor.EnumDescriptor(
name='AddressType',
full_name='lnrpc.AddressType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='WITNESS_PUBKEY_HASH', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NESTED_PUBKEY_HASH', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNUSED_WITNESS_PUBKEY_HASH', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNUSED_NESTED_PUBKEY_HASH', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=20700,
serialized_end=20825,
)
_sym_db.RegisterEnumDescriptor(_ADDRESSTYPE)
AddressType = enum_type_wrapper.EnumTypeWrapper(_ADDRESSTYPE)
_INVOICEHTLCSTATE = _descriptor.EnumDescriptor(
name='InvoiceHTLCState',
full_name='lnrpc.InvoiceHTLCState',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='ACCEPTED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SETTLED', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANCELED', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=20827,
serialized_end=20886,
)
_sym_db.RegisterEnumDescriptor(_INVOICEHTLCSTATE)
InvoiceHTLCState = enum_type_wrapper.EnumTypeWrapper(_INVOICEHTLCSTATE)
WITNESS_PUBKEY_HASH = 0
NESTED_PUBKEY_HASH = 1
UNUSED_WITNESS_PUBKEY_HASH = 2
UNUSED_NESTED_PUBKEY_HASH = 3
ACCEPTED = 0
SETTLED = 1
CANCELED = 2
_CHANNELCLOSESUMMARY_CLOSURETYPE = _descriptor.EnumDescriptor(
name='ClosureType',
full_name='lnrpc.ChannelCloseSummary.ClosureType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='COOPERATIVE_CLOSE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOCAL_FORCE_CLOSE', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REMOTE_FORCE_CLOSE', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BREACH_CLOSE', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FUNDING_CANCELED', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ABANDONED', index=5, number=5,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=5763,
serialized_end=5901,
)
_sym_db.RegisterEnumDescriptor(_CHANNELCLOSESUMMARY_CLOSURETYPE)
_PEER_SYNCTYPE = _descriptor.EnumDescriptor(
name='SyncType',
full_name='lnrpc.Peer.SyncType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_SYNC', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACTIVE_SYNC', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PASSIVE_SYNC', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=6425,
serialized_end=6488,
)
_sym_db.RegisterEnumDescriptor(_PEER_SYNCTYPE)
_CHANNELEVENTUPDATE_UPDATETYPE = _descriptor.EnumDescriptor(
name='UpdateType',
full_name='lnrpc.ChannelEventUpdate.UpdateType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='OPEN_CHANNEL', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CLOSED_CHANNEL', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACTIVE_CHANNEL', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INACTIVE_CHANNEL', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=10722,
serialized_end=10814,
)
_sym_db.RegisterEnumDescriptor(_CHANNELEVENTUPDATE_UPDATETYPE)
_INVOICE_INVOICESTATE = _descriptor.EnumDescriptor(
name='InvoiceState',
full_name='lnrpc.Invoice.InvoiceState',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='OPEN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SETTLED', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANCELED', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACCEPTED', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=15854,
serialized_end=15919,
)
_sym_db.RegisterEnumDescriptor(_INVOICE_INVOICESTATE)
_PAYMENT_PAYMENTSTATUS = _descriptor.EnumDescriptor(
name='PaymentStatus',
full_name='lnrpc.Payment.PaymentStatus',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IN_FLIGHT', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUCCEEDED', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FAILED', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=17341,
serialized_end=17411,
)
_sym_db.RegisterEnumDescriptor(_PAYMENT_PAYMENTSTATUS)
_HTLCATTEMPT_HTLCSTATUS = _descriptor.EnumDescriptor(
name='HTLCStatus',
full_name='lnrpc.HTLCAttempt.HTLCStatus',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='IN_FLIGHT', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUCCEEDED', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FAILED', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=17604,
serialized_end=17658,
)
_sym_db.RegisterEnumDescriptor(_HTLCATTEMPT_HTLCSTATUS)
_GENSEEDREQUEST = _descriptor.Descriptor(
name='GenSeedRequest',
full_name='lnrpc.GenSeedRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='aezeed_passphrase', full_name='lnrpc.GenSeedRequest.aezeed_passphrase', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='seed_entropy', full_name='lnrpc.GenSeedRequest.seed_entropy', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=50,
serialized_end=115,
)
_GENSEEDRESPONSE = _descriptor.Descriptor(
name='GenSeedResponse',
full_name='lnrpc.GenSeedResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cipher_seed_mnemonic', full_name='lnrpc.GenSeedResponse.cipher_seed_mnemonic', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='enciphered_seed', full_name='lnrpc.GenSeedResponse.enciphered_seed', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=117,
serialized_end=189,
)
_INITWALLETREQUEST = _descriptor.Descriptor(
name='InitWalletRequest',
full_name='lnrpc.InitWalletRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wallet_password', full_name='lnrpc.InitWalletRequest.wallet_password', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cipher_seed_mnemonic', full_name='lnrpc.InitWalletRequest.cipher_seed_mnemonic', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='aezeed_passphrase', full_name='lnrpc.InitWalletRequest.aezeed_passphrase', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='recovery_window', full_name='lnrpc.InitWalletRequest.recovery_window', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_backups', full_name='lnrpc.InitWalletRequest.channel_backups', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=192,
serialized_end=370,
)
_INITWALLETRESPONSE = _descriptor.Descriptor(
name='InitWalletResponse',
full_name='lnrpc.InitWalletResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=372,
serialized_end=392,
)
_UNLOCKWALLETREQUEST = _descriptor.Descriptor(
name='UnlockWalletRequest',
full_name='lnrpc.UnlockWalletRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wallet_password', full_name='lnrpc.UnlockWalletRequest.wallet_password', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='recovery_window', full_name='lnrpc.UnlockWalletRequest.recovery_window', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_backups', full_name='lnrpc.UnlockWalletRequest.channel_backups', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=394,
serialized_end=517,
)
_UNLOCKWALLETRESPONSE = _descriptor.Descriptor(
name='UnlockWalletResponse',
full_name='lnrpc.UnlockWalletResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=519,
serialized_end=541,
)
_CHANGEPASSWORDREQUEST = _descriptor.Descriptor(
name='ChangePasswordRequest',
full_name='lnrpc.ChangePasswordRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='current_password', full_name='lnrpc.ChangePasswordRequest.current_password', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='new_password', full_name='lnrpc.ChangePasswordRequest.new_password', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=543,
serialized_end=614,
)
_CHANGEPASSWORDRESPONSE = _descriptor.Descriptor(
name='ChangePasswordResponse',
full_name='lnrpc.ChangePasswordResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=616,
serialized_end=640,
)
_UTXO = _descriptor.Descriptor(
name='Utxo',
full_name='lnrpc.Utxo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='lnrpc.Utxo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address_type', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='address', full_name='lnrpc.Utxo.address', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount_sat', full_name='lnrpc.Utxo.amount_sat', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amount_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pk_script', full_name='lnrpc.Utxo.pk_script', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pk_script', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='outpoint', full_name='lnrpc.Utxo.outpoint', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='outpoint', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='confirmations', full_name='lnrpc.Utxo.confirmations', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='confirmations', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=643,
serialized_end=868,
)
_TRANSACTION = _descriptor.Descriptor(
name='Transaction',
full_name='lnrpc.Transaction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tx_hash', full_name='lnrpc.Transaction.tx_hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tx_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='lnrpc.Transaction.amount', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amount', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_confirmations', full_name='lnrpc.Transaction.num_confirmations', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_confirmations', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_hash', full_name='lnrpc.Transaction.block_hash', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='block_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_height', full_name='lnrpc.Transaction.block_height', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='block_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time_stamp', full_name='lnrpc.Transaction.time_stamp', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='time_stamp', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fees', full_name='lnrpc.Transaction.total_fees', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_fees', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dest_addresses', full_name='lnrpc.Transaction.dest_addresses', index=7,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='dest_addresses', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='raw_tx_hex', full_name='lnrpc.Transaction.raw_tx_hex', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='raw_tx_hex', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=871,
serialized_end=1184,
)
_GETTRANSACTIONSREQUEST = _descriptor.Descriptor(
name='GetTransactionsRequest',
full_name='lnrpc.GetTransactionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1186,
serialized_end=1210,
)
_TRANSACTIONDETAILS = _descriptor.Descriptor(
name='TransactionDetails',
full_name='lnrpc.TransactionDetails',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='transactions', full_name='lnrpc.TransactionDetails.transactions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='transactions', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1212,
serialized_end=1288,
)
_FEELIMIT = _descriptor.Descriptor(
name='FeeLimit',
full_name='lnrpc.FeeLimit',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fixed', full_name='lnrpc.FeeLimit.fixed', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fixed_msat', full_name='lnrpc.FeeLimit.fixed_msat', index=1,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='percent', full_name='lnrpc.FeeLimit.percent', index=2,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='limit', full_name='lnrpc.FeeLimit.limit',
index=0, containing_type=None, fields=[]),
],
serialized_start=1290,
serialized_end=1367,
)
_SENDREQUEST_DESTTLVENTRY = _descriptor.Descriptor(
name='DestTlvEntry',
full_name='lnrpc.SendRequest.DestTlvEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='lnrpc.SendRequest.DestTlvEntry.key', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.SendRequest.DestTlvEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1751,
serialized_end=1797,
)
_SENDREQUEST = _descriptor.Descriptor(
name='SendRequest',
full_name='lnrpc.SendRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dest', full_name='lnrpc.SendRequest.dest', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dest_string', full_name='lnrpc.SendRequest.dest_string', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt', full_name='lnrpc.SendRequest.amt', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_msat', full_name='lnrpc.SendRequest.amt_msat', index=3,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.SendRequest.payment_hash', index=4,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash_string', full_name='lnrpc.SendRequest.payment_hash_string', index=5,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_request', full_name='lnrpc.SendRequest.payment_request', index=6,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='final_cltv_delta', full_name='lnrpc.SendRequest.final_cltv_delta', index=7,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_limit', full_name='lnrpc.SendRequest.fee_limit', index=8,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='outgoing_chan_id', full_name='lnrpc.SendRequest.outgoing_chan_id', index=9,
number=9, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_hop_pubkey', full_name='lnrpc.SendRequest.last_hop_pubkey', index=10,
number=13, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_limit', full_name='lnrpc.SendRequest.cltv_limit', index=11,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dest_tlv', full_name='lnrpc.SendRequest.dest_tlv', index=12,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='allow_self_payment', full_name='lnrpc.SendRequest.allow_self_payment', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_SENDREQUEST_DESTTLVENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1370,
serialized_end=1797,
)
_SENDRESPONSE = _descriptor.Descriptor(
name='SendResponse',
full_name='lnrpc.SendResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payment_error', full_name='lnrpc.SendResponse.payment_error', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_error', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_preimage', full_name='lnrpc.SendResponse.payment_preimage', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_preimage', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_route', full_name='lnrpc.SendResponse.payment_route', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_route', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.SendResponse.payment_hash', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_hash', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1800,
serialized_end=1984,
)
_SENDTOROUTEREQUEST = _descriptor.Descriptor(
name='SendToRouteRequest',
full_name='lnrpc.SendToRouteRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.SendToRouteRequest.payment_hash', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash_string', full_name='lnrpc.SendToRouteRequest.payment_hash_string', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='route', full_name='lnrpc.SendToRouteRequest.route', index=2,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1986,
serialized_end=2096,
)
_CHANNELACCEPTREQUEST = _descriptor.Descriptor(
name='ChannelAcceptRequest',
full_name='lnrpc.ChannelAcceptRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_pubkey', full_name='lnrpc.ChannelAcceptRequest.node_pubkey', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chain_hash', full_name='lnrpc.ChannelAcceptRequest.chain_hash', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_chan_id', full_name='lnrpc.ChannelAcceptRequest.pending_chan_id', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='funding_amt', full_name='lnrpc.ChannelAcceptRequest.funding_amt', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='push_amt', full_name='lnrpc.ChannelAcceptRequest.push_amt', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dust_limit', full_name='lnrpc.ChannelAcceptRequest.dust_limit', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_value_in_flight', full_name='lnrpc.ChannelAcceptRequest.max_value_in_flight', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_reserve', full_name='lnrpc.ChannelAcceptRequest.channel_reserve', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_htlc', full_name='lnrpc.ChannelAcceptRequest.min_htlc', index=8,
number=9, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_per_kw', full_name='lnrpc.ChannelAcceptRequest.fee_per_kw', index=9,
number=10, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='csv_delay', full_name='lnrpc.ChannelAcceptRequest.csv_delay', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_accepted_htlcs', full_name='lnrpc.ChannelAcceptRequest.max_accepted_htlcs', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_flags', full_name='lnrpc.ChannelAcceptRequest.channel_flags', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2099,
serialized_end=2408,
)
_CHANNELACCEPTRESPONSE = _descriptor.Descriptor(
name='ChannelAcceptResponse',
full_name='lnrpc.ChannelAcceptResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='accept', full_name='lnrpc.ChannelAcceptResponse.accept', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_chan_id', full_name='lnrpc.ChannelAcceptResponse.pending_chan_id', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2410,
serialized_end=2474,
)
_CHANNELPOINT = _descriptor.Descriptor(
name='ChannelPoint',
full_name='lnrpc.ChannelPoint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='funding_txid_bytes', full_name='lnrpc.ChannelPoint.funding_txid_bytes', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='funding_txid_bytes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='funding_txid_str', full_name='lnrpc.ChannelPoint.funding_txid_str', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='funding_txid_str', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_index', full_name='lnrpc.ChannelPoint.output_index', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='output_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='funding_txid', full_name='lnrpc.ChannelPoint.funding_txid',
index=0, containing_type=None, fields=[]),
],
serialized_start=2477,
serialized_end=2639,
)
_OUTPOINT = _descriptor.Descriptor(
name='OutPoint',
full_name='lnrpc.OutPoint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txid_bytes', full_name='lnrpc.OutPoint.txid_bytes', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid_bytes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='txid_str', full_name='lnrpc.OutPoint.txid_str', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid_str', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_index', full_name='lnrpc.OutPoint.output_index', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='output_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2641,
serialized_end=2747,
)
_LIGHTNINGADDRESS = _descriptor.Descriptor(
name='LightningAddress',
full_name='lnrpc.LightningAddress',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pubkey', full_name='lnrpc.LightningAddress.pubkey', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='host', full_name='lnrpc.LightningAddress.host', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='host', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2749,
serialized_end=2811,
)
_ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY = _descriptor.Descriptor(
name='AddrToAmountEntry',
full_name='lnrpc.EstimateFeeRequest.AddrToAmountEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='lnrpc.EstimateFeeRequest.AddrToAmountEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.EstimateFeeRequest.AddrToAmountEntry.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2924,
serialized_end=2975,
)
_ESTIMATEFEEREQUEST = _descriptor.Descriptor(
name='EstimateFeeRequest',
full_name='lnrpc.EstimateFeeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='AddrToAmount', full_name='lnrpc.EstimateFeeRequest.AddrToAmount', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.EstimateFeeRequest.target_conf', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2814,
serialized_end=2975,
)
_ESTIMATEFEERESPONSE = _descriptor.Descriptor(
name='EstimateFeeResponse',
full_name='lnrpc.EstimateFeeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fee_sat', full_name='lnrpc.EstimateFeeResponse.fee_sat', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feerate_sat_per_byte', full_name='lnrpc.EstimateFeeResponse.feerate_sat_per_byte', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='feerate_sat_per_byte', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2977,
serialized_end=3076,
)
_SENDMANYREQUEST_ADDRTOAMOUNTENTRY = _descriptor.Descriptor(
name='AddrToAmountEntry',
full_name='lnrpc.SendManyRequest.AddrToAmountEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='lnrpc.SendManyRequest.AddrToAmountEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.SendManyRequest.AddrToAmountEntry.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2924,
serialized_end=2975,
)
_SENDMANYREQUEST = _descriptor.Descriptor(
name='SendManyRequest',
full_name='lnrpc.SendManyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='AddrToAmount', full_name='lnrpc.SendManyRequest.AddrToAmount', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.SendManyRequest.target_conf', index=1,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_per_byte', full_name='lnrpc.SendManyRequest.sat_per_byte', index=2,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_SENDMANYREQUEST_ADDRTOAMOUNTENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3079,
serialized_end=3256,
)
_SENDMANYRESPONSE = _descriptor.Descriptor(
name='SendManyResponse',
full_name='lnrpc.SendManyResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txid', full_name='lnrpc.SendManyResponse.txid', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3258,
serialized_end=3296,
)
_SENDCOINSREQUEST = _descriptor.Descriptor(
name='SendCoinsRequest',
full_name='lnrpc.SendCoinsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='addr', full_name='lnrpc.SendCoinsRequest.addr', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='lnrpc.SendCoinsRequest.amount', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.SendCoinsRequest.target_conf', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_per_byte', full_name='lnrpc.SendCoinsRequest.sat_per_byte', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='send_all', full_name='lnrpc.SendCoinsRequest.send_all', index=4,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3298,
serialized_end=3407,
)
_SENDCOINSRESPONSE = _descriptor.Descriptor(
name='SendCoinsResponse',
full_name='lnrpc.SendCoinsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txid', full_name='lnrpc.SendCoinsResponse.txid', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3409,
serialized_end=3448,
)
_LISTUNSPENTREQUEST = _descriptor.Descriptor(
name='ListUnspentRequest',
full_name='lnrpc.ListUnspentRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_confs', full_name='lnrpc.ListUnspentRequest.min_confs', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_confs', full_name='lnrpc.ListUnspentRequest.max_confs', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3450,
serialized_end=3508,
)
_LISTUNSPENTRESPONSE = _descriptor.Descriptor(
name='ListUnspentResponse',
full_name='lnrpc.ListUnspentResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='utxos', full_name='lnrpc.ListUnspentResponse.utxos', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='utxos', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3510,
serialized_end=3566,
)
_NEWADDRESSREQUEST = _descriptor.Descriptor(
name='NewAddressRequest',
full_name='lnrpc.NewAddressRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='lnrpc.NewAddressRequest.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3568,
serialized_end=3621,
)
_NEWADDRESSRESPONSE = _descriptor.Descriptor(
name='NewAddressResponse',
full_name='lnrpc.NewAddressResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='address', full_name='lnrpc.NewAddressResponse.address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3623,
serialized_end=3669,
)
_SIGNMESSAGEREQUEST = _descriptor.Descriptor(
name='SignMessageRequest',
full_name='lnrpc.SignMessageRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='msg', full_name='lnrpc.SignMessageRequest.msg', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='msg', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3671,
serialized_end=3709,
)
_SIGNMESSAGERESPONSE = _descriptor.Descriptor(
name='SignMessageResponse',
full_name='lnrpc.SignMessageResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='signature', full_name='lnrpc.SignMessageResponse.signature', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='signature', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3711,
serialized_end=3762,
)
_VERIFYMESSAGEREQUEST = _descriptor.Descriptor(
name='VerifyMessageRequest',
full_name='lnrpc.VerifyMessageRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='msg', full_name='lnrpc.VerifyMessageRequest.msg', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='msg', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='signature', full_name='lnrpc.VerifyMessageRequest.signature', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='signature', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3764,
serialized_end=3834,
)
_VERIFYMESSAGERESPONSE = _descriptor.Descriptor(
name='VerifyMessageResponse',
full_name='lnrpc.VerifyMessageResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='valid', full_name='lnrpc.VerifyMessageResponse.valid', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='valid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pubkey', full_name='lnrpc.VerifyMessageResponse.pubkey', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pubkey', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3836,
serialized_end=3905,
)
_CONNECTPEERREQUEST = _descriptor.Descriptor(
name='ConnectPeerRequest',
full_name='lnrpc.ConnectPeerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='addr', full_name='lnrpc.ConnectPeerRequest.addr', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='perm', full_name='lnrpc.ConnectPeerRequest.perm', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3907,
serialized_end=3980,
)
_CONNECTPEERRESPONSE = _descriptor.Descriptor(
name='ConnectPeerResponse',
full_name='lnrpc.ConnectPeerResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3982,
serialized_end=4003,
)
_DISCONNECTPEERREQUEST = _descriptor.Descriptor(
name='DisconnectPeerRequest',
full_name='lnrpc.DisconnectPeerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.DisconnectPeerRequest.pub_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pub_key', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4005,
serialized_end=4054,
)
_DISCONNECTPEERRESPONSE = _descriptor.Descriptor(
name='DisconnectPeerResponse',
full_name='lnrpc.DisconnectPeerResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4056,
serialized_end=4080,
)
_HTLC = _descriptor.Descriptor(
name='HTLC',
full_name='lnrpc.HTLC',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='incoming', full_name='lnrpc.HTLC.incoming', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='incoming', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='lnrpc.HTLC.amount', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amount', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hash_lock', full_name='lnrpc.HTLC.hash_lock', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='hash_lock', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiration_height', full_name='lnrpc.HTLC.expiration_height', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiration_height', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4083,
serialized_end=4217,
)
_CHANNEL = _descriptor.Descriptor(
name='Channel',
full_name='lnrpc.Channel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='active', full_name='lnrpc.Channel.active', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='active', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_pubkey', full_name='lnrpc.Channel.remote_pubkey', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.Channel.channel_point', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.Channel.chan_id', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.Channel.capacity', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_balance', full_name='lnrpc.Channel.local_balance', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_balance', full_name='lnrpc.Channel.remote_balance', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_fee', full_name='lnrpc.Channel.commit_fee', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='commit_fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_weight', full_name='lnrpc.Channel.commit_weight', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='commit_weight', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_per_kw', full_name='lnrpc.Channel.fee_per_kw', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_per_kw', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='unsettled_balance', full_name='lnrpc.Channel.unsettled_balance', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='unsettled_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_satoshis_sent', full_name='lnrpc.Channel.total_satoshis_sent', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_satoshis_sent', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_satoshis_received', full_name='lnrpc.Channel.total_satoshis_received', index=12,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_satoshis_received', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_updates', full_name='lnrpc.Channel.num_updates', index=13,
number=14, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_updates', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_htlcs', full_name='lnrpc.Channel.pending_htlcs', index=14,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_htlcs', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='csv_delay', full_name='lnrpc.Channel.csv_delay', index=15,
number=16, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='csv_delay', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private', full_name='lnrpc.Channel.private', index=16,
number=17, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='private', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='initiator', full_name='lnrpc.Channel.initiator', index=17,
number=18, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='initiator', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_status_flags', full_name='lnrpc.Channel.chan_status_flags', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_status_flags', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_chan_reserve_sat', full_name='lnrpc.Channel.local_chan_reserve_sat', index=19,
number=20, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_chan_reserve_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_chan_reserve_sat', full_name='lnrpc.Channel.remote_chan_reserve_sat', index=20,
number=21, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_chan_reserve_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='static_remote_key', full_name='lnrpc.Channel.static_remote_key', index=21,
number=22, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='static_remote_key', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lifetime', full_name='lnrpc.Channel.lifetime', index=22,
number=23, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='lifetime', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uptime', full_name='lnrpc.Channel.uptime', index=23,
number=24, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='uptime', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4220,
serialized_end=5150,
)
_LISTCHANNELSREQUEST = _descriptor.Descriptor(
name='ListChannelsRequest',
full_name='lnrpc.ListChannelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='active_only', full_name='lnrpc.ListChannelsRequest.active_only', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inactive_only', full_name='lnrpc.ListChannelsRequest.inactive_only', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='public_only', full_name='lnrpc.ListChannelsRequest.public_only', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private_only', full_name='lnrpc.ListChannelsRequest.private_only', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5152,
serialized_end=5260,
)
_LISTCHANNELSRESPONSE = _descriptor.Descriptor(
name='ListChannelsResponse',
full_name='lnrpc.ListChannelsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channels', full_name='lnrpc.ListChannelsResponse.channels', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channels', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5262,
serialized_end=5328,
)
_CHANNELCLOSESUMMARY = _descriptor.Descriptor(
name='ChannelCloseSummary',
full_name='lnrpc.ChannelCloseSummary',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.ChannelCloseSummary.channel_point', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.ChannelCloseSummary.chan_id', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chain_hash', full_name='lnrpc.ChannelCloseSummary.chain_hash', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chain_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closing_tx_hash', full_name='lnrpc.ChannelCloseSummary.closing_tx_hash', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closing_tx_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_pubkey', full_name='lnrpc.ChannelCloseSummary.remote_pubkey', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.ChannelCloseSummary.capacity', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='close_height', full_name='lnrpc.ChannelCloseSummary.close_height', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='close_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settled_balance', full_name='lnrpc.ChannelCloseSummary.settled_balance', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='settled_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time_locked_balance', full_name='lnrpc.ChannelCloseSummary.time_locked_balance', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='time_locked_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='close_type', full_name='lnrpc.ChannelCloseSummary.close_type', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='close_type', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_CHANNELCLOSESUMMARY_CLOSURETYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5331,
serialized_end=5901,
)
_CLOSEDCHANNELSREQUEST = _descriptor.Descriptor(
name='ClosedChannelsRequest',
full_name='lnrpc.ClosedChannelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cooperative', full_name='lnrpc.ClosedChannelsRequest.cooperative', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_force', full_name='lnrpc.ClosedChannelsRequest.local_force', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_force', full_name='lnrpc.ClosedChannelsRequest.remote_force', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='breach', full_name='lnrpc.ClosedChannelsRequest.breach', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='funding_canceled', full_name='lnrpc.ClosedChannelsRequest.funding_canceled', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='abandoned', full_name='lnrpc.ClosedChannelsRequest.abandoned', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5904,
serialized_end=6052,
)
_CLOSEDCHANNELSRESPONSE = _descriptor.Descriptor(
name='ClosedChannelsResponse',
full_name='lnrpc.ClosedChannelsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channels', full_name='lnrpc.ClosedChannelsResponse.channels', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channels', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6054,
serialized_end=6134,
)
_PEER = _descriptor.Descriptor(
name='Peer',
full_name='lnrpc.Peer',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.Peer.pub_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pub_key', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='address', full_name='lnrpc.Peer.address', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bytes_sent', full_name='lnrpc.Peer.bytes_sent', index=2,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='bytes_sent', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bytes_recv', full_name='lnrpc.Peer.bytes_recv', index=3,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='bytes_recv', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_sent', full_name='lnrpc.Peer.sat_sent', index=4,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sat_sent', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_recv', full_name='lnrpc.Peer.sat_recv', index=5,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sat_recv', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inbound', full_name='lnrpc.Peer.inbound', index=6,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='inbound', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ping_time', full_name='lnrpc.Peer.ping_time', index=7,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='ping_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sync_type', full_name='lnrpc.Peer.sync_type', index=8,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sync_type', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_PEER_SYNCTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6137,
serialized_end=6488,
)
_LISTPEERSREQUEST = _descriptor.Descriptor(
name='ListPeersRequest',
full_name='lnrpc.ListPeersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6490,
serialized_end=6508,
)
_LISTPEERSRESPONSE = _descriptor.Descriptor(
name='ListPeersResponse',
full_name='lnrpc.ListPeersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='peers', full_name='lnrpc.ListPeersResponse.peers', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='peers', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6510,
serialized_end=6564,
)
_GETINFOREQUEST = _descriptor.Descriptor(
name='GetInfoRequest',
full_name='lnrpc.GetInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6566,
serialized_end=6582,
)
_GETINFORESPONSE = _descriptor.Descriptor(
name='GetInfoResponse',
full_name='lnrpc.GetInfoResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='identity_pubkey', full_name='lnrpc.GetInfoResponse.identity_pubkey', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identity_pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alias', full_name='lnrpc.GetInfoResponse.alias', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='alias', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_pending_channels', full_name='lnrpc.GetInfoResponse.num_pending_channels', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_pending_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_active_channels', full_name='lnrpc.GetInfoResponse.num_active_channels', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_active_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_peers', full_name='lnrpc.GetInfoResponse.num_peers', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_peers', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_height', full_name='lnrpc.GetInfoResponse.block_height', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='block_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_hash', full_name='lnrpc.GetInfoResponse.block_hash', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='block_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='synced_to_chain', full_name='lnrpc.GetInfoResponse.synced_to_chain', index=7,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='synced_to_chain', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='testnet', full_name='lnrpc.GetInfoResponse.testnet', index=8,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='testnet', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uris', full_name='lnrpc.GetInfoResponse.uris', index=9,
number=12, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='uris', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='best_header_timestamp', full_name='lnrpc.GetInfoResponse.best_header_timestamp', index=10,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='best_header_timestamp', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='lnrpc.GetInfoResponse.version', index=11,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='version', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_inactive_channels', full_name='lnrpc.GetInfoResponse.num_inactive_channels', index=12,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_inactive_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chains', full_name='lnrpc.GetInfoResponse.chains', index=13,
number=16, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chains', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='color', full_name='lnrpc.GetInfoResponse.color', index=14,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='color', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='synced_to_graph', full_name='lnrpc.GetInfoResponse.synced_to_graph', index=15,
number=18, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='synced_to_graph', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6585,
serialized_end=7200,
)
_CHAIN = _descriptor.Descriptor(
name='Chain',
full_name='lnrpc.Chain',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chain', full_name='lnrpc.Chain.chain', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chain', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='network', full_name='lnrpc.Chain.network', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='network', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7202,
serialized_end=7257,
)
_CONFIRMATIONUPDATE = _descriptor.Descriptor(
name='ConfirmationUpdate',
full_name='lnrpc.ConfirmationUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='block_sha', full_name='lnrpc.ConfirmationUpdate.block_sha', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_height', full_name='lnrpc.ConfirmationUpdate.block_height', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_confs_left', full_name='lnrpc.ConfirmationUpdate.num_confs_left', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7259,
serialized_end=7344,
)
_CHANNELOPENUPDATE = _descriptor.Descriptor(
name='ChannelOpenUpdate',
full_name='lnrpc.ChannelOpenUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.ChannelOpenUpdate.channel_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7346,
serialized_end=7424,
)
_CHANNELCLOSEUPDATE = _descriptor.Descriptor(
name='ChannelCloseUpdate',
full_name='lnrpc.ChannelCloseUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='closing_txid', full_name='lnrpc.ChannelCloseUpdate.closing_txid', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closing_txid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success', full_name='lnrpc.ChannelCloseUpdate.success', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='success', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7426,
serialized_end=7508,
)
_CLOSECHANNELREQUEST = _descriptor.Descriptor(
name='CloseChannelRequest',
full_name='lnrpc.CloseChannelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.CloseChannelRequest.channel_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='force', full_name='lnrpc.CloseChannelRequest.force', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.CloseChannelRequest.target_conf', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_per_byte', full_name='lnrpc.CloseChannelRequest.sat_per_byte', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7510,
serialized_end=7633,
)
_CLOSESTATUSUPDATE = _descriptor.Descriptor(
name='CloseStatusUpdate',
full_name='lnrpc.CloseStatusUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='close_pending', full_name='lnrpc.CloseStatusUpdate.close_pending', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='close_pending', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_close', full_name='lnrpc.CloseStatusUpdate.chan_close', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_close', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='update', full_name='lnrpc.CloseStatusUpdate.update',
index=0, containing_type=None, fields=[]),
],
serialized_start=7636,
serialized_end=7788,
)
_PENDINGUPDATE = _descriptor.Descriptor(
name='PendingUpdate',
full_name='lnrpc.PendingUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txid', full_name='lnrpc.PendingUpdate.txid', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_index', full_name='lnrpc.PendingUpdate.output_index', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='output_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7790,
serialized_end=7861,
)
_OPENCHANNELREQUEST = _descriptor.Descriptor(
name='OpenChannelRequest',
full_name='lnrpc.OpenChannelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_pubkey', full_name='lnrpc.OpenChannelRequest.node_pubkey', index=0,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node_pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node_pubkey_string', full_name='lnrpc.OpenChannelRequest.node_pubkey_string', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='node_pubkey_string', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_funding_amount', full_name='lnrpc.OpenChannelRequest.local_funding_amount', index=2,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_funding_amount', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='push_sat', full_name='lnrpc.OpenChannelRequest.push_sat', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='push_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.OpenChannelRequest.target_conf', index=4,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_per_byte', full_name='lnrpc.OpenChannelRequest.sat_per_byte', index=5,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private', full_name='lnrpc.OpenChannelRequest.private', index=6,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='private', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_htlc_msat', full_name='lnrpc.OpenChannelRequest.min_htlc_msat', index=7,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='min_htlc_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_csv_delay', full_name='lnrpc.OpenChannelRequest.remote_csv_delay', index=8,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_csv_delay', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_confs', full_name='lnrpc.OpenChannelRequest.min_confs', index=9,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='min_confs', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='spend_unconfirmed', full_name='lnrpc.OpenChannelRequest.spend_unconfirmed', index=10,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='spend_unconfirmed', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7864,
serialized_end=8277,
)
_OPENSTATUSUPDATE = _descriptor.Descriptor(
name='OpenStatusUpdate',
full_name='lnrpc.OpenStatusUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_pending', full_name='lnrpc.OpenStatusUpdate.chan_pending', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_pending', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_open', full_name='lnrpc.OpenStatusUpdate.chan_open', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_open', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='update', full_name='lnrpc.OpenStatusUpdate.update',
index=0, containing_type=None, fields=[]),
],
serialized_start=8280,
serialized_end=8426,
)
_PENDINGHTLC = _descriptor.Descriptor(
name='PendingHTLC',
full_name='lnrpc.PendingHTLC',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='incoming', full_name='lnrpc.PendingHTLC.incoming', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='incoming', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='lnrpc.PendingHTLC.amount', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amount', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='outpoint', full_name='lnrpc.PendingHTLC.outpoint', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='outpoint', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='maturity_height', full_name='lnrpc.PendingHTLC.maturity_height', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='maturity_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blocks_til_maturity', full_name='lnrpc.PendingHTLC.blocks_til_maturity', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='blocks_til_maturity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stage', full_name='lnrpc.PendingHTLC.stage', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='stage', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8429,
serialized_end=8636,
)
_PENDINGCHANNELSREQUEST = _descriptor.Descriptor(
name='PendingChannelsRequest',
full_name='lnrpc.PendingChannelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8638,
serialized_end=8662,
)
_PENDINGCHANNELSRESPONSE_PENDINGCHANNEL = _descriptor.Descriptor(
name='PendingChannel',
full_name='lnrpc.PendingChannelsResponse.PendingChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='remote_node_pub', full_name='lnrpc.PendingChannelsResponse.PendingChannel.remote_node_pub', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_node_pub', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.PendingChannelsResponse.PendingChannel.channel_point', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.PendingChannelsResponse.PendingChannel.capacity', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_balance', full_name='lnrpc.PendingChannelsResponse.PendingChannel.local_balance', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_balance', full_name='lnrpc.PendingChannelsResponse.PendingChannel.remote_balance', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_chan_reserve_sat', full_name='lnrpc.PendingChannelsResponse.PendingChannel.local_chan_reserve_sat', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_chan_reserve_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_chan_reserve_sat', full_name='lnrpc.PendingChannelsResponse.PendingChannel.remote_chan_reserve_sat', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_chan_reserve_sat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9185,
serialized_end=9501,
)
_PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL = _descriptor.Descriptor(
name='PendingOpenChannel',
full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='confirmation_height', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.confirmation_height', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='confirmation_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_fee', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.commit_fee', index=2,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='commit_fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_weight', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.commit_weight', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='commit_weight', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_per_kw', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.fee_per_kw', index=4,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_per_kw', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9504,
serialized_end=9749,
)
_PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL = _descriptor.Descriptor(
name='WaitingCloseChannel',
full_name='lnrpc.PendingChannelsResponse.WaitingCloseChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='lnrpc.PendingChannelsResponse.WaitingCloseChannel.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='limbo_balance', full_name='lnrpc.PendingChannelsResponse.WaitingCloseChannel.limbo_balance', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='limbo_balance', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9751,
serialized_end=9874,
)
_PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL = _descriptor.Descriptor(
name='ClosedChannel',
full_name='lnrpc.PendingChannelsResponse.ClosedChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='lnrpc.PendingChannelsResponse.ClosedChannel.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closing_txid', full_name='lnrpc.PendingChannelsResponse.ClosedChannel.closing_txid', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closing_txid', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9876,
serialized_end=9991,
)
_PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL = _descriptor.Descriptor(
name='ForceClosedChannel',
full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closing_txid', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.closing_txid', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closing_txid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='limbo_balance', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.limbo_balance', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='limbo_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='maturity_height', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.maturity_height', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='maturity_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blocks_til_maturity', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.blocks_til_maturity', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='blocks_til_maturity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='recovered_balance', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.recovered_balance', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='recovered_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_htlcs', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.pending_htlcs', index=6,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_htlcs', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9994,
serialized_end=10357,
)
_PENDINGCHANNELSRESPONSE = _descriptor.Descriptor(
name='PendingChannelsResponse',
full_name='lnrpc.PendingChannelsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='total_limbo_balance', full_name='lnrpc.PendingChannelsResponse.total_limbo_balance', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_limbo_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_open_channels', full_name='lnrpc.PendingChannelsResponse.pending_open_channels', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_open_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_closing_channels', full_name='lnrpc.PendingChannelsResponse.pending_closing_channels', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_closing_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_force_closing_channels', full_name='lnrpc.PendingChannelsResponse.pending_force_closing_channels', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_force_closing_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='waiting_close_channels', full_name='lnrpc.PendingChannelsResponse.waiting_close_channels', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='waiting_close_channels', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_PENDINGCHANNELSRESPONSE_PENDINGCHANNEL, _PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL, _PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL, _PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL, _PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8665,
serialized_end=10357,
)
_CHANNELEVENTSUBSCRIPTION = _descriptor.Descriptor(
name='ChannelEventSubscription',
full_name='lnrpc.ChannelEventSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10359,
serialized_end=10385,
)
_CHANNELEVENTUPDATE = _descriptor.Descriptor(
name='ChannelEventUpdate',
full_name='lnrpc.ChannelEventUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='open_channel', full_name='lnrpc.ChannelEventUpdate.open_channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='open_channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closed_channel', full_name='lnrpc.ChannelEventUpdate.closed_channel', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closed_channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='active_channel', full_name='lnrpc.ChannelEventUpdate.active_channel', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='active_channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inactive_channel', full_name='lnrpc.ChannelEventUpdate.inactive_channel', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='inactive_channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='lnrpc.ChannelEventUpdate.type', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='type', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_CHANNELEVENTUPDATE_UPDATETYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='channel', full_name='lnrpc.ChannelEventUpdate.channel',
index=0, containing_type=None, fields=[]),
],
serialized_start=10388,
serialized_end=10825,
)
_WALLETBALANCEREQUEST = _descriptor.Descriptor(
name='WalletBalanceRequest',
full_name='lnrpc.WalletBalanceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10827,
serialized_end=10849,
)
_WALLETBALANCERESPONSE = _descriptor.Descriptor(
name='WalletBalanceResponse',
full_name='lnrpc.WalletBalanceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='total_balance', full_name='lnrpc.WalletBalanceResponse.total_balance', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='confirmed_balance', full_name='lnrpc.WalletBalanceResponse.confirmed_balance', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='confirmed_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='unconfirmed_balance', full_name='lnrpc.WalletBalanceResponse.unconfirmed_balance', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='unconfirmed_balance', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10852,
serialized_end=11009,
)
_CHANNELBALANCEREQUEST = _descriptor.Descriptor(
name='ChannelBalanceRequest',
full_name='lnrpc.ChannelBalanceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11011,
serialized_end=11034,
)
_CHANNELBALANCERESPONSE = _descriptor.Descriptor(
name='ChannelBalanceResponse',
full_name='lnrpc.ChannelBalanceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='balance', full_name='lnrpc.ChannelBalanceResponse.balance', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_open_balance', full_name='lnrpc.ChannelBalanceResponse.pending_open_balance', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_open_balance', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11036,
serialized_end=11138,
)
_QUERYROUTESREQUEST = _descriptor.Descriptor(
name='QueryRoutesRequest',
full_name='lnrpc.QueryRoutesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.QueryRoutesRequest.pub_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt', full_name='lnrpc.QueryRoutesRequest.amt', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_msat', full_name='lnrpc.QueryRoutesRequest.amt_msat', index=2,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='final_cltv_delta', full_name='lnrpc.QueryRoutesRequest.final_cltv_delta', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_limit', full_name='lnrpc.QueryRoutesRequest.fee_limit', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ignored_nodes', full_name='lnrpc.QueryRoutesRequest.ignored_nodes', index=5,
number=6, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ignored_edges', full_name='lnrpc.QueryRoutesRequest.ignored_edges', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='source_pub_key', full_name='lnrpc.QueryRoutesRequest.source_pub_key', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='use_mission_control', full_name='lnrpc.QueryRoutesRequest.use_mission_control', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ignored_pairs', full_name='lnrpc.QueryRoutesRequest.ignored_pairs', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_limit', full_name='lnrpc.QueryRoutesRequest.cltv_limit', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11141,
serialized_end=11460,
)
_NODEPAIR = _descriptor.Descriptor(
name='NodePair',
full_name='lnrpc.NodePair',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='from', full_name='lnrpc.NodePair.from', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='to', full_name='lnrpc.NodePair.to', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11462,
serialized_end=11498,
)
_EDGELOCATOR = _descriptor.Descriptor(
name='EdgeLocator',
full_name='lnrpc.EdgeLocator',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_id', full_name='lnrpc.EdgeLocator.channel_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='direction_reverse', full_name='lnrpc.EdgeLocator.direction_reverse', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11500,
serialized_end=11564,
)
_QUERYROUTESRESPONSE = _descriptor.Descriptor(
name='QueryRoutesResponse',
full_name='lnrpc.QueryRoutesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='routes', full_name='lnrpc.QueryRoutesResponse.routes', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='routes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success_prob', full_name='lnrpc.QueryRoutesResponse.success_prob', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='success_prob', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11566,
serialized_end=11661,
)
_HOP = _descriptor.Descriptor(
name='Hop',
full_name='lnrpc.Hop',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.Hop.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_capacity', full_name='lnrpc.Hop.chan_capacity', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_to_forward', full_name='lnrpc.Hop.amt_to_forward', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='amt_to_forward', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee', full_name='lnrpc.Hop.fee', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiry', full_name='lnrpc.Hop.expiry', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_to_forward_msat', full_name='lnrpc.Hop.amt_to_forward_msat', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_to_forward_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_msat', full_name='lnrpc.Hop.fee_msat', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.Hop.pub_key', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pub_key', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tlv_payload', full_name='lnrpc.Hop.tlv_payload', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tlv_payload', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mpp_record', full_name='lnrpc.Hop.mpp_record', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='mpp_record', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11664,
serialized_end=12015,
)
_MPPRECORD = _descriptor.Descriptor(
name='MPPRecord',
full_name='lnrpc.MPPRecord',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payment_addr', full_name='lnrpc.MPPRecord.payment_addr', index=0,
number=11, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_addr', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_amt_msat', full_name='lnrpc.MPPRecord.total_amt_msat', index=1,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_amt_msat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12017,
serialized_end=12104,
)
_ROUTE = _descriptor.Descriptor(
name='Route',
full_name='lnrpc.Route',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='total_time_lock', full_name='lnrpc.Route.total_time_lock', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_time_lock', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fees', full_name='lnrpc.Route.total_fees', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='total_fees', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_amt', full_name='lnrpc.Route.total_amt', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='total_amt', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hops', full_name='lnrpc.Route.hops', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='hops', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fees_msat', full_name='lnrpc.Route.total_fees_msat', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_fees_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_amt_msat', full_name='lnrpc.Route.total_amt_msat', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_amt_msat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12107,
serialized_end=12340,
)
_NODEINFOREQUEST = _descriptor.Descriptor(
name='NodeInfoRequest',
full_name='lnrpc.NodeInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.NodeInfoRequest.pub_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='include_channels', full_name='lnrpc.NodeInfoRequest.include_channels', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12342,
serialized_end=12402,
)
_NODEINFO = _descriptor.Descriptor(
name='NodeInfo',
full_name='lnrpc.NodeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node', full_name='lnrpc.NodeInfo.node', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_channels', full_name='lnrpc.NodeInfo.num_channels', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_capacity', full_name='lnrpc.NodeInfo.total_capacity', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channels', full_name='lnrpc.NodeInfo.channels', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channels', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12405,
serialized_end=12581,
)
_LIGHTNINGNODE = _descriptor.Descriptor(
name='LightningNode',
full_name='lnrpc.LightningNode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='last_update', full_name='lnrpc.LightningNode.last_update', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='last_update', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.LightningNode.pub_key', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pub_key', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alias', full_name='lnrpc.LightningNode.alias', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='alias', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='addresses', full_name='lnrpc.LightningNode.addresses', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='addresses', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='color', full_name='lnrpc.LightningNode.color', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='color', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12584,
serialized_end=12753,
)
_NODEADDRESS = _descriptor.Descriptor(
name='NodeAddress',
full_name='lnrpc.NodeAddress',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='network', full_name='lnrpc.NodeAddress.network', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='network', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='addr', full_name='lnrpc.NodeAddress.addr', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='addr', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12755,
serialized_end=12814,
)
_ROUTINGPOLICY = _descriptor.Descriptor(
name='RoutingPolicy',
full_name='lnrpc.RoutingPolicy',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time_lock_delta', full_name='lnrpc.RoutingPolicy.time_lock_delta', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='time_lock_delta', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_htlc', full_name='lnrpc.RoutingPolicy.min_htlc', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='min_htlc', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_base_msat', full_name='lnrpc.RoutingPolicy.fee_base_msat', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_base_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_rate_milli_msat', full_name='lnrpc.RoutingPolicy.fee_rate_milli_msat', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_rate_milli_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='disabled', full_name='lnrpc.RoutingPolicy.disabled', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='disabled', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_htlc_msat', full_name='lnrpc.RoutingPolicy.max_htlc_msat', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='max_htlc_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_update', full_name='lnrpc.RoutingPolicy.last_update', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='last_update', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12817,
serialized_end=13090,
)
_CHANNELEDGE = _descriptor.Descriptor(
name='ChannelEdge',
full_name='lnrpc.ChannelEdge',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_id', full_name='lnrpc.ChannelEdge.channel_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='channel_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ChannelEdge.chan_point', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_update', full_name='lnrpc.ChannelEdge.last_update', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='last_update', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node1_pub', full_name='lnrpc.ChannelEdge.node1_pub', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node1_pub', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node2_pub', full_name='lnrpc.ChannelEdge.node2_pub', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node2_pub', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.ChannelEdge.capacity', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node1_policy', full_name='lnrpc.ChannelEdge.node1_policy', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node1_policy', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node2_policy', full_name='lnrpc.ChannelEdge.node2_policy', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node2_policy', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13093,
serialized_end=13416,
)
_CHANNELGRAPHREQUEST = _descriptor.Descriptor(
name='ChannelGraphRequest',
full_name='lnrpc.ChannelGraphRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='include_unannounced', full_name='lnrpc.ChannelGraphRequest.include_unannounced', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='include_unannounced', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13418,
serialized_end=13489,
)
_CHANNELGRAPH = _descriptor.Descriptor(
name='ChannelGraph',
full_name='lnrpc.ChannelGraph',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nodes', full_name='lnrpc.ChannelGraph.nodes', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='nodes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='edges', full_name='lnrpc.ChannelGraph.edges', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='edges', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13491,
serialized_end=13591,
)
_CHANINFOREQUEST = _descriptor.Descriptor(
name='ChanInfoRequest',
full_name='lnrpc.ChanInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.ChanInfoRequest.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13593,
serialized_end=13631,
)
_NETWORKINFOREQUEST = _descriptor.Descriptor(
name='NetworkInfoRequest',
full_name='lnrpc.NetworkInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13633,
serialized_end=13653,
)
_NETWORKINFO = _descriptor.Descriptor(
name='NetworkInfo',
full_name='lnrpc.NetworkInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='graph_diameter', full_name='lnrpc.NetworkInfo.graph_diameter', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='graph_diameter', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='avg_out_degree', full_name='lnrpc.NetworkInfo.avg_out_degree', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='avg_out_degree', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_out_degree', full_name='lnrpc.NetworkInfo.max_out_degree', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='max_out_degree', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_nodes', full_name='lnrpc.NetworkInfo.num_nodes', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_nodes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_channels', full_name='lnrpc.NetworkInfo.num_channels', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_network_capacity', full_name='lnrpc.NetworkInfo.total_network_capacity', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_network_capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='avg_channel_size', full_name='lnrpc.NetworkInfo.avg_channel_size', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='avg_channel_size', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_channel_size', full_name='lnrpc.NetworkInfo.min_channel_size', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='min_channel_size', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_channel_size', full_name='lnrpc.NetworkInfo.max_channel_size', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='max_channel_size', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='median_channel_size_sat', full_name='lnrpc.NetworkInfo.median_channel_size_sat', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='median_channel_size_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_zombie_chans', full_name='lnrpc.NetworkInfo.num_zombie_chans', index=10,
number=11, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_zombie_chans', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13656,
serialized_end=14145,
)
_STOPREQUEST = _descriptor.Descriptor(
name='StopRequest',
full_name='lnrpc.StopRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14147,
serialized_end=14160,
)
_STOPRESPONSE = _descriptor.Descriptor(
name='StopResponse',
full_name='lnrpc.StopResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14162,
serialized_end=14176,
)
_GRAPHTOPOLOGYSUBSCRIPTION = _descriptor.Descriptor(
name='GraphTopologySubscription',
full_name='lnrpc.GraphTopologySubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14178,
serialized_end=14205,
)
_GRAPHTOPOLOGYUPDATE = _descriptor.Descriptor(
name='GraphTopologyUpdate',
full_name='lnrpc.GraphTopologyUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_updates', full_name='lnrpc.GraphTopologyUpdate.node_updates', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_updates', full_name='lnrpc.GraphTopologyUpdate.channel_updates', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closed_chans', full_name='lnrpc.GraphTopologyUpdate.closed_chans', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14208,
serialized_end=14371,
)
_NODEUPDATE = _descriptor.Descriptor(
name='NodeUpdate',
full_name='lnrpc.NodeUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='addresses', full_name='lnrpc.NodeUpdate.addresses', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='identity_key', full_name='lnrpc.NodeUpdate.identity_key', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='global_features', full_name='lnrpc.NodeUpdate.global_features', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alias', full_name='lnrpc.NodeUpdate.alias', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='color', full_name='lnrpc.NodeUpdate.color', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14373,
serialized_end=14481,
)
_CHANNELEDGEUPDATE = _descriptor.Descriptor(
name='ChannelEdgeUpdate',
full_name='lnrpc.ChannelEdgeUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.ChannelEdgeUpdate.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ChannelEdgeUpdate.chan_point', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.ChannelEdgeUpdate.capacity', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='routing_policy', full_name='lnrpc.ChannelEdgeUpdate.routing_policy', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='advertising_node', full_name='lnrpc.ChannelEdgeUpdate.advertising_node', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='connecting_node', full_name='lnrpc.ChannelEdgeUpdate.connecting_node', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14484,
serialized_end=14680,
)
_CLOSEDCHANNELUPDATE = _descriptor.Descriptor(
name='ClosedChannelUpdate',
full_name='lnrpc.ClosedChannelUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.ClosedChannelUpdate.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.ClosedChannelUpdate.capacity', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closed_height', full_name='lnrpc.ClosedChannelUpdate.closed_height', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ClosedChannelUpdate.chan_point', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14682,
serialized_end=14806,
)
_HOPHINT = _descriptor.Descriptor(
name='HopHint',
full_name='lnrpc.HopHint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_id', full_name='lnrpc.HopHint.node_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.HopHint.chan_id', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_base_msat', full_name='lnrpc.HopHint.fee_base_msat', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_base_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_proportional_millionths', full_name='lnrpc.HopHint.fee_proportional_millionths', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_proportional_millionths', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_expiry_delta', full_name='lnrpc.HopHint.cltv_expiry_delta', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='cltv_expiry_delta', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14809,
serialized_end=15024,
)
_ROUTEHINT = _descriptor.Descriptor(
name='RouteHint',
full_name='lnrpc.RouteHint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hop_hints', full_name='lnrpc.RouteHint.hop_hints', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='hop_hints', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15026,
serialized_end=15083,
)
_INVOICE = _descriptor.Descriptor(
name='Invoice',
full_name='lnrpc.Invoice',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='memo', full_name='lnrpc.Invoice.memo', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='memo', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='r_preimage', full_name='lnrpc.Invoice.r_preimage', index=1,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='r_preimage', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='r_hash', full_name='lnrpc.Invoice.r_hash', index=2,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='r_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.Invoice.value', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_msat', full_name='lnrpc.Invoice.value_msat', index=4,
number=23, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settled', full_name='lnrpc.Invoice.settled', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='settled', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creation_date', full_name='lnrpc.Invoice.creation_date', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='creation_date', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settle_date', full_name='lnrpc.Invoice.settle_date', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='settle_date', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_request', full_name='lnrpc.Invoice.payment_request', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_request', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description_hash', full_name='lnrpc.Invoice.description_hash', index=9,
number=10, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='description_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiry', full_name='lnrpc.Invoice.expiry', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fallback_addr', full_name='lnrpc.Invoice.fallback_addr', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fallback_addr', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_expiry', full_name='lnrpc.Invoice.cltv_expiry', index=12,
number=13, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='cltv_expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='route_hints', full_name='lnrpc.Invoice.route_hints', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='route_hints', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private', full_name='lnrpc.Invoice.private', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='private', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='add_index', full_name='lnrpc.Invoice.add_index', index=15,
number=16, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='add_index', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settle_index', full_name='lnrpc.Invoice.settle_index', index=16,
number=17, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='settle_index', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_paid', full_name='lnrpc.Invoice.amt_paid', index=17,
number=18, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='amt_paid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_paid_sat', full_name='lnrpc.Invoice.amt_paid_sat', index=18,
number=19, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_paid_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_paid_msat', full_name='lnrpc.Invoice.amt_paid_msat', index=19,
number=20, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_paid_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='lnrpc.Invoice.state', index=20,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='state', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='htlcs', full_name='lnrpc.Invoice.htlcs', index=21,
number=22, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='htlcs', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_INVOICE_INVOICESTATE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15086,
serialized_end=15925,
)
_INVOICEHTLC = _descriptor.Descriptor(
name='InvoiceHTLC',
full_name='lnrpc.InvoiceHTLC',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.InvoiceHTLC.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='htlc_index', full_name='lnrpc.InvoiceHTLC.htlc_index', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='htlc_index', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_msat', full_name='lnrpc.InvoiceHTLC.amt_msat', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='accept_height', full_name='lnrpc.InvoiceHTLC.accept_height', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accept_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='accept_time', full_name='lnrpc.InvoiceHTLC.accept_time', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accept_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resolve_time', full_name='lnrpc.InvoiceHTLC.resolve_time', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='resolve_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiry_height', full_name='lnrpc.InvoiceHTLC.expiry_height', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiry_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='lnrpc.InvoiceHTLC.state', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='state', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15928,
serialized_end=16224,
)
_ADDINVOICERESPONSE = _descriptor.Descriptor(
name='AddInvoiceResponse',
full_name='lnrpc.AddInvoiceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='r_hash', full_name='lnrpc.AddInvoiceResponse.r_hash', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='r_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_request', full_name='lnrpc.AddInvoiceResponse.payment_request', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_request', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='add_index', full_name='lnrpc.AddInvoiceResponse.add_index', index=2,
number=16, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='add_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16226,
serialized_end=16342,
)
_PAYMENTHASH = _descriptor.Descriptor(
name='PaymentHash',
full_name='lnrpc.PaymentHash',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='r_hash_str', full_name='lnrpc.PaymentHash.r_hash_str', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='r_hash_str', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='r_hash', full_name='lnrpc.PaymentHash.r_hash', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='r_hash', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16344,
serialized_end=16417,
)
_LISTINVOICEREQUEST = _descriptor.Descriptor(
name='ListInvoiceRequest',
full_name='lnrpc.ListInvoiceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pending_only', full_name='lnrpc.ListInvoiceRequest.pending_only', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_only', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='index_offset', full_name='lnrpc.ListInvoiceRequest.index_offset', index=1,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='index_offset', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_max_invoices', full_name='lnrpc.ListInvoiceRequest.num_max_invoices', index=2,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_max_invoices', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reversed', full_name='lnrpc.ListInvoiceRequest.reversed', index=3,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='reversed', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16420,
serialized_end=16584,
)
_LISTINVOICERESPONSE = _descriptor.Descriptor(
name='ListInvoiceResponse',
full_name='lnrpc.ListInvoiceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='invoices', full_name='lnrpc.ListInvoiceResponse.invoices', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='invoices', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_index_offset', full_name='lnrpc.ListInvoiceResponse.last_index_offset', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='last_index_offset', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='first_index_offset', full_name='lnrpc.ListInvoiceResponse.first_index_offset', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='first_index_offset', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16587,
serialized_end=16746,
)
_INVOICESUBSCRIPTION = _descriptor.Descriptor(
name='InvoiceSubscription',
full_name='lnrpc.InvoiceSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='add_index', full_name='lnrpc.InvoiceSubscription.add_index', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='add_index', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settle_index', full_name='lnrpc.InvoiceSubscription.settle_index', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='settle_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16748,
serialized_end=16835,
)
_PAYMENT = _descriptor.Descriptor(
name='Payment',
full_name='lnrpc.Payment',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.Payment.payment_hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.Payment.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='value', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creation_date', full_name='lnrpc.Payment.creation_date', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='creation_date', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='path', full_name='lnrpc.Payment.path', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='path', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee', full_name='lnrpc.Payment.fee', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_preimage', full_name='lnrpc.Payment.payment_preimage', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_preimage', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_sat', full_name='lnrpc.Payment.value_sat', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_msat', full_name='lnrpc.Payment.value_msat', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_request', full_name='lnrpc.Payment.payment_request', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_request', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='lnrpc.Payment.status', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='status', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_sat', full_name='lnrpc.Payment.fee_sat', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_msat', full_name='lnrpc.Payment.fee_msat', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creation_time_ns', full_name='lnrpc.Payment.creation_time_ns', index=12,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='creation_time_ns', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='htlcs', full_name='lnrpc.Payment.htlcs', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='htlcs', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_PAYMENT_PAYMENTSTATUS,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16838,
serialized_end=17411,
)
_HTLCATTEMPT = _descriptor.Descriptor(
name='HTLCAttempt',
full_name='lnrpc.HTLCAttempt',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='lnrpc.HTLCAttempt.status', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='status', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='route', full_name='lnrpc.HTLCAttempt.route', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='route', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attempt_time_ns', full_name='lnrpc.HTLCAttempt.attempt_time_ns', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='attempt_time_ns', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resolve_time_ns', full_name='lnrpc.HTLCAttempt.resolve_time_ns', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='resolve_time_ns', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_HTLCATTEMPT_HTLCSTATUS,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17414,
serialized_end=17658,
)
_LISTPAYMENTSREQUEST = _descriptor.Descriptor(
name='ListPaymentsRequest',
full_name='lnrpc.ListPaymentsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='include_incomplete', full_name='lnrpc.ListPaymentsRequest.include_incomplete', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17660,
serialized_end=17709,
)
_LISTPAYMENTSRESPONSE = _descriptor.Descriptor(
name='ListPaymentsResponse',
full_name='lnrpc.ListPaymentsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payments', full_name='lnrpc.ListPaymentsResponse.payments', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payments', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17711,
serialized_end=17777,
)
_DELETEALLPAYMENTSREQUEST = _descriptor.Descriptor(
name='DeleteAllPaymentsRequest',
full_name='lnrpc.DeleteAllPaymentsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17779,
serialized_end=17805,
)
_DELETEALLPAYMENTSRESPONSE = _descriptor.Descriptor(
name='DeleteAllPaymentsResponse',
full_name='lnrpc.DeleteAllPaymentsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17807,
serialized_end=17834,
)
_ABANDONCHANNELREQUEST = _descriptor.Descriptor(
name='AbandonChannelRequest',
full_name='lnrpc.AbandonChannelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.AbandonChannelRequest.channel_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17836,
serialized_end=17903,
)
_ABANDONCHANNELRESPONSE = _descriptor.Descriptor(
name='AbandonChannelResponse',
full_name='lnrpc.AbandonChannelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17905,
serialized_end=17929,
)
_DEBUGLEVELREQUEST = _descriptor.Descriptor(
name='DebugLevelRequest',
full_name='lnrpc.DebugLevelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='show', full_name='lnrpc.DebugLevelRequest.show', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='level_spec', full_name='lnrpc.DebugLevelRequest.level_spec', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17931,
serialized_end=17984,
)
_DEBUGLEVELRESPONSE = _descriptor.Descriptor(
name='DebugLevelResponse',
full_name='lnrpc.DebugLevelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='sub_systems', full_name='lnrpc.DebugLevelResponse.sub_systems', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sub_systems', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17986,
serialized_end=18040,
)
_PAYREQSTRING = _descriptor.Descriptor(
name='PayReqString',
full_name='lnrpc.PayReqString',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pay_req', full_name='lnrpc.PayReqString.pay_req', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18042,
serialized_end=18073,
)
_PAYREQ = _descriptor.Descriptor(
name='PayReq',
full_name='lnrpc.PayReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='destination', full_name='lnrpc.PayReq.destination', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='destination', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.PayReq.payment_hash', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_satoshis', full_name='lnrpc.PayReq.num_satoshis', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_satoshis', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='lnrpc.PayReq.timestamp', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='timestamp', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiry', full_name='lnrpc.PayReq.expiry', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description', full_name='lnrpc.PayReq.description', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='description', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description_hash', full_name='lnrpc.PayReq.description_hash', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='description_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fallback_addr', full_name='lnrpc.PayReq.fallback_addr', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fallback_addr', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_expiry', full_name='lnrpc.PayReq.cltv_expiry', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='cltv_expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='route_hints', full_name='lnrpc.PayReq.route_hints', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='route_hints', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18076,
serialized_end=18446,
)
_FEEREPORTREQUEST = _descriptor.Descriptor(
name='FeeReportRequest',
full_name='lnrpc.FeeReportRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18448,
serialized_end=18466,
)
_CHANNELFEEREPORT = _descriptor.Descriptor(
name='ChannelFeeReport',
full_name='lnrpc.ChannelFeeReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ChannelFeeReport.chan_point', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='base_fee_msat', full_name='lnrpc.ChannelFeeReport.base_fee_msat', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='base_fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_per_mil', full_name='lnrpc.ChannelFeeReport.fee_per_mil', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_per_mil', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_rate', full_name='lnrpc.ChannelFeeReport.fee_rate', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_rate', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18469,
serialized_end=18622,
)
_FEEREPORTRESPONSE = _descriptor.Descriptor(
name='FeeReportResponse',
full_name='lnrpc.FeeReportResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_fees', full_name='lnrpc.FeeReportResponse.channel_fees', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_fees', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='day_fee_sum', full_name='lnrpc.FeeReportResponse.day_fee_sum', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='day_fee_sum', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='week_fee_sum', full_name='lnrpc.FeeReportResponse.week_fee_sum', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='week_fee_sum', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='month_fee_sum', full_name='lnrpc.FeeReportResponse.month_fee_sum', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='month_fee_sum', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18625,
serialized_end=18813,
)
_POLICYUPDATEREQUEST = _descriptor.Descriptor(
name='PolicyUpdateRequest',
full_name='lnrpc.PolicyUpdateRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='global', full_name='lnrpc.PolicyUpdateRequest.global', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='global', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.PolicyUpdateRequest.chan_point', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='base_fee_msat', full_name='lnrpc.PolicyUpdateRequest.base_fee_msat', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='base_fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_rate', full_name='lnrpc.PolicyUpdateRequest.fee_rate', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_rate', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time_lock_delta', full_name='lnrpc.PolicyUpdateRequest.time_lock_delta', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='time_lock_delta', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_htlc_msat', full_name='lnrpc.PolicyUpdateRequest.max_htlc_msat', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='max_htlc_msat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='scope', full_name='lnrpc.PolicyUpdateRequest.scope',
index=0, containing_type=None, fields=[]),
],
serialized_start=18816,
serialized_end=19073,
)
_POLICYUPDATERESPONSE = _descriptor.Descriptor(
name='PolicyUpdateResponse',
full_name='lnrpc.PolicyUpdateResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19075,
serialized_end=19097,
)
_FORWARDINGHISTORYREQUEST = _descriptor.Descriptor(
name='ForwardingHistoryRequest',
full_name='lnrpc.ForwardingHistoryRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='start_time', full_name='lnrpc.ForwardingHistoryRequest.start_time', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='start_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end_time', full_name='lnrpc.ForwardingHistoryRequest.end_time', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='end_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='index_offset', full_name='lnrpc.ForwardingHistoryRequest.index_offset', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='index_offset', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_max_events', full_name='lnrpc.ForwardingHistoryRequest.num_max_events', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_max_events', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19100,
serialized_end=19262,
)
_FORWARDINGEVENT = _descriptor.Descriptor(
name='ForwardingEvent',
full_name='lnrpc.ForwardingEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='lnrpc.ForwardingEvent.timestamp', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='timestamp', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id_in', full_name='lnrpc.ForwardingEvent.chan_id_in', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id_in', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id_out', full_name='lnrpc.ForwardingEvent.chan_id_out', index=2,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id_out', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_in', full_name='lnrpc.ForwardingEvent.amt_in', index=3,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_in', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_out', full_name='lnrpc.ForwardingEvent.amt_out', index=4,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_out', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee', full_name='lnrpc.ForwardingEvent.fee', index=5,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_msat', full_name='lnrpc.ForwardingEvent.fee_msat', index=6,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_in_msat', full_name='lnrpc.ForwardingEvent.amt_in_msat', index=7,
number=9, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_in_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_out_msat', full_name='lnrpc.ForwardingEvent.amt_out_msat', index=8,
number=10, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_out_msat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19265,
serialized_end=19552,
)
_FORWARDINGHISTORYRESPONSE = _descriptor.Descriptor(
name='ForwardingHistoryResponse',
full_name='lnrpc.ForwardingHistoryResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='forwarding_events', full_name='lnrpc.ForwardingHistoryResponse.forwarding_events', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='forwarding_events', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_offset_index', full_name='lnrpc.ForwardingHistoryResponse.last_offset_index', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='last_offset_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19555,
serialized_end=19698,
)
_EXPORTCHANNELBACKUPREQUEST = _descriptor.Descriptor(
name='ExportChannelBackupRequest',
full_name='lnrpc.ExportChannelBackupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ExportChannelBackupRequest.chan_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19700,
serialized_end=19769,
)
_CHANNELBACKUP = _descriptor.Descriptor(
name='ChannelBackup',
full_name='lnrpc.ChannelBackup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ChannelBackup.chan_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_backup', full_name='lnrpc.ChannelBackup.chan_backup', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_backup', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19771,
serialized_end=19873,
)
_MULTICHANBACKUP = _descriptor.Descriptor(
name='MultiChanBackup',
full_name='lnrpc.MultiChanBackup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_points', full_name='lnrpc.MultiChanBackup.chan_points', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_points', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='multi_chan_backup', full_name='lnrpc.MultiChanBackup.multi_chan_backup', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='multi_chan_backup', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19875,
serialized_end=19993,
)
_CHANBACKUPEXPORTREQUEST = _descriptor.Descriptor(
name='ChanBackupExportRequest',
full_name='lnrpc.ChanBackupExportRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19995,
serialized_end=20020,
)
_CHANBACKUPSNAPSHOT = _descriptor.Descriptor(
name='ChanBackupSnapshot',
full_name='lnrpc.ChanBackupSnapshot',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='single_chan_backups', full_name='lnrpc.ChanBackupSnapshot.single_chan_backups', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='single_chan_backups', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='multi_chan_backup', full_name='lnrpc.ChanBackupSnapshot.multi_chan_backup', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='multi_chan_backup', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20023,
serialized_end=20186,
)
_CHANNELBACKUPS = _descriptor.Descriptor(
name='ChannelBackups',
full_name='lnrpc.ChannelBackups',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_backups', full_name='lnrpc.ChannelBackups.chan_backups', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_backups', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20188,
serialized_end=20262,
)
_RESTORECHANBACKUPREQUEST = _descriptor.Descriptor(
name='RestoreChanBackupRequest',
full_name='lnrpc.RestoreChanBackupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_backups', full_name='lnrpc.RestoreChanBackupRequest.chan_backups', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_backups', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='multi_chan_backup', full_name='lnrpc.RestoreChanBackupRequest.multi_chan_backup', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='multi_chan_backup', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='backup', full_name='lnrpc.RestoreChanBackupRequest.backup',
index=0, containing_type=None, fields=[]),
],
serialized_start=20265,
serialized_end=20410,
)
_RESTOREBACKUPRESPONSE = _descriptor.Descriptor(
name='RestoreBackupResponse',
full_name='lnrpc.RestoreBackupResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20412,
serialized_end=20435,
)
_CHANNELBACKUPSUBSCRIPTION = _descriptor.Descriptor(
name='ChannelBackupSubscription',
full_name='lnrpc.ChannelBackupSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20437,
serialized_end=20464,
)
_VERIFYCHANBACKUPRESPONSE = _descriptor.Descriptor(
name='VerifyChanBackupResponse',
full_name='lnrpc.VerifyChanBackupResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20466,
serialized_end=20492,
)
_MACAROONPERMISSION = _descriptor.Descriptor(
name='MacaroonPermission',
full_name='lnrpc.MacaroonPermission',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entity', full_name='lnrpc.MacaroonPermission.entity', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='entity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='lnrpc.MacaroonPermission.action', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='action', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20494,
serialized_end=20562,
)
_BAKEMACAROONREQUEST = _descriptor.Descriptor(
name='BakeMacaroonRequest',
full_name='lnrpc.BakeMacaroonRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='permissions', full_name='lnrpc.BakeMacaroonRequest.permissions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='permissions', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20564,
serialized_end=20646,
)
_BAKEMACAROONRESPONSE = _descriptor.Descriptor(
name='BakeMacaroonResponse',
full_name='lnrpc.BakeMacaroonResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='macaroon', full_name='lnrpc.BakeMacaroonResponse.macaroon', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='macaroon', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20648,
serialized_end=20698,
)
_INITWALLETREQUEST.fields_by_name['channel_backups'].message_type = _CHANBACKUPSNAPSHOT
_UNLOCKWALLETREQUEST.fields_by_name['channel_backups'].message_type = _CHANBACKUPSNAPSHOT
_UTXO.fields_by_name['type'].enum_type = _ADDRESSTYPE
_UTXO.fields_by_name['outpoint'].message_type = _OUTPOINT
_TRANSACTIONDETAILS.fields_by_name['transactions'].message_type = _TRANSACTION
_FEELIMIT.oneofs_by_name['limit'].fields.append(
_FEELIMIT.fields_by_name['fixed'])
_FEELIMIT.fields_by_name['fixed'].containing_oneof = _FEELIMIT.oneofs_by_name['limit']
_FEELIMIT.oneofs_by_name['limit'].fields.append(
_FEELIMIT.fields_by_name['fixed_msat'])
_FEELIMIT.fields_by_name['fixed_msat'].containing_oneof = _FEELIMIT.oneofs_by_name['limit']
_FEELIMIT.oneofs_by_name['limit'].fields.append(
_FEELIMIT.fields_by_name['percent'])
_FEELIMIT.fields_by_name['percent'].containing_oneof = _FEELIMIT.oneofs_by_name['limit']
_SENDREQUEST_DESTTLVENTRY.containing_type = _SENDREQUEST
_SENDREQUEST.fields_by_name['fee_limit'].message_type = _FEELIMIT
_SENDREQUEST.fields_by_name['dest_tlv'].message_type = _SENDREQUEST_DESTTLVENTRY
_SENDRESPONSE.fields_by_name['payment_route'].message_type = _ROUTE
_SENDTOROUTEREQUEST.fields_by_name['route'].message_type = _ROUTE
_CHANNELPOINT.oneofs_by_name['funding_txid'].fields.append(
_CHANNELPOINT.fields_by_name['funding_txid_bytes'])
_CHANNELPOINT.fields_by_name['funding_txid_bytes'].containing_oneof = _CHANNELPOINT.oneofs_by_name['funding_txid']
_CHANNELPOINT.oneofs_by_name['funding_txid'].fields.append(
_CHANNELPOINT.fields_by_name['funding_txid_str'])
_CHANNELPOINT.fields_by_name['funding_txid_str'].containing_oneof = _CHANNELPOINT.oneofs_by_name['funding_txid']
_ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY.containing_type = _ESTIMATEFEEREQUEST
_ESTIMATEFEEREQUEST.fields_by_name['AddrToAmount'].message_type = _ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY
_SENDMANYREQUEST_ADDRTOAMOUNTENTRY.containing_type = _SENDMANYREQUEST
_SENDMANYREQUEST.fields_by_name['AddrToAmount'].message_type = _SENDMANYREQUEST_ADDRTOAMOUNTENTRY
_LISTUNSPENTRESPONSE.fields_by_name['utxos'].message_type = _UTXO
_NEWADDRESSREQUEST.fields_by_name['type'].enum_type = _ADDRESSTYPE
_CONNECTPEERREQUEST.fields_by_name['addr'].message_type = _LIGHTNINGADDRESS
_CHANNEL.fields_by_name['pending_htlcs'].message_type = _HTLC
_LISTCHANNELSRESPONSE.fields_by_name['channels'].message_type = _CHANNEL
_CHANNELCLOSESUMMARY.fields_by_name['close_type'].enum_type = _CHANNELCLOSESUMMARY_CLOSURETYPE
_CHANNELCLOSESUMMARY_CLOSURETYPE.containing_type = _CHANNELCLOSESUMMARY
_CLOSEDCHANNELSRESPONSE.fields_by_name['channels'].message_type = _CHANNELCLOSESUMMARY
_PEER.fields_by_name['sync_type'].enum_type = _PEER_SYNCTYPE
_PEER_SYNCTYPE.containing_type = _PEER
_LISTPEERSRESPONSE.fields_by_name['peers'].message_type = _PEER
_GETINFORESPONSE.fields_by_name['chains'].message_type = _CHAIN
_CHANNELOPENUPDATE.fields_by_name['channel_point'].message_type = _CHANNELPOINT
_CLOSECHANNELREQUEST.fields_by_name['channel_point'].message_type = _CHANNELPOINT
_CLOSESTATUSUPDATE.fields_by_name['close_pending'].message_type = _PENDINGUPDATE
_CLOSESTATUSUPDATE.fields_by_name['chan_close'].message_type = _CHANNELCLOSEUPDATE
_CLOSESTATUSUPDATE.oneofs_by_name['update'].fields.append(
_CLOSESTATUSUPDATE.fields_by_name['close_pending'])
_CLOSESTATUSUPDATE.fields_by_name['close_pending'].containing_oneof = _CLOSESTATUSUPDATE.oneofs_by_name['update']
_CLOSESTATUSUPDATE.oneofs_by_name['update'].fields.append(
_CLOSESTATUSUPDATE.fields_by_name['chan_close'])
_CLOSESTATUSUPDATE.fields_by_name['chan_close'].containing_oneof = _CLOSESTATUSUPDATE.oneofs_by_name['update']
_OPENSTATUSUPDATE.fields_by_name['chan_pending'].message_type = _PENDINGUPDATE
_OPENSTATUSUPDATE.fields_by_name['chan_open'].message_type = _CHANNELOPENUPDATE
_OPENSTATUSUPDATE.oneofs_by_name['update'].fields.append(
_OPENSTATUSUPDATE.fields_by_name['chan_pending'])
_OPENSTATUSUPDATE.fields_by_name['chan_pending'].containing_oneof = _OPENSTATUSUPDATE.oneofs_by_name['update']
_OPENSTATUSUPDATE.oneofs_by_name['update'].fields.append(
_OPENSTATUSUPDATE.fields_by_name['chan_open'])
_OPENSTATUSUPDATE.fields_by_name['chan_open'].containing_oneof = _OPENSTATUSUPDATE.oneofs_by_name['update']
_PENDINGCHANNELSRESPONSE_PENDINGCHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL.fields_by_name['channel'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL
_PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL.fields_by_name['channel'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL
_PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL.fields_by_name['channel'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL
_PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL.fields_by_name['channel'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL
_PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL.fields_by_name['pending_htlcs'].message_type = _PENDINGHTLC
_PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE.fields_by_name['pending_open_channels'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL
_PENDINGCHANNELSRESPONSE.fields_by_name['pending_closing_channels'].message_type = _PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL
_PENDINGCHANNELSRESPONSE.fields_by_name['pending_force_closing_channels'].message_type = _PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL
_PENDINGCHANNELSRESPONSE.fields_by_name['waiting_close_channels'].message_type = _PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL
_CHANNELEVENTUPDATE.fields_by_name['open_channel'].message_type = _CHANNEL
_CHANNELEVENTUPDATE.fields_by_name['closed_channel'].message_type = _CHANNELCLOSESUMMARY
_CHANNELEVENTUPDATE.fields_by_name['active_channel'].message_type = _CHANNELPOINT
_CHANNELEVENTUPDATE.fields_by_name['inactive_channel'].message_type = _CHANNELPOINT
_CHANNELEVENTUPDATE.fields_by_name['type'].enum_type = _CHANNELEVENTUPDATE_UPDATETYPE
_CHANNELEVENTUPDATE_UPDATETYPE.containing_type = _CHANNELEVENTUPDATE
_CHANNELEVENTUPDATE.oneofs_by_name['channel'].fields.append(
_CHANNELEVENTUPDATE.fields_by_name['open_channel'])
_CHANNELEVENTUPDATE.fields_by_name['open_channel'].containing_oneof = _CHANNELEVENTUPDATE.oneofs_by_name['channel']
_CHANNELEVENTUPDATE.oneofs_by_name['channel'].fields.append(
_CHANNELEVENTUPDATE.fields_by_name['closed_channel'])
_CHANNELEVENTUPDATE.fields_by_name['closed_channel'].containing_oneof = _CHANNELEVENTUPDATE.oneofs_by_name['channel']
_CHANNELEVENTUPDATE.oneofs_by_name['channel'].fields.append(
_CHANNELEVENTUPDATE.fields_by_name['active_channel'])
_CHANNELEVENTUPDATE.fields_by_name['active_channel'].containing_oneof = _CHANNELEVENTUPDATE.oneofs_by_name['channel']
_CHANNELEVENTUPDATE.oneofs_by_name['channel'].fields.append(
_CHANNELEVENTUPDATE.fields_by_name['inactive_channel'])
_CHANNELEVENTUPDATE.fields_by_name['inactive_channel'].containing_oneof = _CHANNELEVENTUPDATE.oneofs_by_name['channel']
_QUERYROUTESREQUEST.fields_by_name['fee_limit'].message_type = _FEELIMIT
_QUERYROUTESREQUEST.fields_by_name['ignored_edges'].message_type = _EDGELOCATOR
_QUERYROUTESREQUEST.fields_by_name['ignored_pairs'].message_type = _NODEPAIR
_QUERYROUTESRESPONSE.fields_by_name['routes'].message_type = _ROUTE
_HOP.fields_by_name['mpp_record'].message_type = _MPPRECORD
_ROUTE.fields_by_name['hops'].message_type = _HOP
_NODEINFO.fields_by_name['node'].message_type = _LIGHTNINGNODE
_NODEINFO.fields_by_name['channels'].message_type = _CHANNELEDGE
_LIGHTNINGNODE.fields_by_name['addresses'].message_type = _NODEADDRESS
_CHANNELEDGE.fields_by_name['node1_policy'].message_type = _ROUTINGPOLICY
_CHANNELEDGE.fields_by_name['node2_policy'].message_type = _ROUTINGPOLICY
_CHANNELGRAPH.fields_by_name['nodes'].message_type = _LIGHTNINGNODE
_CHANNELGRAPH.fields_by_name['edges'].message_type = _CHANNELEDGE
_GRAPHTOPOLOGYUPDATE.fields_by_name['node_updates'].message_type = _NODEUPDATE
_GRAPHTOPOLOGYUPDATE.fields_by_name['channel_updates'].message_type = _CHANNELEDGEUPDATE
_GRAPHTOPOLOGYUPDATE.fields_by_name['closed_chans'].message_type = _CLOSEDCHANNELUPDATE
_CHANNELEDGEUPDATE.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_CHANNELEDGEUPDATE.fields_by_name['routing_policy'].message_type = _ROUTINGPOLICY
_CLOSEDCHANNELUPDATE.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_ROUTEHINT.fields_by_name['hop_hints'].message_type = _HOPHINT
_INVOICE.fields_by_name['route_hints'].message_type = _ROUTEHINT
_INVOICE.fields_by_name['state'].enum_type = _INVOICE_INVOICESTATE
_INVOICE.fields_by_name['htlcs'].message_type = _INVOICEHTLC
_INVOICE_INVOICESTATE.containing_type = _INVOICE
_INVOICEHTLC.fields_by_name['state'].enum_type = _INVOICEHTLCSTATE
_LISTINVOICERESPONSE.fields_by_name['invoices'].message_type = _INVOICE
_PAYMENT.fields_by_name['status'].enum_type = _PAYMENT_PAYMENTSTATUS
_PAYMENT.fields_by_name['htlcs'].message_type = _HTLCATTEMPT
_PAYMENT_PAYMENTSTATUS.containing_type = _PAYMENT
_HTLCATTEMPT.fields_by_name['status'].enum_type = _HTLCATTEMPT_HTLCSTATUS
_HTLCATTEMPT.fields_by_name['route'].message_type = _ROUTE
_HTLCATTEMPT_HTLCSTATUS.containing_type = _HTLCATTEMPT
_LISTPAYMENTSRESPONSE.fields_by_name['payments'].message_type = _PAYMENT
_ABANDONCHANNELREQUEST.fields_by_name['channel_point'].message_type = _CHANNELPOINT
_PAYREQ.fields_by_name['route_hints'].message_type = _ROUTEHINT
_FEEREPORTRESPONSE.fields_by_name['channel_fees'].message_type = _CHANNELFEEREPORT
_POLICYUPDATEREQUEST.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_POLICYUPDATEREQUEST.oneofs_by_name['scope'].fields.append(
_POLICYUPDATEREQUEST.fields_by_name['global'])
_POLICYUPDATEREQUEST.fields_by_name['global'].containing_oneof = _POLICYUPDATEREQUEST.oneofs_by_name['scope']
_POLICYUPDATEREQUEST.oneofs_by_name['scope'].fields.append(
_POLICYUPDATEREQUEST.fields_by_name['chan_point'])
_POLICYUPDATEREQUEST.fields_by_name['chan_point'].containing_oneof = _POLICYUPDATEREQUEST.oneofs_by_name['scope']
_FORWARDINGHISTORYRESPONSE.fields_by_name['forwarding_events'].message_type = _FORWARDINGEVENT
_EXPORTCHANNELBACKUPREQUEST.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_CHANNELBACKUP.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_MULTICHANBACKUP.fields_by_name['chan_points'].message_type = _CHANNELPOINT
_CHANBACKUPSNAPSHOT.fields_by_name['single_chan_backups'].message_type = _CHANNELBACKUPS
_CHANBACKUPSNAPSHOT.fields_by_name['multi_chan_backup'].message_type = _MULTICHANBACKUP
_CHANNELBACKUPS.fields_by_name['chan_backups'].message_type = _CHANNELBACKUP
_RESTORECHANBACKUPREQUEST.fields_by_name['chan_backups'].message_type = _CHANNELBACKUPS
_RESTORECHANBACKUPREQUEST.oneofs_by_name['backup'].fields.append(
_RESTORECHANBACKUPREQUEST.fields_by_name['chan_backups'])
_RESTORECHANBACKUPREQUEST.fields_by_name['chan_backups'].containing_oneof = _RESTORECHANBACKUPREQUEST.oneofs_by_name['backup']
_RESTORECHANBACKUPREQUEST.oneofs_by_name['backup'].fields.append(
_RESTORECHANBACKUPREQUEST.fields_by_name['multi_chan_backup'])
_RESTORECHANBACKUPREQUEST.fields_by_name['multi_chan_backup'].containing_oneof = _RESTORECHANBACKUPREQUEST.oneofs_by_name['backup']
_BAKEMACAROONREQUEST.fields_by_name['permissions'].message_type = _MACAROONPERMISSION
DESCRIPTOR.message_types_by_name['GenSeedRequest'] = _GENSEEDREQUEST
DESCRIPTOR.message_types_by_name['GenSeedResponse'] = _GENSEEDRESPONSE
DESCRIPTOR.message_types_by_name['InitWalletRequest'] = _INITWALLETREQUEST
DESCRIPTOR.message_types_by_name['InitWalletResponse'] = _INITWALLETRESPONSE
DESCRIPTOR.message_types_by_name['UnlockWalletRequest'] = _UNLOCKWALLETREQUEST
DESCRIPTOR.message_types_by_name['UnlockWalletResponse'] = _UNLOCKWALLETRESPONSE
DESCRIPTOR.message_types_by_name['ChangePasswordRequest'] = _CHANGEPASSWORDREQUEST
DESCRIPTOR.message_types_by_name['ChangePasswordResponse'] = _CHANGEPASSWORDRESPONSE
DESCRIPTOR.message_types_by_name['Utxo'] = _UTXO
DESCRIPTOR.message_types_by_name['Transaction'] = _TRANSACTION
DESCRIPTOR.message_types_by_name['GetTransactionsRequest'] = _GETTRANSACTIONSREQUEST
DESCRIPTOR.message_types_by_name['TransactionDetails'] = _TRANSACTIONDETAILS
DESCRIPTOR.message_types_by_name['FeeLimit'] = _FEELIMIT
DESCRIPTOR.message_types_by_name['SendRequest'] = _SENDREQUEST
DESCRIPTOR.message_types_by_name['SendResponse'] = _SENDRESPONSE
DESCRIPTOR.message_types_by_name['SendToRouteRequest'] = _SENDTOROUTEREQUEST
DESCRIPTOR.message_types_by_name['ChannelAcceptRequest'] = _CHANNELACCEPTREQUEST
DESCRIPTOR.message_types_by_name['ChannelAcceptResponse'] = _CHANNELACCEPTRESPONSE
DESCRIPTOR.message_types_by_name['ChannelPoint'] = _CHANNELPOINT
DESCRIPTOR.message_types_by_name['OutPoint'] = _OUTPOINT
DESCRIPTOR.message_types_by_name['LightningAddress'] = _LIGHTNINGADDRESS
DESCRIPTOR.message_types_by_name['EstimateFeeRequest'] = _ESTIMATEFEEREQUEST
DESCRIPTOR.message_types_by_name['EstimateFeeResponse'] = _ESTIMATEFEERESPONSE
DESCRIPTOR.message_types_by_name['SendManyRequest'] = _SENDMANYREQUEST
DESCRIPTOR.message_types_by_name['SendManyResponse'] = _SENDMANYRESPONSE
DESCRIPTOR.message_types_by_name['SendCoinsRequest'] = _SENDCOINSREQUEST
DESCRIPTOR.message_types_by_name['SendCoinsResponse'] = _SENDCOINSRESPONSE
DESCRIPTOR.message_types_by_name['ListUnspentRequest'] = _LISTUNSPENTREQUEST
DESCRIPTOR.message_types_by_name['ListUnspentResponse'] = _LISTUNSPENTRESPONSE
DESCRIPTOR.message_types_by_name['NewAddressRequest'] = _NEWADDRESSREQUEST
DESCRIPTOR.message_types_by_name['NewAddressResponse'] = _NEWADDRESSRESPONSE
DESCRIPTOR.message_types_by_name['SignMessageRequest'] = _SIGNMESSAGEREQUEST
DESCRIPTOR.message_types_by_name['SignMessageResponse'] = _SIGNMESSAGERESPONSE
DESCRIPTOR.message_types_by_name['VerifyMessageRequest'] = _VERIFYMESSAGEREQUEST
DESCRIPTOR.message_types_by_name['VerifyMessageResponse'] = _VERIFYMESSAGERESPONSE
DESCRIPTOR.message_types_by_name['ConnectPeerRequest'] = _CONNECTPEERREQUEST
DESCRIPTOR.message_types_by_name['ConnectPeerResponse'] = _CONNECTPEERRESPONSE
DESCRIPTOR.message_types_by_name['DisconnectPeerRequest'] = _DISCONNECTPEERREQUEST
DESCRIPTOR.message_types_by_name['DisconnectPeerResponse'] = _DISCONNECTPEERRESPONSE
DESCRIPTOR.message_types_by_name['HTLC'] = _HTLC
DESCRIPTOR.message_types_by_name['Channel'] = _CHANNEL
DESCRIPTOR.message_types_by_name['ListChannelsRequest'] = _LISTCHANNELSREQUEST
DESCRIPTOR.message_types_by_name['ListChannelsResponse'] = _LISTCHANNELSRESPONSE
DESCRIPTOR.message_types_by_name['ChannelCloseSummary'] = _CHANNELCLOSESUMMARY
DESCRIPTOR.message_types_by_name['ClosedChannelsRequest'] = _CLOSEDCHANNELSREQUEST
DESCRIPTOR.message_types_by_name['ClosedChannelsResponse'] = _CLOSEDCHANNELSRESPONSE
DESCRIPTOR.message_types_by_name['Peer'] = _PEER
DESCRIPTOR.message_types_by_name['ListPeersRequest'] = _LISTPEERSREQUEST
DESCRIPTOR.message_types_by_name['ListPeersResponse'] = _LISTPEERSRESPONSE
DESCRIPTOR.message_types_by_name['GetInfoRequest'] = _GETINFOREQUEST
DESCRIPTOR.message_types_by_name['GetInfoResponse'] = _GETINFORESPONSE
DESCRIPTOR.message_types_by_name['Chain'] = _CHAIN
DESCRIPTOR.message_types_by_name['ConfirmationUpdate'] = _CONFIRMATIONUPDATE
DESCRIPTOR.message_types_by_name['ChannelOpenUpdate'] = _CHANNELOPENUPDATE
DESCRIPTOR.message_types_by_name['ChannelCloseUpdate'] = _CHANNELCLOSEUPDATE
DESCRIPTOR.message_types_by_name['CloseChannelRequest'] = _CLOSECHANNELREQUEST
DESCRIPTOR.message_types_by_name['CloseStatusUpdate'] = _CLOSESTATUSUPDATE
DESCRIPTOR.message_types_by_name['PendingUpdate'] = _PENDINGUPDATE
DESCRIPTOR.message_types_by_name['OpenChannelRequest'] = _OPENCHANNELREQUEST
DESCRIPTOR.message_types_by_name['OpenStatusUpdate'] = _OPENSTATUSUPDATE
DESCRIPTOR.message_types_by_name['PendingHTLC'] = _PENDINGHTLC
DESCRIPTOR.message_types_by_name['PendingChannelsRequest'] = _PENDINGCHANNELSREQUEST
DESCRIPTOR.message_types_by_name['PendingChannelsResponse'] = _PENDINGCHANNELSRESPONSE
DESCRIPTOR.message_types_by_name['ChannelEventSubscription'] = _CHANNELEVENTSUBSCRIPTION
DESCRIPTOR.message_types_by_name['ChannelEventUpdate'] = _CHANNELEVENTUPDATE
DESCRIPTOR.message_types_by_name['WalletBalanceRequest'] = _WALLETBALANCEREQUEST
DESCRIPTOR.message_types_by_name['WalletBalanceResponse'] = _WALLETBALANCERESPONSE
DESCRIPTOR.message_types_by_name['ChannelBalanceRequest'] = _CHANNELBALANCEREQUEST
DESCRIPTOR.message_types_by_name['ChannelBalanceResponse'] = _CHANNELBALANCERESPONSE
DESCRIPTOR.message_types_by_name['QueryRoutesRequest'] = _QUERYROUTESREQUEST
DESCRIPTOR.message_types_by_name['NodePair'] = _NODEPAIR
DESCRIPTOR.message_types_by_name['EdgeLocator'] = _EDGELOCATOR
DESCRIPTOR.message_types_by_name['QueryRoutesResponse'] = _QUERYROUTESRESPONSE
DESCRIPTOR.message_types_by_name['Hop'] = _HOP
DESCRIPTOR.message_types_by_name['MPPRecord'] = _MPPRECORD
DESCRIPTOR.message_types_by_name['Route'] = _ROUTE
DESCRIPTOR.message_types_by_name['NodeInfoRequest'] = _NODEINFOREQUEST
DESCRIPTOR.message_types_by_name['NodeInfo'] = _NODEINFO
DESCRIPTOR.message_types_by_name['LightningNode'] = _LIGHTNINGNODE
DESCRIPTOR.message_types_by_name['NodeAddress'] = _NODEADDRESS
DESCRIPTOR.message_types_by_name['RoutingPolicy'] = _ROUTINGPOLICY
DESCRIPTOR.message_types_by_name['ChannelEdge'] = _CHANNELEDGE
DESCRIPTOR.message_types_by_name['ChannelGraphRequest'] = _CHANNELGRAPHREQUEST
DESCRIPTOR.message_types_by_name['ChannelGraph'] = _CHANNELGRAPH
DESCRIPTOR.message_types_by_name['ChanInfoRequest'] = _CHANINFOREQUEST
DESCRIPTOR.message_types_by_name['NetworkInfoRequest'] = _NETWORKINFOREQUEST
DESCRIPTOR.message_types_by_name['NetworkInfo'] = _NETWORKINFO
DESCRIPTOR.message_types_by_name['StopRequest'] = _STOPREQUEST
DESCRIPTOR.message_types_by_name['StopResponse'] = _STOPRESPONSE
DESCRIPTOR.message_types_by_name['GraphTopologySubscription'] = _GRAPHTOPOLOGYSUBSCRIPTION
DESCRIPTOR.message_types_by_name['GraphTopologyUpdate'] = _GRAPHTOPOLOGYUPDATE
DESCRIPTOR.message_types_by_name['NodeUpdate'] = _NODEUPDATE
DESCRIPTOR.message_types_by_name['ChannelEdgeUpdate'] = _CHANNELEDGEUPDATE
DESCRIPTOR.message_types_by_name['ClosedChannelUpdate'] = _CLOSEDCHANNELUPDATE
DESCRIPTOR.message_types_by_name['HopHint'] = _HOPHINT
DESCRIPTOR.message_types_by_name['RouteHint'] = _ROUTEHINT
DESCRIPTOR.message_types_by_name['Invoice'] = _INVOICE
DESCRIPTOR.message_types_by_name['InvoiceHTLC'] = _INVOICEHTLC
DESCRIPTOR.message_types_by_name['AddInvoiceResponse'] = _ADDINVOICERESPONSE
DESCRIPTOR.message_types_by_name['PaymentHash'] = _PAYMENTHASH
DESCRIPTOR.message_types_by_name['ListInvoiceRequest'] = _LISTINVOICEREQUEST
DESCRIPTOR.message_types_by_name['ListInvoiceResponse'] = _LISTINVOICERESPONSE
DESCRIPTOR.message_types_by_name['InvoiceSubscription'] = _INVOICESUBSCRIPTION
DESCRIPTOR.message_types_by_name['Payment'] = _PAYMENT
DESCRIPTOR.message_types_by_name['HTLCAttempt'] = _HTLCATTEMPT
DESCRIPTOR.message_types_by_name['ListPaymentsRequest'] = _LISTPAYMENTSREQUEST
DESCRIPTOR.message_types_by_name['ListPaymentsResponse'] = _LISTPAYMENTSRESPONSE
DESCRIPTOR.message_types_by_name['DeleteAllPaymentsRequest'] = _DELETEALLPAYMENTSREQUEST
DESCRIPTOR.message_types_by_name['DeleteAllPaymentsResponse'] = _DELETEALLPAYMENTSRESPONSE
DESCRIPTOR.message_types_by_name['AbandonChannelRequest'] = _ABANDONCHANNELREQUEST
DESCRIPTOR.message_types_by_name['AbandonChannelResponse'] = _ABANDONCHANNELRESPONSE
DESCRIPTOR.message_types_by_name['DebugLevelRequest'] = _DEBUGLEVELREQUEST
DESCRIPTOR.message_types_by_name['DebugLevelResponse'] = _DEBUGLEVELRESPONSE
DESCRIPTOR.message_types_by_name['PayReqString'] = _PAYREQSTRING
DESCRIPTOR.message_types_by_name['PayReq'] = _PAYREQ
DESCRIPTOR.message_types_by_name['FeeReportRequest'] = _FEEREPORTREQUEST
DESCRIPTOR.message_types_by_name['ChannelFeeReport'] = _CHANNELFEEREPORT
DESCRIPTOR.message_types_by_name['FeeReportResponse'] = _FEEREPORTRESPONSE
DESCRIPTOR.message_types_by_name['PolicyUpdateRequest'] = _POLICYUPDATEREQUEST
DESCRIPTOR.message_types_by_name['PolicyUpdateResponse'] = _POLICYUPDATERESPONSE
DESCRIPTOR.message_types_by_name['ForwardingHistoryRequest'] = _FORWARDINGHISTORYREQUEST
DESCRIPTOR.message_types_by_name['ForwardingEvent'] = _FORWARDINGEVENT
DESCRIPTOR.message_types_by_name['ForwardingHistoryResponse'] = _FORWARDINGHISTORYRESPONSE
DESCRIPTOR.message_types_by_name['ExportChannelBackupRequest'] = _EXPORTCHANNELBACKUPREQUEST
DESCRIPTOR.message_types_by_name['ChannelBackup'] = _CHANNELBACKUP
DESCRIPTOR.message_types_by_name['MultiChanBackup'] = _MULTICHANBACKUP
DESCRIPTOR.message_types_by_name['ChanBackupExportRequest'] = _CHANBACKUPEXPORTREQUEST
DESCRIPTOR.message_types_by_name['ChanBackupSnapshot'] = _CHANBACKUPSNAPSHOT
DESCRIPTOR.message_types_by_name['ChannelBackups'] = _CHANNELBACKUPS
DESCRIPTOR.message_types_by_name['RestoreChanBackupRequest'] = _RESTORECHANBACKUPREQUEST
DESCRIPTOR.message_types_by_name['RestoreBackupResponse'] = _RESTOREBACKUPRESPONSE
DESCRIPTOR.message_types_by_name['ChannelBackupSubscription'] = _CHANNELBACKUPSUBSCRIPTION
DESCRIPTOR.message_types_by_name['VerifyChanBackupResponse'] = _VERIFYCHANBACKUPRESPONSE
DESCRIPTOR.message_types_by_name['MacaroonPermission'] = _MACAROONPERMISSION
DESCRIPTOR.message_types_by_name['BakeMacaroonRequest'] = _BAKEMACAROONREQUEST
DESCRIPTOR.message_types_by_name['BakeMacaroonResponse'] = _BAKEMACAROONRESPONSE
DESCRIPTOR.enum_types_by_name['AddressType'] = _ADDRESSTYPE
DESCRIPTOR.enum_types_by_name['InvoiceHTLCState'] = _INVOICEHTLCSTATE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GenSeedRequest = _reflection.GeneratedProtocolMessageType('GenSeedRequest', (_message.Message,), {
'DESCRIPTOR' : _GENSEEDREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GenSeedRequest)
})
_sym_db.RegisterMessage(GenSeedRequest)
GenSeedResponse = _reflection.GeneratedProtocolMessageType('GenSeedResponse', (_message.Message,), {
'DESCRIPTOR' : _GENSEEDRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GenSeedResponse)
})
_sym_db.RegisterMessage(GenSeedResponse)
InitWalletRequest = _reflection.GeneratedProtocolMessageType('InitWalletRequest', (_message.Message,), {
'DESCRIPTOR' : _INITWALLETREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.InitWalletRequest)
})
_sym_db.RegisterMessage(InitWalletRequest)
InitWalletResponse = _reflection.GeneratedProtocolMessageType('InitWalletResponse', (_message.Message,), {
'DESCRIPTOR' : _INITWALLETRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.InitWalletResponse)
})
_sym_db.RegisterMessage(InitWalletResponse)
UnlockWalletRequest = _reflection.GeneratedProtocolMessageType('UnlockWalletRequest', (_message.Message,), {
'DESCRIPTOR' : _UNLOCKWALLETREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.UnlockWalletRequest)
})
_sym_db.RegisterMessage(UnlockWalletRequest)
UnlockWalletResponse = _reflection.GeneratedProtocolMessageType('UnlockWalletResponse', (_message.Message,), {
'DESCRIPTOR' : _UNLOCKWALLETRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.UnlockWalletResponse)
})
_sym_db.RegisterMessage(UnlockWalletResponse)
ChangePasswordRequest = _reflection.GeneratedProtocolMessageType('ChangePasswordRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANGEPASSWORDREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChangePasswordRequest)
})
_sym_db.RegisterMessage(ChangePasswordRequest)
ChangePasswordResponse = _reflection.GeneratedProtocolMessageType('ChangePasswordResponse', (_message.Message,), {
'DESCRIPTOR' : _CHANGEPASSWORDRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChangePasswordResponse)
})
_sym_db.RegisterMessage(ChangePasswordResponse)
Utxo = _reflection.GeneratedProtocolMessageType('Utxo', (_message.Message,), {
'DESCRIPTOR' : _UTXO,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Utxo)
})
_sym_db.RegisterMessage(Utxo)
Transaction = _reflection.GeneratedProtocolMessageType('Transaction', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Transaction)
})
_sym_db.RegisterMessage(Transaction)
GetTransactionsRequest = _reflection.GeneratedProtocolMessageType('GetTransactionsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETTRANSACTIONSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GetTransactionsRequest)
})
_sym_db.RegisterMessage(GetTransactionsRequest)
TransactionDetails = _reflection.GeneratedProtocolMessageType('TransactionDetails', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONDETAILS,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.TransactionDetails)
})
_sym_db.RegisterMessage(TransactionDetails)
FeeLimit = _reflection.GeneratedProtocolMessageType('FeeLimit', (_message.Message,), {
'DESCRIPTOR' : _FEELIMIT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.FeeLimit)
})
_sym_db.RegisterMessage(FeeLimit)
SendRequest = _reflection.GeneratedProtocolMessageType('SendRequest', (_message.Message,), {
'DestTlvEntry' : _reflection.GeneratedProtocolMessageType('DestTlvEntry', (_message.Message,), {
'DESCRIPTOR' : _SENDREQUEST_DESTTLVENTRY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendRequest.DestTlvEntry)
})
,
'DESCRIPTOR' : _SENDREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendRequest)
})
_sym_db.RegisterMessage(SendRequest)
_sym_db.RegisterMessage(SendRequest.DestTlvEntry)
SendResponse = _reflection.GeneratedProtocolMessageType('SendResponse', (_message.Message,), {
'DESCRIPTOR' : _SENDRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendResponse)
})
_sym_db.RegisterMessage(SendResponse)
SendToRouteRequest = _reflection.GeneratedProtocolMessageType('SendToRouteRequest', (_message.Message,), {
'DESCRIPTOR' : _SENDTOROUTEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendToRouteRequest)
})
_sym_db.RegisterMessage(SendToRouteRequest)
ChannelAcceptRequest = _reflection.GeneratedProtocolMessageType('ChannelAcceptRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANNELACCEPTREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelAcceptRequest)
})
_sym_db.RegisterMessage(ChannelAcceptRequest)
ChannelAcceptResponse = _reflection.GeneratedProtocolMessageType('ChannelAcceptResponse', (_message.Message,), {
'DESCRIPTOR' : _CHANNELACCEPTRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelAcceptResponse)
})
_sym_db.RegisterMessage(ChannelAcceptResponse)
ChannelPoint = _reflection.GeneratedProtocolMessageType('ChannelPoint', (_message.Message,), {
'DESCRIPTOR' : _CHANNELPOINT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelPoint)
})
_sym_db.RegisterMessage(ChannelPoint)
OutPoint = _reflection.GeneratedProtocolMessageType('OutPoint', (_message.Message,), {
'DESCRIPTOR' : _OUTPOINT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.OutPoint)
})
_sym_db.RegisterMessage(OutPoint)
LightningAddress = _reflection.GeneratedProtocolMessageType('LightningAddress', (_message.Message,), {
'DESCRIPTOR' : _LIGHTNINGADDRESS,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.LightningAddress)
})
_sym_db.RegisterMessage(LightningAddress)
EstimateFeeRequest = _reflection.GeneratedProtocolMessageType('EstimateFeeRequest', (_message.Message,), {
'AddrToAmountEntry' : _reflection.GeneratedProtocolMessageType('AddrToAmountEntry', (_message.Message,), {
'DESCRIPTOR' : _ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.EstimateFeeRequest.AddrToAmountEntry)
})
,
'DESCRIPTOR' : _ESTIMATEFEEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.EstimateFeeRequest)
})
_sym_db.RegisterMessage(EstimateFeeRequest)
_sym_db.RegisterMessage(EstimateFeeRequest.AddrToAmountEntry)
EstimateFeeResponse = _reflection.GeneratedProtocolMessageType('EstimateFeeResponse', (_message.Message,), {
'DESCRIPTOR' : _ESTIMATEFEERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.EstimateFeeResponse)
})
_sym_db.RegisterMessage(EstimateFeeResponse)
SendManyRequest = _reflection.GeneratedProtocolMessageType('SendManyRequest', (_message.Message,), {
'AddrToAmountEntry' : _reflection.GeneratedProtocolMessageType('AddrToAmountEntry', (_message.Message,), {
'DESCRIPTOR' : _SENDMANYREQUEST_ADDRTOAMOUNTENTRY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendManyRequest.AddrToAmountEntry)
})
,
'DESCRIPTOR' : _SENDMANYREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendManyRequest)
})
_sym_db.RegisterMessage(SendManyRequest)
_sym_db.RegisterMessage(SendManyRequest.AddrToAmountEntry)
SendManyResponse = _reflection.GeneratedProtocolMessageType('SendManyResponse', (_message.Message,), {
'DESCRIPTOR' : _SENDMANYRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendManyResponse)
})
_sym_db.RegisterMessage(SendManyResponse)
SendCoinsRequest = _reflection.GeneratedProtocolMessageType('SendCoinsRequest', (_message.Message,), {
'DESCRIPTOR' : _SENDCOINSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendCoinsRequest)
})
_sym_db.RegisterMessage(SendCoinsRequest)
SendCoinsResponse = _reflection.GeneratedProtocolMessageType('SendCoinsResponse', (_message.Message,), {
'DESCRIPTOR' : _SENDCOINSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendCoinsResponse)
})
_sym_db.RegisterMessage(SendCoinsResponse)
ListUnspentRequest = _reflection.GeneratedProtocolMessageType('ListUnspentRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTUNSPENTREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListUnspentRequest)
})
_sym_db.RegisterMessage(ListUnspentRequest)
ListUnspentResponse = _reflection.GeneratedProtocolMessageType('ListUnspentResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTUNSPENTRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListUnspentResponse)
})
_sym_db.RegisterMessage(ListUnspentResponse)
NewAddressRequest = _reflection.GeneratedProtocolMessageType('NewAddressRequest', (_message.Message,), {
'DESCRIPTOR' : _NEWADDRESSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NewAddressRequest)
})
_sym_db.RegisterMessage(NewAddressRequest)
NewAddressResponse = _reflection.GeneratedProtocolMessageType('NewAddressResponse', (_message.Message,), {
'DESCRIPTOR' : _NEWADDRESSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NewAddressResponse)
})
_sym_db.RegisterMessage(NewAddressResponse)
SignMessageRequest = _reflection.GeneratedProtocolMessageType('SignMessageRequest', (_message.Message,), {
'DESCRIPTOR' : _SIGNMESSAGEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SignMessageRequest)
})
_sym_db.RegisterMessage(SignMessageRequest)
SignMessageResponse = _reflection.GeneratedProtocolMessageType('SignMessageResponse', (_message.Message,), {
'DESCRIPTOR' : _SIGNMESSAGERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SignMessageResponse)
})
_sym_db.RegisterMessage(SignMessageResponse)
VerifyMessageRequest = _reflection.GeneratedProtocolMessageType('VerifyMessageRequest', (_message.Message,), {
'DESCRIPTOR' : _VERIFYMESSAGEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.VerifyMessageRequest)
})
_sym_db.RegisterMessage(VerifyMessageRequest)
VerifyMessageResponse = _reflection.GeneratedProtocolMessageType('VerifyMessageResponse', (_message.Message,), {
'DESCRIPTOR' : _VERIFYMESSAGERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.VerifyMessageResponse)
})
_sym_db.RegisterMessage(VerifyMessageResponse)
ConnectPeerRequest = _reflection.GeneratedProtocolMessageType('ConnectPeerRequest', (_message.Message,), {
'DESCRIPTOR' : _CONNECTPEERREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ConnectPeerRequest)
})
_sym_db.RegisterMessage(ConnectPeerRequest)
ConnectPeerResponse = _reflection.GeneratedProtocolMessageType('ConnectPeerResponse', (_message.Message,), {
'DESCRIPTOR' : _CONNECTPEERRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ConnectPeerResponse)
})
_sym_db.RegisterMessage(ConnectPeerResponse)
DisconnectPeerRequest = _reflection.GeneratedProtocolMessageType('DisconnectPeerRequest', (_message.Message,), {
'DESCRIPTOR' : _DISCONNECTPEERREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DisconnectPeerRequest)
})
_sym_db.RegisterMessage(DisconnectPeerRequest)
DisconnectPeerResponse = _reflection.GeneratedProtocolMessageType('DisconnectPeerResponse', (_message.Message,), {
'DESCRIPTOR' : _DISCONNECTPEERRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DisconnectPeerResponse)
})
_sym_db.RegisterMessage(DisconnectPeerResponse)
HTLC = _reflection.GeneratedProtocolMessageType('HTLC', (_message.Message,), {
'DESCRIPTOR' : _HTLC,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.HTLC)
})
_sym_db.RegisterMessage(HTLC)
Channel = _reflection.GeneratedProtocolMessageType('Channel', (_message.Message,), {
'DESCRIPTOR' : _CHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Channel)
})
_sym_db.RegisterMessage(Channel)
ListChannelsRequest = _reflection.GeneratedProtocolMessageType('ListChannelsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTCHANNELSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListChannelsRequest)
})
_sym_db.RegisterMessage(ListChannelsRequest)
ListChannelsResponse = _reflection.GeneratedProtocolMessageType('ListChannelsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTCHANNELSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListChannelsResponse)
})
_sym_db.RegisterMessage(ListChannelsResponse)
ChannelCloseSummary = _reflection.GeneratedProtocolMessageType('ChannelCloseSummary', (_message.Message,), {
'DESCRIPTOR' : _CHANNELCLOSESUMMARY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelCloseSummary)
})
_sym_db.RegisterMessage(ChannelCloseSummary)
ClosedChannelsRequest = _reflection.GeneratedProtocolMessageType('ClosedChannelsRequest', (_message.Message,), {
'DESCRIPTOR' : _CLOSEDCHANNELSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ClosedChannelsRequest)
})
_sym_db.RegisterMessage(ClosedChannelsRequest)
ClosedChannelsResponse = _reflection.GeneratedProtocolMessageType('ClosedChannelsResponse', (_message.Message,), {
'DESCRIPTOR' : _CLOSEDCHANNELSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ClosedChannelsResponse)
})
_sym_db.RegisterMessage(ClosedChannelsResponse)
Peer = _reflection.GeneratedProtocolMessageType('Peer', (_message.Message,), {
'DESCRIPTOR' : _PEER,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Peer)
})
_sym_db.RegisterMessage(Peer)
ListPeersRequest = _reflection.GeneratedProtocolMessageType('ListPeersRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTPEERSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListPeersRequest)
})
_sym_db.RegisterMessage(ListPeersRequest)
ListPeersResponse = _reflection.GeneratedProtocolMessageType('ListPeersResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTPEERSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListPeersResponse)
})
_sym_db.RegisterMessage(ListPeersResponse)
GetInfoRequest = _reflection.GeneratedProtocolMessageType('GetInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _GETINFOREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GetInfoRequest)
})
_sym_db.RegisterMessage(GetInfoRequest)
GetInfoResponse = _reflection.GeneratedProtocolMessageType('GetInfoResponse', (_message.Message,), {
'DESCRIPTOR' : _GETINFORESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GetInfoResponse)
})
_sym_db.RegisterMessage(GetInfoResponse)
Chain = _reflection.GeneratedProtocolMessageType('Chain', (_message.Message,), {
'DESCRIPTOR' : _CHAIN,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Chain)
})
_sym_db.RegisterMessage(Chain)
ConfirmationUpdate = _reflection.GeneratedProtocolMessageType('ConfirmationUpdate', (_message.Message,), {
'DESCRIPTOR' : _CONFIRMATIONUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ConfirmationUpdate)
})
_sym_db.RegisterMessage(ConfirmationUpdate)
ChannelOpenUpdate = _reflection.GeneratedProtocolMessageType('ChannelOpenUpdate', (_message.Message,), {
'DESCRIPTOR' : _CHANNELOPENUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelOpenUpdate)
})
_sym_db.RegisterMessage(ChannelOpenUpdate)
ChannelCloseUpdate = _reflection.GeneratedProtocolMessageType('ChannelCloseUpdate', (_message.Message,), {
'DESCRIPTOR' : _CHANNELCLOSEUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelCloseUpdate)
})
_sym_db.RegisterMessage(ChannelCloseUpdate)
CloseChannelRequest = _reflection.GeneratedProtocolMessageType('CloseChannelRequest', (_message.Message,), {
'DESCRIPTOR' : _CLOSECHANNELREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.CloseChannelRequest)
})
_sym_db.RegisterMessage(CloseChannelRequest)
CloseStatusUpdate = _reflection.GeneratedProtocolMessageType('CloseStatusUpdate', (_message.Message,), {
'DESCRIPTOR' : _CLOSESTATUSUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.CloseStatusUpdate)
})
_sym_db.RegisterMessage(CloseStatusUpdate)
PendingUpdate = _reflection.GeneratedProtocolMessageType('PendingUpdate', (_message.Message,), {
'DESCRIPTOR' : _PENDINGUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingUpdate)
})
_sym_db.RegisterMessage(PendingUpdate)
OpenChannelRequest = _reflection.GeneratedProtocolMessageType('OpenChannelRequest', (_message.Message,), {
'DESCRIPTOR' : _OPENCHANNELREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.OpenChannelRequest)
})
_sym_db.RegisterMessage(OpenChannelRequest)
OpenStatusUpdate = _reflection.GeneratedProtocolMessageType('OpenStatusUpdate', (_message.Message,), {
'DESCRIPTOR' : _OPENSTATUSUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.OpenStatusUpdate)
})
_sym_db.RegisterMessage(OpenStatusUpdate)
PendingHTLC = _reflection.GeneratedProtocolMessageType('PendingHTLC', (_message.Message,), {
'DESCRIPTOR' : _PENDINGHTLC,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingHTLC)
})
_sym_db.RegisterMessage(PendingHTLC)
PendingChannelsRequest = _reflection.GeneratedProtocolMessageType('PendingChannelsRequest', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsRequest)
})
_sym_db.RegisterMessage(PendingChannelsRequest)
PendingChannelsResponse = _reflection.GeneratedProtocolMessageType('PendingChannelsResponse', (_message.Message,), {
'PendingChannel' : _reflection.GeneratedProtocolMessageType('PendingChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.PendingChannel)
})
,
'PendingOpenChannel' : _reflection.GeneratedProtocolMessageType('PendingOpenChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.PendingOpenChannel)
})
,
'WaitingCloseChannel' : _reflection.GeneratedProtocolMessageType('WaitingCloseChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.WaitingCloseChannel)
})
,
'ClosedChannel' : _reflection.GeneratedProtocolMessageType('ClosedChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.ClosedChannel)
})
,
'ForceClosedChannel' : _reflection.GeneratedProtocolMessageType('ForceClosedChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.ForceClosedChannel)
})
,
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse)
})
_sym_db.RegisterMessage(PendingChannelsResponse)
_sym_db.RegisterMessage(PendingChannelsResponse.PendingChannel)
_sym_db.RegisterMessage(PendingChannelsResponse.PendingOpenChannel)
_sym_db.RegisterMessage(PendingChannelsResponse.WaitingCloseChannel)
_sym_db.RegisterMessage(PendingChannelsResponse.ClosedChannel)
_sym_db.RegisterMessage(PendingChannelsResponse.ForceClosedChannel)
ChannelEventSubscription = _reflection.GeneratedProtocolMessageType('ChannelEventSubscription', (_message.Message,), {
'DESCRIPTOR' : _CHANNELEVENTSUBSCRIPTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelEventSubscription)
})
_sym_db.RegisterMessage(ChannelEventSubscription)
ChannelEventUpdate = _reflection.GeneratedProtocolMessageType('ChannelEventUpdate', (_message.Message,), {
'DESCRIPTOR' : _CHANNELEVENTUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelEventUpdate)
})
_sym_db.RegisterMessage(ChannelEventUpdate)
WalletBalanceRequest = _reflection.GeneratedProtocolMessageType('WalletBalanceRequest', (_message.Message,), {
'DESCRIPTOR' : _WALLETBALANCEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.WalletBalanceRequest)
})
_sym_db.RegisterMessage(WalletBalanceRequest)
WalletBalanceResponse = _reflection.GeneratedProtocolMessageType('WalletBalanceResponse', (_message.Message,), {
'DESCRIPTOR' : _WALLETBALANCERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.WalletBalanceResponse)
})
_sym_db.RegisterMessage(WalletBalanceResponse)
ChannelBalanceRequest = _reflection.GeneratedProtocolMessageType('ChannelBalanceRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBALANCEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBalanceRequest)
})
_sym_db.RegisterMessage(ChannelBalanceRequest)
ChannelBalanceResponse = _reflection.GeneratedProtocolMessageType('ChannelBalanceResponse', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBALANCERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBalanceResponse)
})
_sym_db.RegisterMessage(ChannelBalanceResponse)
QueryRoutesRequest = _reflection.GeneratedProtocolMessageType('QueryRoutesRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYROUTESREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.QueryRoutesRequest)
})
_sym_db.RegisterMessage(QueryRoutesRequest)
NodePair = _reflection.GeneratedProtocolMessageType('NodePair', (_message.Message,), {
'DESCRIPTOR' : _NODEPAIR,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodePair)
})
_sym_db.RegisterMessage(NodePair)
EdgeLocator = _reflection.GeneratedProtocolMessageType('EdgeLocator', (_message.Message,), {
'DESCRIPTOR' : _EDGELOCATOR,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.EdgeLocator)
})
_sym_db.RegisterMessage(EdgeLocator)
QueryRoutesResponse = _reflection.GeneratedProtocolMessageType('QueryRoutesResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYROUTESRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.QueryRoutesResponse)
})
_sym_db.RegisterMessage(QueryRoutesResponse)
Hop = _reflection.GeneratedProtocolMessageType('Hop', (_message.Message,), {
'DESCRIPTOR' : _HOP,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Hop)
})
_sym_db.RegisterMessage(Hop)
MPPRecord = _reflection.GeneratedProtocolMessageType('MPPRecord', (_message.Message,), {
'DESCRIPTOR' : _MPPRECORD,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.MPPRecord)
})
_sym_db.RegisterMessage(MPPRecord)
Route = _reflection.GeneratedProtocolMessageType('Route', (_message.Message,), {
'DESCRIPTOR' : _ROUTE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Route)
})
_sym_db.RegisterMessage(Route)
NodeInfoRequest = _reflection.GeneratedProtocolMessageType('NodeInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _NODEINFOREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodeInfoRequest)
})
_sym_db.RegisterMessage(NodeInfoRequest)
NodeInfo = _reflection.GeneratedProtocolMessageType('NodeInfo', (_message.Message,), {
'DESCRIPTOR' : _NODEINFO,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodeInfo)
})
_sym_db.RegisterMessage(NodeInfo)
LightningNode = _reflection.GeneratedProtocolMessageType('LightningNode', (_message.Message,), {
'DESCRIPTOR' : _LIGHTNINGNODE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.LightningNode)
})
_sym_db.RegisterMessage(LightningNode)
NodeAddress = _reflection.GeneratedProtocolMessageType('NodeAddress', (_message.Message,), {
'DESCRIPTOR' : _NODEADDRESS,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodeAddress)
})
_sym_db.RegisterMessage(NodeAddress)
RoutingPolicy = _reflection.GeneratedProtocolMessageType('RoutingPolicy', (_message.Message,), {
'DESCRIPTOR' : _ROUTINGPOLICY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.RoutingPolicy)
})
_sym_db.RegisterMessage(RoutingPolicy)
ChannelEdge = _reflection.GeneratedProtocolMessageType('ChannelEdge', (_message.Message,), {
'DESCRIPTOR' : _CHANNELEDGE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelEdge)
})
_sym_db.RegisterMessage(ChannelEdge)
ChannelGraphRequest = _reflection.GeneratedProtocolMessageType('ChannelGraphRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANNELGRAPHREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelGraphRequest)
})
_sym_db.RegisterMessage(ChannelGraphRequest)
ChannelGraph = _reflection.GeneratedProtocolMessageType('ChannelGraph', (_message.Message,), {
'DESCRIPTOR' : _CHANNELGRAPH,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelGraph)
})
_sym_db.RegisterMessage(ChannelGraph)
ChanInfoRequest = _reflection.GeneratedProtocolMessageType('ChanInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANINFOREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChanInfoRequest)
})
_sym_db.RegisterMessage(ChanInfoRequest)
NetworkInfoRequest = _reflection.GeneratedProtocolMessageType('NetworkInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _NETWORKINFOREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NetworkInfoRequest)
})
_sym_db.RegisterMessage(NetworkInfoRequest)
NetworkInfo = _reflection.GeneratedProtocolMessageType('NetworkInfo', (_message.Message,), {
'DESCRIPTOR' : _NETWORKINFO,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NetworkInfo)
})
_sym_db.RegisterMessage(NetworkInfo)
StopRequest = _reflection.GeneratedProtocolMessageType('StopRequest', (_message.Message,), {
'DESCRIPTOR' : _STOPREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.StopRequest)
})
_sym_db.RegisterMessage(StopRequest)
StopResponse = _reflection.GeneratedProtocolMessageType('StopResponse', (_message.Message,), {
'DESCRIPTOR' : _STOPRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.StopResponse)
})
_sym_db.RegisterMessage(StopResponse)
GraphTopologySubscription = _reflection.GeneratedProtocolMessageType('GraphTopologySubscription', (_message.Message,), {
'DESCRIPTOR' : _GRAPHTOPOLOGYSUBSCRIPTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GraphTopologySubscription)
})
_sym_db.RegisterMessage(GraphTopologySubscription)
GraphTopologyUpdate = _reflection.GeneratedProtocolMessageType('GraphTopologyUpdate', (_message.Message,), {
'DESCRIPTOR' : _GRAPHTOPOLOGYUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GraphTopologyUpdate)
})
_sym_db.RegisterMessage(GraphTopologyUpdate)
NodeUpdate = _reflection.GeneratedProtocolMessageType('NodeUpdate', (_message.Message,), {
'DESCRIPTOR' : _NODEUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodeUpdate)
})
_sym_db.RegisterMessage(NodeUpdate)
ChannelEdgeUpdate = _reflection.GeneratedProtocolMessageType('ChannelEdgeUpdate', (_message.Message,), {
'DESCRIPTOR' : _CHANNELEDGEUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelEdgeUpdate)
})
_sym_db.RegisterMessage(ChannelEdgeUpdate)
ClosedChannelUpdate = _reflection.GeneratedProtocolMessageType('ClosedChannelUpdate', (_message.Message,), {
'DESCRIPTOR' : _CLOSEDCHANNELUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ClosedChannelUpdate)
})
_sym_db.RegisterMessage(ClosedChannelUpdate)
HopHint = _reflection.GeneratedProtocolMessageType('HopHint', (_message.Message,), {
'DESCRIPTOR' : _HOPHINT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.HopHint)
})
_sym_db.RegisterMessage(HopHint)
RouteHint = _reflection.GeneratedProtocolMessageType('RouteHint', (_message.Message,), {
'DESCRIPTOR' : _ROUTEHINT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.RouteHint)
})
_sym_db.RegisterMessage(RouteHint)
Invoice = _reflection.GeneratedProtocolMessageType('Invoice', (_message.Message,), {
'DESCRIPTOR' : _INVOICE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Invoice)
})
_sym_db.RegisterMessage(Invoice)
InvoiceHTLC = _reflection.GeneratedProtocolMessageType('InvoiceHTLC', (_message.Message,), {
'DESCRIPTOR' : _INVOICEHTLC,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.InvoiceHTLC)
})
_sym_db.RegisterMessage(InvoiceHTLC)
AddInvoiceResponse = _reflection.GeneratedProtocolMessageType('AddInvoiceResponse', (_message.Message,), {
'DESCRIPTOR' : _ADDINVOICERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.AddInvoiceResponse)
})
_sym_db.RegisterMessage(AddInvoiceResponse)
PaymentHash = _reflection.GeneratedProtocolMessageType('PaymentHash', (_message.Message,), {
'DESCRIPTOR' : _PAYMENTHASH,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PaymentHash)
})
_sym_db.RegisterMessage(PaymentHash)
ListInvoiceRequest = _reflection.GeneratedProtocolMessageType('ListInvoiceRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTINVOICEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListInvoiceRequest)
})
_sym_db.RegisterMessage(ListInvoiceRequest)
ListInvoiceResponse = _reflection.GeneratedProtocolMessageType('ListInvoiceResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTINVOICERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListInvoiceResponse)
})
_sym_db.RegisterMessage(ListInvoiceResponse)
InvoiceSubscription = _reflection.GeneratedProtocolMessageType('InvoiceSubscription', (_message.Message,), {
'DESCRIPTOR' : _INVOICESUBSCRIPTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.InvoiceSubscription)
})
_sym_db.RegisterMessage(InvoiceSubscription)
Payment = _reflection.GeneratedProtocolMessageType('Payment', (_message.Message,), {
'DESCRIPTOR' : _PAYMENT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Payment)
})
_sym_db.RegisterMessage(Payment)
HTLCAttempt = _reflection.GeneratedProtocolMessageType('HTLCAttempt', (_message.Message,), {
'DESCRIPTOR' : _HTLCATTEMPT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.HTLCAttempt)
})
_sym_db.RegisterMessage(HTLCAttempt)
ListPaymentsRequest = _reflection.GeneratedProtocolMessageType('ListPaymentsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTPAYMENTSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListPaymentsRequest)
})
_sym_db.RegisterMessage(ListPaymentsRequest)
ListPaymentsResponse = _reflection.GeneratedProtocolMessageType('ListPaymentsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTPAYMENTSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListPaymentsResponse)
})
_sym_db.RegisterMessage(ListPaymentsResponse)
DeleteAllPaymentsRequest = _reflection.GeneratedProtocolMessageType('DeleteAllPaymentsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEALLPAYMENTSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DeleteAllPaymentsRequest)
})
_sym_db.RegisterMessage(DeleteAllPaymentsRequest)
DeleteAllPaymentsResponse = _reflection.GeneratedProtocolMessageType('DeleteAllPaymentsResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEALLPAYMENTSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DeleteAllPaymentsResponse)
})
_sym_db.RegisterMessage(DeleteAllPaymentsResponse)
AbandonChannelRequest = _reflection.GeneratedProtocolMessageType('AbandonChannelRequest', (_message.Message,), {
'DESCRIPTOR' : _ABANDONCHANNELREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.AbandonChannelRequest)
})
_sym_db.RegisterMessage(AbandonChannelRequest)
AbandonChannelResponse = _reflection.GeneratedProtocolMessageType('AbandonChannelResponse', (_message.Message,), {
'DESCRIPTOR' : _ABANDONCHANNELRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.AbandonChannelResponse)
})
_sym_db.RegisterMessage(AbandonChannelResponse)
DebugLevelRequest = _reflection.GeneratedProtocolMessageType('DebugLevelRequest', (_message.Message,), {
'DESCRIPTOR' : _DEBUGLEVELREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DebugLevelRequest)
})
_sym_db.RegisterMessage(DebugLevelRequest)
DebugLevelResponse = _reflection.GeneratedProtocolMessageType('DebugLevelResponse', (_message.Message,), {
'DESCRIPTOR' : _DEBUGLEVELRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DebugLevelResponse)
})
_sym_db.RegisterMessage(DebugLevelResponse)
PayReqString = _reflection.GeneratedProtocolMessageType('PayReqString', (_message.Message,), {
'DESCRIPTOR' : _PAYREQSTRING,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PayReqString)
})
_sym_db.RegisterMessage(PayReqString)
PayReq = _reflection.GeneratedProtocolMessageType('PayReq', (_message.Message,), {
'DESCRIPTOR' : _PAYREQ,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PayReq)
})
_sym_db.RegisterMessage(PayReq)
FeeReportRequest = _reflection.GeneratedProtocolMessageType('FeeReportRequest', (_message.Message,), {
'DESCRIPTOR' : _FEEREPORTREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.FeeReportRequest)
})
_sym_db.RegisterMessage(FeeReportRequest)
ChannelFeeReport = _reflection.GeneratedProtocolMessageType('ChannelFeeReport', (_message.Message,), {
'DESCRIPTOR' : _CHANNELFEEREPORT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelFeeReport)
})
_sym_db.RegisterMessage(ChannelFeeReport)
FeeReportResponse = _reflection.GeneratedProtocolMessageType('FeeReportResponse', (_message.Message,), {
'DESCRIPTOR' : _FEEREPORTRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.FeeReportResponse)
})
_sym_db.RegisterMessage(FeeReportResponse)
PolicyUpdateRequest = _reflection.GeneratedProtocolMessageType('PolicyUpdateRequest', (_message.Message,), {
'DESCRIPTOR' : _POLICYUPDATEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PolicyUpdateRequest)
})
_sym_db.RegisterMessage(PolicyUpdateRequest)
PolicyUpdateResponse = _reflection.GeneratedProtocolMessageType('PolicyUpdateResponse', (_message.Message,), {
'DESCRIPTOR' : _POLICYUPDATERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PolicyUpdateResponse)
})
_sym_db.RegisterMessage(PolicyUpdateResponse)
ForwardingHistoryRequest = _reflection.GeneratedProtocolMessageType('ForwardingHistoryRequest', (_message.Message,), {
'DESCRIPTOR' : _FORWARDINGHISTORYREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ForwardingHistoryRequest)
})
_sym_db.RegisterMessage(ForwardingHistoryRequest)
ForwardingEvent = _reflection.GeneratedProtocolMessageType('ForwardingEvent', (_message.Message,), {
'DESCRIPTOR' : _FORWARDINGEVENT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ForwardingEvent)
})
_sym_db.RegisterMessage(ForwardingEvent)
ForwardingHistoryResponse = _reflection.GeneratedProtocolMessageType('ForwardingHistoryResponse', (_message.Message,), {
'DESCRIPTOR' : _FORWARDINGHISTORYRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ForwardingHistoryResponse)
})
_sym_db.RegisterMessage(ForwardingHistoryResponse)
ExportChannelBackupRequest = _reflection.GeneratedProtocolMessageType('ExportChannelBackupRequest', (_message.Message,), {
'DESCRIPTOR' : _EXPORTCHANNELBACKUPREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ExportChannelBackupRequest)
})
_sym_db.RegisterMessage(ExportChannelBackupRequest)
ChannelBackup = _reflection.GeneratedProtocolMessageType('ChannelBackup', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBACKUP,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBackup)
})
_sym_db.RegisterMessage(ChannelBackup)
MultiChanBackup = _reflection.GeneratedProtocolMessageType('MultiChanBackup', (_message.Message,), {
'DESCRIPTOR' : _MULTICHANBACKUP,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.MultiChanBackup)
})
_sym_db.RegisterMessage(MultiChanBackup)
ChanBackupExportRequest = _reflection.GeneratedProtocolMessageType('ChanBackupExportRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANBACKUPEXPORTREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChanBackupExportRequest)
})
_sym_db.RegisterMessage(ChanBackupExportRequest)
ChanBackupSnapshot = _reflection.GeneratedProtocolMessageType('ChanBackupSnapshot', (_message.Message,), {
'DESCRIPTOR' : _CHANBACKUPSNAPSHOT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChanBackupSnapshot)
})
_sym_db.RegisterMessage(ChanBackupSnapshot)
ChannelBackups = _reflection.GeneratedProtocolMessageType('ChannelBackups', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBACKUPS,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBackups)
})
_sym_db.RegisterMessage(ChannelBackups)
RestoreChanBackupRequest = _reflection.GeneratedProtocolMessageType('RestoreChanBackupRequest', (_message.Message,), {
'DESCRIPTOR' : _RESTORECHANBACKUPREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.RestoreChanBackupRequest)
})
_sym_db.RegisterMessage(RestoreChanBackupRequest)
RestoreBackupResponse = _reflection.GeneratedProtocolMessageType('RestoreBackupResponse', (_message.Message,), {
'DESCRIPTOR' : _RESTOREBACKUPRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.RestoreBackupResponse)
})
_sym_db.RegisterMessage(RestoreBackupResponse)
ChannelBackupSubscription = _reflection.GeneratedProtocolMessageType('ChannelBackupSubscription', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBACKUPSUBSCRIPTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBackupSubscription)
})
_sym_db.RegisterMessage(ChannelBackupSubscription)
VerifyChanBackupResponse = _reflection.GeneratedProtocolMessageType('VerifyChanBackupResponse', (_message.Message,), {
'DESCRIPTOR' : _VERIFYCHANBACKUPRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.VerifyChanBackupResponse)
})
_sym_db.RegisterMessage(VerifyChanBackupResponse)
MacaroonPermission = _reflection.GeneratedProtocolMessageType('MacaroonPermission', (_message.Message,), {
'DESCRIPTOR' : _MACAROONPERMISSION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.MacaroonPermission)
})
_sym_db.RegisterMessage(MacaroonPermission)
BakeMacaroonRequest = _reflection.GeneratedProtocolMessageType('BakeMacaroonRequest', (_message.Message,), {
'DESCRIPTOR' : _BAKEMACAROONREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.BakeMacaroonRequest)
})
_sym_db.RegisterMessage(BakeMacaroonRequest)
BakeMacaroonResponse = _reflection.GeneratedProtocolMessageType('BakeMacaroonResponse', (_message.Message,), {
'DESCRIPTOR' : _BAKEMACAROONRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.BakeMacaroonResponse)
})
_sym_db.RegisterMessage(BakeMacaroonResponse)
DESCRIPTOR._options = None
_SENDREQUEST_DESTTLVENTRY._options = None
_SENDREQUEST.fields_by_name['dest_string']._options = None
_SENDREQUEST.fields_by_name['payment_hash_string']._options = None
_SENDREQUEST.fields_by_name['outgoing_chan_id']._options = None
_SENDTOROUTEREQUEST.fields_by_name['payment_hash_string']._options = None
_ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY._options = None
_SENDMANYREQUEST_ADDRTOAMOUNTENTRY._options = None
_CHANNEL.fields_by_name['chan_id']._options = None
_CHANNELCLOSESUMMARY.fields_by_name['chan_id']._options = None
_GETINFORESPONSE.fields_by_name['testnet']._options = None
_OPENCHANNELREQUEST.fields_by_name['node_pubkey_string']._options = None
_QUERYROUTESREQUEST.fields_by_name['ignored_edges']._options = None
_EDGELOCATOR.fields_by_name['channel_id']._options = None
_HOP.fields_by_name['chan_id']._options = None
_HOP.fields_by_name['amt_to_forward']._options = None
_HOP.fields_by_name['fee']._options = None
_ROUTE.fields_by_name['total_fees']._options = None
_ROUTE.fields_by_name['total_amt']._options = None
_CHANNELEDGE.fields_by_name['channel_id']._options = None
_CHANNELEDGE.fields_by_name['last_update']._options = None
_CHANINFOREQUEST.fields_by_name['chan_id']._options = None
_CHANNELEDGEUPDATE.fields_by_name['chan_id']._options = None
_CLOSEDCHANNELUPDATE.fields_by_name['chan_id']._options = None
_HOPHINT.fields_by_name['chan_id']._options = None
_INVOICE.fields_by_name['settled']._options = None
_INVOICE.fields_by_name['amt_paid']._options = None
_INVOICEHTLC.fields_by_name['chan_id']._options = None
_PAYMENTHASH.fields_by_name['r_hash_str']._options = None
_PAYMENT.fields_by_name['value']._options = None
_PAYMENT.fields_by_name['creation_date']._options = None
_PAYMENT.fields_by_name['path']._options = None
_PAYMENT.fields_by_name['fee']._options = None
_FORWARDINGEVENT.fields_by_name['chan_id_in']._options = None
_FORWARDINGEVENT.fields_by_name['chan_id_out']._options = None
_WALLETUNLOCKER = _descriptor.ServiceDescriptor(
name='WalletUnlocker',
full_name='lnrpc.WalletUnlocker',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=20889,
serialized_end=21290,
methods=[
_descriptor.MethodDescriptor(
name='GenSeed',
full_name='lnrpc.WalletUnlocker.GenSeed',
index=0,
containing_service=None,
input_type=_GENSEEDREQUEST,
output_type=_GENSEEDRESPONSE,
serialized_options=_b('\202\323\344\223\002\r\022\013/v1/genseed'),
),
_descriptor.MethodDescriptor(
name='InitWallet',
full_name='lnrpc.WalletUnlocker.InitWallet',
index=1,
containing_service=None,
input_type=_INITWALLETREQUEST,
output_type=_INITWALLETRESPONSE,
serialized_options=_b('\202\323\344\223\002\023\"\016/v1/initwallet:\001*'),
),
_descriptor.MethodDescriptor(
name='UnlockWallet',
full_name='lnrpc.WalletUnlocker.UnlockWallet',
index=2,
containing_service=None,
input_type=_UNLOCKWALLETREQUEST,
output_type=_UNLOCKWALLETRESPONSE,
serialized_options=_b('\202\323\344\223\002\025\"\020/v1/unlockwallet:\001*'),
),
_descriptor.MethodDescriptor(
name='ChangePassword',
full_name='lnrpc.WalletUnlocker.ChangePassword',
index=3,
containing_service=None,
input_type=_CHANGEPASSWORDREQUEST,
output_type=_CHANGEPASSWORDRESPONSE,
serialized_options=_b('\202\323\344\223\002\027\"\022/v1/changepassword:\001*'),
),
])
_sym_db.RegisterServiceDescriptor(_WALLETUNLOCKER)
DESCRIPTOR.services_by_name['WalletUnlocker'] = _WALLETUNLOCKER
_LIGHTNING = _descriptor.ServiceDescriptor(
name='Lightning',
full_name='lnrpc.Lightning',
file=DESCRIPTOR,
index=1,
serialized_options=None,
serialized_start=21293,
serialized_end=26359,
methods=[
_descriptor.MethodDescriptor(
name='WalletBalance',
full_name='lnrpc.Lightning.WalletBalance',
index=0,
containing_service=None,
input_type=_WALLETBALANCEREQUEST,
output_type=_WALLETBALANCERESPONSE,
serialized_options=_b('\202\323\344\223\002\030\022\026/v1/balance/blockchain'),
),
_descriptor.MethodDescriptor(
name='ChannelBalance',
full_name='lnrpc.Lightning.ChannelBalance',
index=1,
containing_service=None,
input_type=_CHANNELBALANCEREQUEST,
output_type=_CHANNELBALANCERESPONSE,
serialized_options=_b('\202\323\344\223\002\026\022\024/v1/balance/channels'),
),
_descriptor.MethodDescriptor(
name='GetTransactions',
full_name='lnrpc.Lightning.GetTransactions',
index=2,
containing_service=None,
input_type=_GETTRANSACTIONSREQUEST,
output_type=_TRANSACTIONDETAILS,
serialized_options=_b('\202\323\344\223\002\022\022\020/v1/transactions'),
),
_descriptor.MethodDescriptor(
name='EstimateFee',
full_name='lnrpc.Lightning.EstimateFee',
index=3,
containing_service=None,
input_type=_ESTIMATEFEEREQUEST,
output_type=_ESTIMATEFEERESPONSE,
serialized_options=_b('\202\323\344\223\002\026\022\024/v1/transactions/fee'),
),
_descriptor.MethodDescriptor(
name='SendCoins',
full_name='lnrpc.Lightning.SendCoins',
index=4,
containing_service=None,
input_type=_SENDCOINSREQUEST,
output_type=_SENDCOINSRESPONSE,
serialized_options=_b('\202\323\344\223\002\025\"\020/v1/transactions:\001*'),
),
_descriptor.MethodDescriptor(
name='ListUnspent',
full_name='lnrpc.Lightning.ListUnspent',
index=5,
containing_service=None,
input_type=_LISTUNSPENTREQUEST,
output_type=_LISTUNSPENTRESPONSE,
serialized_options=_b('\202\323\344\223\002\013\022\t/v1/utxos'),
),
_descriptor.MethodDescriptor(
name='SubscribeTransactions',
full_name='lnrpc.Lightning.SubscribeTransactions',
index=6,
containing_service=None,
input_type=_GETTRANSACTIONSREQUEST,
output_type=_TRANSACTION,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SendMany',
full_name='lnrpc.Lightning.SendMany',
index=7,
containing_service=None,
input_type=_SENDMANYREQUEST,
output_type=_SENDMANYRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='NewAddress',
full_name='lnrpc.Lightning.NewAddress',
index=8,
containing_service=None,
input_type=_NEWADDRESSREQUEST,
output_type=_NEWADDRESSRESPONSE,
serialized_options=_b('\202\323\344\223\002\020\022\016/v1/newaddress'),
),
_descriptor.MethodDescriptor(
name='SignMessage',
full_name='lnrpc.Lightning.SignMessage',
index=9,
containing_service=None,
input_type=_SIGNMESSAGEREQUEST,
output_type=_SIGNMESSAGERESPONSE,
serialized_options=_b('\202\323\344\223\002\024\"\017/v1/signmessage:\001*'),
),
_descriptor.MethodDescriptor(
name='VerifyMessage',
full_name='lnrpc.Lightning.VerifyMessage',
index=10,
containing_service=None,
input_type=_VERIFYMESSAGEREQUEST,
output_type=_VERIFYMESSAGERESPONSE,
serialized_options=_b('\202\323\344\223\002\026\"\021/v1/verifymessage:\001*'),
),
_descriptor.MethodDescriptor(
name='ConnectPeer',
full_name='lnrpc.Lightning.ConnectPeer',
index=11,
containing_service=None,
input_type=_CONNECTPEERREQUEST,
output_type=_CONNECTPEERRESPONSE,
serialized_options=_b('\202\323\344\223\002\016\"\t/v1/peers:\001*'),
),
_descriptor.MethodDescriptor(
name='DisconnectPeer',
full_name='lnrpc.Lightning.DisconnectPeer',
index=12,
containing_service=None,
input_type=_DISCONNECTPEERREQUEST,
output_type=_DISCONNECTPEERRESPONSE,
serialized_options=_b('\202\323\344\223\002\025*\023/v1/peers/{pub_key}'),
),
_descriptor.MethodDescriptor(
name='ListPeers',
full_name='lnrpc.Lightning.ListPeers',
index=13,
containing_service=None,
input_type=_LISTPEERSREQUEST,
output_type=_LISTPEERSRESPONSE,
serialized_options=_b('\202\323\344\223\002\013\022\t/v1/peers'),
),
_descriptor.MethodDescriptor(
name='GetInfo',
full_name='lnrpc.Lightning.GetInfo',
index=14,
containing_service=None,
input_type=_GETINFOREQUEST,
output_type=_GETINFORESPONSE,
serialized_options=_b('\202\323\344\223\002\r\022\013/v1/getinfo'),
),
_descriptor.MethodDescriptor(
name='PendingChannels',
full_name='lnrpc.Lightning.PendingChannels',
index=15,
containing_service=None,
input_type=_PENDINGCHANNELSREQUEST,
output_type=_PENDINGCHANNELSRESPONSE,
serialized_options=_b('\202\323\344\223\002\026\022\024/v1/channels/pending'),
),
_descriptor.MethodDescriptor(
name='ListChannels',
full_name='lnrpc.Lightning.ListChannels',
index=16,
containing_service=None,
input_type=_LISTCHANNELSREQUEST,
output_type=_LISTCHANNELSRESPONSE,
serialized_options=_b('\202\323\344\223\002\016\022\014/v1/channels'),
),
_descriptor.MethodDescriptor(
name='SubscribeChannelEvents',
full_name='lnrpc.Lightning.SubscribeChannelEvents',
index=17,
containing_service=None,
input_type=_CHANNELEVENTSUBSCRIPTION,
output_type=_CHANNELEVENTUPDATE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='ClosedChannels',
full_name='lnrpc.Lightning.ClosedChannels',
index=18,
containing_service=None,
input_type=_CLOSEDCHANNELSREQUEST,
output_type=_CLOSEDCHANNELSRESPONSE,
serialized_options=_b('\202\323\344\223\002\025\022\023/v1/channels/closed'),
),
_descriptor.MethodDescriptor(
name='OpenChannelSync',
full_name='lnrpc.Lightning.OpenChannelSync',
index=19,
containing_service=None,
input_type=_OPENCHANNELREQUEST,
output_type=_CHANNELPOINT,
serialized_options=_b('\202\323\344\223\002\021\"\014/v1/channels:\001*'),
),
_descriptor.MethodDescriptor(
name='OpenChannel',
full_name='lnrpc.Lightning.OpenChannel',
index=20,
containing_service=None,
input_type=_OPENCHANNELREQUEST,
output_type=_OPENSTATUSUPDATE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='ChannelAcceptor',
full_name='lnrpc.Lightning.ChannelAcceptor',
index=21,
containing_service=None,
input_type=_CHANNELACCEPTRESPONSE,
output_type=_CHANNELACCEPTREQUEST,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='CloseChannel',
full_name='lnrpc.Lightning.CloseChannel',
index=22,
containing_service=None,
input_type=_CLOSECHANNELREQUEST,
output_type=_CLOSESTATUSUPDATE,
serialized_options=_b('\202\323\344\223\002L*J/v1/channels/{channel_point.funding_txid_str}/{channel_point.output_index}'),
),
_descriptor.MethodDescriptor(
name='AbandonChannel',
full_name='lnrpc.Lightning.AbandonChannel',
index=23,
containing_service=None,
input_type=_ABANDONCHANNELREQUEST,
output_type=_ABANDONCHANNELRESPONSE,
serialized_options=_b('\202\323\344\223\002T*R/v1/channels/abandon/{channel_point.funding_txid_str}/{channel_point.output_index}'),
),
_descriptor.MethodDescriptor(
name='SendPayment',
full_name='lnrpc.Lightning.SendPayment',
index=24,
containing_service=None,
input_type=_SENDREQUEST,
output_type=_SENDRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SendPaymentSync',
full_name='lnrpc.Lightning.SendPaymentSync',
index=25,
containing_service=None,
input_type=_SENDREQUEST,
output_type=_SENDRESPONSE,
serialized_options=_b('\202\323\344\223\002\036\"\031/v1/channels/transactions:\001*'),
),
_descriptor.MethodDescriptor(
name='SendToRoute',
full_name='lnrpc.Lightning.SendToRoute',
index=26,
containing_service=None,
input_type=_SENDTOROUTEREQUEST,
output_type=_SENDRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SendToRouteSync',
full_name='lnrpc.Lightning.SendToRouteSync',
index=27,
containing_service=None,
input_type=_SENDTOROUTEREQUEST,
output_type=_SENDRESPONSE,
serialized_options=_b('\202\323\344\223\002$\"\037/v1/channels/transactions/route:\001*'),
),
_descriptor.MethodDescriptor(
name='AddInvoice',
full_name='lnrpc.Lightning.AddInvoice',
index=28,
containing_service=None,
input_type=_INVOICE,
output_type=_ADDINVOICERESPONSE,
serialized_options=_b('\202\323\344\223\002\021\"\014/v1/invoices:\001*'),
),
_descriptor.MethodDescriptor(
name='ListInvoices',
full_name='lnrpc.Lightning.ListInvoices',
index=29,
containing_service=None,
input_type=_LISTINVOICEREQUEST,
output_type=_LISTINVOICERESPONSE,
serialized_options=_b('\202\323\344\223\002\016\022\014/v1/invoices'),
),
_descriptor.MethodDescriptor(
name='LookupInvoice',
full_name='lnrpc.Lightning.LookupInvoice',
index=30,
containing_service=None,
input_type=_PAYMENTHASH,
output_type=_INVOICE,
serialized_options=_b('\202\323\344\223\002\032\022\030/v1/invoice/{r_hash_str}'),
),
_descriptor.MethodDescriptor(
name='SubscribeInvoices',
full_name='lnrpc.Lightning.SubscribeInvoices',
index=31,
containing_service=None,
input_type=_INVOICESUBSCRIPTION,
output_type=_INVOICE,
serialized_options=_b('\202\323\344\223\002\030\022\026/v1/invoices/subscribe'),
),
_descriptor.MethodDescriptor(
name='DecodePayReq',
full_name='lnrpc.Lightning.DecodePayReq',
index=32,
containing_service=None,
input_type=_PAYREQSTRING,
output_type=_PAYREQ,
serialized_options=_b('\202\323\344\223\002\026\022\024/v1/payreq/{pay_req}'),
),
_descriptor.MethodDescriptor(
name='ListPayments',
full_name='lnrpc.Lightning.ListPayments',
index=33,
containing_service=None,
input_type=_LISTPAYMENTSREQUEST,
output_type=_LISTPAYMENTSRESPONSE,
serialized_options=_b('\202\323\344\223\002\016\022\014/v1/payments'),
),
_descriptor.MethodDescriptor(
name='DeleteAllPayments',
full_name='lnrpc.Lightning.DeleteAllPayments',
index=34,
containing_service=None,
input_type=_DELETEALLPAYMENTSREQUEST,
output_type=_DELETEALLPAYMENTSRESPONSE,
serialized_options=_b('\202\323\344\223\002\016*\014/v1/payments'),
),
_descriptor.MethodDescriptor(
name='DescribeGraph',
full_name='lnrpc.Lightning.DescribeGraph',
index=35,
containing_service=None,
input_type=_CHANNELGRAPHREQUEST,
output_type=_CHANNELGRAPH,
serialized_options=_b('\202\323\344\223\002\013\022\t/v1/graph'),
),
_descriptor.MethodDescriptor(
name='GetChanInfo',
full_name='lnrpc.Lightning.GetChanInfo',
index=36,
containing_service=None,
input_type=_CHANINFOREQUEST,
output_type=_CHANNELEDGE,
serialized_options=_b('\202\323\344\223\002\032\022\030/v1/graph/edge/{chan_id}'),
),
_descriptor.MethodDescriptor(
name='GetNodeInfo',
full_name='lnrpc.Lightning.GetNodeInfo',
index=37,
containing_service=None,
input_type=_NODEINFOREQUEST,
output_type=_NODEINFO,
serialized_options=_b('\202\323\344\223\002\032\022\030/v1/graph/node/{pub_key}'),
),
_descriptor.MethodDescriptor(
name='QueryRoutes',
full_name='lnrpc.Lightning.QueryRoutes',
index=38,
containing_service=None,
input_type=_QUERYROUTESREQUEST,
output_type=_QUERYROUTESRESPONSE,
serialized_options=_b('\202\323\344\223\002\"\022 /v1/graph/routes/{pub_key}/{amt}'),
),
_descriptor.MethodDescriptor(
name='GetNetworkInfo',
full_name='lnrpc.Lightning.GetNetworkInfo',
index=39,
containing_service=None,
input_type=_NETWORKINFOREQUEST,
output_type=_NETWORKINFO,
serialized_options=_b('\202\323\344\223\002\020\022\016/v1/graph/info'),
),
_descriptor.MethodDescriptor(
name='StopDaemon',
full_name='lnrpc.Lightning.StopDaemon',
index=40,
containing_service=None,
input_type=_STOPREQUEST,
output_type=_STOPRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SubscribeChannelGraph',
full_name='lnrpc.Lightning.SubscribeChannelGraph',
index=41,
containing_service=None,
input_type=_GRAPHTOPOLOGYSUBSCRIPTION,
output_type=_GRAPHTOPOLOGYUPDATE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='DebugLevel',
full_name='lnrpc.Lightning.DebugLevel',
index=42,
containing_service=None,
input_type=_DEBUGLEVELREQUEST,
output_type=_DEBUGLEVELRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='FeeReport',
full_name='lnrpc.Lightning.FeeReport',
index=43,
containing_service=None,
input_type=_FEEREPORTREQUEST,
output_type=_FEEREPORTRESPONSE,
serialized_options=_b('\202\323\344\223\002\n\022\010/v1/fees'),
),
_descriptor.MethodDescriptor(
name='UpdateChannelPolicy',
full_name='lnrpc.Lightning.UpdateChannelPolicy',
index=44,
containing_service=None,
input_type=_POLICYUPDATEREQUEST,
output_type=_POLICYUPDATERESPONSE,
serialized_options=_b('\202\323\344\223\002\023\"\016/v1/chanpolicy:\001*'),
),
_descriptor.MethodDescriptor(
name='ForwardingHistory',
full_name='lnrpc.Lightning.ForwardingHistory',
index=45,
containing_service=None,
input_type=_FORWARDINGHISTORYREQUEST,
output_type=_FORWARDINGHISTORYRESPONSE,
serialized_options=_b('\202\323\344\223\002\017\"\n/v1/switch:\001*'),
),
_descriptor.MethodDescriptor(
name='ExportChannelBackup',
full_name='lnrpc.Lightning.ExportChannelBackup',
index=46,
containing_service=None,
input_type=_EXPORTCHANNELBACKUPREQUEST,
output_type=_CHANNELBACKUP,
serialized_options=_b('\202\323\344\223\002M\022K/v1/channels/backup/{chan_point.funding_txid_str}/{chan_point.output_index}'),
),
_descriptor.MethodDescriptor(
name='ExportAllChannelBackups',
full_name='lnrpc.Lightning.ExportAllChannelBackups',
index=47,
containing_service=None,
input_type=_CHANBACKUPEXPORTREQUEST,
output_type=_CHANBACKUPSNAPSHOT,
serialized_options=_b('\202\323\344\223\002\025\022\023/v1/channels/backup'),
),
_descriptor.MethodDescriptor(
name='VerifyChanBackup',
full_name='lnrpc.Lightning.VerifyChanBackup',
index=48,
containing_service=None,
input_type=_CHANBACKUPSNAPSHOT,
output_type=_VERIFYCHANBACKUPRESPONSE,
serialized_options=_b('\202\323\344\223\002\037\"\032/v1/channels/backup/verify:\001*'),
),
_descriptor.MethodDescriptor(
name='RestoreChannelBackups',
full_name='lnrpc.Lightning.RestoreChannelBackups',
index=49,
containing_service=None,
input_type=_RESTORECHANBACKUPREQUEST,
output_type=_RESTOREBACKUPRESPONSE,
serialized_options=_b('\202\323\344\223\002 \"\033/v1/channels/backup/restore:\001*'),
),
_descriptor.MethodDescriptor(
name='SubscribeChannelBackups',
full_name='lnrpc.Lightning.SubscribeChannelBackups',
index=50,
containing_service=None,
input_type=_CHANNELBACKUPSUBSCRIPTION,
output_type=_CHANBACKUPSNAPSHOT,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='BakeMacaroon',
full_name='lnrpc.Lightning.BakeMacaroon',
index=51,
containing_service=None,
input_type=_BAKEMACAROONREQUEST,
output_type=_BAKEMACAROONRESPONSE,
serialized_options=_b('\202\323\344\223\002\021\"\014/v1/macaroon:\001*'),
),
])
_sym_db.RegisterServiceDescriptor(_LIGHTNING)
DESCRIPTOR.services_by_name['Lightning'] = _LIGHTNING
# @@protoc_insertion_point(module_scope)
| 42.564947 | 43,401 | 0.752543 |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='rpc.proto',
package='lnrpc',
syntax='proto3',
serialized_options=_b('Z%github.com/lightningnetwork/lnd/lnrpc'),
serialized_pb=_b('\n\trpc.proto\x12\x05lnrpc\x1a\x1cgoogle/api/annotations.proto\"A\n\x0eGenSeedRequest\x12\x19\n\x11\x61\x65zeed_passphrase\x18\x01 \x01(\x0c\x12\x14\n\x0cseed_entropy\x18\x02 \x01(\x0c\"H\n\x0fGenSeedResponse\x12\x1c\n\x14\x63ipher_seed_mnemonic\x18\x01 \x03(\t\x12\x17\n\x0f\x65nciphered_seed\x18\x02 \x01(\x0c\"\xb2\x01\n\x11InitWalletRequest\x12\x17\n\x0fwallet_password\x18\x01 \x01(\x0c\x12\x1c\n\x14\x63ipher_seed_mnemonic\x18\x02 \x03(\t\x12\x19\n\x11\x61\x65zeed_passphrase\x18\x03 \x01(\x0c\x12\x17\n\x0frecovery_window\x18\x04 \x01(\x05\x12\x32\n\x0f\x63hannel_backups\x18\x05 \x01(\x0b\x32\x19.lnrpc.ChanBackupSnapshot\"\x14\n\x12InitWalletResponse\"{\n\x13UnlockWalletRequest\x12\x17\n\x0fwallet_password\x18\x01 \x01(\x0c\x12\x17\n\x0frecovery_window\x18\x02 \x01(\x05\x12\x32\n\x0f\x63hannel_backups\x18\x03 \x01(\x0b\x32\x19.lnrpc.ChanBackupSnapshot\"\x16\n\x14UnlockWalletResponse\"G\n\x15\x43hangePasswordRequest\x12\x18\n\x10\x63urrent_password\x18\x01 \x01(\x0c\x12\x14\n\x0cnew_password\x18\x02 \x01(\x0c\"\x18\n\x16\x43hangePasswordResponse\"\xe1\x01\n\x04Utxo\x12.\n\x04type\x18\x01 \x01(\x0e\x32\x12.lnrpc.AddressTypeR\x0c\x61\x64\x64ress_type\x12\x18\n\x07\x61\x64\x64ress\x18\x02 \x01(\tR\x07\x61\x64\x64ress\x12\x1e\n\namount_sat\x18\x03 \x01(\x03R\namount_sat\x12\x1c\n\tpk_script\x18\x04 \x01(\tR\tpk_script\x12+\n\x08outpoint\x18\x05 \x01(\x0b\x32\x0f.lnrpc.OutPointR\x08outpoint\x12$\n\rconfirmations\x18\x06 \x01(\x03R\rconfirmations\"\xb9\x02\n\x0bTransaction\x12\x18\n\x07tx_hash\x18\x01 \x01(\tR\x07tx_hash\x12\x16\n\x06\x61mount\x18\x02 \x01(\x03R\x06\x61mount\x12,\n\x11num_confirmations\x18\x03 \x01(\x05R\x11num_confirmations\x12\x1e\n\nblock_hash\x18\x04 \x01(\tR\nblock_hash\x12\"\n\x0c\x62lock_height\x18\x05 \x01(\x05R\x0c\x62lock_height\x12\x1e\n\ntime_stamp\x18\x06 \x01(\x03R\ntime_stamp\x12\x1e\n\ntotal_fees\x18\x07 \x01(\x03R\ntotal_fees\x12&\n\x0e\x64\x65st_addresses\x18\x08 \x03(\tR\x0e\x64\x65st_addresses\x12\x1e\n\nraw_tx_hex\x18\t \x01(\tR\nraw_tx_hex\"\x18\n\x16GetTransactionsRequest\"L\n\x12TransactionDetails\x12\x36\n\x0ctransactions\x18\x01 \x03(\x0b\x32\x12.lnrpc.TransactionR\x0ctransactions\"M\n\x08\x46\x65\x65Limit\x12\x0f\n\x05\x66ixed\x18\x01 \x01(\x03H\x00\x12\x14\n\nfixed_msat\x18\x03 \x01(\x03H\x00\x12\x11\n\x07percent\x18\x02 \x01(\x03H\x00\x42\x07\n\x05limit\"\xab\x03\n\x0bSendRequest\x12\x0c\n\x04\x64\x65st\x18\x01 \x01(\x0c\x12\x17\n\x0b\x64\x65st_string\x18\x02 \x01(\tB\x02\x18\x01\x12\x0b\n\x03\x61mt\x18\x03 \x01(\x03\x12\x10\n\x08\x61mt_msat\x18\x0c \x01(\x03\x12\x14\n\x0cpayment_hash\x18\x04 \x01(\x0c\x12\x1f\n\x13payment_hash_string\x18\x05 \x01(\tB\x02\x18\x01\x12\x17\n\x0fpayment_request\x18\x06 \x01(\t\x12\x18\n\x10\x66inal_cltv_delta\x18\x07 \x01(\x05\x12\"\n\tfee_limit\x18\x08 \x01(\x0b\x32\x0f.lnrpc.FeeLimit\x12\x1c\n\x10outgoing_chan_id\x18\t \x01(\x04\x42\x02\x30\x01\x12\x17\n\x0flast_hop_pubkey\x18\r \x01(\x0c\x12\x12\n\ncltv_limit\x18\n \x01(\r\x12\x31\n\x08\x64\x65st_tlv\x18\x0b \x03(\x0b\x32\x1f.lnrpc.SendRequest.DestTlvEntry\x12\x1a\n\x12\x61llow_self_payment\x18\x0e \x01(\x08\x1a.\n\x0c\x44\x65stTlvEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01\"\xb8\x01\n\x0cSendResponse\x12$\n\rpayment_error\x18\x01 \x01(\tR\rpayment_error\x12*\n\x10payment_preimage\x18\x02 \x01(\x0cR\x10payment_preimage\x12\x32\n\rpayment_route\x18\x03 \x01(\x0b\x32\x0c.lnrpc.RouteR\rpayment_route\x12\"\n\x0cpayment_hash\x18\x04 \x01(\x0cR\x0cpayment_hash\"n\n\x12SendToRouteRequest\x12\x14\n\x0cpayment_hash\x18\x01 \x01(\x0c\x12\x1f\n\x13payment_hash_string\x18\x02 \x01(\tB\x02\x18\x01\x12\x1b\n\x05route\x18\x04 \x01(\x0b\x32\x0c.lnrpc.RouteJ\x04\x08\x03\x10\x04\"\xb5\x02\n\x14\x43hannelAcceptRequest\x12\x13\n\x0bnode_pubkey\x18\x01 \x01(\x0c\x12\x12\n\nchain_hash\x18\x02 \x01(\x0c\x12\x17\n\x0fpending_chan_id\x18\x03 \x01(\x0c\x12\x13\n\x0b\x66unding_amt\x18\x04 \x01(\x04\x12\x10\n\x08push_amt\x18\x05 \x01(\x04\x12\x12\n\ndust_limit\x18\x06 \x01(\x04\x12\x1b\n\x13max_value_in_flight\x18\x07 \x01(\x04\x12\x17\n\x0f\x63hannel_reserve\x18\x08 \x01(\x04\x12\x10\n\x08min_htlc\x18\t \x01(\x04\x12\x12\n\nfee_per_kw\x18\n \x01(\x04\x12\x11\n\tcsv_delay\x18\x0b \x01(\r\x12\x1a\n\x12max_accepted_htlcs\x18\x0c \x01(\r\x12\x15\n\rchannel_flags\x18\r \x01(\r\"@\n\x15\x43hannelAcceptResponse\x12\x0e\n\x06\x61\x63\x63\x65pt\x18\x01 \x01(\x08\x12\x17\n\x0fpending_chan_id\x18\x02 \x01(\x0c\"\xa2\x01\n\x0c\x43hannelPoint\x12\x30\n\x12\x66unding_txid_bytes\x18\x01 \x01(\x0cH\x00R\x12\x66unding_txid_bytes\x12,\n\x10\x66unding_txid_str\x18\x02 \x01(\tH\x00R\x10\x66unding_txid_str\x12\"\n\x0coutput_index\x18\x03 \x01(\rR\x0coutput_indexB\x0e\n\x0c\x66unding_txid\"j\n\x08OutPoint\x12\x1e\n\ntxid_bytes\x18\x01 \x01(\x0cR\ntxid_bytes\x12\x1a\n\x08txid_str\x18\x02 \x01(\tR\x08txid_str\x12\"\n\x0coutput_index\x18\x03 \x01(\rR\x0coutput_index\">\n\x10LightningAddress\x12\x16\n\x06pubkey\x18\x01 \x01(\tR\x06pubkey\x12\x12\n\x04host\x18\x02 \x01(\tR\x04host\"\xa1\x01\n\x12\x45stimateFeeRequest\x12\x41\n\x0c\x41\x64\x64rToAmount\x18\x01 \x03(\x0b\x32+.lnrpc.EstimateFeeRequest.AddrToAmountEntry\x12\x13\n\x0btarget_conf\x18\x02 \x01(\x05\x1a\x33\n\x11\x41\x64\x64rToAmountEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\"c\n\x13\x45stimateFeeResponse\x12\x18\n\x07\x66\x65\x65_sat\x18\x01 \x01(\x03R\x07\x66\x65\x65_sat\x12\x32\n\x14\x66\x65\x65rate_sat_per_byte\x18\x02 \x01(\x03R\x14\x66\x65\x65rate_sat_per_byte\"\xb1\x01\n\x0fSendManyRequest\x12>\n\x0c\x41\x64\x64rToAmount\x18\x01 \x03(\x0b\x32(.lnrpc.SendManyRequest.AddrToAmountEntry\x12\x13\n\x0btarget_conf\x18\x03 \x01(\x05\x12\x14\n\x0csat_per_byte\x18\x05 \x01(\x03\x1a\x33\n\x11\x41\x64\x64rToAmountEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\"&\n\x10SendManyResponse\x12\x12\n\x04txid\x18\x01 \x01(\tR\x04txid\"m\n\x10SendCoinsRequest\x12\x0c\n\x04\x61\x64\x64r\x18\x01 \x01(\t\x12\x0e\n\x06\x61mount\x18\x02 \x01(\x03\x12\x13\n\x0btarget_conf\x18\x03 \x01(\x05\x12\x14\n\x0csat_per_byte\x18\x05 \x01(\x03\x12\x10\n\x08send_all\x18\x06 \x01(\x08\"\'\n\x11SendCoinsResponse\x12\x12\n\x04txid\x18\x01 \x01(\tR\x04txid\":\n\x12ListUnspentRequest\x12\x11\n\tmin_confs\x18\x01 \x01(\x05\x12\x11\n\tmax_confs\x18\x02 \x01(\x05\"8\n\x13ListUnspentResponse\x12!\n\x05utxos\x18\x01 \x03(\x0b\x32\x0b.lnrpc.UtxoR\x05utxos\"5\n\x11NewAddressRequest\x12 \n\x04type\x18\x01 \x01(\x0e\x32\x12.lnrpc.AddressType\".\n\x12NewAddressResponse\x12\x18\n\x07\x61\x64\x64ress\x18\x01 \x01(\tR\x07\x61\x64\x64ress\"&\n\x12SignMessageRequest\x12\x10\n\x03msg\x18\x01 \x01(\x0cR\x03msg\"3\n\x13SignMessageResponse\x12\x1c\n\tsignature\x18\x01 \x01(\tR\tsignature\"F\n\x14VerifyMessageRequest\x12\x10\n\x03msg\x18\x01 \x01(\x0cR\x03msg\x12\x1c\n\tsignature\x18\x02 \x01(\tR\tsignature\"E\n\x15VerifyMessageResponse\x12\x14\n\x05valid\x18\x01 \x01(\x08R\x05valid\x12\x16\n\x06pubkey\x18\x02 \x01(\tR\x06pubkey\"I\n\x12\x43onnectPeerRequest\x12%\n\x04\x61\x64\x64r\x18\x01 \x01(\x0b\x32\x17.lnrpc.LightningAddress\x12\x0c\n\x04perm\x18\x02 \x01(\x08\"\x15\n\x13\x43onnectPeerResponse\"1\n\x15\x44isconnectPeerRequest\x12\x18\n\x07pub_key\x18\x01 \x01(\tR\x07pub_key\"\x18\n\x16\x44isconnectPeerResponse\"\x86\x01\n\x04HTLC\x12\x1a\n\x08incoming\x18\x01 \x01(\x08R\x08incoming\x12\x16\n\x06\x61mount\x18\x02 \x01(\x03R\x06\x61mount\x12\x1c\n\thash_lock\x18\x03 \x01(\x0cR\thash_lock\x12,\n\x11\x65xpiration_height\x18\x04 \x01(\rR\x11\x65xpiration_height\"\xa2\x07\n\x07\x43hannel\x12\x16\n\x06\x61\x63tive\x18\x01 \x01(\x08R\x06\x61\x63tive\x12$\n\rremote_pubkey\x18\x02 \x01(\tR\rremote_pubkey\x12$\n\rchannel_point\x18\x03 \x01(\tR\rchannel_point\x12\x1c\n\x07\x63han_id\x18\x04 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12\x1a\n\x08\x63\x61pacity\x18\x05 \x01(\x03R\x08\x63\x61pacity\x12$\n\rlocal_balance\x18\x06 \x01(\x03R\rlocal_balance\x12&\n\x0eremote_balance\x18\x07 \x01(\x03R\x0eremote_balance\x12\x1e\n\ncommit_fee\x18\x08 \x01(\x03R\ncommit_fee\x12$\n\rcommit_weight\x18\t \x01(\x03R\rcommit_weight\x12\x1e\n\nfee_per_kw\x18\n \x01(\x03R\nfee_per_kw\x12,\n\x11unsettled_balance\x18\x0b \x01(\x03R\x11unsettled_balance\x12\x30\n\x13total_satoshis_sent\x18\x0c \x01(\x03R\x13total_satoshis_sent\x12\x38\n\x17total_satoshis_received\x18\r \x01(\x03R\x17total_satoshis_received\x12 \n\x0bnum_updates\x18\x0e \x01(\x04R\x0bnum_updates\x12\x31\n\rpending_htlcs\x18\x0f \x03(\x0b\x32\x0b.lnrpc.HTLCR\rpending_htlcs\x12\x1c\n\tcsv_delay\x18\x10 \x01(\rR\tcsv_delay\x12\x18\n\x07private\x18\x11 \x01(\x08R\x07private\x12\x1c\n\tinitiator\x18\x12 \x01(\x08R\tinitiator\x12,\n\x11\x63han_status_flags\x18\x13 \x01(\tR\x11\x63han_status_flags\x12\x36\n\x16local_chan_reserve_sat\x18\x14 \x01(\x03R\x16local_chan_reserve_sat\x12\x38\n\x17remote_chan_reserve_sat\x18\x15 \x01(\x03R\x17remote_chan_reserve_sat\x12,\n\x11static_remote_key\x18\x16 \x01(\x08R\x11static_remote_key\x12\x1a\n\x08lifetime\x18\x17 \x01(\x03R\x08lifetime\x12\x16\n\x06uptime\x18\x18 \x01(\x03R\x06uptime\"l\n\x13ListChannelsRequest\x12\x13\n\x0b\x61\x63tive_only\x18\x01 \x01(\x08\x12\x15\n\rinactive_only\x18\x02 \x01(\x08\x12\x13\n\x0bpublic_only\x18\x03 \x01(\x08\x12\x14\n\x0cprivate_only\x18\x04 \x01(\x08\"B\n\x14ListChannelsResponse\x12*\n\x08\x63hannels\x18\x0b \x03(\x0b\x32\x0e.lnrpc.ChannelR\x08\x63hannels\"\xba\x04\n\x13\x43hannelCloseSummary\x12$\n\rchannel_point\x18\x01 \x01(\tR\rchannel_point\x12\x1c\n\x07\x63han_id\x18\x02 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12\x1e\n\nchain_hash\x18\x03 \x01(\tR\nchain_hash\x12(\n\x0f\x63losing_tx_hash\x18\x04 \x01(\tR\x0f\x63losing_tx_hash\x12$\n\rremote_pubkey\x18\x05 \x01(\tR\rremote_pubkey\x12\x1a\n\x08\x63\x61pacity\x18\x06 \x01(\x03R\x08\x63\x61pacity\x12\"\n\x0c\x63lose_height\x18\x07 \x01(\rR\x0c\x63lose_height\x12(\n\x0fsettled_balance\x18\x08 \x01(\x03R\x0fsettled_balance\x12\x30\n\x13time_locked_balance\x18\t \x01(\x03R\x13time_locked_balance\x12\x46\n\nclose_type\x18\n \x01(\x0e\x32&.lnrpc.ChannelCloseSummary.ClosureTypeR\nclose_type\"\x8a\x01\n\x0b\x43losureType\x12\x15\n\x11\x43OOPERATIVE_CLOSE\x10\x00\x12\x15\n\x11LOCAL_FORCE_CLOSE\x10\x01\x12\x16\n\x12REMOTE_FORCE_CLOSE\x10\x02\x12\x10\n\x0c\x42REACH_CLOSE\x10\x03\x12\x14\n\x10\x46UNDING_CANCELED\x10\x04\x12\r\n\tABANDONED\x10\x05\"\x94\x01\n\x15\x43losedChannelsRequest\x12\x13\n\x0b\x63ooperative\x18\x01 \x01(\x08\x12\x13\n\x0blocal_force\x18\x02 \x01(\x08\x12\x14\n\x0cremote_force\x18\x03 \x01(\x08\x12\x0e\n\x06\x62reach\x18\x04 \x01(\x08\x12\x18\n\x10\x66unding_canceled\x18\x05 \x01(\x08\x12\x11\n\tabandoned\x18\x06 \x01(\x08\"P\n\x16\x43losedChannelsResponse\x12\x36\n\x08\x63hannels\x18\x01 \x03(\x0b\x32\x1a.lnrpc.ChannelCloseSummaryR\x08\x63hannels\"\xdf\x02\n\x04Peer\x12\x18\n\x07pub_key\x18\x01 \x01(\tR\x07pub_key\x12\x18\n\x07\x61\x64\x64ress\x18\x03 \x01(\tR\x07\x61\x64\x64ress\x12\x1e\n\nbytes_sent\x18\x04 \x01(\x04R\nbytes_sent\x12\x1e\n\nbytes_recv\x18\x05 \x01(\x04R\nbytes_recv\x12\x1a\n\x08sat_sent\x18\x06 \x01(\x03R\x08sat_sent\x12\x1a\n\x08sat_recv\x18\x07 \x01(\x03R\x08sat_recv\x12\x18\n\x07inbound\x18\x08 \x01(\x08R\x07inbound\x12\x1c\n\tping_time\x18\t \x01(\x03R\tping_time\x12\x32\n\tsync_type\x18\n \x01(\x0e\x32\x14.lnrpc.Peer.SyncTypeR\tsync_type\"?\n\x08SyncType\x12\x10\n\x0cUNKNOWN_SYNC\x10\x00\x12\x0f\n\x0b\x41\x43TIVE_SYNC\x10\x01\x12\x10\n\x0cPASSIVE_SYNC\x10\x02\"\x12\n\x10ListPeersRequest\"6\n\x11ListPeersResponse\x12!\n\x05peers\x18\x01 \x03(\x0b\x32\x0b.lnrpc.PeerR\x05peers\"\x10\n\x0eGetInfoRequest\"\xe7\x04\n\x0fGetInfoResponse\x12(\n\x0fidentity_pubkey\x18\x01 \x01(\tR\x0fidentity_pubkey\x12\x14\n\x05\x61lias\x18\x02 \x01(\tR\x05\x61lias\x12\x32\n\x14num_pending_channels\x18\x03 \x01(\rR\x14num_pending_channels\x12\x30\n\x13num_active_channels\x18\x04 \x01(\rR\x13num_active_channels\x12\x1c\n\tnum_peers\x18\x05 \x01(\rR\tnum_peers\x12\"\n\x0c\x62lock_height\x18\x06 \x01(\rR\x0c\x62lock_height\x12\x1e\n\nblock_hash\x18\x08 \x01(\tR\nblock_hash\x12(\n\x0fsynced_to_chain\x18\t \x01(\x08R\x0fsynced_to_chain\x12\x1c\n\x07testnet\x18\n \x01(\x08\x42\x02\x18\x01R\x07testnet\x12\x12\n\x04uris\x18\x0c \x03(\tR\x04uris\x12\x34\n\x15\x62\x65st_header_timestamp\x18\r \x01(\x03R\x15\x62\x65st_header_timestamp\x12\x18\n\x07version\x18\x0e \x01(\tR\x07version\x12\x34\n\x15num_inactive_channels\x18\x0f \x01(\rR\x15num_inactive_channels\x12$\n\x06\x63hains\x18\x10 \x03(\x0b\x32\x0c.lnrpc.ChainR\x06\x63hains\x12\x14\n\x05\x63olor\x18\x11 \x01(\tR\x05\x63olor\x12(\n\x0fsynced_to_graph\x18\x12 \x01(\x08R\x0fsynced_to_graphJ\x04\x08\x0b\x10\x0c\"7\n\x05\x43hain\x12\x14\n\x05\x63hain\x18\x01 \x01(\tR\x05\x63hain\x12\x18\n\x07network\x18\x02 \x01(\tR\x07network\"U\n\x12\x43onfirmationUpdate\x12\x11\n\tblock_sha\x18\x01 \x01(\x0c\x12\x14\n\x0c\x62lock_height\x18\x02 \x01(\x05\x12\x16\n\x0enum_confs_left\x18\x03 \x01(\r\"N\n\x11\x43hannelOpenUpdate\x12\x39\n\rchannel_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPointR\rchannel_point\"R\n\x12\x43hannelCloseUpdate\x12\"\n\x0c\x63losing_txid\x18\x01 \x01(\x0cR\x0c\x63losing_txid\x12\x18\n\x07success\x18\x02 \x01(\x08R\x07success\"{\n\x13\x43loseChannelRequest\x12*\n\rchannel_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\x12\r\n\x05\x66orce\x18\x02 \x01(\x08\x12\x13\n\x0btarget_conf\x18\x03 \x01(\x05\x12\x14\n\x0csat_per_byte\x18\x04 \x01(\x03\"\x98\x01\n\x11\x43loseStatusUpdate\x12<\n\rclose_pending\x18\x01 \x01(\x0b\x32\x14.lnrpc.PendingUpdateH\x00R\rclose_pending\x12;\n\nchan_close\x18\x03 \x01(\x0b\x32\x19.lnrpc.ChannelCloseUpdateH\x00R\nchan_closeB\x08\n\x06update\"G\n\rPendingUpdate\x12\x12\n\x04txid\x18\x01 \x01(\x0cR\x04txid\x12\"\n\x0coutput_index\x18\x02 \x01(\rR\x0coutput_index\"\x9d\x03\n\x12OpenChannelRequest\x12 \n\x0bnode_pubkey\x18\x02 \x01(\x0cR\x0bnode_pubkey\x12\x32\n\x12node_pubkey_string\x18\x03 \x01(\tB\x02\x18\x01R\x12node_pubkey_string\x12\x32\n\x14local_funding_amount\x18\x04 \x01(\x03R\x14local_funding_amount\x12\x1a\n\x08push_sat\x18\x05 \x01(\x03R\x08push_sat\x12\x13\n\x0btarget_conf\x18\x06 \x01(\x05\x12\x14\n\x0csat_per_byte\x18\x07 \x01(\x03\x12\x18\n\x07private\x18\x08 \x01(\x08R\x07private\x12$\n\rmin_htlc_msat\x18\t \x01(\x03R\rmin_htlc_msat\x12*\n\x10remote_csv_delay\x18\n \x01(\rR\x10remote_csv_delay\x12\x1c\n\tmin_confs\x18\x0b \x01(\x05R\tmin_confs\x12,\n\x11spend_unconfirmed\x18\x0c \x01(\x08R\x11spend_unconfirmed\"\x92\x01\n\x10OpenStatusUpdate\x12:\n\x0c\x63han_pending\x18\x01 \x01(\x0b\x32\x14.lnrpc.PendingUpdateH\x00R\x0c\x63han_pending\x12\x38\n\tchan_open\x18\x03 \x01(\x0b\x32\x18.lnrpc.ChannelOpenUpdateH\x00R\tchan_openB\x08\n\x06update\"\xcf\x01\n\x0bPendingHTLC\x12\x1a\n\x08incoming\x18\x01 \x01(\x08R\x08incoming\x12\x16\n\x06\x61mount\x18\x02 \x01(\x03R\x06\x61mount\x12\x1a\n\x08outpoint\x18\x03 \x01(\tR\x08outpoint\x12(\n\x0fmaturity_height\x18\x04 \x01(\rR\x0fmaturity_height\x12\x30\n\x13\x62locks_til_maturity\x18\x05 \x01(\x05R\x13\x62locks_til_maturity\x12\x14\n\x05stage\x18\x06 \x01(\rR\x05stage\"\x18\n\x16PendingChannelsRequest\"\x9c\r\n\x17PendingChannelsResponse\x12\x30\n\x13total_limbo_balance\x18\x01 \x01(\x03R\x13total_limbo_balance\x12g\n\x15pending_open_channels\x18\x02 \x03(\x0b\x32\x31.lnrpc.PendingChannelsResponse.PendingOpenChannelR\x15pending_open_channels\x12h\n\x18pending_closing_channels\x18\x03 \x03(\x0b\x32,.lnrpc.PendingChannelsResponse.ClosedChannelR\x18pending_closing_channels\x12y\n\x1epending_force_closing_channels\x18\x04 \x03(\x0b\x32\x31.lnrpc.PendingChannelsResponse.ForceClosedChannelR\x1epending_force_closing_channels\x12j\n\x16waiting_close_channels\x18\x05 \x03(\x0b\x32\x32.lnrpc.PendingChannelsResponse.WaitingCloseChannelR\x16waiting_close_channels\x1a\xbc\x02\n\x0ePendingChannel\x12(\n\x0fremote_node_pub\x18\x01 \x01(\tR\x0fremote_node_pub\x12$\n\rchannel_point\x18\x02 \x01(\tR\rchannel_point\x12\x1a\n\x08\x63\x61pacity\x18\x03 \x01(\x03R\x08\x63\x61pacity\x12$\n\rlocal_balance\x18\x04 \x01(\x03R\rlocal_balance\x12&\n\x0eremote_balance\x18\x05 \x01(\x03R\x0eremote_balance\x12\x36\n\x16local_chan_reserve_sat\x18\x06 \x01(\x03R\x16local_chan_reserve_sat\x12\x38\n\x17remote_chan_reserve_sat\x18\x07 \x01(\x03R\x17remote_chan_reserve_sat\x1a\xf5\x01\n\x12PendingOpenChannel\x12G\n\x07\x63hannel\x18\x01 \x01(\x0b\x32-.lnrpc.PendingChannelsResponse.PendingChannelR\x07\x63hannel\x12\x30\n\x13\x63onfirmation_height\x18\x02 \x01(\rR\x13\x63onfirmation_height\x12\x1e\n\ncommit_fee\x18\x04 \x01(\x03R\ncommit_fee\x12$\n\rcommit_weight\x18\x05 \x01(\x03R\rcommit_weight\x12\x1e\n\nfee_per_kw\x18\x06 \x01(\x03R\nfee_per_kw\x1a{\n\x13WaitingCloseChannel\x12>\n\x07\x63hannel\x18\x01 \x01(\x0b\x32-.lnrpc.PendingChannelsResponse.PendingChannel\x12$\n\rlimbo_balance\x18\x02 \x01(\x03R\rlimbo_balance\x1as\n\rClosedChannel\x12>\n\x07\x63hannel\x18\x01 \x01(\x0b\x32-.lnrpc.PendingChannelsResponse.PendingChannel\x12\"\n\x0c\x63losing_txid\x18\x02 \x01(\tR\x0c\x63losing_txid\x1a\xeb\x02\n\x12\x46orceClosedChannel\x12G\n\x07\x63hannel\x18\x01 \x01(\x0b\x32-.lnrpc.PendingChannelsResponse.PendingChannelR\x07\x63hannel\x12\"\n\x0c\x63losing_txid\x18\x02 \x01(\tR\x0c\x63losing_txid\x12$\n\rlimbo_balance\x18\x03 \x01(\x03R\rlimbo_balance\x12(\n\x0fmaturity_height\x18\x04 \x01(\rR\x0fmaturity_height\x12\x30\n\x13\x62locks_til_maturity\x18\x05 \x01(\x05R\x13\x62locks_til_maturity\x12,\n\x11recovered_balance\x18\x06 \x01(\x03R\x11recovered_balance\x12\x38\n\rpending_htlcs\x18\x08 \x03(\x0b\x32\x12.lnrpc.PendingHTLCR\rpending_htlcs\"\x1a\n\x18\x43hannelEventSubscription\"\xb5\x03\n\x12\x43hannelEventUpdate\x12\x34\n\x0copen_channel\x18\x01 \x01(\x0b\x32\x0e.lnrpc.ChannelH\x00R\x0copen_channel\x12\x44\n\x0e\x63losed_channel\x18\x02 \x01(\x0b\x32\x1a.lnrpc.ChannelCloseSummaryH\x00R\x0e\x63losed_channel\x12=\n\x0e\x61\x63tive_channel\x18\x03 \x01(\x0b\x32\x13.lnrpc.ChannelPointH\x00R\x0e\x61\x63tive_channel\x12\x41\n\x10inactive_channel\x18\x04 \x01(\x0b\x32\x13.lnrpc.ChannelPointH\x00R\x10inactive_channel\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32$.lnrpc.ChannelEventUpdate.UpdateTypeR\x04type\"\\\n\nUpdateType\x12\x10\n\x0cOPEN_CHANNEL\x10\x00\x12\x12\n\x0e\x43LOSED_CHANNEL\x10\x01\x12\x12\n\x0e\x41\x43TIVE_CHANNEL\x10\x02\x12\x14\n\x10INACTIVE_CHANNEL\x10\x03\x42\t\n\x07\x63hannel\"\x16\n\x14WalletBalanceRequest\"\x9d\x01\n\x15WalletBalanceResponse\x12$\n\rtotal_balance\x18\x01 \x01(\x03R\rtotal_balance\x12,\n\x11\x63onfirmed_balance\x18\x02 \x01(\x03R\x11\x63onfirmed_balance\x12\x30\n\x13unconfirmed_balance\x18\x03 \x01(\x03R\x13unconfirmed_balance\"\x17\n\x15\x43hannelBalanceRequest\"f\n\x16\x43hannelBalanceResponse\x12\x18\n\x07\x62\x61lance\x18\x01 \x01(\x03R\x07\x62\x61lance\x12\x32\n\x14pending_open_balance\x18\x02 \x01(\x03R\x14pending_open_balance\"\xbf\x02\n\x12QueryRoutesRequest\x12\x0f\n\x07pub_key\x18\x01 \x01(\t\x12\x0b\n\x03\x61mt\x18\x02 \x01(\x03\x12\x10\n\x08\x61mt_msat\x18\x0c \x01(\x03\x12\x18\n\x10\x66inal_cltv_delta\x18\x04 \x01(\x05\x12\"\n\tfee_limit\x18\x05 \x01(\x0b\x32\x0f.lnrpc.FeeLimit\x12\x15\n\rignored_nodes\x18\x06 \x03(\x0c\x12-\n\rignored_edges\x18\x07 \x03(\x0b\x32\x12.lnrpc.EdgeLocatorB\x02\x18\x01\x12\x16\n\x0esource_pub_key\x18\x08 \x01(\t\x12\x1b\n\x13use_mission_control\x18\t \x01(\x08\x12&\n\rignored_pairs\x18\n \x03(\x0b\x32\x0f.lnrpc.NodePair\x12\x12\n\ncltv_limit\x18\x0b \x01(\rJ\x04\x08\x03\x10\x04\"$\n\x08NodePair\x12\x0c\n\x04\x66rom\x18\x01 \x01(\x0c\x12\n\n\x02to\x18\x02 \x01(\x0c\"@\n\x0b\x45\x64geLocator\x12\x16\n\nchannel_id\x18\x01 \x01(\x04\x42\x02\x30\x01\x12\x19\n\x11\x64irection_reverse\x18\x02 \x01(\x08\"_\n\x13QueryRoutesResponse\x12$\n\x06routes\x18\x01 \x03(\x0b\x32\x0c.lnrpc.RouteR\x06routes\x12\"\n\x0csuccess_prob\x18\x02 \x01(\x01R\x0csuccess_prob\"\xdf\x02\n\x03Hop\x12\x1c\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12$\n\rchan_capacity\x18\x02 \x01(\x03R\rchan_capacity\x12*\n\x0e\x61mt_to_forward\x18\x03 \x01(\x03\x42\x02\x18\x01R\x0e\x61mt_to_forward\x12\x14\n\x03\x66\x65\x65\x18\x04 \x01(\x03\x42\x02\x18\x01R\x03\x66\x65\x65\x12\x16\n\x06\x65xpiry\x18\x05 \x01(\rR\x06\x65xpiry\x12\x30\n\x13\x61mt_to_forward_msat\x18\x06 \x01(\x03R\x13\x61mt_to_forward_msat\x12\x1a\n\x08\x66\x65\x65_msat\x18\x07 \x01(\x03R\x08\x66\x65\x65_msat\x12\x18\n\x07pub_key\x18\x08 \x01(\tR\x07pub_key\x12 \n\x0btlv_payload\x18\t \x01(\x08R\x0btlv_payload\x12\x30\n\nmpp_record\x18\n \x01(\x0b\x32\x10.lnrpc.MPPRecordR\nmpp_record\"W\n\tMPPRecord\x12\"\n\x0cpayment_addr\x18\x0b \x01(\x0cR\x0cpayment_addr\x12&\n\x0etotal_amt_msat\x18\n \x01(\x03R\x0etotal_amt_msat\"\xe9\x01\n\x05Route\x12(\n\x0ftotal_time_lock\x18\x01 \x01(\rR\x0ftotal_time_lock\x12\"\n\ntotal_fees\x18\x02 \x01(\x03\x42\x02\x18\x01R\ntotal_fees\x12 \n\ttotal_amt\x18\x03 \x01(\x03\x42\x02\x18\x01R\ttotal_amt\x12\x1e\n\x04hops\x18\x04 \x03(\x0b\x32\n.lnrpc.HopR\x04hops\x12(\n\x0ftotal_fees_msat\x18\x05 \x01(\x03R\x0ftotal_fees_msat\x12&\n\x0etotal_amt_msat\x18\x06 \x01(\x03R\x0etotal_amt_msat\"<\n\x0fNodeInfoRequest\x12\x0f\n\x07pub_key\x18\x01 \x01(\t\x12\x18\n\x10include_channels\x18\x02 \x01(\x08\"\xb0\x01\n\x08NodeInfo\x12(\n\x04node\x18\x01 \x01(\x0b\x32\x14.lnrpc.LightningNodeR\x04node\x12\"\n\x0cnum_channels\x18\x02 \x01(\rR\x0cnum_channels\x12&\n\x0etotal_capacity\x18\x03 \x01(\x03R\x0etotal_capacity\x12.\n\x08\x63hannels\x18\x04 \x03(\x0b\x32\x12.lnrpc.ChannelEdgeR\x08\x63hannels\"\xa9\x01\n\rLightningNode\x12 \n\x0blast_update\x18\x01 \x01(\rR\x0blast_update\x12\x18\n\x07pub_key\x18\x02 \x01(\tR\x07pub_key\x12\x14\n\x05\x61lias\x18\x03 \x01(\tR\x05\x61lias\x12\x30\n\taddresses\x18\x04 \x03(\x0b\x32\x12.lnrpc.NodeAddressR\taddresses\x12\x14\n\x05\x63olor\x18\x05 \x01(\tR\x05\x63olor\";\n\x0bNodeAddress\x12\x18\n\x07network\x18\x01 \x01(\tR\x07network\x12\x12\n\x04\x61\x64\x64r\x18\x02 \x01(\tR\x04\x61\x64\x64r\"\x91\x02\n\rRoutingPolicy\x12(\n\x0ftime_lock_delta\x18\x01 \x01(\rR\x0ftime_lock_delta\x12\x1a\n\x08min_htlc\x18\x02 \x01(\x03R\x08min_htlc\x12$\n\rfee_base_msat\x18\x03 \x01(\x03R\rfee_base_msat\x12\x30\n\x13\x66\x65\x65_rate_milli_msat\x18\x04 \x01(\x03R\x13\x66\x65\x65_rate_milli_msat\x12\x1a\n\x08\x64isabled\x18\x05 \x01(\x08R\x08\x64isabled\x12$\n\rmax_htlc_msat\x18\x06 \x01(\x04R\rmax_htlc_msat\x12 \n\x0blast_update\x18\x07 \x01(\rR\x0blast_update\"\xc3\x02\n\x0b\x43hannelEdge\x12\"\n\nchannel_id\x18\x01 \x01(\x04\x42\x02\x30\x01R\nchannel_id\x12\x1e\n\nchan_point\x18\x02 \x01(\tR\nchan_point\x12$\n\x0blast_update\x18\x03 \x01(\rB\x02\x18\x01R\x0blast_update\x12\x1c\n\tnode1_pub\x18\x04 \x01(\tR\tnode1_pub\x12\x1c\n\tnode2_pub\x18\x05 \x01(\tR\tnode2_pub\x12\x1a\n\x08\x63\x61pacity\x18\x06 \x01(\x03R\x08\x63\x61pacity\x12\x38\n\x0cnode1_policy\x18\x07 \x01(\x0b\x32\x14.lnrpc.RoutingPolicyR\x0cnode1_policy\x12\x38\n\x0cnode2_policy\x18\x08 \x01(\x0b\x32\x14.lnrpc.RoutingPolicyR\x0cnode2_policy\"G\n\x13\x43hannelGraphRequest\x12\x30\n\x13include_unannounced\x18\x01 \x01(\x08R\x13include_unannounced\"d\n\x0c\x43hannelGraph\x12*\n\x05nodes\x18\x01 \x03(\x0b\x32\x14.lnrpc.LightningNodeR\x05nodes\x12(\n\x05\x65\x64ges\x18\x02 \x03(\x0b\x32\x12.lnrpc.ChannelEdgeR\x05\x65\x64ges\"&\n\x0f\x43hanInfoRequest\x12\x13\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01\"\x14\n\x12NetworkInfoRequest\"\xe9\x03\n\x0bNetworkInfo\x12&\n\x0egraph_diameter\x18\x01 \x01(\rR\x0egraph_diameter\x12&\n\x0e\x61vg_out_degree\x18\x02 \x01(\x01R\x0e\x61vg_out_degree\x12&\n\x0emax_out_degree\x18\x03 \x01(\rR\x0emax_out_degree\x12\x1c\n\tnum_nodes\x18\x04 \x01(\rR\tnum_nodes\x12\"\n\x0cnum_channels\x18\x05 \x01(\rR\x0cnum_channels\x12\x36\n\x16total_network_capacity\x18\x06 \x01(\x03R\x16total_network_capacity\x12*\n\x10\x61vg_channel_size\x18\x07 \x01(\x01R\x10\x61vg_channel_size\x12*\n\x10min_channel_size\x18\x08 \x01(\x03R\x10min_channel_size\x12*\n\x10max_channel_size\x18\t \x01(\x03R\x10max_channel_size\x12\x38\n\x17median_channel_size_sat\x18\n \x01(\x03R\x17median_channel_size_sat\x12*\n\x10num_zombie_chans\x18\x0b \x01(\x04R\x10num_zombie_chans\"\r\n\x0bStopRequest\"\x0e\n\x0cStopResponse\"\x1b\n\x19GraphTopologySubscription\"\xa3\x01\n\x13GraphTopologyUpdate\x12\'\n\x0cnode_updates\x18\x01 \x03(\x0b\x32\x11.lnrpc.NodeUpdate\x12\x31\n\x0f\x63hannel_updates\x18\x02 \x03(\x0b\x32\x18.lnrpc.ChannelEdgeUpdate\x12\x30\n\x0c\x63losed_chans\x18\x03 \x03(\x0b\x32\x1a.lnrpc.ClosedChannelUpdate\"l\n\nNodeUpdate\x12\x11\n\taddresses\x18\x01 \x03(\t\x12\x14\n\x0cidentity_key\x18\x02 \x01(\t\x12\x17\n\x0fglobal_features\x18\x03 \x01(\x0c\x12\r\n\x05\x61lias\x18\x04 \x01(\t\x12\r\n\x05\x63olor\x18\x05 \x01(\t\"\xc4\x01\n\x11\x43hannelEdgeUpdate\x12\x13\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01\x12\'\n\nchan_point\x18\x02 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\x12\x10\n\x08\x63\x61pacity\x18\x03 \x01(\x03\x12,\n\x0erouting_policy\x18\x04 \x01(\x0b\x32\x14.lnrpc.RoutingPolicy\x12\x18\n\x10\x61\x64vertising_node\x18\x05 \x01(\t\x12\x17\n\x0f\x63onnecting_node\x18\x06 \x01(\t\"|\n\x13\x43losedChannelUpdate\x12\x13\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01\x12\x10\n\x08\x63\x61pacity\x18\x02 \x01(\x03\x12\x15\n\rclosed_height\x18\x03 \x01(\r\x12\'\n\nchan_point\x18\x04 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\"\xd7\x01\n\x07HopHint\x12\x18\n\x07node_id\x18\x01 \x01(\tR\x07node_id\x12\x1c\n\x07\x63han_id\x18\x02 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12$\n\rfee_base_msat\x18\x03 \x01(\rR\rfee_base_msat\x12@\n\x1b\x66\x65\x65_proportional_millionths\x18\x04 \x01(\rR\x1b\x66\x65\x65_proportional_millionths\x12,\n\x11\x63ltv_expiry_delta\x18\x05 \x01(\rR\x11\x63ltv_expiry_delta\"9\n\tRouteHint\x12,\n\thop_hints\x18\x01 \x03(\x0b\x32\x0e.lnrpc.HopHintR\thop_hints\"\xc7\x06\n\x07Invoice\x12\x12\n\x04memo\x18\x01 \x01(\tR\x04memo\x12\x1e\n\nr_preimage\x18\x03 \x01(\x0cR\nr_preimage\x12\x16\n\x06r_hash\x18\x04 \x01(\x0cR\x06r_hash\x12\x14\n\x05value\x18\x05 \x01(\x03R\x05value\x12\x1e\n\nvalue_msat\x18\x17 \x01(\x03R\nvalue_msat\x12\x1c\n\x07settled\x18\x06 \x01(\x08\x42\x02\x18\x01R\x07settled\x12$\n\rcreation_date\x18\x07 \x01(\x03R\rcreation_date\x12 \n\x0bsettle_date\x18\x08 \x01(\x03R\x0bsettle_date\x12(\n\x0fpayment_request\x18\t \x01(\tR\x0fpayment_request\x12*\n\x10\x64\x65scription_hash\x18\n \x01(\x0cR\x10\x64\x65scription_hash\x12\x16\n\x06\x65xpiry\x18\x0b \x01(\x03R\x06\x65xpiry\x12$\n\rfallback_addr\x18\x0c \x01(\tR\rfallback_addr\x12 \n\x0b\x63ltv_expiry\x18\r \x01(\x04R\x0b\x63ltv_expiry\x12\x32\n\x0broute_hints\x18\x0e \x03(\x0b\x32\x10.lnrpc.RouteHintR\x0broute_hints\x12\x18\n\x07private\x18\x0f \x01(\x08R\x07private\x12\x1c\n\tadd_index\x18\x10 \x01(\x04R\tadd_index\x12\"\n\x0csettle_index\x18\x11 \x01(\x04R\x0csettle_index\x12\x1e\n\x08\x61mt_paid\x18\x12 \x01(\x03\x42\x02\x18\x01R\x08\x61mt_paid\x12\"\n\x0c\x61mt_paid_sat\x18\x13 \x01(\x03R\x0c\x61mt_paid_sat\x12$\n\ramt_paid_msat\x18\x14 \x01(\x03R\ramt_paid_msat\x12\x31\n\x05state\x18\x15 \x01(\x0e\x32\x1b.lnrpc.Invoice.InvoiceStateR\x05state\x12(\n\x05htlcs\x18\x16 \x03(\x0b\x32\x12.lnrpc.InvoiceHTLCR\x05htlcs\"A\n\x0cInvoiceState\x12\x08\n\x04OPEN\x10\x00\x12\x0b\n\x07SETTLED\x10\x01\x12\x0c\n\x08\x43\x41NCELED\x10\x02\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x03J\x04\x08\x02\x10\x03\"\xa8\x02\n\x0bInvoiceHTLC\x12\x1c\n\x07\x63han_id\x18\x01 \x01(\x04\x42\x02\x30\x01R\x07\x63han_id\x12\x1e\n\nhtlc_index\x18\x02 \x01(\x04R\nhtlc_index\x12\x1a\n\x08\x61mt_msat\x18\x03 \x01(\x04R\x08\x61mt_msat\x12$\n\raccept_height\x18\x04 \x01(\x05R\raccept_height\x12 \n\x0b\x61\x63\x63\x65pt_time\x18\x05 \x01(\x03R\x0b\x61\x63\x63\x65pt_time\x12\"\n\x0cresolve_time\x18\x06 \x01(\x03R\x0cresolve_time\x12$\n\rexpiry_height\x18\x07 \x01(\x05R\rexpiry_height\x12-\n\x05state\x18\x08 \x01(\x0e\x32\x17.lnrpc.InvoiceHTLCStateR\x05state\"t\n\x12\x41\x64\x64InvoiceResponse\x12\x16\n\x06r_hash\x18\x01 \x01(\x0cR\x06r_hash\x12(\n\x0fpayment_request\x18\x02 \x01(\tR\x0fpayment_request\x12\x1c\n\tadd_index\x18\x10 \x01(\x04R\tadd_index\"I\n\x0bPaymentHash\x12\"\n\nr_hash_str\x18\x01 \x01(\tB\x02\x18\x01R\nr_hash_str\x12\x16\n\x06r_hash\x18\x02 \x01(\x0cR\x06r_hash\"\xa4\x01\n\x12ListInvoiceRequest\x12\"\n\x0cpending_only\x18\x01 \x01(\x08R\x0cpending_only\x12\"\n\x0cindex_offset\x18\x04 \x01(\x04R\x0cindex_offset\x12*\n\x10num_max_invoices\x18\x05 \x01(\x04R\x10num_max_invoices\x12\x1a\n\x08reversed\x18\x06 \x01(\x08R\x08reversed\"\x9f\x01\n\x13ListInvoiceResponse\x12*\n\x08invoices\x18\x01 \x03(\x0b\x32\x0e.lnrpc.InvoiceR\x08invoices\x12,\n\x11last_index_offset\x18\x02 \x01(\x04R\x11last_index_offset\x12.\n\x12\x66irst_index_offset\x18\x03 \x01(\x04R\x12\x66irst_index_offset\"W\n\x13InvoiceSubscription\x12\x1c\n\tadd_index\x18\x01 \x01(\x04R\tadd_index\x12\"\n\x0csettle_index\x18\x02 \x01(\x04R\x0csettle_index\"\xbd\x04\n\x07Payment\x12\"\n\x0cpayment_hash\x18\x01 \x01(\tR\x0cpayment_hash\x12\x18\n\x05value\x18\x02 \x01(\x03\x42\x02\x18\x01R\x05value\x12(\n\rcreation_date\x18\x03 \x01(\x03\x42\x02\x18\x01R\rcreation_date\x12\x16\n\x04path\x18\x04 \x03(\tB\x02\x18\x01R\x04path\x12\x14\n\x03\x66\x65\x65\x18\x05 \x01(\x03\x42\x02\x18\x01R\x03\x66\x65\x65\x12*\n\x10payment_preimage\x18\x06 \x01(\tR\x10payment_preimage\x12\x1c\n\tvalue_sat\x18\x07 \x01(\x03R\tvalue_sat\x12\x1e\n\nvalue_msat\x18\x08 \x01(\x03R\nvalue_msat\x12(\n\x0fpayment_request\x18\t \x01(\tR\x0fpayment_request\x12\x34\n\x06status\x18\n \x01(\x0e\x32\x1c.lnrpc.Payment.PaymentStatusR\x06status\x12\x18\n\x07\x66\x65\x65_sat\x18\x0b \x01(\x03R\x07\x66\x65\x65_sat\x12\x1a\n\x08\x66\x65\x65_msat\x18\x0c \x01(\x03R\x08\x66\x65\x65_msat\x12*\n\x10\x63reation_time_ns\x18\r \x01(\x03R\x10\x63reation_time_ns\x12(\n\x05htlcs\x18\x0e \x03(\x0b\x32\x12.lnrpc.HTLCAttemptR\x05htlcs\"F\n\rPaymentStatus\x12\x0b\n\x07UNKNOWN\x10\x00\x12\r\n\tIN_FLIGHT\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\"\xf4\x01\n\x0bHTLCAttempt\x12\x35\n\x06status\x18\x01 \x01(\x0e\x32\x1d.lnrpc.HTLCAttempt.HTLCStatusR\x06status\x12\"\n\x05route\x18\x02 \x01(\x0b\x32\x0c.lnrpc.RouteR\x05route\x12(\n\x0f\x61ttempt_time_ns\x18\x03 \x01(\x03R\x0f\x61ttempt_time_ns\x12(\n\x0fresolve_time_ns\x18\x04 \x01(\x03R\x0fresolve_time_ns\"6\n\nHTLCStatus\x12\r\n\tIN_FLIGHT\x10\x00\x12\r\n\tSUCCEEDED\x10\x01\x12\n\n\x06\x46\x41ILED\x10\x02\"1\n\x13ListPaymentsRequest\x12\x1a\n\x12include_incomplete\x18\x01 \x01(\x08\"B\n\x14ListPaymentsResponse\x12*\n\x08payments\x18\x01 \x03(\x0b\x32\x0e.lnrpc.PaymentR\x08payments\"\x1a\n\x18\x44\x65leteAllPaymentsRequest\"\x1b\n\x19\x44\x65leteAllPaymentsResponse\"C\n\x15\x41\x62\x61ndonChannelRequest\x12*\n\rchannel_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\"\x18\n\x16\x41\x62\x61ndonChannelResponse\"5\n\x11\x44\x65\x62ugLevelRequest\x12\x0c\n\x04show\x18\x01 \x01(\x08\x12\x12\n\nlevel_spec\x18\x02 \x01(\t\"6\n\x12\x44\x65\x62ugLevelResponse\x12 \n\x0bsub_systems\x18\x01 \x01(\tR\x0bsub_systems\"\x1f\n\x0cPayReqString\x12\x0f\n\x07pay_req\x18\x01 \x01(\t\"\xf2\x02\n\x06PayReq\x12 \n\x0b\x64\x65stination\x18\x01 \x01(\tR\x0b\x64\x65stination\x12\"\n\x0cpayment_hash\x18\x02 \x01(\tR\x0cpayment_hash\x12\"\n\x0cnum_satoshis\x18\x03 \x01(\x03R\x0cnum_satoshis\x12\x1c\n\ttimestamp\x18\x04 \x01(\x03R\ttimestamp\x12\x16\n\x06\x65xpiry\x18\x05 \x01(\x03R\x06\x65xpiry\x12 \n\x0b\x64\x65scription\x18\x06 \x01(\tR\x0b\x64\x65scription\x12*\n\x10\x64\x65scription_hash\x18\x07 \x01(\tR\x10\x64\x65scription_hash\x12$\n\rfallback_addr\x18\x08 \x01(\tR\rfallback_addr\x12 \n\x0b\x63ltv_expiry\x18\t \x01(\x03R\x0b\x63ltv_expiry\x12\x32\n\x0broute_hints\x18\n \x03(\x0b\x32\x10.lnrpc.RouteHintR\x0broute_hints\"\x12\n\x10\x46\x65\x65ReportRequest\"\x99\x01\n\x10\x43hannelFeeReport\x12!\n\nchan_point\x18\x01 \x01(\tR\rchannel_point\x12$\n\rbase_fee_msat\x18\x02 \x01(\x03R\rbase_fee_msat\x12 \n\x0b\x66\x65\x65_per_mil\x18\x03 \x01(\x03R\x0b\x66\x65\x65_per_mil\x12\x1a\n\x08\x66\x65\x65_rate\x18\x04 \x01(\x01R\x08\x66\x65\x65_rate\"\xbc\x01\n\x11\x46\x65\x65ReportResponse\x12;\n\x0c\x63hannel_fees\x18\x01 \x03(\x0b\x32\x17.lnrpc.ChannelFeeReportR\x0c\x63hannel_fees\x12 \n\x0b\x64\x61y_fee_sum\x18\x02 \x01(\x04R\x0b\x64\x61y_fee_sum\x12\"\n\x0cweek_fee_sum\x18\x03 \x01(\x04R\x0cweek_fee_sum\x12$\n\rmonth_fee_sum\x18\x04 \x01(\x04R\rmonth_fee_sum\"\x81\x02\n\x13PolicyUpdateRequest\x12\x18\n\x06global\x18\x01 \x01(\x08H\x00R\x06global\x12\x35\n\nchan_point\x18\x02 \x01(\x0b\x32\x13.lnrpc.ChannelPointH\x00R\nchan_point\x12$\n\rbase_fee_msat\x18\x03 \x01(\x03R\rbase_fee_msat\x12\x1a\n\x08\x66\x65\x65_rate\x18\x04 \x01(\x01R\x08\x66\x65\x65_rate\x12(\n\x0ftime_lock_delta\x18\x05 \x01(\rR\x0ftime_lock_delta\x12$\n\rmax_htlc_msat\x18\x06 \x01(\x04R\rmax_htlc_msatB\x07\n\x05scope\"\x16\n\x14PolicyUpdateResponse\"\xa2\x01\n\x18\x46orwardingHistoryRequest\x12\x1e\n\nstart_time\x18\x01 \x01(\x04R\nstart_time\x12\x1a\n\x08\x65nd_time\x18\x02 \x01(\x04R\x08\x65nd_time\x12\"\n\x0cindex_offset\x18\x03 \x01(\rR\x0cindex_offset\x12&\n\x0enum_max_events\x18\x04 \x01(\rR\x0enum_max_events\"\x9f\x02\n\x0f\x46orwardingEvent\x12\x1c\n\ttimestamp\x18\x01 \x01(\x04R\ttimestamp\x12\"\n\nchan_id_in\x18\x02 \x01(\x04\x42\x02\x30\x01R\nchan_id_in\x12$\n\x0b\x63han_id_out\x18\x04 \x01(\x04\x42\x02\x30\x01R\x0b\x63han_id_out\x12\x16\n\x06\x61mt_in\x18\x05 \x01(\x04R\x06\x61mt_in\x12\x18\n\x07\x61mt_out\x18\x06 \x01(\x04R\x07\x61mt_out\x12\x10\n\x03\x66\x65\x65\x18\x07 \x01(\x04R\x03\x66\x65\x65\x12\x1a\n\x08\x66\x65\x65_msat\x18\x08 \x01(\x04R\x08\x66\x65\x65_msat\x12 \n\x0b\x61mt_in_msat\x18\t \x01(\x04R\x0b\x61mt_in_msat\x12\"\n\x0c\x61mt_out_msat\x18\n \x01(\x04R\x0c\x61mt_out_msat\"\x8f\x01\n\x19\x46orwardingHistoryResponse\x12\x44\n\x11\x66orwarding_events\x18\x01 \x03(\x0b\x32\x16.lnrpc.ForwardingEventR\x11\x66orwarding_events\x12,\n\x11last_offset_index\x18\x02 \x01(\rR\x11last_offset_index\"E\n\x1a\x45xportChannelBackupRequest\x12\'\n\nchan_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPoint\"f\n\rChannelBackup\x12\x33\n\nchan_point\x18\x01 \x01(\x0b\x32\x13.lnrpc.ChannelPointR\nchan_point\x12 \n\x0b\x63han_backup\x18\x02 \x01(\x0cR\x0b\x63han_backup\"v\n\x0fMultiChanBackup\x12\x35\n\x0b\x63han_points\x18\x01 \x03(\x0b\x32\x13.lnrpc.ChannelPointR\x0b\x63han_points\x12,\n\x11multi_chan_backup\x18\x02 \x01(\x0cR\x11multi_chan_backup\"\x19\n\x17\x43hanBackupExportRequest\"\xa3\x01\n\x12\x43hanBackupSnapshot\x12G\n\x13single_chan_backups\x18\x01 \x01(\x0b\x32\x15.lnrpc.ChannelBackupsR\x13single_chan_backups\x12\x44\n\x11multi_chan_backup\x18\x02 \x01(\x0b\x32\x16.lnrpc.MultiChanBackupR\x11multi_chan_backup\"J\n\x0e\x43hannelBackups\x12\x38\n\x0c\x63han_backups\x18\x01 \x03(\x0b\x32\x14.lnrpc.ChannelBackupR\x0c\x63han_backups\"\x91\x01\n\x18RestoreChanBackupRequest\x12;\n\x0c\x63han_backups\x18\x01 \x01(\x0b\x32\x15.lnrpc.ChannelBackupsH\x00R\x0c\x63han_backups\x12.\n\x11multi_chan_backup\x18\x02 \x01(\x0cH\x00R\x11multi_chan_backupB\x08\n\x06\x62\x61\x63kup\"\x17\n\x15RestoreBackupResponse\"\x1b\n\x19\x43hannelBackupSubscription\"\x1a\n\x18VerifyChanBackupResponse\"D\n\x12MacaroonPermission\x12\x16\n\x06\x65ntity\x18\x01 \x01(\tR\x06\x65ntity\x12\x16\n\x06\x61\x63tion\x18\x02 \x01(\tR\x06\x61\x63tion\"R\n\x13\x42\x61keMacaroonRequest\x12;\n\x0bpermissions\x18\x01 \x03(\x0b\x32\x19.lnrpc.MacaroonPermissionR\x0bpermissions\"2\n\x14\x42\x61keMacaroonResponse\x12\x1a\n\x08macaroon\x18\x01 \x01(\tR\x08macaroon*}\n\x0b\x41\x64\x64ressType\x12\x17\n\x13WITNESS_PUBKEY_HASH\x10\x00\x12\x16\n\x12NESTED_PUBKEY_HASH\x10\x01\x12\x1e\n\x1aUNUSED_WITNESS_PUBKEY_HASH\x10\x02\x12\x1d\n\x19UNUSED_NESTED_PUBKEY_HASH\x10\x03*;\n\x10InvoiceHTLCState\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x00\x12\x0b\n\x07SETTLED\x10\x01\x12\x0c\n\x08\x43\x41NCELED\x10\x02\x32\x91\x03\n\x0eWalletUnlocker\x12M\n\x07GenSeed\x12\x15.lnrpc.GenSeedRequest\x1a\x16.lnrpc.GenSeedResponse\"\x13\x82\xd3\xe4\x93\x02\r\x12\x0b/v1/genseed\x12\\\n\nInitWallet\x12\x18.lnrpc.InitWalletRequest\x1a\x19.lnrpc.InitWalletResponse\"\x19\x82\xd3\xe4\x93\x02\x13\"\x0e/v1/initwallet:\x01*\x12\x64\n\x0cUnlockWallet\x12\x1a.lnrpc.UnlockWalletRequest\x1a\x1b.lnrpc.UnlockWalletResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v1/unlockwallet:\x01*\x12l\n\x0e\x43hangePassword\x12\x1c.lnrpc.ChangePasswordRequest\x1a\x1d.lnrpc.ChangePasswordResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\"\x12/v1/changepassword:\x01*2\xca\'\n\tLightning\x12j\n\rWalletBalance\x12\x1b.lnrpc.WalletBalanceRequest\x1a\x1c.lnrpc.WalletBalanceResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/balance/blockchain\x12k\n\x0e\x43hannelBalance\x12\x1c.lnrpc.ChannelBalanceRequest\x1a\x1d.lnrpc.ChannelBalanceResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/balance/channels\x12\x65\n\x0fGetTransactions\x12\x1d.lnrpc.GetTransactionsRequest\x1a\x19.lnrpc.TransactionDetails\"\x18\x82\xd3\xe4\x93\x02\x12\x12\x10/v1/transactions\x12\x62\n\x0b\x45stimateFee\x12\x19.lnrpc.EstimateFeeRequest\x1a\x1a.lnrpc.EstimateFeeResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/transactions/fee\x12[\n\tSendCoins\x12\x17.lnrpc.SendCoinsRequest\x1a\x18.lnrpc.SendCoinsResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v1/transactions:\x01*\x12W\n\x0bListUnspent\x12\x19.lnrpc.ListUnspentRequest\x1a\x1a.lnrpc.ListUnspentResponse\"\x11\x82\xd3\xe4\x93\x02\x0b\x12\t/v1/utxos\x12L\n\x15SubscribeTransactions\x12\x1d.lnrpc.GetTransactionsRequest\x1a\x12.lnrpc.Transaction0\x01\x12;\n\x08SendMany\x12\x16.lnrpc.SendManyRequest\x1a\x17.lnrpc.SendManyResponse\x12Y\n\nNewAddress\x12\x18.lnrpc.NewAddressRequest\x1a\x19.lnrpc.NewAddressResponse\"\x16\x82\xd3\xe4\x93\x02\x10\x12\x0e/v1/newaddress\x12`\n\x0bSignMessage\x12\x19.lnrpc.SignMessageRequest\x1a\x1a.lnrpc.SignMessageResponse\"\x1a\x82\xd3\xe4\x93\x02\x14\"\x0f/v1/signmessage:\x01*\x12h\n\rVerifyMessage\x12\x1b.lnrpc.VerifyMessageRequest\x1a\x1c.lnrpc.VerifyMessageResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/v1/verifymessage:\x01*\x12Z\n\x0b\x43onnectPeer\x12\x19.lnrpc.ConnectPeerRequest\x1a\x1a.lnrpc.ConnectPeerResponse\"\x14\x82\xd3\xe4\x93\x02\x0e\"\t/v1/peers:\x01*\x12j\n\x0e\x44isconnectPeer\x12\x1c.lnrpc.DisconnectPeerRequest\x1a\x1d.lnrpc.DisconnectPeerResponse\"\x1b\x82\xd3\xe4\x93\x02\x15*\x13/v1/peers/{pub_key}\x12Q\n\tListPeers\x12\x17.lnrpc.ListPeersRequest\x1a\x18.lnrpc.ListPeersResponse\"\x11\x82\xd3\xe4\x93\x02\x0b\x12\t/v1/peers\x12M\n\x07GetInfo\x12\x15.lnrpc.GetInfoRequest\x1a\x16.lnrpc.GetInfoResponse\"\x13\x82\xd3\xe4\x93\x02\r\x12\x0b/v1/getinfo\x12n\n\x0fPendingChannels\x12\x1d.lnrpc.PendingChannelsRequest\x1a\x1e.lnrpc.PendingChannelsResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/channels/pending\x12]\n\x0cListChannels\x12\x1a.lnrpc.ListChannelsRequest\x1a\x1b.lnrpc.ListChannelsResponse\"\x14\x82\xd3\xe4\x93\x02\x0e\x12\x0c/v1/channels\x12V\n\x16SubscribeChannelEvents\x12\x1f.lnrpc.ChannelEventSubscription\x1a\x19.lnrpc.ChannelEventUpdate0\x01\x12j\n\x0e\x43losedChannels\x12\x1c.lnrpc.ClosedChannelsRequest\x1a\x1d.lnrpc.ClosedChannelsResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\x12\x13/v1/channels/closed\x12Z\n\x0fOpenChannelSync\x12\x19.lnrpc.OpenChannelRequest\x1a\x13.lnrpc.ChannelPoint\"\x17\x82\xd3\xe4\x93\x02\x11\"\x0c/v1/channels:\x01*\x12\x43\n\x0bOpenChannel\x12\x19.lnrpc.OpenChannelRequest\x1a\x17.lnrpc.OpenStatusUpdate0\x01\x12P\n\x0f\x43hannelAcceptor\x12\x1c.lnrpc.ChannelAcceptResponse\x1a\x1b.lnrpc.ChannelAcceptRequest(\x01\x30\x01\x12\x9a\x01\n\x0c\x43loseChannel\x12\x1a.lnrpc.CloseChannelRequest\x1a\x18.lnrpc.CloseStatusUpdate\"R\x82\xd3\xe4\x93\x02L*J/v1/channels/{channel_point.funding_txid_str}/{channel_point.output_index}0\x01\x12\xa9\x01\n\x0e\x41\x62\x61ndonChannel\x12\x1c.lnrpc.AbandonChannelRequest\x1a\x1d.lnrpc.AbandonChannelResponse\"Z\x82\xd3\xe4\x93\x02T*R/v1/channels/abandon/{channel_point.funding_txid_str}/{channel_point.output_index}\x12:\n\x0bSendPayment\x12\x12.lnrpc.SendRequest\x1a\x13.lnrpc.SendResponse(\x01\x30\x01\x12`\n\x0fSendPaymentSync\x12\x12.lnrpc.SendRequest\x1a\x13.lnrpc.SendResponse\"$\x82\xd3\xe4\x93\x02\x1e\"\x19/v1/channels/transactions:\x01*\x12\x41\n\x0bSendToRoute\x12\x19.lnrpc.SendToRouteRequest\x1a\x13.lnrpc.SendResponse(\x01\x30\x01\x12m\n\x0fSendToRouteSync\x12\x19.lnrpc.SendToRouteRequest\x1a\x13.lnrpc.SendResponse\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1/channels/transactions/route:\x01*\x12P\n\nAddInvoice\x12\x0e.lnrpc.Invoice\x1a\x19.lnrpc.AddInvoiceResponse\"\x17\x82\xd3\xe4\x93\x02\x11\"\x0c/v1/invoices:\x01*\x12[\n\x0cListInvoices\x12\x19.lnrpc.ListInvoiceRequest\x1a\x1a.lnrpc.ListInvoiceResponse\"\x14\x82\xd3\xe4\x93\x02\x0e\x12\x0c/v1/invoices\x12U\n\rLookupInvoice\x12\x12.lnrpc.PaymentHash\x1a\x0e.lnrpc.Invoice\" \x82\xd3\xe4\x93\x02\x1a\x12\x18/v1/invoice/{r_hash_str}\x12\x61\n\x11SubscribeInvoices\x12\x1a.lnrpc.InvoiceSubscription\x1a\x0e.lnrpc.Invoice\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1/invoices/subscribe0\x01\x12P\n\x0c\x44\x65\x63odePayReq\x12\x13.lnrpc.PayReqString\x1a\r.lnrpc.PayReq\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1/payreq/{pay_req}\x12]\n\x0cListPayments\x12\x1a.lnrpc.ListPaymentsRequest\x1a\x1b.lnrpc.ListPaymentsResponse\"\x14\x82\xd3\xe4\x93\x02\x0e\x12\x0c/v1/payments\x12l\n\x11\x44\x65leteAllPayments\x12\x1f.lnrpc.DeleteAllPaymentsRequest\x1a .lnrpc.DeleteAllPaymentsResponse\"\x14\x82\xd3\xe4\x93\x02\x0e*\x0c/v1/payments\x12S\n\rDescribeGraph\x12\x1a.lnrpc.ChannelGraphRequest\x1a\x13.lnrpc.ChannelGraph\"\x11\x82\xd3\xe4\x93\x02\x0b\x12\t/v1/graph\x12[\n\x0bGetChanInfo\x12\x16.lnrpc.ChanInfoRequest\x1a\x12.lnrpc.ChannelEdge\" \x82\xd3\xe4\x93\x02\x1a\x12\x18/v1/graph/edge/{chan_id}\x12X\n\x0bGetNodeInfo\x12\x16.lnrpc.NodeInfoRequest\x1a\x0f.lnrpc.NodeInfo\" \x82\xd3\xe4\x93\x02\x1a\x12\x18/v1/graph/node/{pub_key}\x12n\n\x0bQueryRoutes\x12\x19.lnrpc.QueryRoutesRequest\x1a\x1a.lnrpc.QueryRoutesResponse\"(\x82\xd3\xe4\x93\x02\"\x12 /v1/graph/routes/{pub_key}/{amt}\x12W\n\x0eGetNetworkInfo\x12\x19.lnrpc.NetworkInfoRequest\x1a\x12.lnrpc.NetworkInfo\"\x16\x82\xd3\xe4\x93\x02\x10\x12\x0e/v1/graph/info\x12\x35\n\nStopDaemon\x12\x12.lnrpc.StopRequest\x1a\x13.lnrpc.StopResponse\x12W\n\x15SubscribeChannelGraph\x12 .lnrpc.GraphTopologySubscription\x1a\x1a.lnrpc.GraphTopologyUpdate0\x01\x12\x41\n\nDebugLevel\x12\x18.lnrpc.DebugLevelRequest\x1a\x19.lnrpc.DebugLevelResponse\x12P\n\tFeeReport\x12\x17.lnrpc.FeeReportRequest\x1a\x18.lnrpc.FeeReportResponse\"\x10\x82\xd3\xe4\x93\x02\n\x12\x08/v1/fees\x12i\n\x13UpdateChannelPolicy\x12\x1a.lnrpc.PolicyUpdateRequest\x1a\x1b.lnrpc.PolicyUpdateResponse\"\x19\x82\xd3\xe4\x93\x02\x13\"\x0e/v1/chanpolicy:\x01*\x12m\n\x11\x46orwardingHistory\x12\x1f.lnrpc.ForwardingHistoryRequest\x1a .lnrpc.ForwardingHistoryResponse\"\x15\x82\xd3\xe4\x93\x02\x0f\"\n/v1/switch:\x01*\x12\xa3\x01\n\x13\x45xportChannelBackup\x12!.lnrpc.ExportChannelBackupRequest\x1a\x14.lnrpc.ChannelBackup\"S\x82\xd3\xe4\x93\x02M\x12K/v1/channels/backup/{chan_point.funding_txid_str}/{chan_point.output_index}\x12q\n\x17\x45xportAllChannelBackups\x12\x1e.lnrpc.ChanBackupExportRequest\x1a\x19.lnrpc.ChanBackupSnapshot\"\x1b\x82\xd3\xe4\x93\x02\x15\x12\x13/v1/channels/backup\x12u\n\x10VerifyChanBackup\x12\x19.lnrpc.ChanBackupSnapshot\x1a\x1f.lnrpc.VerifyChanBackupResponse\"%\x82\xd3\xe4\x93\x02\x1f\"\x1a/v1/channels/backup/verify:\x01*\x12~\n\x15RestoreChannelBackups\x12\x1f.lnrpc.RestoreChanBackupRequest\x1a\x1c.lnrpc.RestoreBackupResponse\"&\x82\xd3\xe4\x93\x02 \"\x1b/v1/channels/backup/restore:\x01*\x12Z\n\x17SubscribeChannelBackups\x12 .lnrpc.ChannelBackupSubscription\x1a\x19.lnrpc.ChanBackupSnapshot\"\x00\x30\x01\x12`\n\x0c\x42\x61keMacaroon\x12\x1a.lnrpc.BakeMacaroonRequest\x1a\x1b.lnrpc.BakeMacaroonResponse\"\x17\x82\xd3\xe4\x93\x02\x11\"\x0c/v1/macaroon:\x01*B\'Z%github.com/lightningnetwork/lnd/lnrpcb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_ADDRESSTYPE = _descriptor.EnumDescriptor(
name='AddressType',
full_name='lnrpc.AddressType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='WITNESS_PUBKEY_HASH', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NESTED_PUBKEY_HASH', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNUSED_WITNESS_PUBKEY_HASH', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNUSED_NESTED_PUBKEY_HASH', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=20700,
serialized_end=20825,
)
_sym_db.RegisterEnumDescriptor(_ADDRESSTYPE)
AddressType = enum_type_wrapper.EnumTypeWrapper(_ADDRESSTYPE)
_INVOICEHTLCSTATE = _descriptor.EnumDescriptor(
name='InvoiceHTLCState',
full_name='lnrpc.InvoiceHTLCState',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='ACCEPTED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SETTLED', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANCELED', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=20827,
serialized_end=20886,
)
_sym_db.RegisterEnumDescriptor(_INVOICEHTLCSTATE)
InvoiceHTLCState = enum_type_wrapper.EnumTypeWrapper(_INVOICEHTLCSTATE)
WITNESS_PUBKEY_HASH = 0
NESTED_PUBKEY_HASH = 1
UNUSED_WITNESS_PUBKEY_HASH = 2
UNUSED_NESTED_PUBKEY_HASH = 3
ACCEPTED = 0
SETTLED = 1
CANCELED = 2
_CHANNELCLOSESUMMARY_CLOSURETYPE = _descriptor.EnumDescriptor(
name='ClosureType',
full_name='lnrpc.ChannelCloseSummary.ClosureType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='COOPERATIVE_CLOSE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOCAL_FORCE_CLOSE', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REMOTE_FORCE_CLOSE', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BREACH_CLOSE', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FUNDING_CANCELED', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ABANDONED', index=5, number=5,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=5763,
serialized_end=5901,
)
_sym_db.RegisterEnumDescriptor(_CHANNELCLOSESUMMARY_CLOSURETYPE)
_PEER_SYNCTYPE = _descriptor.EnumDescriptor(
name='SyncType',
full_name='lnrpc.Peer.SyncType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_SYNC', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACTIVE_SYNC', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PASSIVE_SYNC', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=6425,
serialized_end=6488,
)
_sym_db.RegisterEnumDescriptor(_PEER_SYNCTYPE)
_CHANNELEVENTUPDATE_UPDATETYPE = _descriptor.EnumDescriptor(
name='UpdateType',
full_name='lnrpc.ChannelEventUpdate.UpdateType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='OPEN_CHANNEL', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CLOSED_CHANNEL', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACTIVE_CHANNEL', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INACTIVE_CHANNEL', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=10722,
serialized_end=10814,
)
_sym_db.RegisterEnumDescriptor(_CHANNELEVENTUPDATE_UPDATETYPE)
_INVOICE_INVOICESTATE = _descriptor.EnumDescriptor(
name='InvoiceState',
full_name='lnrpc.Invoice.InvoiceState',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='OPEN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SETTLED', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANCELED', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACCEPTED', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=15854,
serialized_end=15919,
)
_sym_db.RegisterEnumDescriptor(_INVOICE_INVOICESTATE)
_PAYMENT_PAYMENTSTATUS = _descriptor.EnumDescriptor(
name='PaymentStatus',
full_name='lnrpc.Payment.PaymentStatus',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IN_FLIGHT', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUCCEEDED', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FAILED', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=17341,
serialized_end=17411,
)
_sym_db.RegisterEnumDescriptor(_PAYMENT_PAYMENTSTATUS)
_HTLCATTEMPT_HTLCSTATUS = _descriptor.EnumDescriptor(
name='HTLCStatus',
full_name='lnrpc.HTLCAttempt.HTLCStatus',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='IN_FLIGHT', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUCCEEDED', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FAILED', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=17604,
serialized_end=17658,
)
_sym_db.RegisterEnumDescriptor(_HTLCATTEMPT_HTLCSTATUS)
_GENSEEDREQUEST = _descriptor.Descriptor(
name='GenSeedRequest',
full_name='lnrpc.GenSeedRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='aezeed_passphrase', full_name='lnrpc.GenSeedRequest.aezeed_passphrase', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='seed_entropy', full_name='lnrpc.GenSeedRequest.seed_entropy', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=50,
serialized_end=115,
)
_GENSEEDRESPONSE = _descriptor.Descriptor(
name='GenSeedResponse',
full_name='lnrpc.GenSeedResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cipher_seed_mnemonic', full_name='lnrpc.GenSeedResponse.cipher_seed_mnemonic', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='enciphered_seed', full_name='lnrpc.GenSeedResponse.enciphered_seed', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=117,
serialized_end=189,
)
_INITWALLETREQUEST = _descriptor.Descriptor(
name='InitWalletRequest',
full_name='lnrpc.InitWalletRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wallet_password', full_name='lnrpc.InitWalletRequest.wallet_password', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cipher_seed_mnemonic', full_name='lnrpc.InitWalletRequest.cipher_seed_mnemonic', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='aezeed_passphrase', full_name='lnrpc.InitWalletRequest.aezeed_passphrase', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='recovery_window', full_name='lnrpc.InitWalletRequest.recovery_window', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_backups', full_name='lnrpc.InitWalletRequest.channel_backups', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=192,
serialized_end=370,
)
_INITWALLETRESPONSE = _descriptor.Descriptor(
name='InitWalletResponse',
full_name='lnrpc.InitWalletResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=372,
serialized_end=392,
)
_UNLOCKWALLETREQUEST = _descriptor.Descriptor(
name='UnlockWalletRequest',
full_name='lnrpc.UnlockWalletRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wallet_password', full_name='lnrpc.UnlockWalletRequest.wallet_password', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='recovery_window', full_name='lnrpc.UnlockWalletRequest.recovery_window', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_backups', full_name='lnrpc.UnlockWalletRequest.channel_backups', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=394,
serialized_end=517,
)
_UNLOCKWALLETRESPONSE = _descriptor.Descriptor(
name='UnlockWalletResponse',
full_name='lnrpc.UnlockWalletResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=519,
serialized_end=541,
)
_CHANGEPASSWORDREQUEST = _descriptor.Descriptor(
name='ChangePasswordRequest',
full_name='lnrpc.ChangePasswordRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='current_password', full_name='lnrpc.ChangePasswordRequest.current_password', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='new_password', full_name='lnrpc.ChangePasswordRequest.new_password', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=543,
serialized_end=614,
)
_CHANGEPASSWORDRESPONSE = _descriptor.Descriptor(
name='ChangePasswordResponse',
full_name='lnrpc.ChangePasswordResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=616,
serialized_end=640,
)
_UTXO = _descriptor.Descriptor(
name='Utxo',
full_name='lnrpc.Utxo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='lnrpc.Utxo.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address_type', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='address', full_name='lnrpc.Utxo.address', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount_sat', full_name='lnrpc.Utxo.amount_sat', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amount_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pk_script', full_name='lnrpc.Utxo.pk_script', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pk_script', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='outpoint', full_name='lnrpc.Utxo.outpoint', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='outpoint', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='confirmations', full_name='lnrpc.Utxo.confirmations', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='confirmations', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=643,
serialized_end=868,
)
_TRANSACTION = _descriptor.Descriptor(
name='Transaction',
full_name='lnrpc.Transaction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tx_hash', full_name='lnrpc.Transaction.tx_hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tx_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='lnrpc.Transaction.amount', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amount', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_confirmations', full_name='lnrpc.Transaction.num_confirmations', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_confirmations', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_hash', full_name='lnrpc.Transaction.block_hash', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='block_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_height', full_name='lnrpc.Transaction.block_height', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='block_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time_stamp', full_name='lnrpc.Transaction.time_stamp', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='time_stamp', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fees', full_name='lnrpc.Transaction.total_fees', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_fees', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dest_addresses', full_name='lnrpc.Transaction.dest_addresses', index=7,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='dest_addresses', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='raw_tx_hex', full_name='lnrpc.Transaction.raw_tx_hex', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='raw_tx_hex', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=871,
serialized_end=1184,
)
_GETTRANSACTIONSREQUEST = _descriptor.Descriptor(
name='GetTransactionsRequest',
full_name='lnrpc.GetTransactionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1186,
serialized_end=1210,
)
_TRANSACTIONDETAILS = _descriptor.Descriptor(
name='TransactionDetails',
full_name='lnrpc.TransactionDetails',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='transactions', full_name='lnrpc.TransactionDetails.transactions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='transactions', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1212,
serialized_end=1288,
)
_FEELIMIT = _descriptor.Descriptor(
name='FeeLimit',
full_name='lnrpc.FeeLimit',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fixed', full_name='lnrpc.FeeLimit.fixed', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fixed_msat', full_name='lnrpc.FeeLimit.fixed_msat', index=1,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='percent', full_name='lnrpc.FeeLimit.percent', index=2,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='limit', full_name='lnrpc.FeeLimit.limit',
index=0, containing_type=None, fields=[]),
],
serialized_start=1290,
serialized_end=1367,
)
_SENDREQUEST_DESTTLVENTRY = _descriptor.Descriptor(
name='DestTlvEntry',
full_name='lnrpc.SendRequest.DestTlvEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='lnrpc.SendRequest.DestTlvEntry.key', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.SendRequest.DestTlvEntry.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1751,
serialized_end=1797,
)
_SENDREQUEST = _descriptor.Descriptor(
name='SendRequest',
full_name='lnrpc.SendRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dest', full_name='lnrpc.SendRequest.dest', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dest_string', full_name='lnrpc.SendRequest.dest_string', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt', full_name='lnrpc.SendRequest.amt', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_msat', full_name='lnrpc.SendRequest.amt_msat', index=3,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.SendRequest.payment_hash', index=4,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash_string', full_name='lnrpc.SendRequest.payment_hash_string', index=5,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_request', full_name='lnrpc.SendRequest.payment_request', index=6,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='final_cltv_delta', full_name='lnrpc.SendRequest.final_cltv_delta', index=7,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_limit', full_name='lnrpc.SendRequest.fee_limit', index=8,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='outgoing_chan_id', full_name='lnrpc.SendRequest.outgoing_chan_id', index=9,
number=9, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_hop_pubkey', full_name='lnrpc.SendRequest.last_hop_pubkey', index=10,
number=13, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_limit', full_name='lnrpc.SendRequest.cltv_limit', index=11,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dest_tlv', full_name='lnrpc.SendRequest.dest_tlv', index=12,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='allow_self_payment', full_name='lnrpc.SendRequest.allow_self_payment', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_SENDREQUEST_DESTTLVENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1370,
serialized_end=1797,
)
_SENDRESPONSE = _descriptor.Descriptor(
name='SendResponse',
full_name='lnrpc.SendResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payment_error', full_name='lnrpc.SendResponse.payment_error', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_error', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_preimage', full_name='lnrpc.SendResponse.payment_preimage', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_preimage', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_route', full_name='lnrpc.SendResponse.payment_route', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_route', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.SendResponse.payment_hash', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_hash', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1800,
serialized_end=1984,
)
_SENDTOROUTEREQUEST = _descriptor.Descriptor(
name='SendToRouteRequest',
full_name='lnrpc.SendToRouteRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.SendToRouteRequest.payment_hash', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash_string', full_name='lnrpc.SendToRouteRequest.payment_hash_string', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='route', full_name='lnrpc.SendToRouteRequest.route', index=2,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1986,
serialized_end=2096,
)
_CHANNELACCEPTREQUEST = _descriptor.Descriptor(
name='ChannelAcceptRequest',
full_name='lnrpc.ChannelAcceptRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_pubkey', full_name='lnrpc.ChannelAcceptRequest.node_pubkey', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chain_hash', full_name='lnrpc.ChannelAcceptRequest.chain_hash', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_chan_id', full_name='lnrpc.ChannelAcceptRequest.pending_chan_id', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='funding_amt', full_name='lnrpc.ChannelAcceptRequest.funding_amt', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='push_amt', full_name='lnrpc.ChannelAcceptRequest.push_amt', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dust_limit', full_name='lnrpc.ChannelAcceptRequest.dust_limit', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_value_in_flight', full_name='lnrpc.ChannelAcceptRequest.max_value_in_flight', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_reserve', full_name='lnrpc.ChannelAcceptRequest.channel_reserve', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_htlc', full_name='lnrpc.ChannelAcceptRequest.min_htlc', index=8,
number=9, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_per_kw', full_name='lnrpc.ChannelAcceptRequest.fee_per_kw', index=9,
number=10, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='csv_delay', full_name='lnrpc.ChannelAcceptRequest.csv_delay', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_accepted_htlcs', full_name='lnrpc.ChannelAcceptRequest.max_accepted_htlcs', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_flags', full_name='lnrpc.ChannelAcceptRequest.channel_flags', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2099,
serialized_end=2408,
)
_CHANNELACCEPTRESPONSE = _descriptor.Descriptor(
name='ChannelAcceptResponse',
full_name='lnrpc.ChannelAcceptResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='accept', full_name='lnrpc.ChannelAcceptResponse.accept', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_chan_id', full_name='lnrpc.ChannelAcceptResponse.pending_chan_id', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2410,
serialized_end=2474,
)
_CHANNELPOINT = _descriptor.Descriptor(
name='ChannelPoint',
full_name='lnrpc.ChannelPoint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='funding_txid_bytes', full_name='lnrpc.ChannelPoint.funding_txid_bytes', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='funding_txid_bytes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='funding_txid_str', full_name='lnrpc.ChannelPoint.funding_txid_str', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='funding_txid_str', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_index', full_name='lnrpc.ChannelPoint.output_index', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='output_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='funding_txid', full_name='lnrpc.ChannelPoint.funding_txid',
index=0, containing_type=None, fields=[]),
],
serialized_start=2477,
serialized_end=2639,
)
_OUTPOINT = _descriptor.Descriptor(
name='OutPoint',
full_name='lnrpc.OutPoint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txid_bytes', full_name='lnrpc.OutPoint.txid_bytes', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid_bytes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='txid_str', full_name='lnrpc.OutPoint.txid_str', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid_str', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_index', full_name='lnrpc.OutPoint.output_index', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='output_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2641,
serialized_end=2747,
)
_LIGHTNINGADDRESS = _descriptor.Descriptor(
name='LightningAddress',
full_name='lnrpc.LightningAddress',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pubkey', full_name='lnrpc.LightningAddress.pubkey', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='host', full_name='lnrpc.LightningAddress.host', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='host', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2749,
serialized_end=2811,
)
_ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY = _descriptor.Descriptor(
name='AddrToAmountEntry',
full_name='lnrpc.EstimateFeeRequest.AddrToAmountEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='lnrpc.EstimateFeeRequest.AddrToAmountEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.EstimateFeeRequest.AddrToAmountEntry.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2924,
serialized_end=2975,
)
_ESTIMATEFEEREQUEST = _descriptor.Descriptor(
name='EstimateFeeRequest',
full_name='lnrpc.EstimateFeeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='AddrToAmount', full_name='lnrpc.EstimateFeeRequest.AddrToAmount', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.EstimateFeeRequest.target_conf', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2814,
serialized_end=2975,
)
_ESTIMATEFEERESPONSE = _descriptor.Descriptor(
name='EstimateFeeResponse',
full_name='lnrpc.EstimateFeeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fee_sat', full_name='lnrpc.EstimateFeeResponse.fee_sat', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feerate_sat_per_byte', full_name='lnrpc.EstimateFeeResponse.feerate_sat_per_byte', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='feerate_sat_per_byte', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2977,
serialized_end=3076,
)
_SENDMANYREQUEST_ADDRTOAMOUNTENTRY = _descriptor.Descriptor(
name='AddrToAmountEntry',
full_name='lnrpc.SendManyRequest.AddrToAmountEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='lnrpc.SendManyRequest.AddrToAmountEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.SendManyRequest.AddrToAmountEntry.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2924,
serialized_end=2975,
)
_SENDMANYREQUEST = _descriptor.Descriptor(
name='SendManyRequest',
full_name='lnrpc.SendManyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='AddrToAmount', full_name='lnrpc.SendManyRequest.AddrToAmount', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.SendManyRequest.target_conf', index=1,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_per_byte', full_name='lnrpc.SendManyRequest.sat_per_byte', index=2,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_SENDMANYREQUEST_ADDRTOAMOUNTENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3079,
serialized_end=3256,
)
_SENDMANYRESPONSE = _descriptor.Descriptor(
name='SendManyResponse',
full_name='lnrpc.SendManyResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txid', full_name='lnrpc.SendManyResponse.txid', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3258,
serialized_end=3296,
)
_SENDCOINSREQUEST = _descriptor.Descriptor(
name='SendCoinsRequest',
full_name='lnrpc.SendCoinsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='addr', full_name='lnrpc.SendCoinsRequest.addr', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='lnrpc.SendCoinsRequest.amount', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.SendCoinsRequest.target_conf', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_per_byte', full_name='lnrpc.SendCoinsRequest.sat_per_byte', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='send_all', full_name='lnrpc.SendCoinsRequest.send_all', index=4,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3298,
serialized_end=3407,
)
_SENDCOINSRESPONSE = _descriptor.Descriptor(
name='SendCoinsResponse',
full_name='lnrpc.SendCoinsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txid', full_name='lnrpc.SendCoinsResponse.txid', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3409,
serialized_end=3448,
)
_LISTUNSPENTREQUEST = _descriptor.Descriptor(
name='ListUnspentRequest',
full_name='lnrpc.ListUnspentRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_confs', full_name='lnrpc.ListUnspentRequest.min_confs', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_confs', full_name='lnrpc.ListUnspentRequest.max_confs', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3450,
serialized_end=3508,
)
_LISTUNSPENTRESPONSE = _descriptor.Descriptor(
name='ListUnspentResponse',
full_name='lnrpc.ListUnspentResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='utxos', full_name='lnrpc.ListUnspentResponse.utxos', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='utxos', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3510,
serialized_end=3566,
)
_NEWADDRESSREQUEST = _descriptor.Descriptor(
name='NewAddressRequest',
full_name='lnrpc.NewAddressRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='lnrpc.NewAddressRequest.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3568,
serialized_end=3621,
)
_NEWADDRESSRESPONSE = _descriptor.Descriptor(
name='NewAddressResponse',
full_name='lnrpc.NewAddressResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='address', full_name='lnrpc.NewAddressResponse.address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3623,
serialized_end=3669,
)
_SIGNMESSAGEREQUEST = _descriptor.Descriptor(
name='SignMessageRequest',
full_name='lnrpc.SignMessageRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='msg', full_name='lnrpc.SignMessageRequest.msg', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='msg', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3671,
serialized_end=3709,
)
_SIGNMESSAGERESPONSE = _descriptor.Descriptor(
name='SignMessageResponse',
full_name='lnrpc.SignMessageResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='signature', full_name='lnrpc.SignMessageResponse.signature', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='signature', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3711,
serialized_end=3762,
)
_VERIFYMESSAGEREQUEST = _descriptor.Descriptor(
name='VerifyMessageRequest',
full_name='lnrpc.VerifyMessageRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='msg', full_name='lnrpc.VerifyMessageRequest.msg', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='msg', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='signature', full_name='lnrpc.VerifyMessageRequest.signature', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='signature', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3764,
serialized_end=3834,
)
_VERIFYMESSAGERESPONSE = _descriptor.Descriptor(
name='VerifyMessageResponse',
full_name='lnrpc.VerifyMessageResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='valid', full_name='lnrpc.VerifyMessageResponse.valid', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='valid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pubkey', full_name='lnrpc.VerifyMessageResponse.pubkey', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pubkey', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3836,
serialized_end=3905,
)
_CONNECTPEERREQUEST = _descriptor.Descriptor(
name='ConnectPeerRequest',
full_name='lnrpc.ConnectPeerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='addr', full_name='lnrpc.ConnectPeerRequest.addr', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='perm', full_name='lnrpc.ConnectPeerRequest.perm', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3907,
serialized_end=3980,
)
_CONNECTPEERRESPONSE = _descriptor.Descriptor(
name='ConnectPeerResponse',
full_name='lnrpc.ConnectPeerResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3982,
serialized_end=4003,
)
_DISCONNECTPEERREQUEST = _descriptor.Descriptor(
name='DisconnectPeerRequest',
full_name='lnrpc.DisconnectPeerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.DisconnectPeerRequest.pub_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pub_key', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4005,
serialized_end=4054,
)
_DISCONNECTPEERRESPONSE = _descriptor.Descriptor(
name='DisconnectPeerResponse',
full_name='lnrpc.DisconnectPeerResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4056,
serialized_end=4080,
)
_HTLC = _descriptor.Descriptor(
name='HTLC',
full_name='lnrpc.HTLC',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='incoming', full_name='lnrpc.HTLC.incoming', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='incoming', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='lnrpc.HTLC.amount', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amount', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hash_lock', full_name='lnrpc.HTLC.hash_lock', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='hash_lock', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiration_height', full_name='lnrpc.HTLC.expiration_height', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiration_height', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4083,
serialized_end=4217,
)
_CHANNEL = _descriptor.Descriptor(
name='Channel',
full_name='lnrpc.Channel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='active', full_name='lnrpc.Channel.active', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='active', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_pubkey', full_name='lnrpc.Channel.remote_pubkey', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.Channel.channel_point', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.Channel.chan_id', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.Channel.capacity', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_balance', full_name='lnrpc.Channel.local_balance', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_balance', full_name='lnrpc.Channel.remote_balance', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_fee', full_name='lnrpc.Channel.commit_fee', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='commit_fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_weight', full_name='lnrpc.Channel.commit_weight', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='commit_weight', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_per_kw', full_name='lnrpc.Channel.fee_per_kw', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_per_kw', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='unsettled_balance', full_name='lnrpc.Channel.unsettled_balance', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='unsettled_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_satoshis_sent', full_name='lnrpc.Channel.total_satoshis_sent', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_satoshis_sent', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_satoshis_received', full_name='lnrpc.Channel.total_satoshis_received', index=12,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_satoshis_received', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_updates', full_name='lnrpc.Channel.num_updates', index=13,
number=14, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_updates', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_htlcs', full_name='lnrpc.Channel.pending_htlcs', index=14,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_htlcs', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='csv_delay', full_name='lnrpc.Channel.csv_delay', index=15,
number=16, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='csv_delay', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private', full_name='lnrpc.Channel.private', index=16,
number=17, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='private', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='initiator', full_name='lnrpc.Channel.initiator', index=17,
number=18, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='initiator', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_status_flags', full_name='lnrpc.Channel.chan_status_flags', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_status_flags', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_chan_reserve_sat', full_name='lnrpc.Channel.local_chan_reserve_sat', index=19,
number=20, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_chan_reserve_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_chan_reserve_sat', full_name='lnrpc.Channel.remote_chan_reserve_sat', index=20,
number=21, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_chan_reserve_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='static_remote_key', full_name='lnrpc.Channel.static_remote_key', index=21,
number=22, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='static_remote_key', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lifetime', full_name='lnrpc.Channel.lifetime', index=22,
number=23, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='lifetime', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uptime', full_name='lnrpc.Channel.uptime', index=23,
number=24, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='uptime', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4220,
serialized_end=5150,
)
_LISTCHANNELSREQUEST = _descriptor.Descriptor(
name='ListChannelsRequest',
full_name='lnrpc.ListChannelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='active_only', full_name='lnrpc.ListChannelsRequest.active_only', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inactive_only', full_name='lnrpc.ListChannelsRequest.inactive_only', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='public_only', full_name='lnrpc.ListChannelsRequest.public_only', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private_only', full_name='lnrpc.ListChannelsRequest.private_only', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5152,
serialized_end=5260,
)
_LISTCHANNELSRESPONSE = _descriptor.Descriptor(
name='ListChannelsResponse',
full_name='lnrpc.ListChannelsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channels', full_name='lnrpc.ListChannelsResponse.channels', index=0,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channels', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5262,
serialized_end=5328,
)
_CHANNELCLOSESUMMARY = _descriptor.Descriptor(
name='ChannelCloseSummary',
full_name='lnrpc.ChannelCloseSummary',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.ChannelCloseSummary.channel_point', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.ChannelCloseSummary.chan_id', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chain_hash', full_name='lnrpc.ChannelCloseSummary.chain_hash', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chain_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closing_tx_hash', full_name='lnrpc.ChannelCloseSummary.closing_tx_hash', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closing_tx_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_pubkey', full_name='lnrpc.ChannelCloseSummary.remote_pubkey', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.ChannelCloseSummary.capacity', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='close_height', full_name='lnrpc.ChannelCloseSummary.close_height', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='close_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settled_balance', full_name='lnrpc.ChannelCloseSummary.settled_balance', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='settled_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time_locked_balance', full_name='lnrpc.ChannelCloseSummary.time_locked_balance', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='time_locked_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='close_type', full_name='lnrpc.ChannelCloseSummary.close_type', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='close_type', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_CHANNELCLOSESUMMARY_CLOSURETYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5331,
serialized_end=5901,
)
_CLOSEDCHANNELSREQUEST = _descriptor.Descriptor(
name='ClosedChannelsRequest',
full_name='lnrpc.ClosedChannelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cooperative', full_name='lnrpc.ClosedChannelsRequest.cooperative', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_force', full_name='lnrpc.ClosedChannelsRequest.local_force', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_force', full_name='lnrpc.ClosedChannelsRequest.remote_force', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='breach', full_name='lnrpc.ClosedChannelsRequest.breach', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='funding_canceled', full_name='lnrpc.ClosedChannelsRequest.funding_canceled', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='abandoned', full_name='lnrpc.ClosedChannelsRequest.abandoned', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5904,
serialized_end=6052,
)
_CLOSEDCHANNELSRESPONSE = _descriptor.Descriptor(
name='ClosedChannelsResponse',
full_name='lnrpc.ClosedChannelsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channels', full_name='lnrpc.ClosedChannelsResponse.channels', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channels', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6054,
serialized_end=6134,
)
_PEER = _descriptor.Descriptor(
name='Peer',
full_name='lnrpc.Peer',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.Peer.pub_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pub_key', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='address', full_name='lnrpc.Peer.address', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='address', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bytes_sent', full_name='lnrpc.Peer.bytes_sent', index=2,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='bytes_sent', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bytes_recv', full_name='lnrpc.Peer.bytes_recv', index=3,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='bytes_recv', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_sent', full_name='lnrpc.Peer.sat_sent', index=4,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sat_sent', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_recv', full_name='lnrpc.Peer.sat_recv', index=5,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sat_recv', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inbound', full_name='lnrpc.Peer.inbound', index=6,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='inbound', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ping_time', full_name='lnrpc.Peer.ping_time', index=7,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='ping_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sync_type', full_name='lnrpc.Peer.sync_type', index=8,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sync_type', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_PEER_SYNCTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6137,
serialized_end=6488,
)
_LISTPEERSREQUEST = _descriptor.Descriptor(
name='ListPeersRequest',
full_name='lnrpc.ListPeersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6490,
serialized_end=6508,
)
_LISTPEERSRESPONSE = _descriptor.Descriptor(
name='ListPeersResponse',
full_name='lnrpc.ListPeersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='peers', full_name='lnrpc.ListPeersResponse.peers', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='peers', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6510,
serialized_end=6564,
)
_GETINFOREQUEST = _descriptor.Descriptor(
name='GetInfoRequest',
full_name='lnrpc.GetInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6566,
serialized_end=6582,
)
_GETINFORESPONSE = _descriptor.Descriptor(
name='GetInfoResponse',
full_name='lnrpc.GetInfoResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='identity_pubkey', full_name='lnrpc.GetInfoResponse.identity_pubkey', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='identity_pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alias', full_name='lnrpc.GetInfoResponse.alias', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='alias', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_pending_channels', full_name='lnrpc.GetInfoResponse.num_pending_channels', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_pending_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_active_channels', full_name='lnrpc.GetInfoResponse.num_active_channels', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_active_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_peers', full_name='lnrpc.GetInfoResponse.num_peers', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_peers', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_height', full_name='lnrpc.GetInfoResponse.block_height', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='block_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_hash', full_name='lnrpc.GetInfoResponse.block_hash', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='block_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='synced_to_chain', full_name='lnrpc.GetInfoResponse.synced_to_chain', index=7,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='synced_to_chain', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='testnet', full_name='lnrpc.GetInfoResponse.testnet', index=8,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='testnet', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='uris', full_name='lnrpc.GetInfoResponse.uris', index=9,
number=12, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='uris', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='best_header_timestamp', full_name='lnrpc.GetInfoResponse.best_header_timestamp', index=10,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='best_header_timestamp', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='lnrpc.GetInfoResponse.version', index=11,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='version', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_inactive_channels', full_name='lnrpc.GetInfoResponse.num_inactive_channels', index=12,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_inactive_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chains', full_name='lnrpc.GetInfoResponse.chains', index=13,
number=16, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chains', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='color', full_name='lnrpc.GetInfoResponse.color', index=14,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='color', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='synced_to_graph', full_name='lnrpc.GetInfoResponse.synced_to_graph', index=15,
number=18, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='synced_to_graph', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6585,
serialized_end=7200,
)
_CHAIN = _descriptor.Descriptor(
name='Chain',
full_name='lnrpc.Chain',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chain', full_name='lnrpc.Chain.chain', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chain', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='network', full_name='lnrpc.Chain.network', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='network', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7202,
serialized_end=7257,
)
_CONFIRMATIONUPDATE = _descriptor.Descriptor(
name='ConfirmationUpdate',
full_name='lnrpc.ConfirmationUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='block_sha', full_name='lnrpc.ConfirmationUpdate.block_sha', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='block_height', full_name='lnrpc.ConfirmationUpdate.block_height', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_confs_left', full_name='lnrpc.ConfirmationUpdate.num_confs_left', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7259,
serialized_end=7344,
)
_CHANNELOPENUPDATE = _descriptor.Descriptor(
name='ChannelOpenUpdate',
full_name='lnrpc.ChannelOpenUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.ChannelOpenUpdate.channel_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7346,
serialized_end=7424,
)
_CHANNELCLOSEUPDATE = _descriptor.Descriptor(
name='ChannelCloseUpdate',
full_name='lnrpc.ChannelCloseUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='closing_txid', full_name='lnrpc.ChannelCloseUpdate.closing_txid', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closing_txid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success', full_name='lnrpc.ChannelCloseUpdate.success', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='success', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7426,
serialized_end=7508,
)
_CLOSECHANNELREQUEST = _descriptor.Descriptor(
name='CloseChannelRequest',
full_name='lnrpc.CloseChannelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.CloseChannelRequest.channel_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='force', full_name='lnrpc.CloseChannelRequest.force', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.CloseChannelRequest.target_conf', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_per_byte', full_name='lnrpc.CloseChannelRequest.sat_per_byte', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7510,
serialized_end=7633,
)
_CLOSESTATUSUPDATE = _descriptor.Descriptor(
name='CloseStatusUpdate',
full_name='lnrpc.CloseStatusUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='close_pending', full_name='lnrpc.CloseStatusUpdate.close_pending', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='close_pending', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_close', full_name='lnrpc.CloseStatusUpdate.chan_close', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_close', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='update', full_name='lnrpc.CloseStatusUpdate.update',
index=0, containing_type=None, fields=[]),
],
serialized_start=7636,
serialized_end=7788,
)
_PENDINGUPDATE = _descriptor.Descriptor(
name='PendingUpdate',
full_name='lnrpc.PendingUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='txid', full_name='lnrpc.PendingUpdate.txid', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_index', full_name='lnrpc.PendingUpdate.output_index', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='output_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7790,
serialized_end=7861,
)
_OPENCHANNELREQUEST = _descriptor.Descriptor(
name='OpenChannelRequest',
full_name='lnrpc.OpenChannelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_pubkey', full_name='lnrpc.OpenChannelRequest.node_pubkey', index=0,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node_pubkey', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node_pubkey_string', full_name='lnrpc.OpenChannelRequest.node_pubkey_string', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='node_pubkey_string', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_funding_amount', full_name='lnrpc.OpenChannelRequest.local_funding_amount', index=2,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_funding_amount', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='push_sat', full_name='lnrpc.OpenChannelRequest.push_sat', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='push_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_conf', full_name='lnrpc.OpenChannelRequest.target_conf', index=4,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sat_per_byte', full_name='lnrpc.OpenChannelRequest.sat_per_byte', index=5,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private', full_name='lnrpc.OpenChannelRequest.private', index=6,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='private', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_htlc_msat', full_name='lnrpc.OpenChannelRequest.min_htlc_msat', index=7,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='min_htlc_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_csv_delay', full_name='lnrpc.OpenChannelRequest.remote_csv_delay', index=8,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_csv_delay', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_confs', full_name='lnrpc.OpenChannelRequest.min_confs', index=9,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='min_confs', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='spend_unconfirmed', full_name='lnrpc.OpenChannelRequest.spend_unconfirmed', index=10,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='spend_unconfirmed', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7864,
serialized_end=8277,
)
_OPENSTATUSUPDATE = _descriptor.Descriptor(
name='OpenStatusUpdate',
full_name='lnrpc.OpenStatusUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_pending', full_name='lnrpc.OpenStatusUpdate.chan_pending', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_pending', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_open', full_name='lnrpc.OpenStatusUpdate.chan_open', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_open', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='update', full_name='lnrpc.OpenStatusUpdate.update',
index=0, containing_type=None, fields=[]),
],
serialized_start=8280,
serialized_end=8426,
)
_PENDINGHTLC = _descriptor.Descriptor(
name='PendingHTLC',
full_name='lnrpc.PendingHTLC',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='incoming', full_name='lnrpc.PendingHTLC.incoming', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='incoming', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='lnrpc.PendingHTLC.amount', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amount', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='outpoint', full_name='lnrpc.PendingHTLC.outpoint', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='outpoint', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='maturity_height', full_name='lnrpc.PendingHTLC.maturity_height', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='maturity_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blocks_til_maturity', full_name='lnrpc.PendingHTLC.blocks_til_maturity', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='blocks_til_maturity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stage', full_name='lnrpc.PendingHTLC.stage', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='stage', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8429,
serialized_end=8636,
)
_PENDINGCHANNELSREQUEST = _descriptor.Descriptor(
name='PendingChannelsRequest',
full_name='lnrpc.PendingChannelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8638,
serialized_end=8662,
)
_PENDINGCHANNELSRESPONSE_PENDINGCHANNEL = _descriptor.Descriptor(
name='PendingChannel',
full_name='lnrpc.PendingChannelsResponse.PendingChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='remote_node_pub', full_name='lnrpc.PendingChannelsResponse.PendingChannel.remote_node_pub', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_node_pub', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.PendingChannelsResponse.PendingChannel.channel_point', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.PendingChannelsResponse.PendingChannel.capacity', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_balance', full_name='lnrpc.PendingChannelsResponse.PendingChannel.local_balance', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_balance', full_name='lnrpc.PendingChannelsResponse.PendingChannel.remote_balance', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_chan_reserve_sat', full_name='lnrpc.PendingChannelsResponse.PendingChannel.local_chan_reserve_sat', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='local_chan_reserve_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='remote_chan_reserve_sat', full_name='lnrpc.PendingChannelsResponse.PendingChannel.remote_chan_reserve_sat', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='remote_chan_reserve_sat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9185,
serialized_end=9501,
)
_PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL = _descriptor.Descriptor(
name='PendingOpenChannel',
full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='confirmation_height', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.confirmation_height', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='confirmation_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_fee', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.commit_fee', index=2,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='commit_fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commit_weight', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.commit_weight', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='commit_weight', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_per_kw', full_name='lnrpc.PendingChannelsResponse.PendingOpenChannel.fee_per_kw', index=4,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_per_kw', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9504,
serialized_end=9749,
)
_PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL = _descriptor.Descriptor(
name='WaitingCloseChannel',
full_name='lnrpc.PendingChannelsResponse.WaitingCloseChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='lnrpc.PendingChannelsResponse.WaitingCloseChannel.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='limbo_balance', full_name='lnrpc.PendingChannelsResponse.WaitingCloseChannel.limbo_balance', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='limbo_balance', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9751,
serialized_end=9874,
)
_PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL = _descriptor.Descriptor(
name='ClosedChannel',
full_name='lnrpc.PendingChannelsResponse.ClosedChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='lnrpc.PendingChannelsResponse.ClosedChannel.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closing_txid', full_name='lnrpc.PendingChannelsResponse.ClosedChannel.closing_txid', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closing_txid', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9876,
serialized_end=9991,
)
_PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL = _descriptor.Descriptor(
name='ForceClosedChannel',
full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closing_txid', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.closing_txid', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closing_txid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='limbo_balance', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.limbo_balance', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='limbo_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='maturity_height', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.maturity_height', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='maturity_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blocks_til_maturity', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.blocks_til_maturity', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='blocks_til_maturity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='recovered_balance', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.recovered_balance', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='recovered_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_htlcs', full_name='lnrpc.PendingChannelsResponse.ForceClosedChannel.pending_htlcs', index=6,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_htlcs', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9994,
serialized_end=10357,
)
_PENDINGCHANNELSRESPONSE = _descriptor.Descriptor(
name='PendingChannelsResponse',
full_name='lnrpc.PendingChannelsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='total_limbo_balance', full_name='lnrpc.PendingChannelsResponse.total_limbo_balance', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_limbo_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_open_channels', full_name='lnrpc.PendingChannelsResponse.pending_open_channels', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_open_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_closing_channels', full_name='lnrpc.PendingChannelsResponse.pending_closing_channels', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_closing_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_force_closing_channels', full_name='lnrpc.PendingChannelsResponse.pending_force_closing_channels', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_force_closing_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='waiting_close_channels', full_name='lnrpc.PendingChannelsResponse.waiting_close_channels', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='waiting_close_channels', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_PENDINGCHANNELSRESPONSE_PENDINGCHANNEL, _PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL, _PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL, _PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL, _PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8665,
serialized_end=10357,
)
_CHANNELEVENTSUBSCRIPTION = _descriptor.Descriptor(
name='ChannelEventSubscription',
full_name='lnrpc.ChannelEventSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10359,
serialized_end=10385,
)
_CHANNELEVENTUPDATE = _descriptor.Descriptor(
name='ChannelEventUpdate',
full_name='lnrpc.ChannelEventUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='open_channel', full_name='lnrpc.ChannelEventUpdate.open_channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='open_channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closed_channel', full_name='lnrpc.ChannelEventUpdate.closed_channel', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='closed_channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='active_channel', full_name='lnrpc.ChannelEventUpdate.active_channel', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='active_channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inactive_channel', full_name='lnrpc.ChannelEventUpdate.inactive_channel', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='inactive_channel', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='lnrpc.ChannelEventUpdate.type', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='type', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_CHANNELEVENTUPDATE_UPDATETYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='channel', full_name='lnrpc.ChannelEventUpdate.channel',
index=0, containing_type=None, fields=[]),
],
serialized_start=10388,
serialized_end=10825,
)
_WALLETBALANCEREQUEST = _descriptor.Descriptor(
name='WalletBalanceRequest',
full_name='lnrpc.WalletBalanceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10827,
serialized_end=10849,
)
_WALLETBALANCERESPONSE = _descriptor.Descriptor(
name='WalletBalanceResponse',
full_name='lnrpc.WalletBalanceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='total_balance', full_name='lnrpc.WalletBalanceResponse.total_balance', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='confirmed_balance', full_name='lnrpc.WalletBalanceResponse.confirmed_balance', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='confirmed_balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='unconfirmed_balance', full_name='lnrpc.WalletBalanceResponse.unconfirmed_balance', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='unconfirmed_balance', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10852,
serialized_end=11009,
)
_CHANNELBALANCEREQUEST = _descriptor.Descriptor(
name='ChannelBalanceRequest',
full_name='lnrpc.ChannelBalanceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11011,
serialized_end=11034,
)
_CHANNELBALANCERESPONSE = _descriptor.Descriptor(
name='ChannelBalanceResponse',
full_name='lnrpc.ChannelBalanceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='balance', full_name='lnrpc.ChannelBalanceResponse.balance', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='balance', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pending_open_balance', full_name='lnrpc.ChannelBalanceResponse.pending_open_balance', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_open_balance', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11036,
serialized_end=11138,
)
_QUERYROUTESREQUEST = _descriptor.Descriptor(
name='QueryRoutesRequest',
full_name='lnrpc.QueryRoutesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.QueryRoutesRequest.pub_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt', full_name='lnrpc.QueryRoutesRequest.amt', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_msat', full_name='lnrpc.QueryRoutesRequest.amt_msat', index=2,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='final_cltv_delta', full_name='lnrpc.QueryRoutesRequest.final_cltv_delta', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_limit', full_name='lnrpc.QueryRoutesRequest.fee_limit', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ignored_nodes', full_name='lnrpc.QueryRoutesRequest.ignored_nodes', index=5,
number=6, type=12, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ignored_edges', full_name='lnrpc.QueryRoutesRequest.ignored_edges', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='source_pub_key', full_name='lnrpc.QueryRoutesRequest.source_pub_key', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='use_mission_control', full_name='lnrpc.QueryRoutesRequest.use_mission_control', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ignored_pairs', full_name='lnrpc.QueryRoutesRequest.ignored_pairs', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_limit', full_name='lnrpc.QueryRoutesRequest.cltv_limit', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11141,
serialized_end=11460,
)
_NODEPAIR = _descriptor.Descriptor(
name='NodePair',
full_name='lnrpc.NodePair',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='from', full_name='lnrpc.NodePair.from', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='to', full_name='lnrpc.NodePair.to', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11462,
serialized_end=11498,
)
_EDGELOCATOR = _descriptor.Descriptor(
name='EdgeLocator',
full_name='lnrpc.EdgeLocator',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_id', full_name='lnrpc.EdgeLocator.channel_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='direction_reverse', full_name='lnrpc.EdgeLocator.direction_reverse', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11500,
serialized_end=11564,
)
_QUERYROUTESRESPONSE = _descriptor.Descriptor(
name='QueryRoutesResponse',
full_name='lnrpc.QueryRoutesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='routes', full_name='lnrpc.QueryRoutesResponse.routes', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='routes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='success_prob', full_name='lnrpc.QueryRoutesResponse.success_prob', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='success_prob', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11566,
serialized_end=11661,
)
_HOP = _descriptor.Descriptor(
name='Hop',
full_name='lnrpc.Hop',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.Hop.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_capacity', full_name='lnrpc.Hop.chan_capacity', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_to_forward', full_name='lnrpc.Hop.amt_to_forward', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='amt_to_forward', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee', full_name='lnrpc.Hop.fee', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiry', full_name='lnrpc.Hop.expiry', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_to_forward_msat', full_name='lnrpc.Hop.amt_to_forward_msat', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_to_forward_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_msat', full_name='lnrpc.Hop.fee_msat', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.Hop.pub_key', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pub_key', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tlv_payload', full_name='lnrpc.Hop.tlv_payload', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='tlv_payload', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mpp_record', full_name='lnrpc.Hop.mpp_record', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='mpp_record', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11664,
serialized_end=12015,
)
_MPPRECORD = _descriptor.Descriptor(
name='MPPRecord',
full_name='lnrpc.MPPRecord',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payment_addr', full_name='lnrpc.MPPRecord.payment_addr', index=0,
number=11, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_addr', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_amt_msat', full_name='lnrpc.MPPRecord.total_amt_msat', index=1,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_amt_msat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12017,
serialized_end=12104,
)
_ROUTE = _descriptor.Descriptor(
name='Route',
full_name='lnrpc.Route',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='total_time_lock', full_name='lnrpc.Route.total_time_lock', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_time_lock', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fees', full_name='lnrpc.Route.total_fees', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='total_fees', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_amt', full_name='lnrpc.Route.total_amt', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='total_amt', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hops', full_name='lnrpc.Route.hops', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='hops', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_fees_msat', full_name='lnrpc.Route.total_fees_msat', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_fees_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_amt_msat', full_name='lnrpc.Route.total_amt_msat', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_amt_msat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12107,
serialized_end=12340,
)
_NODEINFOREQUEST = _descriptor.Descriptor(
name='NodeInfoRequest',
full_name='lnrpc.NodeInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.NodeInfoRequest.pub_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='include_channels', full_name='lnrpc.NodeInfoRequest.include_channels', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12342,
serialized_end=12402,
)
_NODEINFO = _descriptor.Descriptor(
name='NodeInfo',
full_name='lnrpc.NodeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node', full_name='lnrpc.NodeInfo.node', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_channels', full_name='lnrpc.NodeInfo.num_channels', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_capacity', full_name='lnrpc.NodeInfo.total_capacity', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channels', full_name='lnrpc.NodeInfo.channels', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channels', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12405,
serialized_end=12581,
)
_LIGHTNINGNODE = _descriptor.Descriptor(
name='LightningNode',
full_name='lnrpc.LightningNode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='last_update', full_name='lnrpc.LightningNode.last_update', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='last_update', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pub_key', full_name='lnrpc.LightningNode.pub_key', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pub_key', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alias', full_name='lnrpc.LightningNode.alias', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='alias', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='addresses', full_name='lnrpc.LightningNode.addresses', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='addresses', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='color', full_name='lnrpc.LightningNode.color', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='color', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12584,
serialized_end=12753,
)
_NODEADDRESS = _descriptor.Descriptor(
name='NodeAddress',
full_name='lnrpc.NodeAddress',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='network', full_name='lnrpc.NodeAddress.network', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='network', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='addr', full_name='lnrpc.NodeAddress.addr', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='addr', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12755,
serialized_end=12814,
)
_ROUTINGPOLICY = _descriptor.Descriptor(
name='RoutingPolicy',
full_name='lnrpc.RoutingPolicy',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='time_lock_delta', full_name='lnrpc.RoutingPolicy.time_lock_delta', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='time_lock_delta', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_htlc', full_name='lnrpc.RoutingPolicy.min_htlc', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='min_htlc', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_base_msat', full_name='lnrpc.RoutingPolicy.fee_base_msat', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_base_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_rate_milli_msat', full_name='lnrpc.RoutingPolicy.fee_rate_milli_msat', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_rate_milli_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='disabled', full_name='lnrpc.RoutingPolicy.disabled', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='disabled', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_htlc_msat', full_name='lnrpc.RoutingPolicy.max_htlc_msat', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='max_htlc_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_update', full_name='lnrpc.RoutingPolicy.last_update', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='last_update', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12817,
serialized_end=13090,
)
_CHANNELEDGE = _descriptor.Descriptor(
name='ChannelEdge',
full_name='lnrpc.ChannelEdge',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_id', full_name='lnrpc.ChannelEdge.channel_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='channel_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ChannelEdge.chan_point', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_update', full_name='lnrpc.ChannelEdge.last_update', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='last_update', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node1_pub', full_name='lnrpc.ChannelEdge.node1_pub', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node1_pub', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node2_pub', full_name='lnrpc.ChannelEdge.node2_pub', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node2_pub', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.ChannelEdge.capacity', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node1_policy', full_name='lnrpc.ChannelEdge.node1_policy', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node1_policy', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='node2_policy', full_name='lnrpc.ChannelEdge.node2_policy', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node2_policy', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13093,
serialized_end=13416,
)
_CHANNELGRAPHREQUEST = _descriptor.Descriptor(
name='ChannelGraphRequest',
full_name='lnrpc.ChannelGraphRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='include_unannounced', full_name='lnrpc.ChannelGraphRequest.include_unannounced', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='include_unannounced', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13418,
serialized_end=13489,
)
_CHANNELGRAPH = _descriptor.Descriptor(
name='ChannelGraph',
full_name='lnrpc.ChannelGraph',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nodes', full_name='lnrpc.ChannelGraph.nodes', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='nodes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='edges', full_name='lnrpc.ChannelGraph.edges', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='edges', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13491,
serialized_end=13591,
)
_CHANINFOREQUEST = _descriptor.Descriptor(
name='ChanInfoRequest',
full_name='lnrpc.ChanInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.ChanInfoRequest.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13593,
serialized_end=13631,
)
_NETWORKINFOREQUEST = _descriptor.Descriptor(
name='NetworkInfoRequest',
full_name='lnrpc.NetworkInfoRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13633,
serialized_end=13653,
)
_NETWORKINFO = _descriptor.Descriptor(
name='NetworkInfo',
full_name='lnrpc.NetworkInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='graph_diameter', full_name='lnrpc.NetworkInfo.graph_diameter', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='graph_diameter', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='avg_out_degree', full_name='lnrpc.NetworkInfo.avg_out_degree', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='avg_out_degree', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_out_degree', full_name='lnrpc.NetworkInfo.max_out_degree', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='max_out_degree', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_nodes', full_name='lnrpc.NetworkInfo.num_nodes', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_nodes', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_channels', full_name='lnrpc.NetworkInfo.num_channels', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_channels', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total_network_capacity', full_name='lnrpc.NetworkInfo.total_network_capacity', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='total_network_capacity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='avg_channel_size', full_name='lnrpc.NetworkInfo.avg_channel_size', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='avg_channel_size', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='min_channel_size', full_name='lnrpc.NetworkInfo.min_channel_size', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='min_channel_size', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_channel_size', full_name='lnrpc.NetworkInfo.max_channel_size', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='max_channel_size', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='median_channel_size_sat', full_name='lnrpc.NetworkInfo.median_channel_size_sat', index=9,
number=10, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='median_channel_size_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_zombie_chans', full_name='lnrpc.NetworkInfo.num_zombie_chans', index=10,
number=11, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_zombie_chans', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13656,
serialized_end=14145,
)
_STOPREQUEST = _descriptor.Descriptor(
name='StopRequest',
full_name='lnrpc.StopRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14147,
serialized_end=14160,
)
_STOPRESPONSE = _descriptor.Descriptor(
name='StopResponse',
full_name='lnrpc.StopResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14162,
serialized_end=14176,
)
_GRAPHTOPOLOGYSUBSCRIPTION = _descriptor.Descriptor(
name='GraphTopologySubscription',
full_name='lnrpc.GraphTopologySubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14178,
serialized_end=14205,
)
_GRAPHTOPOLOGYUPDATE = _descriptor.Descriptor(
name='GraphTopologyUpdate',
full_name='lnrpc.GraphTopologyUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_updates', full_name='lnrpc.GraphTopologyUpdate.node_updates', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel_updates', full_name='lnrpc.GraphTopologyUpdate.channel_updates', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closed_chans', full_name='lnrpc.GraphTopologyUpdate.closed_chans', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14208,
serialized_end=14371,
)
_NODEUPDATE = _descriptor.Descriptor(
name='NodeUpdate',
full_name='lnrpc.NodeUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='addresses', full_name='lnrpc.NodeUpdate.addresses', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='identity_key', full_name='lnrpc.NodeUpdate.identity_key', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='global_features', full_name='lnrpc.NodeUpdate.global_features', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alias', full_name='lnrpc.NodeUpdate.alias', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='color', full_name='lnrpc.NodeUpdate.color', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14373,
serialized_end=14481,
)
_CHANNELEDGEUPDATE = _descriptor.Descriptor(
name='ChannelEdgeUpdate',
full_name='lnrpc.ChannelEdgeUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.ChannelEdgeUpdate.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ChannelEdgeUpdate.chan_point', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.ChannelEdgeUpdate.capacity', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='routing_policy', full_name='lnrpc.ChannelEdgeUpdate.routing_policy', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='advertising_node', full_name='lnrpc.ChannelEdgeUpdate.advertising_node', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='connecting_node', full_name='lnrpc.ChannelEdgeUpdate.connecting_node', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14484,
serialized_end=14680,
)
_CLOSEDCHANNELUPDATE = _descriptor.Descriptor(
name='ClosedChannelUpdate',
full_name='lnrpc.ClosedChannelUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.ClosedChannelUpdate.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='capacity', full_name='lnrpc.ClosedChannelUpdate.capacity', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closed_height', full_name='lnrpc.ClosedChannelUpdate.closed_height', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ClosedChannelUpdate.chan_point', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14682,
serialized_end=14806,
)
_HOPHINT = _descriptor.Descriptor(
name='HopHint',
full_name='lnrpc.HopHint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_id', full_name='lnrpc.HopHint.node_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='node_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.HopHint.chan_id', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_base_msat', full_name='lnrpc.HopHint.fee_base_msat', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_base_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_proportional_millionths', full_name='lnrpc.HopHint.fee_proportional_millionths', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_proportional_millionths', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_expiry_delta', full_name='lnrpc.HopHint.cltv_expiry_delta', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='cltv_expiry_delta', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14809,
serialized_end=15024,
)
_ROUTEHINT = _descriptor.Descriptor(
name='RouteHint',
full_name='lnrpc.RouteHint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hop_hints', full_name='lnrpc.RouteHint.hop_hints', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='hop_hints', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15026,
serialized_end=15083,
)
_INVOICE = _descriptor.Descriptor(
name='Invoice',
full_name='lnrpc.Invoice',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='memo', full_name='lnrpc.Invoice.memo', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='memo', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='r_preimage', full_name='lnrpc.Invoice.r_preimage', index=1,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='r_preimage', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='r_hash', full_name='lnrpc.Invoice.r_hash', index=2,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='r_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.Invoice.value', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_msat', full_name='lnrpc.Invoice.value_msat', index=4,
number=23, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settled', full_name='lnrpc.Invoice.settled', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='settled', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creation_date', full_name='lnrpc.Invoice.creation_date', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='creation_date', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settle_date', full_name='lnrpc.Invoice.settle_date', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='settle_date', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_request', full_name='lnrpc.Invoice.payment_request', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_request', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description_hash', full_name='lnrpc.Invoice.description_hash', index=9,
number=10, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='description_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiry', full_name='lnrpc.Invoice.expiry', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fallback_addr', full_name='lnrpc.Invoice.fallback_addr', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fallback_addr', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_expiry', full_name='lnrpc.Invoice.cltv_expiry', index=12,
number=13, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='cltv_expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='route_hints', full_name='lnrpc.Invoice.route_hints', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='route_hints', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private', full_name='lnrpc.Invoice.private', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='private', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='add_index', full_name='lnrpc.Invoice.add_index', index=15,
number=16, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='add_index', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settle_index', full_name='lnrpc.Invoice.settle_index', index=16,
number=17, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='settle_index', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_paid', full_name='lnrpc.Invoice.amt_paid', index=17,
number=18, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='amt_paid', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_paid_sat', full_name='lnrpc.Invoice.amt_paid_sat', index=18,
number=19, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_paid_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_paid_msat', full_name='lnrpc.Invoice.amt_paid_msat', index=19,
number=20, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_paid_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='lnrpc.Invoice.state', index=20,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='state', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='htlcs', full_name='lnrpc.Invoice.htlcs', index=21,
number=22, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='htlcs', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_INVOICE_INVOICESTATE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15086,
serialized_end=15925,
)
_INVOICEHTLC = _descriptor.Descriptor(
name='InvoiceHTLC',
full_name='lnrpc.InvoiceHTLC',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_id', full_name='lnrpc.InvoiceHTLC.chan_id', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='htlc_index', full_name='lnrpc.InvoiceHTLC.htlc_index', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='htlc_index', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_msat', full_name='lnrpc.InvoiceHTLC.amt_msat', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='accept_height', full_name='lnrpc.InvoiceHTLC.accept_height', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accept_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='accept_time', full_name='lnrpc.InvoiceHTLC.accept_time', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accept_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resolve_time', full_name='lnrpc.InvoiceHTLC.resolve_time', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='resolve_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiry_height', full_name='lnrpc.InvoiceHTLC.expiry_height', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiry_height', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='lnrpc.InvoiceHTLC.state', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='state', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15928,
serialized_end=16224,
)
_ADDINVOICERESPONSE = _descriptor.Descriptor(
name='AddInvoiceResponse',
full_name='lnrpc.AddInvoiceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='r_hash', full_name='lnrpc.AddInvoiceResponse.r_hash', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='r_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_request', full_name='lnrpc.AddInvoiceResponse.payment_request', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_request', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='add_index', full_name='lnrpc.AddInvoiceResponse.add_index', index=2,
number=16, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='add_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16226,
serialized_end=16342,
)
_PAYMENTHASH = _descriptor.Descriptor(
name='PaymentHash',
full_name='lnrpc.PaymentHash',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='r_hash_str', full_name='lnrpc.PaymentHash.r_hash_str', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='r_hash_str', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='r_hash', full_name='lnrpc.PaymentHash.r_hash', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='r_hash', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16344,
serialized_end=16417,
)
_LISTINVOICEREQUEST = _descriptor.Descriptor(
name='ListInvoiceRequest',
full_name='lnrpc.ListInvoiceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pending_only', full_name='lnrpc.ListInvoiceRequest.pending_only', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='pending_only', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='index_offset', full_name='lnrpc.ListInvoiceRequest.index_offset', index=1,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='index_offset', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_max_invoices', full_name='lnrpc.ListInvoiceRequest.num_max_invoices', index=2,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_max_invoices', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reversed', full_name='lnrpc.ListInvoiceRequest.reversed', index=3,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='reversed', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16420,
serialized_end=16584,
)
_LISTINVOICERESPONSE = _descriptor.Descriptor(
name='ListInvoiceResponse',
full_name='lnrpc.ListInvoiceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='invoices', full_name='lnrpc.ListInvoiceResponse.invoices', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='invoices', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_index_offset', full_name='lnrpc.ListInvoiceResponse.last_index_offset', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='last_index_offset', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='first_index_offset', full_name='lnrpc.ListInvoiceResponse.first_index_offset', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='first_index_offset', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16587,
serialized_end=16746,
)
_INVOICESUBSCRIPTION = _descriptor.Descriptor(
name='InvoiceSubscription',
full_name='lnrpc.InvoiceSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='add_index', full_name='lnrpc.InvoiceSubscription.add_index', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='add_index', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='settle_index', full_name='lnrpc.InvoiceSubscription.settle_index', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='settle_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16748,
serialized_end=16835,
)
_PAYMENT = _descriptor.Descriptor(
name='Payment',
full_name='lnrpc.Payment',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.Payment.payment_hash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='lnrpc.Payment.value', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='value', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creation_date', full_name='lnrpc.Payment.creation_date', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='creation_date', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='path', full_name='lnrpc.Payment.path', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='path', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee', full_name='lnrpc.Payment.fee', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), json_name='fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_preimage', full_name='lnrpc.Payment.payment_preimage', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_preimage', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_sat', full_name='lnrpc.Payment.value_sat', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value_msat', full_name='lnrpc.Payment.value_msat', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='value_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_request', full_name='lnrpc.Payment.payment_request', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_request', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='lnrpc.Payment.status', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='status', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_sat', full_name='lnrpc.Payment.fee_sat', index=10,
number=11, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_sat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_msat', full_name='lnrpc.Payment.fee_msat', index=11,
number=12, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creation_time_ns', full_name='lnrpc.Payment.creation_time_ns', index=12,
number=13, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='creation_time_ns', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='htlcs', full_name='lnrpc.Payment.htlcs', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='htlcs', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_PAYMENT_PAYMENTSTATUS,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16838,
serialized_end=17411,
)
_HTLCATTEMPT = _descriptor.Descriptor(
name='HTLCAttempt',
full_name='lnrpc.HTLCAttempt',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='lnrpc.HTLCAttempt.status', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='status', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='route', full_name='lnrpc.HTLCAttempt.route', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='route', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attempt_time_ns', full_name='lnrpc.HTLCAttempt.attempt_time_ns', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='attempt_time_ns', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='resolve_time_ns', full_name='lnrpc.HTLCAttempt.resolve_time_ns', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='resolve_time_ns', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_HTLCATTEMPT_HTLCSTATUS,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17414,
serialized_end=17658,
)
_LISTPAYMENTSREQUEST = _descriptor.Descriptor(
name='ListPaymentsRequest',
full_name='lnrpc.ListPaymentsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='include_incomplete', full_name='lnrpc.ListPaymentsRequest.include_incomplete', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17660,
serialized_end=17709,
)
_LISTPAYMENTSRESPONSE = _descriptor.Descriptor(
name='ListPaymentsResponse',
full_name='lnrpc.ListPaymentsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='payments', full_name='lnrpc.ListPaymentsResponse.payments', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payments', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17711,
serialized_end=17777,
)
_DELETEALLPAYMENTSREQUEST = _descriptor.Descriptor(
name='DeleteAllPaymentsRequest',
full_name='lnrpc.DeleteAllPaymentsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17779,
serialized_end=17805,
)
_DELETEALLPAYMENTSRESPONSE = _descriptor.Descriptor(
name='DeleteAllPaymentsResponse',
full_name='lnrpc.DeleteAllPaymentsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17807,
serialized_end=17834,
)
_ABANDONCHANNELREQUEST = _descriptor.Descriptor(
name='AbandonChannelRequest',
full_name='lnrpc.AbandonChannelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_point', full_name='lnrpc.AbandonChannelRequest.channel_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17836,
serialized_end=17903,
)
_ABANDONCHANNELRESPONSE = _descriptor.Descriptor(
name='AbandonChannelResponse',
full_name='lnrpc.AbandonChannelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17905,
serialized_end=17929,
)
_DEBUGLEVELREQUEST = _descriptor.Descriptor(
name='DebugLevelRequest',
full_name='lnrpc.DebugLevelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='show', full_name='lnrpc.DebugLevelRequest.show', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='level_spec', full_name='lnrpc.DebugLevelRequest.level_spec', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17931,
serialized_end=17984,
)
_DEBUGLEVELRESPONSE = _descriptor.Descriptor(
name='DebugLevelResponse',
full_name='lnrpc.DebugLevelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='sub_systems', full_name='lnrpc.DebugLevelResponse.sub_systems', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='sub_systems', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17986,
serialized_end=18040,
)
_PAYREQSTRING = _descriptor.Descriptor(
name='PayReqString',
full_name='lnrpc.PayReqString',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pay_req', full_name='lnrpc.PayReqString.pay_req', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18042,
serialized_end=18073,
)
_PAYREQ = _descriptor.Descriptor(
name='PayReq',
full_name='lnrpc.PayReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='destination', full_name='lnrpc.PayReq.destination', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='destination', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_hash', full_name='lnrpc.PayReq.payment_hash', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='payment_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_satoshis', full_name='lnrpc.PayReq.num_satoshis', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_satoshis', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='lnrpc.PayReq.timestamp', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='timestamp', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='expiry', full_name='lnrpc.PayReq.expiry', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description', full_name='lnrpc.PayReq.description', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='description', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description_hash', full_name='lnrpc.PayReq.description_hash', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='description_hash', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fallback_addr', full_name='lnrpc.PayReq.fallback_addr', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fallback_addr', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cltv_expiry', full_name='lnrpc.PayReq.cltv_expiry', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='cltv_expiry', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='route_hints', full_name='lnrpc.PayReq.route_hints', index=9,
number=10, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='route_hints', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18076,
serialized_end=18446,
)
_FEEREPORTREQUEST = _descriptor.Descriptor(
name='FeeReportRequest',
full_name='lnrpc.FeeReportRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18448,
serialized_end=18466,
)
_CHANNELFEEREPORT = _descriptor.Descriptor(
name='ChannelFeeReport',
full_name='lnrpc.ChannelFeeReport',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ChannelFeeReport.chan_point', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='base_fee_msat', full_name='lnrpc.ChannelFeeReport.base_fee_msat', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='base_fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_per_mil', full_name='lnrpc.ChannelFeeReport.fee_per_mil', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_per_mil', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_rate', full_name='lnrpc.ChannelFeeReport.fee_rate', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_rate', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18469,
serialized_end=18622,
)
_FEEREPORTRESPONSE = _descriptor.Descriptor(
name='FeeReportResponse',
full_name='lnrpc.FeeReportResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channel_fees', full_name='lnrpc.FeeReportResponse.channel_fees', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='channel_fees', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='day_fee_sum', full_name='lnrpc.FeeReportResponse.day_fee_sum', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='day_fee_sum', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='week_fee_sum', full_name='lnrpc.FeeReportResponse.week_fee_sum', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='week_fee_sum', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='month_fee_sum', full_name='lnrpc.FeeReportResponse.month_fee_sum', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='month_fee_sum', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18625,
serialized_end=18813,
)
_POLICYUPDATEREQUEST = _descriptor.Descriptor(
name='PolicyUpdateRequest',
full_name='lnrpc.PolicyUpdateRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='global', full_name='lnrpc.PolicyUpdateRequest.global', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='global', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.PolicyUpdateRequest.chan_point', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='base_fee_msat', full_name='lnrpc.PolicyUpdateRequest.base_fee_msat', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='base_fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_rate', full_name='lnrpc.PolicyUpdateRequest.fee_rate', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_rate', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time_lock_delta', full_name='lnrpc.PolicyUpdateRequest.time_lock_delta', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='time_lock_delta', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_htlc_msat', full_name='lnrpc.PolicyUpdateRequest.max_htlc_msat', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='max_htlc_msat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='scope', full_name='lnrpc.PolicyUpdateRequest.scope',
index=0, containing_type=None, fields=[]),
],
serialized_start=18816,
serialized_end=19073,
)
_POLICYUPDATERESPONSE = _descriptor.Descriptor(
name='PolicyUpdateResponse',
full_name='lnrpc.PolicyUpdateResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19075,
serialized_end=19097,
)
_FORWARDINGHISTORYREQUEST = _descriptor.Descriptor(
name='ForwardingHistoryRequest',
full_name='lnrpc.ForwardingHistoryRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='start_time', full_name='lnrpc.ForwardingHistoryRequest.start_time', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='start_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end_time', full_name='lnrpc.ForwardingHistoryRequest.end_time', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='end_time', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='index_offset', full_name='lnrpc.ForwardingHistoryRequest.index_offset', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='index_offset', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_max_events', full_name='lnrpc.ForwardingHistoryRequest.num_max_events', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='num_max_events', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19100,
serialized_end=19262,
)
_FORWARDINGEVENT = _descriptor.Descriptor(
name='ForwardingEvent',
full_name='lnrpc.ForwardingEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='lnrpc.ForwardingEvent.timestamp', index=0,
number=1, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='timestamp', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id_in', full_name='lnrpc.ForwardingEvent.chan_id_in', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id_in', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_id_out', full_name='lnrpc.ForwardingEvent.chan_id_out', index=2,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('0\001'), json_name='chan_id_out', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_in', full_name='lnrpc.ForwardingEvent.amt_in', index=3,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_in', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_out', full_name='lnrpc.ForwardingEvent.amt_out', index=4,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_out', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee', full_name='lnrpc.ForwardingEvent.fee', index=5,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fee_msat', full_name='lnrpc.ForwardingEvent.fee_msat', index=6,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='fee_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_in_msat', full_name='lnrpc.ForwardingEvent.amt_in_msat', index=7,
number=9, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_in_msat', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amt_out_msat', full_name='lnrpc.ForwardingEvent.amt_out_msat', index=8,
number=10, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='amt_out_msat', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19265,
serialized_end=19552,
)
_FORWARDINGHISTORYRESPONSE = _descriptor.Descriptor(
name='ForwardingHistoryResponse',
full_name='lnrpc.ForwardingHistoryResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='forwarding_events', full_name='lnrpc.ForwardingHistoryResponse.forwarding_events', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='forwarding_events', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_offset_index', full_name='lnrpc.ForwardingHistoryResponse.last_offset_index', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='last_offset_index', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19555,
serialized_end=19698,
)
_EXPORTCHANNELBACKUPREQUEST = _descriptor.Descriptor(
name='ExportChannelBackupRequest',
full_name='lnrpc.ExportChannelBackupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ExportChannelBackupRequest.chan_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19700,
serialized_end=19769,
)
_CHANNELBACKUP = _descriptor.Descriptor(
name='ChannelBackup',
full_name='lnrpc.ChannelBackup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_point', full_name='lnrpc.ChannelBackup.chan_point', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_point', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='chan_backup', full_name='lnrpc.ChannelBackup.chan_backup', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_backup', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19771,
serialized_end=19873,
)
_MULTICHANBACKUP = _descriptor.Descriptor(
name='MultiChanBackup',
full_name='lnrpc.MultiChanBackup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_points', full_name='lnrpc.MultiChanBackup.chan_points', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_points', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='multi_chan_backup', full_name='lnrpc.MultiChanBackup.multi_chan_backup', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='multi_chan_backup', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19875,
serialized_end=19993,
)
_CHANBACKUPEXPORTREQUEST = _descriptor.Descriptor(
name='ChanBackupExportRequest',
full_name='lnrpc.ChanBackupExportRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19995,
serialized_end=20020,
)
_CHANBACKUPSNAPSHOT = _descriptor.Descriptor(
name='ChanBackupSnapshot',
full_name='lnrpc.ChanBackupSnapshot',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='single_chan_backups', full_name='lnrpc.ChanBackupSnapshot.single_chan_backups', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='single_chan_backups', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='multi_chan_backup', full_name='lnrpc.ChanBackupSnapshot.multi_chan_backup', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='multi_chan_backup', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20023,
serialized_end=20186,
)
_CHANNELBACKUPS = _descriptor.Descriptor(
name='ChannelBackups',
full_name='lnrpc.ChannelBackups',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_backups', full_name='lnrpc.ChannelBackups.chan_backups', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_backups', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20188,
serialized_end=20262,
)
_RESTORECHANBACKUPREQUEST = _descriptor.Descriptor(
name='RestoreChanBackupRequest',
full_name='lnrpc.RestoreChanBackupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chan_backups', full_name='lnrpc.RestoreChanBackupRequest.chan_backups', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='chan_backups', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='multi_chan_backup', full_name='lnrpc.RestoreChanBackupRequest.multi_chan_backup', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='multi_chan_backup', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='backup', full_name='lnrpc.RestoreChanBackupRequest.backup',
index=0, containing_type=None, fields=[]),
],
serialized_start=20265,
serialized_end=20410,
)
_RESTOREBACKUPRESPONSE = _descriptor.Descriptor(
name='RestoreBackupResponse',
full_name='lnrpc.RestoreBackupResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20412,
serialized_end=20435,
)
_CHANNELBACKUPSUBSCRIPTION = _descriptor.Descriptor(
name='ChannelBackupSubscription',
full_name='lnrpc.ChannelBackupSubscription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20437,
serialized_end=20464,
)
_VERIFYCHANBACKUPRESPONSE = _descriptor.Descriptor(
name='VerifyChanBackupResponse',
full_name='lnrpc.VerifyChanBackupResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20466,
serialized_end=20492,
)
_MACAROONPERMISSION = _descriptor.Descriptor(
name='MacaroonPermission',
full_name='lnrpc.MacaroonPermission',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entity', full_name='lnrpc.MacaroonPermission.entity', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='entity', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='lnrpc.MacaroonPermission.action', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='action', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20494,
serialized_end=20562,
)
_BAKEMACAROONREQUEST = _descriptor.Descriptor(
name='BakeMacaroonRequest',
full_name='lnrpc.BakeMacaroonRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='permissions', full_name='lnrpc.BakeMacaroonRequest.permissions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='permissions', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20564,
serialized_end=20646,
)
_BAKEMACAROONRESPONSE = _descriptor.Descriptor(
name='BakeMacaroonResponse',
full_name='lnrpc.BakeMacaroonResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='macaroon', full_name='lnrpc.BakeMacaroonResponse.macaroon', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='macaroon', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20648,
serialized_end=20698,
)
_INITWALLETREQUEST.fields_by_name['channel_backups'].message_type = _CHANBACKUPSNAPSHOT
_UNLOCKWALLETREQUEST.fields_by_name['channel_backups'].message_type = _CHANBACKUPSNAPSHOT
_UTXO.fields_by_name['type'].enum_type = _ADDRESSTYPE
_UTXO.fields_by_name['outpoint'].message_type = _OUTPOINT
_TRANSACTIONDETAILS.fields_by_name['transactions'].message_type = _TRANSACTION
_FEELIMIT.oneofs_by_name['limit'].fields.append(
_FEELIMIT.fields_by_name['fixed'])
_FEELIMIT.fields_by_name['fixed'].containing_oneof = _FEELIMIT.oneofs_by_name['limit']
_FEELIMIT.oneofs_by_name['limit'].fields.append(
_FEELIMIT.fields_by_name['fixed_msat'])
_FEELIMIT.fields_by_name['fixed_msat'].containing_oneof = _FEELIMIT.oneofs_by_name['limit']
_FEELIMIT.oneofs_by_name['limit'].fields.append(
_FEELIMIT.fields_by_name['percent'])
_FEELIMIT.fields_by_name['percent'].containing_oneof = _FEELIMIT.oneofs_by_name['limit']
_SENDREQUEST_DESTTLVENTRY.containing_type = _SENDREQUEST
_SENDREQUEST.fields_by_name['fee_limit'].message_type = _FEELIMIT
_SENDREQUEST.fields_by_name['dest_tlv'].message_type = _SENDREQUEST_DESTTLVENTRY
_SENDRESPONSE.fields_by_name['payment_route'].message_type = _ROUTE
_SENDTOROUTEREQUEST.fields_by_name['route'].message_type = _ROUTE
_CHANNELPOINT.oneofs_by_name['funding_txid'].fields.append(
_CHANNELPOINT.fields_by_name['funding_txid_bytes'])
_CHANNELPOINT.fields_by_name['funding_txid_bytes'].containing_oneof = _CHANNELPOINT.oneofs_by_name['funding_txid']
_CHANNELPOINT.oneofs_by_name['funding_txid'].fields.append(
_CHANNELPOINT.fields_by_name['funding_txid_str'])
_CHANNELPOINT.fields_by_name['funding_txid_str'].containing_oneof = _CHANNELPOINT.oneofs_by_name['funding_txid']
_ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY.containing_type = _ESTIMATEFEEREQUEST
_ESTIMATEFEEREQUEST.fields_by_name['AddrToAmount'].message_type = _ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY
_SENDMANYREQUEST_ADDRTOAMOUNTENTRY.containing_type = _SENDMANYREQUEST
_SENDMANYREQUEST.fields_by_name['AddrToAmount'].message_type = _SENDMANYREQUEST_ADDRTOAMOUNTENTRY
_LISTUNSPENTRESPONSE.fields_by_name['utxos'].message_type = _UTXO
_NEWADDRESSREQUEST.fields_by_name['type'].enum_type = _ADDRESSTYPE
_CONNECTPEERREQUEST.fields_by_name['addr'].message_type = _LIGHTNINGADDRESS
_CHANNEL.fields_by_name['pending_htlcs'].message_type = _HTLC
_LISTCHANNELSRESPONSE.fields_by_name['channels'].message_type = _CHANNEL
_CHANNELCLOSESUMMARY.fields_by_name['close_type'].enum_type = _CHANNELCLOSESUMMARY_CLOSURETYPE
_CHANNELCLOSESUMMARY_CLOSURETYPE.containing_type = _CHANNELCLOSESUMMARY
_CLOSEDCHANNELSRESPONSE.fields_by_name['channels'].message_type = _CHANNELCLOSESUMMARY
_PEER.fields_by_name['sync_type'].enum_type = _PEER_SYNCTYPE
_PEER_SYNCTYPE.containing_type = _PEER
_LISTPEERSRESPONSE.fields_by_name['peers'].message_type = _PEER
_GETINFORESPONSE.fields_by_name['chains'].message_type = _CHAIN
_CHANNELOPENUPDATE.fields_by_name['channel_point'].message_type = _CHANNELPOINT
_CLOSECHANNELREQUEST.fields_by_name['channel_point'].message_type = _CHANNELPOINT
_CLOSESTATUSUPDATE.fields_by_name['close_pending'].message_type = _PENDINGUPDATE
_CLOSESTATUSUPDATE.fields_by_name['chan_close'].message_type = _CHANNELCLOSEUPDATE
_CLOSESTATUSUPDATE.oneofs_by_name['update'].fields.append(
_CLOSESTATUSUPDATE.fields_by_name['close_pending'])
_CLOSESTATUSUPDATE.fields_by_name['close_pending'].containing_oneof = _CLOSESTATUSUPDATE.oneofs_by_name['update']
_CLOSESTATUSUPDATE.oneofs_by_name['update'].fields.append(
_CLOSESTATUSUPDATE.fields_by_name['chan_close'])
_CLOSESTATUSUPDATE.fields_by_name['chan_close'].containing_oneof = _CLOSESTATUSUPDATE.oneofs_by_name['update']
_OPENSTATUSUPDATE.fields_by_name['chan_pending'].message_type = _PENDINGUPDATE
_OPENSTATUSUPDATE.fields_by_name['chan_open'].message_type = _CHANNELOPENUPDATE
_OPENSTATUSUPDATE.oneofs_by_name['update'].fields.append(
_OPENSTATUSUPDATE.fields_by_name['chan_pending'])
_OPENSTATUSUPDATE.fields_by_name['chan_pending'].containing_oneof = _OPENSTATUSUPDATE.oneofs_by_name['update']
_OPENSTATUSUPDATE.oneofs_by_name['update'].fields.append(
_OPENSTATUSUPDATE.fields_by_name['chan_open'])
_OPENSTATUSUPDATE.fields_by_name['chan_open'].containing_oneof = _OPENSTATUSUPDATE.oneofs_by_name['update']
_PENDINGCHANNELSRESPONSE_PENDINGCHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL.fields_by_name['channel'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL
_PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL.fields_by_name['channel'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL
_PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL.fields_by_name['channel'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL
_PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL.fields_by_name['channel'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL
_PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL.fields_by_name['pending_htlcs'].message_type = _PENDINGHTLC
_PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL.containing_type = _PENDINGCHANNELSRESPONSE
_PENDINGCHANNELSRESPONSE.fields_by_name['pending_open_channels'].message_type = _PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL
_PENDINGCHANNELSRESPONSE.fields_by_name['pending_closing_channels'].message_type = _PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL
_PENDINGCHANNELSRESPONSE.fields_by_name['pending_force_closing_channels'].message_type = _PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL
_PENDINGCHANNELSRESPONSE.fields_by_name['waiting_close_channels'].message_type = _PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL
_CHANNELEVENTUPDATE.fields_by_name['open_channel'].message_type = _CHANNEL
_CHANNELEVENTUPDATE.fields_by_name['closed_channel'].message_type = _CHANNELCLOSESUMMARY
_CHANNELEVENTUPDATE.fields_by_name['active_channel'].message_type = _CHANNELPOINT
_CHANNELEVENTUPDATE.fields_by_name['inactive_channel'].message_type = _CHANNELPOINT
_CHANNELEVENTUPDATE.fields_by_name['type'].enum_type = _CHANNELEVENTUPDATE_UPDATETYPE
_CHANNELEVENTUPDATE_UPDATETYPE.containing_type = _CHANNELEVENTUPDATE
_CHANNELEVENTUPDATE.oneofs_by_name['channel'].fields.append(
_CHANNELEVENTUPDATE.fields_by_name['open_channel'])
_CHANNELEVENTUPDATE.fields_by_name['open_channel'].containing_oneof = _CHANNELEVENTUPDATE.oneofs_by_name['channel']
_CHANNELEVENTUPDATE.oneofs_by_name['channel'].fields.append(
_CHANNELEVENTUPDATE.fields_by_name['closed_channel'])
_CHANNELEVENTUPDATE.fields_by_name['closed_channel'].containing_oneof = _CHANNELEVENTUPDATE.oneofs_by_name['channel']
_CHANNELEVENTUPDATE.oneofs_by_name['channel'].fields.append(
_CHANNELEVENTUPDATE.fields_by_name['active_channel'])
_CHANNELEVENTUPDATE.fields_by_name['active_channel'].containing_oneof = _CHANNELEVENTUPDATE.oneofs_by_name['channel']
_CHANNELEVENTUPDATE.oneofs_by_name['channel'].fields.append(
_CHANNELEVENTUPDATE.fields_by_name['inactive_channel'])
_CHANNELEVENTUPDATE.fields_by_name['inactive_channel'].containing_oneof = _CHANNELEVENTUPDATE.oneofs_by_name['channel']
_QUERYROUTESREQUEST.fields_by_name['fee_limit'].message_type = _FEELIMIT
_QUERYROUTESREQUEST.fields_by_name['ignored_edges'].message_type = _EDGELOCATOR
_QUERYROUTESREQUEST.fields_by_name['ignored_pairs'].message_type = _NODEPAIR
_QUERYROUTESRESPONSE.fields_by_name['routes'].message_type = _ROUTE
_HOP.fields_by_name['mpp_record'].message_type = _MPPRECORD
_ROUTE.fields_by_name['hops'].message_type = _HOP
_NODEINFO.fields_by_name['node'].message_type = _LIGHTNINGNODE
_NODEINFO.fields_by_name['channels'].message_type = _CHANNELEDGE
_LIGHTNINGNODE.fields_by_name['addresses'].message_type = _NODEADDRESS
_CHANNELEDGE.fields_by_name['node1_policy'].message_type = _ROUTINGPOLICY
_CHANNELEDGE.fields_by_name['node2_policy'].message_type = _ROUTINGPOLICY
_CHANNELGRAPH.fields_by_name['nodes'].message_type = _LIGHTNINGNODE
_CHANNELGRAPH.fields_by_name['edges'].message_type = _CHANNELEDGE
_GRAPHTOPOLOGYUPDATE.fields_by_name['node_updates'].message_type = _NODEUPDATE
_GRAPHTOPOLOGYUPDATE.fields_by_name['channel_updates'].message_type = _CHANNELEDGEUPDATE
_GRAPHTOPOLOGYUPDATE.fields_by_name['closed_chans'].message_type = _CLOSEDCHANNELUPDATE
_CHANNELEDGEUPDATE.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_CHANNELEDGEUPDATE.fields_by_name['routing_policy'].message_type = _ROUTINGPOLICY
_CLOSEDCHANNELUPDATE.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_ROUTEHINT.fields_by_name['hop_hints'].message_type = _HOPHINT
_INVOICE.fields_by_name['route_hints'].message_type = _ROUTEHINT
_INVOICE.fields_by_name['state'].enum_type = _INVOICE_INVOICESTATE
_INVOICE.fields_by_name['htlcs'].message_type = _INVOICEHTLC
_INVOICE_INVOICESTATE.containing_type = _INVOICE
_INVOICEHTLC.fields_by_name['state'].enum_type = _INVOICEHTLCSTATE
_LISTINVOICERESPONSE.fields_by_name['invoices'].message_type = _INVOICE
_PAYMENT.fields_by_name['status'].enum_type = _PAYMENT_PAYMENTSTATUS
_PAYMENT.fields_by_name['htlcs'].message_type = _HTLCATTEMPT
_PAYMENT_PAYMENTSTATUS.containing_type = _PAYMENT
_HTLCATTEMPT.fields_by_name['status'].enum_type = _HTLCATTEMPT_HTLCSTATUS
_HTLCATTEMPT.fields_by_name['route'].message_type = _ROUTE
_HTLCATTEMPT_HTLCSTATUS.containing_type = _HTLCATTEMPT
_LISTPAYMENTSRESPONSE.fields_by_name['payments'].message_type = _PAYMENT
_ABANDONCHANNELREQUEST.fields_by_name['channel_point'].message_type = _CHANNELPOINT
_PAYREQ.fields_by_name['route_hints'].message_type = _ROUTEHINT
_FEEREPORTRESPONSE.fields_by_name['channel_fees'].message_type = _CHANNELFEEREPORT
_POLICYUPDATEREQUEST.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_POLICYUPDATEREQUEST.oneofs_by_name['scope'].fields.append(
_POLICYUPDATEREQUEST.fields_by_name['global'])
_POLICYUPDATEREQUEST.fields_by_name['global'].containing_oneof = _POLICYUPDATEREQUEST.oneofs_by_name['scope']
_POLICYUPDATEREQUEST.oneofs_by_name['scope'].fields.append(
_POLICYUPDATEREQUEST.fields_by_name['chan_point'])
_POLICYUPDATEREQUEST.fields_by_name['chan_point'].containing_oneof = _POLICYUPDATEREQUEST.oneofs_by_name['scope']
_FORWARDINGHISTORYRESPONSE.fields_by_name['forwarding_events'].message_type = _FORWARDINGEVENT
_EXPORTCHANNELBACKUPREQUEST.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_CHANNELBACKUP.fields_by_name['chan_point'].message_type = _CHANNELPOINT
_MULTICHANBACKUP.fields_by_name['chan_points'].message_type = _CHANNELPOINT
_CHANBACKUPSNAPSHOT.fields_by_name['single_chan_backups'].message_type = _CHANNELBACKUPS
_CHANBACKUPSNAPSHOT.fields_by_name['multi_chan_backup'].message_type = _MULTICHANBACKUP
_CHANNELBACKUPS.fields_by_name['chan_backups'].message_type = _CHANNELBACKUP
_RESTORECHANBACKUPREQUEST.fields_by_name['chan_backups'].message_type = _CHANNELBACKUPS
_RESTORECHANBACKUPREQUEST.oneofs_by_name['backup'].fields.append(
_RESTORECHANBACKUPREQUEST.fields_by_name['chan_backups'])
_RESTORECHANBACKUPREQUEST.fields_by_name['chan_backups'].containing_oneof = _RESTORECHANBACKUPREQUEST.oneofs_by_name['backup']
_RESTORECHANBACKUPREQUEST.oneofs_by_name['backup'].fields.append(
_RESTORECHANBACKUPREQUEST.fields_by_name['multi_chan_backup'])
_RESTORECHANBACKUPREQUEST.fields_by_name['multi_chan_backup'].containing_oneof = _RESTORECHANBACKUPREQUEST.oneofs_by_name['backup']
_BAKEMACAROONREQUEST.fields_by_name['permissions'].message_type = _MACAROONPERMISSION
DESCRIPTOR.message_types_by_name['GenSeedRequest'] = _GENSEEDREQUEST
DESCRIPTOR.message_types_by_name['GenSeedResponse'] = _GENSEEDRESPONSE
DESCRIPTOR.message_types_by_name['InitWalletRequest'] = _INITWALLETREQUEST
DESCRIPTOR.message_types_by_name['InitWalletResponse'] = _INITWALLETRESPONSE
DESCRIPTOR.message_types_by_name['UnlockWalletRequest'] = _UNLOCKWALLETREQUEST
DESCRIPTOR.message_types_by_name['UnlockWalletResponse'] = _UNLOCKWALLETRESPONSE
DESCRIPTOR.message_types_by_name['ChangePasswordRequest'] = _CHANGEPASSWORDREQUEST
DESCRIPTOR.message_types_by_name['ChangePasswordResponse'] = _CHANGEPASSWORDRESPONSE
DESCRIPTOR.message_types_by_name['Utxo'] = _UTXO
DESCRIPTOR.message_types_by_name['Transaction'] = _TRANSACTION
DESCRIPTOR.message_types_by_name['GetTransactionsRequest'] = _GETTRANSACTIONSREQUEST
DESCRIPTOR.message_types_by_name['TransactionDetails'] = _TRANSACTIONDETAILS
DESCRIPTOR.message_types_by_name['FeeLimit'] = _FEELIMIT
DESCRIPTOR.message_types_by_name['SendRequest'] = _SENDREQUEST
DESCRIPTOR.message_types_by_name['SendResponse'] = _SENDRESPONSE
DESCRIPTOR.message_types_by_name['SendToRouteRequest'] = _SENDTOROUTEREQUEST
DESCRIPTOR.message_types_by_name['ChannelAcceptRequest'] = _CHANNELACCEPTREQUEST
DESCRIPTOR.message_types_by_name['ChannelAcceptResponse'] = _CHANNELACCEPTRESPONSE
DESCRIPTOR.message_types_by_name['ChannelPoint'] = _CHANNELPOINT
DESCRIPTOR.message_types_by_name['OutPoint'] = _OUTPOINT
DESCRIPTOR.message_types_by_name['LightningAddress'] = _LIGHTNINGADDRESS
DESCRIPTOR.message_types_by_name['EstimateFeeRequest'] = _ESTIMATEFEEREQUEST
DESCRIPTOR.message_types_by_name['EstimateFeeResponse'] = _ESTIMATEFEERESPONSE
DESCRIPTOR.message_types_by_name['SendManyRequest'] = _SENDMANYREQUEST
DESCRIPTOR.message_types_by_name['SendManyResponse'] = _SENDMANYRESPONSE
DESCRIPTOR.message_types_by_name['SendCoinsRequest'] = _SENDCOINSREQUEST
DESCRIPTOR.message_types_by_name['SendCoinsResponse'] = _SENDCOINSRESPONSE
DESCRIPTOR.message_types_by_name['ListUnspentRequest'] = _LISTUNSPENTREQUEST
DESCRIPTOR.message_types_by_name['ListUnspentResponse'] = _LISTUNSPENTRESPONSE
DESCRIPTOR.message_types_by_name['NewAddressRequest'] = _NEWADDRESSREQUEST
DESCRIPTOR.message_types_by_name['NewAddressResponse'] = _NEWADDRESSRESPONSE
DESCRIPTOR.message_types_by_name['SignMessageRequest'] = _SIGNMESSAGEREQUEST
DESCRIPTOR.message_types_by_name['SignMessageResponse'] = _SIGNMESSAGERESPONSE
DESCRIPTOR.message_types_by_name['VerifyMessageRequest'] = _VERIFYMESSAGEREQUEST
DESCRIPTOR.message_types_by_name['VerifyMessageResponse'] = _VERIFYMESSAGERESPONSE
DESCRIPTOR.message_types_by_name['ConnectPeerRequest'] = _CONNECTPEERREQUEST
DESCRIPTOR.message_types_by_name['ConnectPeerResponse'] = _CONNECTPEERRESPONSE
DESCRIPTOR.message_types_by_name['DisconnectPeerRequest'] = _DISCONNECTPEERREQUEST
DESCRIPTOR.message_types_by_name['DisconnectPeerResponse'] = _DISCONNECTPEERRESPONSE
DESCRIPTOR.message_types_by_name['HTLC'] = _HTLC
DESCRIPTOR.message_types_by_name['Channel'] = _CHANNEL
DESCRIPTOR.message_types_by_name['ListChannelsRequest'] = _LISTCHANNELSREQUEST
DESCRIPTOR.message_types_by_name['ListChannelsResponse'] = _LISTCHANNELSRESPONSE
DESCRIPTOR.message_types_by_name['ChannelCloseSummary'] = _CHANNELCLOSESUMMARY
DESCRIPTOR.message_types_by_name['ClosedChannelsRequest'] = _CLOSEDCHANNELSREQUEST
DESCRIPTOR.message_types_by_name['ClosedChannelsResponse'] = _CLOSEDCHANNELSRESPONSE
DESCRIPTOR.message_types_by_name['Peer'] = _PEER
DESCRIPTOR.message_types_by_name['ListPeersRequest'] = _LISTPEERSREQUEST
DESCRIPTOR.message_types_by_name['ListPeersResponse'] = _LISTPEERSRESPONSE
DESCRIPTOR.message_types_by_name['GetInfoRequest'] = _GETINFOREQUEST
DESCRIPTOR.message_types_by_name['GetInfoResponse'] = _GETINFORESPONSE
DESCRIPTOR.message_types_by_name['Chain'] = _CHAIN
DESCRIPTOR.message_types_by_name['ConfirmationUpdate'] = _CONFIRMATIONUPDATE
DESCRIPTOR.message_types_by_name['ChannelOpenUpdate'] = _CHANNELOPENUPDATE
DESCRIPTOR.message_types_by_name['ChannelCloseUpdate'] = _CHANNELCLOSEUPDATE
DESCRIPTOR.message_types_by_name['CloseChannelRequest'] = _CLOSECHANNELREQUEST
DESCRIPTOR.message_types_by_name['CloseStatusUpdate'] = _CLOSESTATUSUPDATE
DESCRIPTOR.message_types_by_name['PendingUpdate'] = _PENDINGUPDATE
DESCRIPTOR.message_types_by_name['OpenChannelRequest'] = _OPENCHANNELREQUEST
DESCRIPTOR.message_types_by_name['OpenStatusUpdate'] = _OPENSTATUSUPDATE
DESCRIPTOR.message_types_by_name['PendingHTLC'] = _PENDINGHTLC
DESCRIPTOR.message_types_by_name['PendingChannelsRequest'] = _PENDINGCHANNELSREQUEST
DESCRIPTOR.message_types_by_name['PendingChannelsResponse'] = _PENDINGCHANNELSRESPONSE
DESCRIPTOR.message_types_by_name['ChannelEventSubscription'] = _CHANNELEVENTSUBSCRIPTION
DESCRIPTOR.message_types_by_name['ChannelEventUpdate'] = _CHANNELEVENTUPDATE
DESCRIPTOR.message_types_by_name['WalletBalanceRequest'] = _WALLETBALANCEREQUEST
DESCRIPTOR.message_types_by_name['WalletBalanceResponse'] = _WALLETBALANCERESPONSE
DESCRIPTOR.message_types_by_name['ChannelBalanceRequest'] = _CHANNELBALANCEREQUEST
DESCRIPTOR.message_types_by_name['ChannelBalanceResponse'] = _CHANNELBALANCERESPONSE
DESCRIPTOR.message_types_by_name['QueryRoutesRequest'] = _QUERYROUTESREQUEST
DESCRIPTOR.message_types_by_name['NodePair'] = _NODEPAIR
DESCRIPTOR.message_types_by_name['EdgeLocator'] = _EDGELOCATOR
DESCRIPTOR.message_types_by_name['QueryRoutesResponse'] = _QUERYROUTESRESPONSE
DESCRIPTOR.message_types_by_name['Hop'] = _HOP
DESCRIPTOR.message_types_by_name['MPPRecord'] = _MPPRECORD
DESCRIPTOR.message_types_by_name['Route'] = _ROUTE
DESCRIPTOR.message_types_by_name['NodeInfoRequest'] = _NODEINFOREQUEST
DESCRIPTOR.message_types_by_name['NodeInfo'] = _NODEINFO
DESCRIPTOR.message_types_by_name['LightningNode'] = _LIGHTNINGNODE
DESCRIPTOR.message_types_by_name['NodeAddress'] = _NODEADDRESS
DESCRIPTOR.message_types_by_name['RoutingPolicy'] = _ROUTINGPOLICY
DESCRIPTOR.message_types_by_name['ChannelEdge'] = _CHANNELEDGE
DESCRIPTOR.message_types_by_name['ChannelGraphRequest'] = _CHANNELGRAPHREQUEST
DESCRIPTOR.message_types_by_name['ChannelGraph'] = _CHANNELGRAPH
DESCRIPTOR.message_types_by_name['ChanInfoRequest'] = _CHANINFOREQUEST
DESCRIPTOR.message_types_by_name['NetworkInfoRequest'] = _NETWORKINFOREQUEST
DESCRIPTOR.message_types_by_name['NetworkInfo'] = _NETWORKINFO
DESCRIPTOR.message_types_by_name['StopRequest'] = _STOPREQUEST
DESCRIPTOR.message_types_by_name['StopResponse'] = _STOPRESPONSE
DESCRIPTOR.message_types_by_name['GraphTopologySubscription'] = _GRAPHTOPOLOGYSUBSCRIPTION
DESCRIPTOR.message_types_by_name['GraphTopologyUpdate'] = _GRAPHTOPOLOGYUPDATE
DESCRIPTOR.message_types_by_name['NodeUpdate'] = _NODEUPDATE
DESCRIPTOR.message_types_by_name['ChannelEdgeUpdate'] = _CHANNELEDGEUPDATE
DESCRIPTOR.message_types_by_name['ClosedChannelUpdate'] = _CLOSEDCHANNELUPDATE
DESCRIPTOR.message_types_by_name['HopHint'] = _HOPHINT
DESCRIPTOR.message_types_by_name['RouteHint'] = _ROUTEHINT
DESCRIPTOR.message_types_by_name['Invoice'] = _INVOICE
DESCRIPTOR.message_types_by_name['InvoiceHTLC'] = _INVOICEHTLC
DESCRIPTOR.message_types_by_name['AddInvoiceResponse'] = _ADDINVOICERESPONSE
DESCRIPTOR.message_types_by_name['PaymentHash'] = _PAYMENTHASH
DESCRIPTOR.message_types_by_name['ListInvoiceRequest'] = _LISTINVOICEREQUEST
DESCRIPTOR.message_types_by_name['ListInvoiceResponse'] = _LISTINVOICERESPONSE
DESCRIPTOR.message_types_by_name['InvoiceSubscription'] = _INVOICESUBSCRIPTION
DESCRIPTOR.message_types_by_name['Payment'] = _PAYMENT
DESCRIPTOR.message_types_by_name['HTLCAttempt'] = _HTLCATTEMPT
DESCRIPTOR.message_types_by_name['ListPaymentsRequest'] = _LISTPAYMENTSREQUEST
DESCRIPTOR.message_types_by_name['ListPaymentsResponse'] = _LISTPAYMENTSRESPONSE
DESCRIPTOR.message_types_by_name['DeleteAllPaymentsRequest'] = _DELETEALLPAYMENTSREQUEST
DESCRIPTOR.message_types_by_name['DeleteAllPaymentsResponse'] = _DELETEALLPAYMENTSRESPONSE
DESCRIPTOR.message_types_by_name['AbandonChannelRequest'] = _ABANDONCHANNELREQUEST
DESCRIPTOR.message_types_by_name['AbandonChannelResponse'] = _ABANDONCHANNELRESPONSE
DESCRIPTOR.message_types_by_name['DebugLevelRequest'] = _DEBUGLEVELREQUEST
DESCRIPTOR.message_types_by_name['DebugLevelResponse'] = _DEBUGLEVELRESPONSE
DESCRIPTOR.message_types_by_name['PayReqString'] = _PAYREQSTRING
DESCRIPTOR.message_types_by_name['PayReq'] = _PAYREQ
DESCRIPTOR.message_types_by_name['FeeReportRequest'] = _FEEREPORTREQUEST
DESCRIPTOR.message_types_by_name['ChannelFeeReport'] = _CHANNELFEEREPORT
DESCRIPTOR.message_types_by_name['FeeReportResponse'] = _FEEREPORTRESPONSE
DESCRIPTOR.message_types_by_name['PolicyUpdateRequest'] = _POLICYUPDATEREQUEST
DESCRIPTOR.message_types_by_name['PolicyUpdateResponse'] = _POLICYUPDATERESPONSE
DESCRIPTOR.message_types_by_name['ForwardingHistoryRequest'] = _FORWARDINGHISTORYREQUEST
DESCRIPTOR.message_types_by_name['ForwardingEvent'] = _FORWARDINGEVENT
DESCRIPTOR.message_types_by_name['ForwardingHistoryResponse'] = _FORWARDINGHISTORYRESPONSE
DESCRIPTOR.message_types_by_name['ExportChannelBackupRequest'] = _EXPORTCHANNELBACKUPREQUEST
DESCRIPTOR.message_types_by_name['ChannelBackup'] = _CHANNELBACKUP
DESCRIPTOR.message_types_by_name['MultiChanBackup'] = _MULTICHANBACKUP
DESCRIPTOR.message_types_by_name['ChanBackupExportRequest'] = _CHANBACKUPEXPORTREQUEST
DESCRIPTOR.message_types_by_name['ChanBackupSnapshot'] = _CHANBACKUPSNAPSHOT
DESCRIPTOR.message_types_by_name['ChannelBackups'] = _CHANNELBACKUPS
DESCRIPTOR.message_types_by_name['RestoreChanBackupRequest'] = _RESTORECHANBACKUPREQUEST
DESCRIPTOR.message_types_by_name['RestoreBackupResponse'] = _RESTOREBACKUPRESPONSE
DESCRIPTOR.message_types_by_name['ChannelBackupSubscription'] = _CHANNELBACKUPSUBSCRIPTION
DESCRIPTOR.message_types_by_name['VerifyChanBackupResponse'] = _VERIFYCHANBACKUPRESPONSE
DESCRIPTOR.message_types_by_name['MacaroonPermission'] = _MACAROONPERMISSION
DESCRIPTOR.message_types_by_name['BakeMacaroonRequest'] = _BAKEMACAROONREQUEST
DESCRIPTOR.message_types_by_name['BakeMacaroonResponse'] = _BAKEMACAROONRESPONSE
DESCRIPTOR.enum_types_by_name['AddressType'] = _ADDRESSTYPE
DESCRIPTOR.enum_types_by_name['InvoiceHTLCState'] = _INVOICEHTLCSTATE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GenSeedRequest = _reflection.GeneratedProtocolMessageType('GenSeedRequest', (_message.Message,), {
'DESCRIPTOR' : _GENSEEDREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GenSeedRequest)
})
_sym_db.RegisterMessage(GenSeedRequest)
GenSeedResponse = _reflection.GeneratedProtocolMessageType('GenSeedResponse', (_message.Message,), {
'DESCRIPTOR' : _GENSEEDRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GenSeedResponse)
})
_sym_db.RegisterMessage(GenSeedResponse)
InitWalletRequest = _reflection.GeneratedProtocolMessageType('InitWalletRequest', (_message.Message,), {
'DESCRIPTOR' : _INITWALLETREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.InitWalletRequest)
})
_sym_db.RegisterMessage(InitWalletRequest)
InitWalletResponse = _reflection.GeneratedProtocolMessageType('InitWalletResponse', (_message.Message,), {
'DESCRIPTOR' : _INITWALLETRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.InitWalletResponse)
})
_sym_db.RegisterMessage(InitWalletResponse)
UnlockWalletRequest = _reflection.GeneratedProtocolMessageType('UnlockWalletRequest', (_message.Message,), {
'DESCRIPTOR' : _UNLOCKWALLETREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.UnlockWalletRequest)
})
_sym_db.RegisterMessage(UnlockWalletRequest)
UnlockWalletResponse = _reflection.GeneratedProtocolMessageType('UnlockWalletResponse', (_message.Message,), {
'DESCRIPTOR' : _UNLOCKWALLETRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.UnlockWalletResponse)
})
_sym_db.RegisterMessage(UnlockWalletResponse)
ChangePasswordRequest = _reflection.GeneratedProtocolMessageType('ChangePasswordRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANGEPASSWORDREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChangePasswordRequest)
})
_sym_db.RegisterMessage(ChangePasswordRequest)
ChangePasswordResponse = _reflection.GeneratedProtocolMessageType('ChangePasswordResponse', (_message.Message,), {
'DESCRIPTOR' : _CHANGEPASSWORDRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChangePasswordResponse)
})
_sym_db.RegisterMessage(ChangePasswordResponse)
Utxo = _reflection.GeneratedProtocolMessageType('Utxo', (_message.Message,), {
'DESCRIPTOR' : _UTXO,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Utxo)
})
_sym_db.RegisterMessage(Utxo)
Transaction = _reflection.GeneratedProtocolMessageType('Transaction', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Transaction)
})
_sym_db.RegisterMessage(Transaction)
GetTransactionsRequest = _reflection.GeneratedProtocolMessageType('GetTransactionsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETTRANSACTIONSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GetTransactionsRequest)
})
_sym_db.RegisterMessage(GetTransactionsRequest)
TransactionDetails = _reflection.GeneratedProtocolMessageType('TransactionDetails', (_message.Message,), {
'DESCRIPTOR' : _TRANSACTIONDETAILS,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.TransactionDetails)
})
_sym_db.RegisterMessage(TransactionDetails)
FeeLimit = _reflection.GeneratedProtocolMessageType('FeeLimit', (_message.Message,), {
'DESCRIPTOR' : _FEELIMIT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.FeeLimit)
})
_sym_db.RegisterMessage(FeeLimit)
SendRequest = _reflection.GeneratedProtocolMessageType('SendRequest', (_message.Message,), {
'DestTlvEntry' : _reflection.GeneratedProtocolMessageType('DestTlvEntry', (_message.Message,), {
'DESCRIPTOR' : _SENDREQUEST_DESTTLVENTRY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendRequest.DestTlvEntry)
})
,
'DESCRIPTOR' : _SENDREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendRequest)
})
_sym_db.RegisterMessage(SendRequest)
_sym_db.RegisterMessage(SendRequest.DestTlvEntry)
SendResponse = _reflection.GeneratedProtocolMessageType('SendResponse', (_message.Message,), {
'DESCRIPTOR' : _SENDRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendResponse)
})
_sym_db.RegisterMessage(SendResponse)
SendToRouteRequest = _reflection.GeneratedProtocolMessageType('SendToRouteRequest', (_message.Message,), {
'DESCRIPTOR' : _SENDTOROUTEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendToRouteRequest)
})
_sym_db.RegisterMessage(SendToRouteRequest)
ChannelAcceptRequest = _reflection.GeneratedProtocolMessageType('ChannelAcceptRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANNELACCEPTREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelAcceptRequest)
})
_sym_db.RegisterMessage(ChannelAcceptRequest)
ChannelAcceptResponse = _reflection.GeneratedProtocolMessageType('ChannelAcceptResponse', (_message.Message,), {
'DESCRIPTOR' : _CHANNELACCEPTRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelAcceptResponse)
})
_sym_db.RegisterMessage(ChannelAcceptResponse)
ChannelPoint = _reflection.GeneratedProtocolMessageType('ChannelPoint', (_message.Message,), {
'DESCRIPTOR' : _CHANNELPOINT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelPoint)
})
_sym_db.RegisterMessage(ChannelPoint)
OutPoint = _reflection.GeneratedProtocolMessageType('OutPoint', (_message.Message,), {
'DESCRIPTOR' : _OUTPOINT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.OutPoint)
})
_sym_db.RegisterMessage(OutPoint)
LightningAddress = _reflection.GeneratedProtocolMessageType('LightningAddress', (_message.Message,), {
'DESCRIPTOR' : _LIGHTNINGADDRESS,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.LightningAddress)
})
_sym_db.RegisterMessage(LightningAddress)
EstimateFeeRequest = _reflection.GeneratedProtocolMessageType('EstimateFeeRequest', (_message.Message,), {
'AddrToAmountEntry' : _reflection.GeneratedProtocolMessageType('AddrToAmountEntry', (_message.Message,), {
'DESCRIPTOR' : _ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.EstimateFeeRequest.AddrToAmountEntry)
})
,
'DESCRIPTOR' : _ESTIMATEFEEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.EstimateFeeRequest)
})
_sym_db.RegisterMessage(EstimateFeeRequest)
_sym_db.RegisterMessage(EstimateFeeRequest.AddrToAmountEntry)
EstimateFeeResponse = _reflection.GeneratedProtocolMessageType('EstimateFeeResponse', (_message.Message,), {
'DESCRIPTOR' : _ESTIMATEFEERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.EstimateFeeResponse)
})
_sym_db.RegisterMessage(EstimateFeeResponse)
SendManyRequest = _reflection.GeneratedProtocolMessageType('SendManyRequest', (_message.Message,), {
'AddrToAmountEntry' : _reflection.GeneratedProtocolMessageType('AddrToAmountEntry', (_message.Message,), {
'DESCRIPTOR' : _SENDMANYREQUEST_ADDRTOAMOUNTENTRY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendManyRequest.AddrToAmountEntry)
})
,
'DESCRIPTOR' : _SENDMANYREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendManyRequest)
})
_sym_db.RegisterMessage(SendManyRequest)
_sym_db.RegisterMessage(SendManyRequest.AddrToAmountEntry)
SendManyResponse = _reflection.GeneratedProtocolMessageType('SendManyResponse', (_message.Message,), {
'DESCRIPTOR' : _SENDMANYRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendManyResponse)
})
_sym_db.RegisterMessage(SendManyResponse)
SendCoinsRequest = _reflection.GeneratedProtocolMessageType('SendCoinsRequest', (_message.Message,), {
'DESCRIPTOR' : _SENDCOINSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendCoinsRequest)
})
_sym_db.RegisterMessage(SendCoinsRequest)
SendCoinsResponse = _reflection.GeneratedProtocolMessageType('SendCoinsResponse', (_message.Message,), {
'DESCRIPTOR' : _SENDCOINSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SendCoinsResponse)
})
_sym_db.RegisterMessage(SendCoinsResponse)
ListUnspentRequest = _reflection.GeneratedProtocolMessageType('ListUnspentRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTUNSPENTREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListUnspentRequest)
})
_sym_db.RegisterMessage(ListUnspentRequest)
ListUnspentResponse = _reflection.GeneratedProtocolMessageType('ListUnspentResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTUNSPENTRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListUnspentResponse)
})
_sym_db.RegisterMessage(ListUnspentResponse)
NewAddressRequest = _reflection.GeneratedProtocolMessageType('NewAddressRequest', (_message.Message,), {
'DESCRIPTOR' : _NEWADDRESSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NewAddressRequest)
})
_sym_db.RegisterMessage(NewAddressRequest)
NewAddressResponse = _reflection.GeneratedProtocolMessageType('NewAddressResponse', (_message.Message,), {
'DESCRIPTOR' : _NEWADDRESSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NewAddressResponse)
})
_sym_db.RegisterMessage(NewAddressResponse)
SignMessageRequest = _reflection.GeneratedProtocolMessageType('SignMessageRequest', (_message.Message,), {
'DESCRIPTOR' : _SIGNMESSAGEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SignMessageRequest)
})
_sym_db.RegisterMessage(SignMessageRequest)
SignMessageResponse = _reflection.GeneratedProtocolMessageType('SignMessageResponse', (_message.Message,), {
'DESCRIPTOR' : _SIGNMESSAGERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.SignMessageResponse)
})
_sym_db.RegisterMessage(SignMessageResponse)
VerifyMessageRequest = _reflection.GeneratedProtocolMessageType('VerifyMessageRequest', (_message.Message,), {
'DESCRIPTOR' : _VERIFYMESSAGEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.VerifyMessageRequest)
})
_sym_db.RegisterMessage(VerifyMessageRequest)
VerifyMessageResponse = _reflection.GeneratedProtocolMessageType('VerifyMessageResponse', (_message.Message,), {
'DESCRIPTOR' : _VERIFYMESSAGERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.VerifyMessageResponse)
})
_sym_db.RegisterMessage(VerifyMessageResponse)
ConnectPeerRequest = _reflection.GeneratedProtocolMessageType('ConnectPeerRequest', (_message.Message,), {
'DESCRIPTOR' : _CONNECTPEERREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ConnectPeerRequest)
})
_sym_db.RegisterMessage(ConnectPeerRequest)
ConnectPeerResponse = _reflection.GeneratedProtocolMessageType('ConnectPeerResponse', (_message.Message,), {
'DESCRIPTOR' : _CONNECTPEERRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ConnectPeerResponse)
})
_sym_db.RegisterMessage(ConnectPeerResponse)
DisconnectPeerRequest = _reflection.GeneratedProtocolMessageType('DisconnectPeerRequest', (_message.Message,), {
'DESCRIPTOR' : _DISCONNECTPEERREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DisconnectPeerRequest)
})
_sym_db.RegisterMessage(DisconnectPeerRequest)
DisconnectPeerResponse = _reflection.GeneratedProtocolMessageType('DisconnectPeerResponse', (_message.Message,), {
'DESCRIPTOR' : _DISCONNECTPEERRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DisconnectPeerResponse)
})
_sym_db.RegisterMessage(DisconnectPeerResponse)
HTLC = _reflection.GeneratedProtocolMessageType('HTLC', (_message.Message,), {
'DESCRIPTOR' : _HTLC,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.HTLC)
})
_sym_db.RegisterMessage(HTLC)
Channel = _reflection.GeneratedProtocolMessageType('Channel', (_message.Message,), {
'DESCRIPTOR' : _CHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Channel)
})
_sym_db.RegisterMessage(Channel)
ListChannelsRequest = _reflection.GeneratedProtocolMessageType('ListChannelsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTCHANNELSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListChannelsRequest)
})
_sym_db.RegisterMessage(ListChannelsRequest)
ListChannelsResponse = _reflection.GeneratedProtocolMessageType('ListChannelsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTCHANNELSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListChannelsResponse)
})
_sym_db.RegisterMessage(ListChannelsResponse)
ChannelCloseSummary = _reflection.GeneratedProtocolMessageType('ChannelCloseSummary', (_message.Message,), {
'DESCRIPTOR' : _CHANNELCLOSESUMMARY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelCloseSummary)
})
_sym_db.RegisterMessage(ChannelCloseSummary)
ClosedChannelsRequest = _reflection.GeneratedProtocolMessageType('ClosedChannelsRequest', (_message.Message,), {
'DESCRIPTOR' : _CLOSEDCHANNELSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ClosedChannelsRequest)
})
_sym_db.RegisterMessage(ClosedChannelsRequest)
ClosedChannelsResponse = _reflection.GeneratedProtocolMessageType('ClosedChannelsResponse', (_message.Message,), {
'DESCRIPTOR' : _CLOSEDCHANNELSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ClosedChannelsResponse)
})
_sym_db.RegisterMessage(ClosedChannelsResponse)
Peer = _reflection.GeneratedProtocolMessageType('Peer', (_message.Message,), {
'DESCRIPTOR' : _PEER,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Peer)
})
_sym_db.RegisterMessage(Peer)
ListPeersRequest = _reflection.GeneratedProtocolMessageType('ListPeersRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTPEERSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListPeersRequest)
})
_sym_db.RegisterMessage(ListPeersRequest)
ListPeersResponse = _reflection.GeneratedProtocolMessageType('ListPeersResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTPEERSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListPeersResponse)
})
_sym_db.RegisterMessage(ListPeersResponse)
GetInfoRequest = _reflection.GeneratedProtocolMessageType('GetInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _GETINFOREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GetInfoRequest)
})
_sym_db.RegisterMessage(GetInfoRequest)
GetInfoResponse = _reflection.GeneratedProtocolMessageType('GetInfoResponse', (_message.Message,), {
'DESCRIPTOR' : _GETINFORESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GetInfoResponse)
})
_sym_db.RegisterMessage(GetInfoResponse)
Chain = _reflection.GeneratedProtocolMessageType('Chain', (_message.Message,), {
'DESCRIPTOR' : _CHAIN,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Chain)
})
_sym_db.RegisterMessage(Chain)
ConfirmationUpdate = _reflection.GeneratedProtocolMessageType('ConfirmationUpdate', (_message.Message,), {
'DESCRIPTOR' : _CONFIRMATIONUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ConfirmationUpdate)
})
_sym_db.RegisterMessage(ConfirmationUpdate)
ChannelOpenUpdate = _reflection.GeneratedProtocolMessageType('ChannelOpenUpdate', (_message.Message,), {
'DESCRIPTOR' : _CHANNELOPENUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelOpenUpdate)
})
_sym_db.RegisterMessage(ChannelOpenUpdate)
ChannelCloseUpdate = _reflection.GeneratedProtocolMessageType('ChannelCloseUpdate', (_message.Message,), {
'DESCRIPTOR' : _CHANNELCLOSEUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelCloseUpdate)
})
_sym_db.RegisterMessage(ChannelCloseUpdate)
CloseChannelRequest = _reflection.GeneratedProtocolMessageType('CloseChannelRequest', (_message.Message,), {
'DESCRIPTOR' : _CLOSECHANNELREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.CloseChannelRequest)
})
_sym_db.RegisterMessage(CloseChannelRequest)
CloseStatusUpdate = _reflection.GeneratedProtocolMessageType('CloseStatusUpdate', (_message.Message,), {
'DESCRIPTOR' : _CLOSESTATUSUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.CloseStatusUpdate)
})
_sym_db.RegisterMessage(CloseStatusUpdate)
PendingUpdate = _reflection.GeneratedProtocolMessageType('PendingUpdate', (_message.Message,), {
'DESCRIPTOR' : _PENDINGUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingUpdate)
})
_sym_db.RegisterMessage(PendingUpdate)
OpenChannelRequest = _reflection.GeneratedProtocolMessageType('OpenChannelRequest', (_message.Message,), {
'DESCRIPTOR' : _OPENCHANNELREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.OpenChannelRequest)
})
_sym_db.RegisterMessage(OpenChannelRequest)
OpenStatusUpdate = _reflection.GeneratedProtocolMessageType('OpenStatusUpdate', (_message.Message,), {
'DESCRIPTOR' : _OPENSTATUSUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.OpenStatusUpdate)
})
_sym_db.RegisterMessage(OpenStatusUpdate)
PendingHTLC = _reflection.GeneratedProtocolMessageType('PendingHTLC', (_message.Message,), {
'DESCRIPTOR' : _PENDINGHTLC,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingHTLC)
})
_sym_db.RegisterMessage(PendingHTLC)
PendingChannelsRequest = _reflection.GeneratedProtocolMessageType('PendingChannelsRequest', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsRequest)
})
_sym_db.RegisterMessage(PendingChannelsRequest)
PendingChannelsResponse = _reflection.GeneratedProtocolMessageType('PendingChannelsResponse', (_message.Message,), {
'PendingChannel' : _reflection.GeneratedProtocolMessageType('PendingChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_PENDINGCHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.PendingChannel)
})
,
'PendingOpenChannel' : _reflection.GeneratedProtocolMessageType('PendingOpenChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_PENDINGOPENCHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.PendingOpenChannel)
})
,
'WaitingCloseChannel' : _reflection.GeneratedProtocolMessageType('WaitingCloseChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_WAITINGCLOSECHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.WaitingCloseChannel)
})
,
'ClosedChannel' : _reflection.GeneratedProtocolMessageType('ClosedChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_CLOSEDCHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.ClosedChannel)
})
,
'ForceClosedChannel' : _reflection.GeneratedProtocolMessageType('ForceClosedChannel', (_message.Message,), {
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE_FORCECLOSEDCHANNEL,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse.ForceClosedChannel)
})
,
'DESCRIPTOR' : _PENDINGCHANNELSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PendingChannelsResponse)
})
_sym_db.RegisterMessage(PendingChannelsResponse)
_sym_db.RegisterMessage(PendingChannelsResponse.PendingChannel)
_sym_db.RegisterMessage(PendingChannelsResponse.PendingOpenChannel)
_sym_db.RegisterMessage(PendingChannelsResponse.WaitingCloseChannel)
_sym_db.RegisterMessage(PendingChannelsResponse.ClosedChannel)
_sym_db.RegisterMessage(PendingChannelsResponse.ForceClosedChannel)
ChannelEventSubscription = _reflection.GeneratedProtocolMessageType('ChannelEventSubscription', (_message.Message,), {
'DESCRIPTOR' : _CHANNELEVENTSUBSCRIPTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelEventSubscription)
})
_sym_db.RegisterMessage(ChannelEventSubscription)
ChannelEventUpdate = _reflection.GeneratedProtocolMessageType('ChannelEventUpdate', (_message.Message,), {
'DESCRIPTOR' : _CHANNELEVENTUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelEventUpdate)
})
_sym_db.RegisterMessage(ChannelEventUpdate)
WalletBalanceRequest = _reflection.GeneratedProtocolMessageType('WalletBalanceRequest', (_message.Message,), {
'DESCRIPTOR' : _WALLETBALANCEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.WalletBalanceRequest)
})
_sym_db.RegisterMessage(WalletBalanceRequest)
WalletBalanceResponse = _reflection.GeneratedProtocolMessageType('WalletBalanceResponse', (_message.Message,), {
'DESCRIPTOR' : _WALLETBALANCERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.WalletBalanceResponse)
})
_sym_db.RegisterMessage(WalletBalanceResponse)
ChannelBalanceRequest = _reflection.GeneratedProtocolMessageType('ChannelBalanceRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBALANCEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBalanceRequest)
})
_sym_db.RegisterMessage(ChannelBalanceRequest)
ChannelBalanceResponse = _reflection.GeneratedProtocolMessageType('ChannelBalanceResponse', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBALANCERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBalanceResponse)
})
_sym_db.RegisterMessage(ChannelBalanceResponse)
QueryRoutesRequest = _reflection.GeneratedProtocolMessageType('QueryRoutesRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYROUTESREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.QueryRoutesRequest)
})
_sym_db.RegisterMessage(QueryRoutesRequest)
NodePair = _reflection.GeneratedProtocolMessageType('NodePair', (_message.Message,), {
'DESCRIPTOR' : _NODEPAIR,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodePair)
})
_sym_db.RegisterMessage(NodePair)
EdgeLocator = _reflection.GeneratedProtocolMessageType('EdgeLocator', (_message.Message,), {
'DESCRIPTOR' : _EDGELOCATOR,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.EdgeLocator)
})
_sym_db.RegisterMessage(EdgeLocator)
QueryRoutesResponse = _reflection.GeneratedProtocolMessageType('QueryRoutesResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYROUTESRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.QueryRoutesResponse)
})
_sym_db.RegisterMessage(QueryRoutesResponse)
Hop = _reflection.GeneratedProtocolMessageType('Hop', (_message.Message,), {
'DESCRIPTOR' : _HOP,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Hop)
})
_sym_db.RegisterMessage(Hop)
MPPRecord = _reflection.GeneratedProtocolMessageType('MPPRecord', (_message.Message,), {
'DESCRIPTOR' : _MPPRECORD,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.MPPRecord)
})
_sym_db.RegisterMessage(MPPRecord)
Route = _reflection.GeneratedProtocolMessageType('Route', (_message.Message,), {
'DESCRIPTOR' : _ROUTE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Route)
})
_sym_db.RegisterMessage(Route)
NodeInfoRequest = _reflection.GeneratedProtocolMessageType('NodeInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _NODEINFOREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodeInfoRequest)
})
_sym_db.RegisterMessage(NodeInfoRequest)
NodeInfo = _reflection.GeneratedProtocolMessageType('NodeInfo', (_message.Message,), {
'DESCRIPTOR' : _NODEINFO,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodeInfo)
})
_sym_db.RegisterMessage(NodeInfo)
LightningNode = _reflection.GeneratedProtocolMessageType('LightningNode', (_message.Message,), {
'DESCRIPTOR' : _LIGHTNINGNODE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.LightningNode)
})
_sym_db.RegisterMessage(LightningNode)
NodeAddress = _reflection.GeneratedProtocolMessageType('NodeAddress', (_message.Message,), {
'DESCRIPTOR' : _NODEADDRESS,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodeAddress)
})
_sym_db.RegisterMessage(NodeAddress)
RoutingPolicy = _reflection.GeneratedProtocolMessageType('RoutingPolicy', (_message.Message,), {
'DESCRIPTOR' : _ROUTINGPOLICY,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.RoutingPolicy)
})
_sym_db.RegisterMessage(RoutingPolicy)
ChannelEdge = _reflection.GeneratedProtocolMessageType('ChannelEdge', (_message.Message,), {
'DESCRIPTOR' : _CHANNELEDGE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelEdge)
})
_sym_db.RegisterMessage(ChannelEdge)
ChannelGraphRequest = _reflection.GeneratedProtocolMessageType('ChannelGraphRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANNELGRAPHREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelGraphRequest)
})
_sym_db.RegisterMessage(ChannelGraphRequest)
ChannelGraph = _reflection.GeneratedProtocolMessageType('ChannelGraph', (_message.Message,), {
'DESCRIPTOR' : _CHANNELGRAPH,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelGraph)
})
_sym_db.RegisterMessage(ChannelGraph)
ChanInfoRequest = _reflection.GeneratedProtocolMessageType('ChanInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANINFOREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChanInfoRequest)
})
_sym_db.RegisterMessage(ChanInfoRequest)
NetworkInfoRequest = _reflection.GeneratedProtocolMessageType('NetworkInfoRequest', (_message.Message,), {
'DESCRIPTOR' : _NETWORKINFOREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NetworkInfoRequest)
})
_sym_db.RegisterMessage(NetworkInfoRequest)
NetworkInfo = _reflection.GeneratedProtocolMessageType('NetworkInfo', (_message.Message,), {
'DESCRIPTOR' : _NETWORKINFO,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NetworkInfo)
})
_sym_db.RegisterMessage(NetworkInfo)
StopRequest = _reflection.GeneratedProtocolMessageType('StopRequest', (_message.Message,), {
'DESCRIPTOR' : _STOPREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.StopRequest)
})
_sym_db.RegisterMessage(StopRequest)
StopResponse = _reflection.GeneratedProtocolMessageType('StopResponse', (_message.Message,), {
'DESCRIPTOR' : _STOPRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.StopResponse)
})
_sym_db.RegisterMessage(StopResponse)
GraphTopologySubscription = _reflection.GeneratedProtocolMessageType('GraphTopologySubscription', (_message.Message,), {
'DESCRIPTOR' : _GRAPHTOPOLOGYSUBSCRIPTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GraphTopologySubscription)
})
_sym_db.RegisterMessage(GraphTopologySubscription)
GraphTopologyUpdate = _reflection.GeneratedProtocolMessageType('GraphTopologyUpdate', (_message.Message,), {
'DESCRIPTOR' : _GRAPHTOPOLOGYUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.GraphTopologyUpdate)
})
_sym_db.RegisterMessage(GraphTopologyUpdate)
NodeUpdate = _reflection.GeneratedProtocolMessageType('NodeUpdate', (_message.Message,), {
'DESCRIPTOR' : _NODEUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.NodeUpdate)
})
_sym_db.RegisterMessage(NodeUpdate)
ChannelEdgeUpdate = _reflection.GeneratedProtocolMessageType('ChannelEdgeUpdate', (_message.Message,), {
'DESCRIPTOR' : _CHANNELEDGEUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelEdgeUpdate)
})
_sym_db.RegisterMessage(ChannelEdgeUpdate)
ClosedChannelUpdate = _reflection.GeneratedProtocolMessageType('ClosedChannelUpdate', (_message.Message,), {
'DESCRIPTOR' : _CLOSEDCHANNELUPDATE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ClosedChannelUpdate)
})
_sym_db.RegisterMessage(ClosedChannelUpdate)
HopHint = _reflection.GeneratedProtocolMessageType('HopHint', (_message.Message,), {
'DESCRIPTOR' : _HOPHINT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.HopHint)
})
_sym_db.RegisterMessage(HopHint)
RouteHint = _reflection.GeneratedProtocolMessageType('RouteHint', (_message.Message,), {
'DESCRIPTOR' : _ROUTEHINT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.RouteHint)
})
_sym_db.RegisterMessage(RouteHint)
Invoice = _reflection.GeneratedProtocolMessageType('Invoice', (_message.Message,), {
'DESCRIPTOR' : _INVOICE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Invoice)
})
_sym_db.RegisterMessage(Invoice)
InvoiceHTLC = _reflection.GeneratedProtocolMessageType('InvoiceHTLC', (_message.Message,), {
'DESCRIPTOR' : _INVOICEHTLC,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.InvoiceHTLC)
})
_sym_db.RegisterMessage(InvoiceHTLC)
AddInvoiceResponse = _reflection.GeneratedProtocolMessageType('AddInvoiceResponse', (_message.Message,), {
'DESCRIPTOR' : _ADDINVOICERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.AddInvoiceResponse)
})
_sym_db.RegisterMessage(AddInvoiceResponse)
PaymentHash = _reflection.GeneratedProtocolMessageType('PaymentHash', (_message.Message,), {
'DESCRIPTOR' : _PAYMENTHASH,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PaymentHash)
})
_sym_db.RegisterMessage(PaymentHash)
ListInvoiceRequest = _reflection.GeneratedProtocolMessageType('ListInvoiceRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTINVOICEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListInvoiceRequest)
})
_sym_db.RegisterMessage(ListInvoiceRequest)
ListInvoiceResponse = _reflection.GeneratedProtocolMessageType('ListInvoiceResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTINVOICERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListInvoiceResponse)
})
_sym_db.RegisterMessage(ListInvoiceResponse)
InvoiceSubscription = _reflection.GeneratedProtocolMessageType('InvoiceSubscription', (_message.Message,), {
'DESCRIPTOR' : _INVOICESUBSCRIPTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.InvoiceSubscription)
})
_sym_db.RegisterMessage(InvoiceSubscription)
Payment = _reflection.GeneratedProtocolMessageType('Payment', (_message.Message,), {
'DESCRIPTOR' : _PAYMENT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.Payment)
})
_sym_db.RegisterMessage(Payment)
HTLCAttempt = _reflection.GeneratedProtocolMessageType('HTLCAttempt', (_message.Message,), {
'DESCRIPTOR' : _HTLCATTEMPT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.HTLCAttempt)
})
_sym_db.RegisterMessage(HTLCAttempt)
ListPaymentsRequest = _reflection.GeneratedProtocolMessageType('ListPaymentsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTPAYMENTSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListPaymentsRequest)
})
_sym_db.RegisterMessage(ListPaymentsRequest)
ListPaymentsResponse = _reflection.GeneratedProtocolMessageType('ListPaymentsResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTPAYMENTSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ListPaymentsResponse)
})
_sym_db.RegisterMessage(ListPaymentsResponse)
DeleteAllPaymentsRequest = _reflection.GeneratedProtocolMessageType('DeleteAllPaymentsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEALLPAYMENTSREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DeleteAllPaymentsRequest)
})
_sym_db.RegisterMessage(DeleteAllPaymentsRequest)
DeleteAllPaymentsResponse = _reflection.GeneratedProtocolMessageType('DeleteAllPaymentsResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEALLPAYMENTSRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DeleteAllPaymentsResponse)
})
_sym_db.RegisterMessage(DeleteAllPaymentsResponse)
AbandonChannelRequest = _reflection.GeneratedProtocolMessageType('AbandonChannelRequest', (_message.Message,), {
'DESCRIPTOR' : _ABANDONCHANNELREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.AbandonChannelRequest)
})
_sym_db.RegisterMessage(AbandonChannelRequest)
AbandonChannelResponse = _reflection.GeneratedProtocolMessageType('AbandonChannelResponse', (_message.Message,), {
'DESCRIPTOR' : _ABANDONCHANNELRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.AbandonChannelResponse)
})
_sym_db.RegisterMessage(AbandonChannelResponse)
DebugLevelRequest = _reflection.GeneratedProtocolMessageType('DebugLevelRequest', (_message.Message,), {
'DESCRIPTOR' : _DEBUGLEVELREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DebugLevelRequest)
})
_sym_db.RegisterMessage(DebugLevelRequest)
DebugLevelResponse = _reflection.GeneratedProtocolMessageType('DebugLevelResponse', (_message.Message,), {
'DESCRIPTOR' : _DEBUGLEVELRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.DebugLevelResponse)
})
_sym_db.RegisterMessage(DebugLevelResponse)
PayReqString = _reflection.GeneratedProtocolMessageType('PayReqString', (_message.Message,), {
'DESCRIPTOR' : _PAYREQSTRING,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PayReqString)
})
_sym_db.RegisterMessage(PayReqString)
PayReq = _reflection.GeneratedProtocolMessageType('PayReq', (_message.Message,), {
'DESCRIPTOR' : _PAYREQ,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PayReq)
})
_sym_db.RegisterMessage(PayReq)
FeeReportRequest = _reflection.GeneratedProtocolMessageType('FeeReportRequest', (_message.Message,), {
'DESCRIPTOR' : _FEEREPORTREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.FeeReportRequest)
})
_sym_db.RegisterMessage(FeeReportRequest)
ChannelFeeReport = _reflection.GeneratedProtocolMessageType('ChannelFeeReport', (_message.Message,), {
'DESCRIPTOR' : _CHANNELFEEREPORT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelFeeReport)
})
_sym_db.RegisterMessage(ChannelFeeReport)
FeeReportResponse = _reflection.GeneratedProtocolMessageType('FeeReportResponse', (_message.Message,), {
'DESCRIPTOR' : _FEEREPORTRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.FeeReportResponse)
})
_sym_db.RegisterMessage(FeeReportResponse)
PolicyUpdateRequest = _reflection.GeneratedProtocolMessageType('PolicyUpdateRequest', (_message.Message,), {
'DESCRIPTOR' : _POLICYUPDATEREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PolicyUpdateRequest)
})
_sym_db.RegisterMessage(PolicyUpdateRequest)
PolicyUpdateResponse = _reflection.GeneratedProtocolMessageType('PolicyUpdateResponse', (_message.Message,), {
'DESCRIPTOR' : _POLICYUPDATERESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.PolicyUpdateResponse)
})
_sym_db.RegisterMessage(PolicyUpdateResponse)
ForwardingHistoryRequest = _reflection.GeneratedProtocolMessageType('ForwardingHistoryRequest', (_message.Message,), {
'DESCRIPTOR' : _FORWARDINGHISTORYREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ForwardingHistoryRequest)
})
_sym_db.RegisterMessage(ForwardingHistoryRequest)
ForwardingEvent = _reflection.GeneratedProtocolMessageType('ForwardingEvent', (_message.Message,), {
'DESCRIPTOR' : _FORWARDINGEVENT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ForwardingEvent)
})
_sym_db.RegisterMessage(ForwardingEvent)
ForwardingHistoryResponse = _reflection.GeneratedProtocolMessageType('ForwardingHistoryResponse', (_message.Message,), {
'DESCRIPTOR' : _FORWARDINGHISTORYRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ForwardingHistoryResponse)
})
_sym_db.RegisterMessage(ForwardingHistoryResponse)
ExportChannelBackupRequest = _reflection.GeneratedProtocolMessageType('ExportChannelBackupRequest', (_message.Message,), {
'DESCRIPTOR' : _EXPORTCHANNELBACKUPREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ExportChannelBackupRequest)
})
_sym_db.RegisterMessage(ExportChannelBackupRequest)
ChannelBackup = _reflection.GeneratedProtocolMessageType('ChannelBackup', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBACKUP,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBackup)
})
_sym_db.RegisterMessage(ChannelBackup)
MultiChanBackup = _reflection.GeneratedProtocolMessageType('MultiChanBackup', (_message.Message,), {
'DESCRIPTOR' : _MULTICHANBACKUP,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.MultiChanBackup)
})
_sym_db.RegisterMessage(MultiChanBackup)
ChanBackupExportRequest = _reflection.GeneratedProtocolMessageType('ChanBackupExportRequest', (_message.Message,), {
'DESCRIPTOR' : _CHANBACKUPEXPORTREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChanBackupExportRequest)
})
_sym_db.RegisterMessage(ChanBackupExportRequest)
ChanBackupSnapshot = _reflection.GeneratedProtocolMessageType('ChanBackupSnapshot', (_message.Message,), {
'DESCRIPTOR' : _CHANBACKUPSNAPSHOT,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChanBackupSnapshot)
})
_sym_db.RegisterMessage(ChanBackupSnapshot)
ChannelBackups = _reflection.GeneratedProtocolMessageType('ChannelBackups', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBACKUPS,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBackups)
})
_sym_db.RegisterMessage(ChannelBackups)
RestoreChanBackupRequest = _reflection.GeneratedProtocolMessageType('RestoreChanBackupRequest', (_message.Message,), {
'DESCRIPTOR' : _RESTORECHANBACKUPREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.RestoreChanBackupRequest)
})
_sym_db.RegisterMessage(RestoreChanBackupRequest)
RestoreBackupResponse = _reflection.GeneratedProtocolMessageType('RestoreBackupResponse', (_message.Message,), {
'DESCRIPTOR' : _RESTOREBACKUPRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.RestoreBackupResponse)
})
_sym_db.RegisterMessage(RestoreBackupResponse)
ChannelBackupSubscription = _reflection.GeneratedProtocolMessageType('ChannelBackupSubscription', (_message.Message,), {
'DESCRIPTOR' : _CHANNELBACKUPSUBSCRIPTION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.ChannelBackupSubscription)
})
_sym_db.RegisterMessage(ChannelBackupSubscription)
VerifyChanBackupResponse = _reflection.GeneratedProtocolMessageType('VerifyChanBackupResponse', (_message.Message,), {
'DESCRIPTOR' : _VERIFYCHANBACKUPRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.VerifyChanBackupResponse)
})
_sym_db.RegisterMessage(VerifyChanBackupResponse)
MacaroonPermission = _reflection.GeneratedProtocolMessageType('MacaroonPermission', (_message.Message,), {
'DESCRIPTOR' : _MACAROONPERMISSION,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.MacaroonPermission)
})
_sym_db.RegisterMessage(MacaroonPermission)
BakeMacaroonRequest = _reflection.GeneratedProtocolMessageType('BakeMacaroonRequest', (_message.Message,), {
'DESCRIPTOR' : _BAKEMACAROONREQUEST,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.BakeMacaroonRequest)
})
_sym_db.RegisterMessage(BakeMacaroonRequest)
BakeMacaroonResponse = _reflection.GeneratedProtocolMessageType('BakeMacaroonResponse', (_message.Message,), {
'DESCRIPTOR' : _BAKEMACAROONRESPONSE,
'__module__' : 'rpc_pb2'
# @@protoc_insertion_point(class_scope:lnrpc.BakeMacaroonResponse)
})
_sym_db.RegisterMessage(BakeMacaroonResponse)
DESCRIPTOR._options = None
_SENDREQUEST_DESTTLVENTRY._options = None
_SENDREQUEST.fields_by_name['dest_string']._options = None
_SENDREQUEST.fields_by_name['payment_hash_string']._options = None
_SENDREQUEST.fields_by_name['outgoing_chan_id']._options = None
_SENDTOROUTEREQUEST.fields_by_name['payment_hash_string']._options = None
_ESTIMATEFEEREQUEST_ADDRTOAMOUNTENTRY._options = None
_SENDMANYREQUEST_ADDRTOAMOUNTENTRY._options = None
_CHANNEL.fields_by_name['chan_id']._options = None
_CHANNELCLOSESUMMARY.fields_by_name['chan_id']._options = None
_GETINFORESPONSE.fields_by_name['testnet']._options = None
_OPENCHANNELREQUEST.fields_by_name['node_pubkey_string']._options = None
_QUERYROUTESREQUEST.fields_by_name['ignored_edges']._options = None
_EDGELOCATOR.fields_by_name['channel_id']._options = None
_HOP.fields_by_name['chan_id']._options = None
_HOP.fields_by_name['amt_to_forward']._options = None
_HOP.fields_by_name['fee']._options = None
_ROUTE.fields_by_name['total_fees']._options = None
_ROUTE.fields_by_name['total_amt']._options = None
_CHANNELEDGE.fields_by_name['channel_id']._options = None
_CHANNELEDGE.fields_by_name['last_update']._options = None
_CHANINFOREQUEST.fields_by_name['chan_id']._options = None
_CHANNELEDGEUPDATE.fields_by_name['chan_id']._options = None
_CLOSEDCHANNELUPDATE.fields_by_name['chan_id']._options = None
_HOPHINT.fields_by_name['chan_id']._options = None
_INVOICE.fields_by_name['settled']._options = None
_INVOICE.fields_by_name['amt_paid']._options = None
_INVOICEHTLC.fields_by_name['chan_id']._options = None
_PAYMENTHASH.fields_by_name['r_hash_str']._options = None
_PAYMENT.fields_by_name['value']._options = None
_PAYMENT.fields_by_name['creation_date']._options = None
_PAYMENT.fields_by_name['path']._options = None
_PAYMENT.fields_by_name['fee']._options = None
_FORWARDINGEVENT.fields_by_name['chan_id_in']._options = None
_FORWARDINGEVENT.fields_by_name['chan_id_out']._options = None
_WALLETUNLOCKER = _descriptor.ServiceDescriptor(
name='WalletUnlocker',
full_name='lnrpc.WalletUnlocker',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=20889,
serialized_end=21290,
methods=[
_descriptor.MethodDescriptor(
name='GenSeed',
full_name='lnrpc.WalletUnlocker.GenSeed',
index=0,
containing_service=None,
input_type=_GENSEEDREQUEST,
output_type=_GENSEEDRESPONSE,
serialized_options=_b('\202\323\344\223\002\r\022\013/v1/genseed'),
),
_descriptor.MethodDescriptor(
name='InitWallet',
full_name='lnrpc.WalletUnlocker.InitWallet',
index=1,
containing_service=None,
input_type=_INITWALLETREQUEST,
output_type=_INITWALLETRESPONSE,
serialized_options=_b('\202\323\344\223\002\023\"\016/v1/initwallet:\001*'),
),
_descriptor.MethodDescriptor(
name='UnlockWallet',
full_name='lnrpc.WalletUnlocker.UnlockWallet',
index=2,
containing_service=None,
input_type=_UNLOCKWALLETREQUEST,
output_type=_UNLOCKWALLETRESPONSE,
serialized_options=_b('\202\323\344\223\002\025\"\020/v1/unlockwallet:\001*'),
),
_descriptor.MethodDescriptor(
name='ChangePassword',
full_name='lnrpc.WalletUnlocker.ChangePassword',
index=3,
containing_service=None,
input_type=_CHANGEPASSWORDREQUEST,
output_type=_CHANGEPASSWORDRESPONSE,
serialized_options=_b('\202\323\344\223\002\027\"\022/v1/changepassword:\001*'),
),
])
_sym_db.RegisterServiceDescriptor(_WALLETUNLOCKER)
DESCRIPTOR.services_by_name['WalletUnlocker'] = _WALLETUNLOCKER
_LIGHTNING = _descriptor.ServiceDescriptor(
name='Lightning',
full_name='lnrpc.Lightning',
file=DESCRIPTOR,
index=1,
serialized_options=None,
serialized_start=21293,
serialized_end=26359,
methods=[
_descriptor.MethodDescriptor(
name='WalletBalance',
full_name='lnrpc.Lightning.WalletBalance',
index=0,
containing_service=None,
input_type=_WALLETBALANCEREQUEST,
output_type=_WALLETBALANCERESPONSE,
serialized_options=_b('\202\323\344\223\002\030\022\026/v1/balance/blockchain'),
),
_descriptor.MethodDescriptor(
name='ChannelBalance',
full_name='lnrpc.Lightning.ChannelBalance',
index=1,
containing_service=None,
input_type=_CHANNELBALANCEREQUEST,
output_type=_CHANNELBALANCERESPONSE,
serialized_options=_b('\202\323\344\223\002\026\022\024/v1/balance/channels'),
),
_descriptor.MethodDescriptor(
name='GetTransactions',
full_name='lnrpc.Lightning.GetTransactions',
index=2,
containing_service=None,
input_type=_GETTRANSACTIONSREQUEST,
output_type=_TRANSACTIONDETAILS,
serialized_options=_b('\202\323\344\223\002\022\022\020/v1/transactions'),
),
_descriptor.MethodDescriptor(
name='EstimateFee',
full_name='lnrpc.Lightning.EstimateFee',
index=3,
containing_service=None,
input_type=_ESTIMATEFEEREQUEST,
output_type=_ESTIMATEFEERESPONSE,
serialized_options=_b('\202\323\344\223\002\026\022\024/v1/transactions/fee'),
),
_descriptor.MethodDescriptor(
name='SendCoins',
full_name='lnrpc.Lightning.SendCoins',
index=4,
containing_service=None,
input_type=_SENDCOINSREQUEST,
output_type=_SENDCOINSRESPONSE,
serialized_options=_b('\202\323\344\223\002\025\"\020/v1/transactions:\001*'),
),
_descriptor.MethodDescriptor(
name='ListUnspent',
full_name='lnrpc.Lightning.ListUnspent',
index=5,
containing_service=None,
input_type=_LISTUNSPENTREQUEST,
output_type=_LISTUNSPENTRESPONSE,
serialized_options=_b('\202\323\344\223\002\013\022\t/v1/utxos'),
),
_descriptor.MethodDescriptor(
name='SubscribeTransactions',
full_name='lnrpc.Lightning.SubscribeTransactions',
index=6,
containing_service=None,
input_type=_GETTRANSACTIONSREQUEST,
output_type=_TRANSACTION,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SendMany',
full_name='lnrpc.Lightning.SendMany',
index=7,
containing_service=None,
input_type=_SENDMANYREQUEST,
output_type=_SENDMANYRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='NewAddress',
full_name='lnrpc.Lightning.NewAddress',
index=8,
containing_service=None,
input_type=_NEWADDRESSREQUEST,
output_type=_NEWADDRESSRESPONSE,
serialized_options=_b('\202\323\344\223\002\020\022\016/v1/newaddress'),
),
_descriptor.MethodDescriptor(
name='SignMessage',
full_name='lnrpc.Lightning.SignMessage',
index=9,
containing_service=None,
input_type=_SIGNMESSAGEREQUEST,
output_type=_SIGNMESSAGERESPONSE,
serialized_options=_b('\202\323\344\223\002\024\"\017/v1/signmessage:\001*'),
),
_descriptor.MethodDescriptor(
name='VerifyMessage',
full_name='lnrpc.Lightning.VerifyMessage',
index=10,
containing_service=None,
input_type=_VERIFYMESSAGEREQUEST,
output_type=_VERIFYMESSAGERESPONSE,
serialized_options=_b('\202\323\344\223\002\026\"\021/v1/verifymessage:\001*'),
),
_descriptor.MethodDescriptor(
name='ConnectPeer',
full_name='lnrpc.Lightning.ConnectPeer',
index=11,
containing_service=None,
input_type=_CONNECTPEERREQUEST,
output_type=_CONNECTPEERRESPONSE,
serialized_options=_b('\202\323\344\223\002\016\"\t/v1/peers:\001*'),
),
_descriptor.MethodDescriptor(
name='DisconnectPeer',
full_name='lnrpc.Lightning.DisconnectPeer',
index=12,
containing_service=None,
input_type=_DISCONNECTPEERREQUEST,
output_type=_DISCONNECTPEERRESPONSE,
serialized_options=_b('\202\323\344\223\002\025*\023/v1/peers/{pub_key}'),
),
_descriptor.MethodDescriptor(
name='ListPeers',
full_name='lnrpc.Lightning.ListPeers',
index=13,
containing_service=None,
input_type=_LISTPEERSREQUEST,
output_type=_LISTPEERSRESPONSE,
serialized_options=_b('\202\323\344\223\002\013\022\t/v1/peers'),
),
_descriptor.MethodDescriptor(
name='GetInfo',
full_name='lnrpc.Lightning.GetInfo',
index=14,
containing_service=None,
input_type=_GETINFOREQUEST,
output_type=_GETINFORESPONSE,
serialized_options=_b('\202\323\344\223\002\r\022\013/v1/getinfo'),
),
_descriptor.MethodDescriptor(
name='PendingChannels',
full_name='lnrpc.Lightning.PendingChannels',
index=15,
containing_service=None,
input_type=_PENDINGCHANNELSREQUEST,
output_type=_PENDINGCHANNELSRESPONSE,
serialized_options=_b('\202\323\344\223\002\026\022\024/v1/channels/pending'),
),
_descriptor.MethodDescriptor(
name='ListChannels',
full_name='lnrpc.Lightning.ListChannels',
index=16,
containing_service=None,
input_type=_LISTCHANNELSREQUEST,
output_type=_LISTCHANNELSRESPONSE,
serialized_options=_b('\202\323\344\223\002\016\022\014/v1/channels'),
),
_descriptor.MethodDescriptor(
name='SubscribeChannelEvents',
full_name='lnrpc.Lightning.SubscribeChannelEvents',
index=17,
containing_service=None,
input_type=_CHANNELEVENTSUBSCRIPTION,
output_type=_CHANNELEVENTUPDATE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='ClosedChannels',
full_name='lnrpc.Lightning.ClosedChannels',
index=18,
containing_service=None,
input_type=_CLOSEDCHANNELSREQUEST,
output_type=_CLOSEDCHANNELSRESPONSE,
serialized_options=_b('\202\323\344\223\002\025\022\023/v1/channels/closed'),
),
_descriptor.MethodDescriptor(
name='OpenChannelSync',
full_name='lnrpc.Lightning.OpenChannelSync',
index=19,
containing_service=None,
input_type=_OPENCHANNELREQUEST,
output_type=_CHANNELPOINT,
serialized_options=_b('\202\323\344\223\002\021\"\014/v1/channels:\001*'),
),
_descriptor.MethodDescriptor(
name='OpenChannel',
full_name='lnrpc.Lightning.OpenChannel',
index=20,
containing_service=None,
input_type=_OPENCHANNELREQUEST,
output_type=_OPENSTATUSUPDATE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='ChannelAcceptor',
full_name='lnrpc.Lightning.ChannelAcceptor',
index=21,
containing_service=None,
input_type=_CHANNELACCEPTRESPONSE,
output_type=_CHANNELACCEPTREQUEST,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='CloseChannel',
full_name='lnrpc.Lightning.CloseChannel',
index=22,
containing_service=None,
input_type=_CLOSECHANNELREQUEST,
output_type=_CLOSESTATUSUPDATE,
serialized_options=_b('\202\323\344\223\002L*J/v1/channels/{channel_point.funding_txid_str}/{channel_point.output_index}'),
),
_descriptor.MethodDescriptor(
name='AbandonChannel',
full_name='lnrpc.Lightning.AbandonChannel',
index=23,
containing_service=None,
input_type=_ABANDONCHANNELREQUEST,
output_type=_ABANDONCHANNELRESPONSE,
serialized_options=_b('\202\323\344\223\002T*R/v1/channels/abandon/{channel_point.funding_txid_str}/{channel_point.output_index}'),
),
_descriptor.MethodDescriptor(
name='SendPayment',
full_name='lnrpc.Lightning.SendPayment',
index=24,
containing_service=None,
input_type=_SENDREQUEST,
output_type=_SENDRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SendPaymentSync',
full_name='lnrpc.Lightning.SendPaymentSync',
index=25,
containing_service=None,
input_type=_SENDREQUEST,
output_type=_SENDRESPONSE,
serialized_options=_b('\202\323\344\223\002\036\"\031/v1/channels/transactions:\001*'),
),
_descriptor.MethodDescriptor(
name='SendToRoute',
full_name='lnrpc.Lightning.SendToRoute',
index=26,
containing_service=None,
input_type=_SENDTOROUTEREQUEST,
output_type=_SENDRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SendToRouteSync',
full_name='lnrpc.Lightning.SendToRouteSync',
index=27,
containing_service=None,
input_type=_SENDTOROUTEREQUEST,
output_type=_SENDRESPONSE,
serialized_options=_b('\202\323\344\223\002$\"\037/v1/channels/transactions/route:\001*'),
),
_descriptor.MethodDescriptor(
name='AddInvoice',
full_name='lnrpc.Lightning.AddInvoice',
index=28,
containing_service=None,
input_type=_INVOICE,
output_type=_ADDINVOICERESPONSE,
serialized_options=_b('\202\323\344\223\002\021\"\014/v1/invoices:\001*'),
),
_descriptor.MethodDescriptor(
name='ListInvoices',
full_name='lnrpc.Lightning.ListInvoices',
index=29,
containing_service=None,
input_type=_LISTINVOICEREQUEST,
output_type=_LISTINVOICERESPONSE,
serialized_options=_b('\202\323\344\223\002\016\022\014/v1/invoices'),
),
_descriptor.MethodDescriptor(
name='LookupInvoice',
full_name='lnrpc.Lightning.LookupInvoice',
index=30,
containing_service=None,
input_type=_PAYMENTHASH,
output_type=_INVOICE,
serialized_options=_b('\202\323\344\223\002\032\022\030/v1/invoice/{r_hash_str}'),
),
_descriptor.MethodDescriptor(
name='SubscribeInvoices',
full_name='lnrpc.Lightning.SubscribeInvoices',
index=31,
containing_service=None,
input_type=_INVOICESUBSCRIPTION,
output_type=_INVOICE,
serialized_options=_b('\202\323\344\223\002\030\022\026/v1/invoices/subscribe'),
),
_descriptor.MethodDescriptor(
name='DecodePayReq',
full_name='lnrpc.Lightning.DecodePayReq',
index=32,
containing_service=None,
input_type=_PAYREQSTRING,
output_type=_PAYREQ,
serialized_options=_b('\202\323\344\223\002\026\022\024/v1/payreq/{pay_req}'),
),
_descriptor.MethodDescriptor(
name='ListPayments',
full_name='lnrpc.Lightning.ListPayments',
index=33,
containing_service=None,
input_type=_LISTPAYMENTSREQUEST,
output_type=_LISTPAYMENTSRESPONSE,
serialized_options=_b('\202\323\344\223\002\016\022\014/v1/payments'),
),
_descriptor.MethodDescriptor(
name='DeleteAllPayments',
full_name='lnrpc.Lightning.DeleteAllPayments',
index=34,
containing_service=None,
input_type=_DELETEALLPAYMENTSREQUEST,
output_type=_DELETEALLPAYMENTSRESPONSE,
serialized_options=_b('\202\323\344\223\002\016*\014/v1/payments'),
),
_descriptor.MethodDescriptor(
name='DescribeGraph',
full_name='lnrpc.Lightning.DescribeGraph',
index=35,
containing_service=None,
input_type=_CHANNELGRAPHREQUEST,
output_type=_CHANNELGRAPH,
serialized_options=_b('\202\323\344\223\002\013\022\t/v1/graph'),
),
_descriptor.MethodDescriptor(
name='GetChanInfo',
full_name='lnrpc.Lightning.GetChanInfo',
index=36,
containing_service=None,
input_type=_CHANINFOREQUEST,
output_type=_CHANNELEDGE,
serialized_options=_b('\202\323\344\223\002\032\022\030/v1/graph/edge/{chan_id}'),
),
_descriptor.MethodDescriptor(
name='GetNodeInfo',
full_name='lnrpc.Lightning.GetNodeInfo',
index=37,
containing_service=None,
input_type=_NODEINFOREQUEST,
output_type=_NODEINFO,
serialized_options=_b('\202\323\344\223\002\032\022\030/v1/graph/node/{pub_key}'),
),
_descriptor.MethodDescriptor(
name='QueryRoutes',
full_name='lnrpc.Lightning.QueryRoutes',
index=38,
containing_service=None,
input_type=_QUERYROUTESREQUEST,
output_type=_QUERYROUTESRESPONSE,
serialized_options=_b('\202\323\344\223\002\"\022 /v1/graph/routes/{pub_key}/{amt}'),
),
_descriptor.MethodDescriptor(
name='GetNetworkInfo',
full_name='lnrpc.Lightning.GetNetworkInfo',
index=39,
containing_service=None,
input_type=_NETWORKINFOREQUEST,
output_type=_NETWORKINFO,
serialized_options=_b('\202\323\344\223\002\020\022\016/v1/graph/info'),
),
_descriptor.MethodDescriptor(
name='StopDaemon',
full_name='lnrpc.Lightning.StopDaemon',
index=40,
containing_service=None,
input_type=_STOPREQUEST,
output_type=_STOPRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='SubscribeChannelGraph',
full_name='lnrpc.Lightning.SubscribeChannelGraph',
index=41,
containing_service=None,
input_type=_GRAPHTOPOLOGYSUBSCRIPTION,
output_type=_GRAPHTOPOLOGYUPDATE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='DebugLevel',
full_name='lnrpc.Lightning.DebugLevel',
index=42,
containing_service=None,
input_type=_DEBUGLEVELREQUEST,
output_type=_DEBUGLEVELRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='FeeReport',
full_name='lnrpc.Lightning.FeeReport',
index=43,
containing_service=None,
input_type=_FEEREPORTREQUEST,
output_type=_FEEREPORTRESPONSE,
serialized_options=_b('\202\323\344\223\002\n\022\010/v1/fees'),
),
_descriptor.MethodDescriptor(
name='UpdateChannelPolicy',
full_name='lnrpc.Lightning.UpdateChannelPolicy',
index=44,
containing_service=None,
input_type=_POLICYUPDATEREQUEST,
output_type=_POLICYUPDATERESPONSE,
serialized_options=_b('\202\323\344\223\002\023\"\016/v1/chanpolicy:\001*'),
),
_descriptor.MethodDescriptor(
name='ForwardingHistory',
full_name='lnrpc.Lightning.ForwardingHistory',
index=45,
containing_service=None,
input_type=_FORWARDINGHISTORYREQUEST,
output_type=_FORWARDINGHISTORYRESPONSE,
serialized_options=_b('\202\323\344\223\002\017\"\n/v1/switch:\001*'),
),
_descriptor.MethodDescriptor(
name='ExportChannelBackup',
full_name='lnrpc.Lightning.ExportChannelBackup',
index=46,
containing_service=None,
input_type=_EXPORTCHANNELBACKUPREQUEST,
output_type=_CHANNELBACKUP,
serialized_options=_b('\202\323\344\223\002M\022K/v1/channels/backup/{chan_point.funding_txid_str}/{chan_point.output_index}'),
),
_descriptor.MethodDescriptor(
name='ExportAllChannelBackups',
full_name='lnrpc.Lightning.ExportAllChannelBackups',
index=47,
containing_service=None,
input_type=_CHANBACKUPEXPORTREQUEST,
output_type=_CHANBACKUPSNAPSHOT,
serialized_options=_b('\202\323\344\223\002\025\022\023/v1/channels/backup'),
),
_descriptor.MethodDescriptor(
name='VerifyChanBackup',
full_name='lnrpc.Lightning.VerifyChanBackup',
index=48,
containing_service=None,
input_type=_CHANBACKUPSNAPSHOT,
output_type=_VERIFYCHANBACKUPRESPONSE,
serialized_options=_b('\202\323\344\223\002\037\"\032/v1/channels/backup/verify:\001*'),
),
_descriptor.MethodDescriptor(
name='RestoreChannelBackups',
full_name='lnrpc.Lightning.RestoreChannelBackups',
index=49,
containing_service=None,
input_type=_RESTORECHANBACKUPREQUEST,
output_type=_RESTOREBACKUPRESPONSE,
serialized_options=_b('\202\323\344\223\002 \"\033/v1/channels/backup/restore:\001*'),
),
_descriptor.MethodDescriptor(
name='SubscribeChannelBackups',
full_name='lnrpc.Lightning.SubscribeChannelBackups',
index=50,
containing_service=None,
input_type=_CHANNELBACKUPSUBSCRIPTION,
output_type=_CHANBACKUPSNAPSHOT,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='BakeMacaroon',
full_name='lnrpc.Lightning.BakeMacaroon',
index=51,
containing_service=None,
input_type=_BAKEMACAROONREQUEST,
output_type=_BAKEMACAROONRESPONSE,
serialized_options=_b('\202\323\344\223\002\021\"\014/v1/macaroon:\001*'),
),
])
_sym_db.RegisterServiceDescriptor(_LIGHTNING)
DESCRIPTOR.services_by_name['Lightning'] = _LIGHTNING
# @@protoc_insertion_point(module_scope)
| true | true |
f72e123db1b697c426bdff7098e5de232a420d59 | 398 | py | Python | jobTracker/wsgi.py | TGAC/grassroots-job-tracker | 5391bff1f1088c656b8a052e0c5a106598f82abd | [
"Apache-2.0"
] | null | null | null | jobTracker/wsgi.py | TGAC/grassroots-job-tracker | 5391bff1f1088c656b8a052e0c5a106598f82abd | [
"Apache-2.0"
] | 1 | 2021-06-11T00:03:37.000Z | 2021-06-11T00:03:37.000Z | jobTracker/wsgi.py | xbian/job-tracker | 5391bff1f1088c656b8a052e0c5a106598f82abd | [
"Apache-2.0"
] | null | null | null | """
WSGI config for jobTracker project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jobTracker.settings")
application = get_wsgi_application()
| 23.411765 | 78 | 0.788945 |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jobTracker.settings")
application = get_wsgi_application()
| true | true |
f72e1274b370c3c9fbb6dec7d88bd5fee4343d62 | 11,039 | py | Python | mailchimp_marketing_asyncio/models/campaign_tracking_options.py | john-parton/mailchimp-asyncio | 3865ca0867bec8f537dc1e3256aa3a160c00f8a2 | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing_asyncio/models/campaign_tracking_options.py | john-parton/mailchimp-asyncio | 3865ca0867bec8f537dc1e3256aa3a160c00f8a2 | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing_asyncio/models/campaign_tracking_options.py | john-parton/mailchimp-asyncio | 3865ca0867bec8f537dc1e3256aa3a160c00f8a2 | [
"Apache-2.0"
] | 1 | 2022-03-09T14:52:22.000Z | 2022-03-09T14:52:22.000Z | # coding: utf-8
"""
Mailchimp Marketing API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 3.0.74
Contact: apihelp@mailchimp.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class CampaignTrackingOptions(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'opens': 'bool',
'html_clicks': 'bool',
'text_clicks': 'bool',
'goal_tracking': 'bool',
'ecomm360': 'bool',
'google_analytics': 'str',
'clicktale': 'str',
'salesforce': 'SalesforceCRMTracking',
'capsule': 'CapsuleCRMTracking1'
}
attribute_map = {
'opens': 'opens',
'html_clicks': 'html_clicks',
'text_clicks': 'text_clicks',
'goal_tracking': 'goal_tracking',
'ecomm360': 'ecomm360',
'google_analytics': 'google_analytics',
'clicktale': 'clicktale',
'salesforce': 'salesforce',
'capsule': 'capsule'
}
def __init__(self, opens=None, html_clicks=None, text_clicks=None, goal_tracking=None, ecomm360=None, google_analytics=None, clicktale=None, salesforce=None, capsule=None): # noqa: E501
"""CampaignTrackingOptions - a model defined in Swagger""" # noqa: E501
self._opens = None
self._html_clicks = None
self._text_clicks = None
self._goal_tracking = None
self._ecomm360 = None
self._google_analytics = None
self._clicktale = None
self._salesforce = None
self._capsule = None
self.discriminator = None
if opens is not None:
self.opens = opens
if html_clicks is not None:
self.html_clicks = html_clicks
if text_clicks is not None:
self.text_clicks = text_clicks
if goal_tracking is not None:
self.goal_tracking = goal_tracking
if ecomm360 is not None:
self.ecomm360 = ecomm360
if google_analytics is not None:
self.google_analytics = google_analytics
if clicktale is not None:
self.clicktale = clicktale
if salesforce is not None:
self.salesforce = salesforce
if capsule is not None:
self.capsule = capsule
@property
def opens(self):
"""Gets the opens of this CampaignTrackingOptions. # noqa: E501
Whether to [track opens](https://mailchimp.com/help/about-open-tracking/). Defaults to `true`. # noqa: E501
:return: The opens of this CampaignTrackingOptions. # noqa: E501
:rtype: bool
"""
return self._opens
@opens.setter
def opens(self, opens):
"""Sets the opens of this CampaignTrackingOptions.
Whether to [track opens](https://mailchimp.com/help/about-open-tracking/). Defaults to `true`. # noqa: E501
:param opens: The opens of this CampaignTrackingOptions. # noqa: E501
:type: bool
"""
self._opens = opens
@property
def html_clicks(self):
"""Gets the html_clicks of this CampaignTrackingOptions. # noqa: E501
Whether to [track clicks](https://mailchimp.com/help/enable-and-view-click-tracking/) in the HTML version of the campaign. Defaults to `true`. # noqa: E501
:return: The html_clicks of this CampaignTrackingOptions. # noqa: E501
:rtype: bool
"""
return self._html_clicks
@html_clicks.setter
def html_clicks(self, html_clicks):
"""Sets the html_clicks of this CampaignTrackingOptions.
Whether to [track clicks](https://mailchimp.com/help/enable-and-view-click-tracking/) in the HTML version of the campaign. Defaults to `true`. # noqa: E501
:param html_clicks: The html_clicks of this CampaignTrackingOptions. # noqa: E501
:type: bool
"""
self._html_clicks = html_clicks
@property
def text_clicks(self):
"""Gets the text_clicks of this CampaignTrackingOptions. # noqa: E501
Whether to [track clicks](https://mailchimp.com/help/enable-and-view-click-tracking/) in the plain-text version of the campaign. Defaults to `true`. # noqa: E501
:return: The text_clicks of this CampaignTrackingOptions. # noqa: E501
:rtype: bool
"""
return self._text_clicks
@text_clicks.setter
def text_clicks(self, text_clicks):
"""Sets the text_clicks of this CampaignTrackingOptions.
Whether to [track clicks](https://mailchimp.com/help/enable-and-view-click-tracking/) in the plain-text version of the campaign. Defaults to `true`. # noqa: E501
:param text_clicks: The text_clicks of this CampaignTrackingOptions. # noqa: E501
:type: bool
"""
self._text_clicks = text_clicks
@property
def goal_tracking(self):
"""Gets the goal_tracking of this CampaignTrackingOptions. # noqa: E501
Deprecated # noqa: E501
:return: The goal_tracking of this CampaignTrackingOptions. # noqa: E501
:rtype: bool
"""
return self._goal_tracking
@goal_tracking.setter
def goal_tracking(self, goal_tracking):
"""Sets the goal_tracking of this CampaignTrackingOptions.
Deprecated # noqa: E501
:param goal_tracking: The goal_tracking of this CampaignTrackingOptions. # noqa: E501
:type: bool
"""
self._goal_tracking = goal_tracking
@property
def ecomm360(self):
"""Gets the ecomm360 of this CampaignTrackingOptions. # noqa: E501
Whether to enable e-commerce tracking. # noqa: E501
:return: The ecomm360 of this CampaignTrackingOptions. # noqa: E501
:rtype: bool
"""
return self._ecomm360
@ecomm360.setter
def ecomm360(self, ecomm360):
"""Sets the ecomm360 of this CampaignTrackingOptions.
Whether to enable e-commerce tracking. # noqa: E501
:param ecomm360: The ecomm360 of this CampaignTrackingOptions. # noqa: E501
:type: bool
"""
self._ecomm360 = ecomm360
@property
def google_analytics(self):
"""Gets the google_analytics of this CampaignTrackingOptions. # noqa: E501
The custom slug for [Google Analytics](https://mailchimp.com/help/integrate-google-analytics-with-mailchimp/) tracking (max of 50 bytes). # noqa: E501
:return: The google_analytics of this CampaignTrackingOptions. # noqa: E501
:rtype: str
"""
return self._google_analytics
@google_analytics.setter
def google_analytics(self, google_analytics):
"""Sets the google_analytics of this CampaignTrackingOptions.
The custom slug for [Google Analytics](https://mailchimp.com/help/integrate-google-analytics-with-mailchimp/) tracking (max of 50 bytes). # noqa: E501
:param google_analytics: The google_analytics of this CampaignTrackingOptions. # noqa: E501
:type: str
"""
self._google_analytics = google_analytics
@property
def clicktale(self):
"""Gets the clicktale of this CampaignTrackingOptions. # noqa: E501
The custom slug for [Click Tale](https://mailchimp.com/help/additional-tracking-options-for-campaigns/) tracking (max of 50 bytes). # noqa: E501
:return: The clicktale of this CampaignTrackingOptions. # noqa: E501
:rtype: str
"""
return self._clicktale
@clicktale.setter
def clicktale(self, clicktale):
"""Sets the clicktale of this CampaignTrackingOptions.
The custom slug for [Click Tale](https://mailchimp.com/help/additional-tracking-options-for-campaigns/) tracking (max of 50 bytes). # noqa: E501
:param clicktale: The clicktale of this CampaignTrackingOptions. # noqa: E501
:type: str
"""
self._clicktale = clicktale
@property
def salesforce(self):
"""Gets the salesforce of this CampaignTrackingOptions. # noqa: E501
:return: The salesforce of this CampaignTrackingOptions. # noqa: E501
:rtype: SalesforceCRMTracking
"""
return self._salesforce
@salesforce.setter
def salesforce(self, salesforce):
"""Sets the salesforce of this CampaignTrackingOptions.
:param salesforce: The salesforce of this CampaignTrackingOptions. # noqa: E501
:type: SalesforceCRMTracking
"""
self._salesforce = salesforce
@property
def capsule(self):
"""Gets the capsule of this CampaignTrackingOptions. # noqa: E501
:return: The capsule of this CampaignTrackingOptions. # noqa: E501
:rtype: CapsuleCRMTracking1
"""
return self._capsule
@capsule.setter
def capsule(self, capsule):
"""Sets the capsule of this CampaignTrackingOptions.
:param capsule: The capsule of this CampaignTrackingOptions. # noqa: E501
:type: CapsuleCRMTracking1
"""
self._capsule = capsule
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(CampaignTrackingOptions, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CampaignTrackingOptions):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 32.659763 | 190 | 0.625872 |
import pprint
import re
import six
class CampaignTrackingOptions(object):
swagger_types = {
'opens': 'bool',
'html_clicks': 'bool',
'text_clicks': 'bool',
'goal_tracking': 'bool',
'ecomm360': 'bool',
'google_analytics': 'str',
'clicktale': 'str',
'salesforce': 'SalesforceCRMTracking',
'capsule': 'CapsuleCRMTracking1'
}
attribute_map = {
'opens': 'opens',
'html_clicks': 'html_clicks',
'text_clicks': 'text_clicks',
'goal_tracking': 'goal_tracking',
'ecomm360': 'ecomm360',
'google_analytics': 'google_analytics',
'clicktale': 'clicktale',
'salesforce': 'salesforce',
'capsule': 'capsule'
}
def __init__(self, opens=None, html_clicks=None, text_clicks=None, goal_tracking=None, ecomm360=None, google_analytics=None, clicktale=None, salesforce=None, capsule=None):
self._opens = None
self._html_clicks = None
self._text_clicks = None
self._goal_tracking = None
self._ecomm360 = None
self._google_analytics = None
self._clicktale = None
self._salesforce = None
self._capsule = None
self.discriminator = None
if opens is not None:
self.opens = opens
if html_clicks is not None:
self.html_clicks = html_clicks
if text_clicks is not None:
self.text_clicks = text_clicks
if goal_tracking is not None:
self.goal_tracking = goal_tracking
if ecomm360 is not None:
self.ecomm360 = ecomm360
if google_analytics is not None:
self.google_analytics = google_analytics
if clicktale is not None:
self.clicktale = clicktale
if salesforce is not None:
self.salesforce = salesforce
if capsule is not None:
self.capsule = capsule
@property
def opens(self):
return self._opens
@opens.setter
def opens(self, opens):
self._opens = opens
@property
def html_clicks(self):
return self._html_clicks
@html_clicks.setter
def html_clicks(self, html_clicks):
self._html_clicks = html_clicks
@property
def text_clicks(self):
return self._text_clicks
@text_clicks.setter
def text_clicks(self, text_clicks):
self._text_clicks = text_clicks
@property
def goal_tracking(self):
return self._goal_tracking
@goal_tracking.setter
def goal_tracking(self, goal_tracking):
self._goal_tracking = goal_tracking
@property
def ecomm360(self):
return self._ecomm360
@ecomm360.setter
def ecomm360(self, ecomm360):
self._ecomm360 = ecomm360
@property
def google_analytics(self):
return self._google_analytics
@google_analytics.setter
def google_analytics(self, google_analytics):
self._google_analytics = google_analytics
@property
def clicktale(self):
return self._clicktale
@clicktale.setter
def clicktale(self, clicktale):
self._clicktale = clicktale
@property
def salesforce(self):
return self._salesforce
@salesforce.setter
def salesforce(self, salesforce):
self._salesforce = salesforce
@property
def capsule(self):
return self._capsule
@capsule.setter
def capsule(self, capsule):
self._capsule = capsule
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(CampaignTrackingOptions, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, CampaignTrackingOptions):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f72e12a2ece435b15b1dfd4dd0e12f3fbd260fc4 | 6,160 | py | Python | lib/airflow/tests/providers/amazon/aws/operators/test_batch.py | SteNicholas/ai-flow | 2c70547981f1516f0e37bbe6936a1b7cccd31822 | [
"Apache-2.0"
] | 79 | 2021-10-15T07:32:27.000Z | 2022-03-28T04:10:19.000Z | lib/airflow/tests/providers/amazon/aws/operators/test_batch.py | SteNicholas/ai-flow | 2c70547981f1516f0e37bbe6936a1b7cccd31822 | [
"Apache-2.0"
] | 153 | 2021-10-15T05:23:46.000Z | 2022-02-23T06:07:10.000Z | lib/airflow/tests/providers/amazon/aws/operators/test_batch.py | SteNicholas/ai-flow | 2c70547981f1516f0e37bbe6936a1b7cccd31822 | [
"Apache-2.0"
] | 23 | 2021-10-15T02:36:37.000Z | 2022-03-17T02:59:27.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# pylint: disable=missing-docstring
import unittest
from unittest import mock
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.batch_client import AwsBatchClientHook
from airflow.providers.amazon.aws.operators.batch import AwsBatchOperator
# Use dummy AWS credentials
AWS_REGION = "eu-west-1"
AWS_ACCESS_KEY_ID = "airflow_dummy_key"
AWS_SECRET_ACCESS_KEY = "airflow_dummy_secret"
JOB_NAME = "51455483-c62c-48ac-9b88-53a6a725baa3"
JOB_ID = "8ba9d676-4108-4474-9dca-8bbac1da9b19"
RESPONSE_WITHOUT_FAILURES = {
"jobName": JOB_NAME,
"jobId": JOB_ID,
}
class TestAwsBatchOperator(unittest.TestCase):
MAX_RETRIES = 2
STATUS_RETRIES = 3
@mock.patch.dict("os.environ", AWS_DEFAULT_REGION=AWS_REGION)
@mock.patch.dict("os.environ", AWS_ACCESS_KEY_ID=AWS_ACCESS_KEY_ID)
@mock.patch.dict("os.environ", AWS_SECRET_ACCESS_KEY=AWS_SECRET_ACCESS_KEY)
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.AwsBaseHook.get_client_type")
def setUp(self, get_client_type_mock):
self.get_client_type_mock = get_client_type_mock
self.batch = AwsBatchOperator(
task_id="task",
job_name=JOB_NAME,
job_queue="queue",
job_definition="hello-world",
max_retries=self.MAX_RETRIES,
status_retries=self.STATUS_RETRIES,
parameters=None,
overrides={},
array_properties=None,
aws_conn_id='airflow_test',
region_name="eu-west-1",
)
self.client_mock = self.get_client_type_mock.return_value
self.assertEqual(self.batch.hook.client, self.client_mock) # setup client property
# don't pause in unit tests
self.mock_delay = mock.Mock(return_value=None)
self.batch.delay = self.mock_delay
self.mock_exponential_delay = mock.Mock(return_value=0)
self.batch.exponential_delay = self.mock_exponential_delay
# Assign a job ID for most tests, so they don't depend on a job submission.
self.assertIsNone(self.batch.job_id)
self.batch.job_id = JOB_ID
def test_init(self):
self.assertEqual(self.batch.job_id, JOB_ID)
self.assertEqual(self.batch.job_name, JOB_NAME)
self.assertEqual(self.batch.job_queue, "queue")
self.assertEqual(self.batch.job_definition, "hello-world")
self.assertEqual(self.batch.waiters, None)
self.assertEqual(self.batch.hook.max_retries, self.MAX_RETRIES)
self.assertEqual(self.batch.hook.status_retries, self.STATUS_RETRIES)
self.assertEqual(self.batch.parameters, {})
self.assertEqual(self.batch.overrides, {})
self.assertEqual(self.batch.array_properties, {})
self.assertEqual(self.batch.hook.region_name, "eu-west-1")
self.assertEqual(self.batch.hook.aws_conn_id, "airflow_test")
self.assertEqual(self.batch.hook.client, self.client_mock)
self.get_client_type_mock.assert_called_once_with("batch", region_name="eu-west-1")
def test_template_fields_overrides(self):
self.assertEqual(
self.batch.template_fields,
(
"job_name",
"overrides",
"parameters",
),
)
@mock.patch.object(AwsBatchClientHook, "wait_for_job")
@mock.patch.object(AwsBatchClientHook, "check_job_success")
def test_execute_without_failures(self, check_mock, wait_mock):
# JOB_ID is in RESPONSE_WITHOUT_FAILURES
self.client_mock.submit_job.return_value = RESPONSE_WITHOUT_FAILURES
self.batch.job_id = None
self.batch.waiters = None # use default wait
self.batch.execute(None)
self.client_mock.submit_job.assert_called_once_with(
jobQueue="queue",
jobName=JOB_NAME,
containerOverrides={},
jobDefinition="hello-world",
arrayProperties={},
parameters={},
)
self.assertEqual(self.batch.job_id, JOB_ID)
wait_mock.assert_called_once_with(JOB_ID)
check_mock.assert_called_once_with(JOB_ID)
def test_execute_with_failures(self):
self.client_mock.submit_job.return_value = ""
with self.assertRaises(AirflowException):
self.batch.execute(None)
self.client_mock.submit_job.assert_called_once_with(
jobQueue="queue",
jobName=JOB_NAME,
containerOverrides={},
jobDefinition="hello-world",
arrayProperties={},
parameters={},
)
@mock.patch.object(AwsBatchClientHook, "check_job_success")
def test_wait_job_complete_using_waiters(self, check_mock):
mock_waiters = mock.Mock()
self.batch.waiters = mock_waiters
self.client_mock.submit_job.return_value = RESPONSE_WITHOUT_FAILURES
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "SUCCEEDED"}]}
self.batch.execute(None)
mock_waiters.wait_for_job.assert_called_once_with(JOB_ID)
check_mock.assert_called_once_with(JOB_ID)
def test_kill_job(self):
self.client_mock.terminate_job.return_value = {}
self.batch.on_kill()
self.client_mock.terminate_job.assert_called_once_with(jobId=JOB_ID, reason="Task killed by the user")
| 38.26087 | 110 | 0.69513 |
import unittest
from unittest import mock
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.batch_client import AwsBatchClientHook
from airflow.providers.amazon.aws.operators.batch import AwsBatchOperator
AWS_REGION = "eu-west-1"
AWS_ACCESS_KEY_ID = "airflow_dummy_key"
AWS_SECRET_ACCESS_KEY = "airflow_dummy_secret"
JOB_NAME = "51455483-c62c-48ac-9b88-53a6a725baa3"
JOB_ID = "8ba9d676-4108-4474-9dca-8bbac1da9b19"
RESPONSE_WITHOUT_FAILURES = {
"jobName": JOB_NAME,
"jobId": JOB_ID,
}
class TestAwsBatchOperator(unittest.TestCase):
MAX_RETRIES = 2
STATUS_RETRIES = 3
@mock.patch.dict("os.environ", AWS_DEFAULT_REGION=AWS_REGION)
@mock.patch.dict("os.environ", AWS_ACCESS_KEY_ID=AWS_ACCESS_KEY_ID)
@mock.patch.dict("os.environ", AWS_SECRET_ACCESS_KEY=AWS_SECRET_ACCESS_KEY)
@mock.patch("airflow.providers.amazon.aws.hooks.batch_client.AwsBaseHook.get_client_type")
def setUp(self, get_client_type_mock):
self.get_client_type_mock = get_client_type_mock
self.batch = AwsBatchOperator(
task_id="task",
job_name=JOB_NAME,
job_queue="queue",
job_definition="hello-world",
max_retries=self.MAX_RETRIES,
status_retries=self.STATUS_RETRIES,
parameters=None,
overrides={},
array_properties=None,
aws_conn_id='airflow_test',
region_name="eu-west-1",
)
self.client_mock = self.get_client_type_mock.return_value
self.assertEqual(self.batch.hook.client, self.client_mock)
self.mock_delay = mock.Mock(return_value=None)
self.batch.delay = self.mock_delay
self.mock_exponential_delay = mock.Mock(return_value=0)
self.batch.exponential_delay = self.mock_exponential_delay
# Assign a job ID for most tests, so they don't depend on a job submission.
self.assertIsNone(self.batch.job_id)
self.batch.job_id = JOB_ID
def test_init(self):
self.assertEqual(self.batch.job_id, JOB_ID)
self.assertEqual(self.batch.job_name, JOB_NAME)
self.assertEqual(self.batch.job_queue, "queue")
self.assertEqual(self.batch.job_definition, "hello-world")
self.assertEqual(self.batch.waiters, None)
self.assertEqual(self.batch.hook.max_retries, self.MAX_RETRIES)
self.assertEqual(self.batch.hook.status_retries, self.STATUS_RETRIES)
self.assertEqual(self.batch.parameters, {})
self.assertEqual(self.batch.overrides, {})
self.assertEqual(self.batch.array_properties, {})
self.assertEqual(self.batch.hook.region_name, "eu-west-1")
self.assertEqual(self.batch.hook.aws_conn_id, "airflow_test")
self.assertEqual(self.batch.hook.client, self.client_mock)
self.get_client_type_mock.assert_called_once_with("batch", region_name="eu-west-1")
def test_template_fields_overrides(self):
self.assertEqual(
self.batch.template_fields,
(
"job_name",
"overrides",
"parameters",
),
)
@mock.patch.object(AwsBatchClientHook, "wait_for_job")
@mock.patch.object(AwsBatchClientHook, "check_job_success")
def test_execute_without_failures(self, check_mock, wait_mock):
self.client_mock.submit_job.return_value = RESPONSE_WITHOUT_FAILURES
self.batch.job_id = None
self.batch.waiters = None
self.batch.execute(None)
self.client_mock.submit_job.assert_called_once_with(
jobQueue="queue",
jobName=JOB_NAME,
containerOverrides={},
jobDefinition="hello-world",
arrayProperties={},
parameters={},
)
self.assertEqual(self.batch.job_id, JOB_ID)
wait_mock.assert_called_once_with(JOB_ID)
check_mock.assert_called_once_with(JOB_ID)
def test_execute_with_failures(self):
self.client_mock.submit_job.return_value = ""
with self.assertRaises(AirflowException):
self.batch.execute(None)
self.client_mock.submit_job.assert_called_once_with(
jobQueue="queue",
jobName=JOB_NAME,
containerOverrides={},
jobDefinition="hello-world",
arrayProperties={},
parameters={},
)
@mock.patch.object(AwsBatchClientHook, "check_job_success")
def test_wait_job_complete_using_waiters(self, check_mock):
mock_waiters = mock.Mock()
self.batch.waiters = mock_waiters
self.client_mock.submit_job.return_value = RESPONSE_WITHOUT_FAILURES
self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "SUCCEEDED"}]}
self.batch.execute(None)
mock_waiters.wait_for_job.assert_called_once_with(JOB_ID)
check_mock.assert_called_once_with(JOB_ID)
def test_kill_job(self):
self.client_mock.terminate_job.return_value = {}
self.batch.on_kill()
self.client_mock.terminate_job.assert_called_once_with(jobId=JOB_ID, reason="Task killed by the user")
| true | true |
f72e12b1839be243a177c4893f6437b629c907c1 | 365 | py | Python | azsc/handlers/az/ResourceGroup.py | jongio/azure-script | acd5c18e16184814ead49029605dcc2dcd04313e | [
"MIT"
] | null | null | null | azsc/handlers/az/ResourceGroup.py | jongio/azure-script | acd5c18e16184814ead49029605dcc2dcd04313e | [
"MIT"
] | null | null | null | azsc/handlers/az/ResourceGroup.py | jongio/azure-script | acd5c18e16184814ead49029605dcc2dcd04313e | [
"MIT"
] | null | null | null | from azsc.handlers.Handler import Handler
from azsc.handlers.az.Generic import GenericHandler
class ResourceGroupHandler(GenericHandler):
azure_object = "group"
def execute(self):
self.add_context_parameter("location", "location")
cmd = super(ResourceGroupHandler, self).execute()
self.save_to_context()
return cmd
| 22.8125 | 58 | 0.709589 | from azsc.handlers.Handler import Handler
from azsc.handlers.az.Generic import GenericHandler
class ResourceGroupHandler(GenericHandler):
azure_object = "group"
def execute(self):
self.add_context_parameter("location", "location")
cmd = super(ResourceGroupHandler, self).execute()
self.save_to_context()
return cmd
| true | true |
f72e1342fa58b465f538a36f253ae2803b1ae833 | 1,696 | py | Python | setup.py | rynge/straxen | 7546177c8d79c8570e0c6e005c5ffdd6eb4c54f6 | [
"BSD-3-Clause"
] | null | null | null | setup.py | rynge/straxen | 7546177c8d79c8570e0c6e005c5ffdd6eb4c54f6 | [
"BSD-3-Clause"
] | null | null | null | setup.py | rynge/straxen | 7546177c8d79c8570e0c6e005c5ffdd6eb4c54f6 | [
"BSD-3-Clause"
] | null | null | null | import setuptools
# Get requirements from requirements.txt, stripping the version tags
with open('requirements.txt') as f:
requires = [x.strip().split('=')[0]
for x in f.readlines()]
with open('README.md') as file:
readme = file.read()
with open('HISTORY.md') as file:
history = file.read()
setuptools.setup(name='straxen',
version='0.2.0',
description='Streaming analysis for XENON',
author='Straxen contributors, the XENON collaboration',
url='https://github.com/XENONnT/straxen',
install_requires=requires,
long_description=readme + '\n\n' + history,
long_description_content_type="text/markdown",
python_requires=">=3.6",
extras_require={
'docs': ['sphinx',
'sphinx_rtd_theme',
'nbsphinx',
'recommonmark',
'graphviz'],
},
scripts=['bin/bootstrax'],
packages=setuptools.find_packages(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering :: Physics',
],
zip_safe=False)
| 40.380952 | 83 | 0.48467 | import setuptools
with open('requirements.txt') as f:
requires = [x.strip().split('=')[0]
for x in f.readlines()]
with open('README.md') as file:
readme = file.read()
with open('HISTORY.md') as file:
history = file.read()
setuptools.setup(name='straxen',
version='0.2.0',
description='Streaming analysis for XENON',
author='Straxen contributors, the XENON collaboration',
url='https://github.com/XENONnT/straxen',
install_requires=requires,
long_description=readme + '\n\n' + history,
long_description_content_type="text/markdown",
python_requires=">=3.6",
extras_require={
'docs': ['sphinx',
'sphinx_rtd_theme',
'nbsphinx',
'recommonmark',
'graphviz'],
},
scripts=['bin/bootstrax'],
packages=setuptools.find_packages(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3.6',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering :: Physics',
],
zip_safe=False)
| true | true |
f72e136a621c00c16de3e45e9a8425b17e893742 | 126 | py | Python | data_analysis/monthly_aggregator.py | nehiljain/rhime | 0e2b7001fcb52b743fdd396e69a290de5c1a7aa3 | [
"MIT"
] | null | null | null | data_analysis/monthly_aggregator.py | nehiljain/rhime | 0e2b7001fcb52b743fdd396e69a290de5c1a7aa3 | [
"MIT"
] | null | null | null | data_analysis/monthly_aggregator.py | nehiljain/rhime | 0e2b7001fcb52b743fdd396e69a290de5c1a7aa3 | [
"MIT"
] | null | null | null | from pymongo import MongoClient
client = MongoClient('128.199.138.180',27017)
db = client.rhime_prod
articles = db.articles
| 18 | 45 | 0.777778 | from pymongo import MongoClient
client = MongoClient('128.199.138.180',27017)
db = client.rhime_prod
articles = db.articles
| true | true |
f72e13ad4ec48f6b9e13ee14b34d5349b1a8e5e7 | 1,603 | py | Python | misc/decorators/instantiate.py | mverleg/django_misc | e5a6f6d8c16ecfdaab634eb4af812921d532181f | [
"BSD-3-Clause"
] | null | null | null | misc/decorators/instantiate.py | mverleg/django_misc | e5a6f6d8c16ecfdaab634eb4af812921d532181f | [
"BSD-3-Clause"
] | null | null | null | misc/decorators/instantiate.py | mverleg/django_misc | e5a6f6d8c16ecfdaab634eb4af812921d532181f | [
"BSD-3-Clause"
] | null | null | null |
"""
decorator for functions that take a unique model attribute as
url parameter (or other string argument), and convert that
into an actual instance of that model as argument for the function
e.g. django url sees this function:
my_view(request, example_pk)
which is defined as
@instantiate(Example)
def my_view(request, example)
uses primary key by default, with modelname as instance kwarg
and modelname_pk as input string kwargs (can all be changed)
"""
from django.http import Http404
def instantiate(Model, in_kw_name = None, out_kw_name = None, model_attr_name = 'pk', model_attr_type = int):
if out_kw_name is None:
out_kw_name = Model.__name__.lower()
if in_kw_name is None:
in_kw_name = '%s_%s' % (out_kw_name, model_attr_name)
def convert_to_instance_decorator(func):
def func_with_instance(request, *args, **kwargs):
identifier = kwargs.pop(in_kw_name)
try:
instance = Model.objects.get(**{model_attr_name: model_attr_type(identifier)})
except Model.DoesNotExist:
message = 'This page expectes a %s with %s = %s, but no such %s was found.' % (Model.__name__, model_attr_name, identifier, Model.__name__)
raise Http404(message)
kwargs[out_kw_name] = instance
return func(request, *args, **kwargs)
return func_with_instance
return convert_to_instance_decorator
def instantiate_slug(Model, in_kw_name = None, out_kw_name = None, model_attr_name = 'slug', model_attr_type = str):
return instantiate(Model = Model, in_kw_name = in_kw_name, out_kw_name = out_kw_name, model_attr_name = model_attr_name, model_attr_type = model_attr_type)
| 39.097561 | 156 | 0.760449 |
from django.http import Http404
def instantiate(Model, in_kw_name = None, out_kw_name = None, model_attr_name = 'pk', model_attr_type = int):
if out_kw_name is None:
out_kw_name = Model.__name__.lower()
if in_kw_name is None:
in_kw_name = '%s_%s' % (out_kw_name, model_attr_name)
def convert_to_instance_decorator(func):
def func_with_instance(request, *args, **kwargs):
identifier = kwargs.pop(in_kw_name)
try:
instance = Model.objects.get(**{model_attr_name: model_attr_type(identifier)})
except Model.DoesNotExist:
message = 'This page expectes a %s with %s = %s, but no such %s was found.' % (Model.__name__, model_attr_name, identifier, Model.__name__)
raise Http404(message)
kwargs[out_kw_name] = instance
return func(request, *args, **kwargs)
return func_with_instance
return convert_to_instance_decorator
def instantiate_slug(Model, in_kw_name = None, out_kw_name = None, model_attr_name = 'slug', model_attr_type = str):
return instantiate(Model = Model, in_kw_name = in_kw_name, out_kw_name = out_kw_name, model_attr_name = model_attr_name, model_attr_type = model_attr_type)
| true | true |
f72e13cb96f53044636422cfec0972192051a964 | 8,201 | py | Python | babyai/arguments.py | MathijsMul/babyai-emergent-guidance | 9e37535134c89bd019affa51c7f199d1672811b6 | [
"BSD-3-Clause"
] | null | null | null | babyai/arguments.py | MathijsMul/babyai-emergent-guidance | 9e37535134c89bd019affa51c7f199d1672811b6 | [
"BSD-3-Clause"
] | null | null | null | babyai/arguments.py | MathijsMul/babyai-emergent-guidance | 9e37535134c89bd019affa51c7f199d1672811b6 | [
"BSD-3-Clause"
] | null | null | null | """
Common arguments for BabyAI training scripts
"""
import os
import argparse
import numpy as np
class ArgumentParser(argparse.ArgumentParser):
def __init__(self):
super().__init__()
# Base arguments
self.add_argument("--env", default=None,
help="name of the environment to train on (REQUIRED)")
self.add_argument("--model", default=None,
help="name of the model (default: ENV_ALGO_TIME)")
self.add_argument("--pretrained-model", default=None,
help='If you\'re using a pre-trained model and want the fine-tuned one to have a new name')
self.add_argument("--seed", type=int, default=1,
help="random seed; if 0, a random random seed will be used (default: 1)")
self.add_argument("--task-id-seed", action='store_true',
help="use the task id within a Slurm job array as the seed")
self.add_argument("--procs", type=int, default=64,
help="number of processes (default: 64)")
self.add_argument("--tb", action="store_true", default=False,
help="log into Tensorboard")
# Training arguments
self.add_argument("--log-interval", type=int, default=1,
help="number of updates between two logs (default(Mathijs): 1, used to be 10)")
self.add_argument("--save-interval", type=int, default=1000,
help="number of updates between two saves (default: 1000, 0 means no saving)")
self.add_argument("--frames", type=int, default=int(9e10),
help="number of frames of training (default: 9e10)")
self.add_argument("--patience", type=int, default=100,
help="patience for early stopping (default: 100)")
self.add_argument("--epochs", type=int, default=1000000,
help="maximum number of epochs")
self.add_argument("--frames-per-proc", type=int, default=40,
help="number of frames per process before update (default: 40)")
self.add_argument("--lr", type=float, default=1e-4,
help="learning rate (default: 1e-4)")
self.add_argument("--beta1", type=float, default=0.9,
help="beta1 for Adam (default: 0.9)")
self.add_argument("--beta2", type=float, default=0.999,
help="beta2 for Adam (default: 0.999)")
self.add_argument("--recurrence", type=int, default=20,
help="number of timesteps gradient is backpropagated (default: 20)")
self.add_argument("--optim-eps", type=float, default=1e-5,
help="Adam and RMSprop optimizer epsilon (default: 1e-5)")
self.add_argument("--optim-alpha", type=float, default=0.99,
help="RMSprop optimizer apha (default: 0.99)")
self.add_argument("--batch-size", type=int, default=1280,
help="batch size for PPO (default: 1280)")
self.add_argument("--entropy-coef", type=float, default=0.01,
help="entropy term coefficient (default: 0.01)")
self.add_argument("--dropout", type=float, default=0.5,
help="dropout probability for processed corrections (default: 0.5)")
self.add_argument("--save-each-epoch", action="store_true", default=False,
help="store model at each epoch")
self.add_argument("--class-weights", action="store_true", default=False,
help="use class weights in loss function")
self.add_argument("--compute-cic", action="store_true", default=False,
help="compute and log causal influence of communication metric after each epoch")
# Model parameters
self.add_argument("--image-dim", type=int, default=128,
help="dimensionality of the image embedding")
self.add_argument("--memory-dim", type=int, default=128,
help="dimensionality of the memory LSTM")
self.add_argument("--instr-dim", type=int, default=128,
help="dimensionality of the memory LSTM")
self.add_argument("--no-instr", action="store_true", default=False,
help="don't use instructions in the model")
self.add_argument("--instr-arch", default="gru",
help="arch to encode instructions, possible values: gru, bigru, conv, bow (default: gru)")
self.add_argument("--no-mem", action="store_true", default=False,
help="don't use memory in the model")
self.add_argument("--arch", default='expert_filmcnn',
help="image embedding architecture")
self.add_argument("--learner", action="store_true", default=False,
help="use ordinary learner")
# Corrector parameters
self.add_argument("--corrector", action="store_true", default=False,
help="use correction module")
self.add_argument("--corr-length", type=int, default=2,
help="length of correction messages (max length if --var-corr-length true)")
self.add_argument("--corr-own-vocab", action="store_true", default=False,
help="corrector uses its own vocabulary instead of instruction vocabulary")
self.add_argument("--corr-embedding-dim", type=int, default=0,
help="embedding dimensionality for corrector")
self.add_argument("--corr-vocab-size", type=int, default=3,
help="vocabulary size of corrector")
self.add_argument("--pretrained-corrector", type=str, default=None,
help="location of pretrained corrector to use and freeze")
self.add_argument("--show-corrections", action="store_true", default=False,
help="show correction messages")
self.add_argument("--corrector-frozen", action="store_true", default=False,
help="freeze pretrained corrector")
self.add_argument("--random-corrector", action="store_true", default=False,
help="randomize correction messages")
self.add_argument("--var-corr-length", action="store_true", default=False,
help="variable length correction messages with penalty for longer ones")
self.add_argument("--corr-loss-coef", type=float, default=0.1,
help="correction loss coefficient (untested default: 0.1)")
self.add_argument("--weigh-corrections", action="store_true", default=False,
help="weigh corrections depending on entropy of previous timestep")
self.add_argument("--correction-weight-loss-coef", type=float, default=1.0,
help="coefficient for correction weight loss")
# Validation parameters
self.add_argument("--val-seed", type=int, default=0,
help="seed for environment used for validation (default: 0)")
self.add_argument("--val-interval", type=int, default=1,
help="number of epochs between two validation checks (default: 1)")
self.add_argument("--val-episodes", type=int, default=500,
help="number of episodes used to evaluate the agent, and to evaluate validation accuracy")
def parse_args(self):
"""
Parse the arguments and perform some basic validation
"""
args = super().parse_args()
# Set seed for all randomness sources
if args.seed == 0:
args.seed = np.random.randint(10000)
if args.task_id_seed:
args.seed = int(os.environ['SLURM_ARRAY_TASK_ID'])
print('set seed to {}'.format(args.seed))
# TODO: more validation
return args
| 58.163121 | 119 | 0.57871 |
import os
import argparse
import numpy as np
class ArgumentParser(argparse.ArgumentParser):
def __init__(self):
super().__init__()
self.add_argument("--env", default=None,
help="name of the environment to train on (REQUIRED)")
self.add_argument("--model", default=None,
help="name of the model (default: ENV_ALGO_TIME)")
self.add_argument("--pretrained-model", default=None,
help='If you\'re using a pre-trained model and want the fine-tuned one to have a new name')
self.add_argument("--seed", type=int, default=1,
help="random seed; if 0, a random random seed will be used (default: 1)")
self.add_argument("--task-id-seed", action='store_true',
help="use the task id within a Slurm job array as the seed")
self.add_argument("--procs", type=int, default=64,
help="number of processes (default: 64)")
self.add_argument("--tb", action="store_true", default=False,
help="log into Tensorboard")
# Training arguments
self.add_argument("--log-interval", type=int, default=1,
help="number of updates between two logs (default(Mathijs): 1, used to be 10)")
self.add_argument("--save-interval", type=int, default=1000,
help="number of updates between two saves (default: 1000, 0 means no saving)")
self.add_argument("--frames", type=int, default=int(9e10),
help="number of frames of training (default: 9e10)")
self.add_argument("--patience", type=int, default=100,
help="patience for early stopping (default: 100)")
self.add_argument("--epochs", type=int, default=1000000,
help="maximum number of epochs")
self.add_argument("--frames-per-proc", type=int, default=40,
help="number of frames per process before update (default: 40)")
self.add_argument("--lr", type=float, default=1e-4,
help="learning rate (default: 1e-4)")
self.add_argument("--beta1", type=float, default=0.9,
help="beta1 for Adam (default: 0.9)")
self.add_argument("--beta2", type=float, default=0.999,
help="beta2 for Adam (default: 0.999)")
self.add_argument("--recurrence", type=int, default=20,
help="number of timesteps gradient is backpropagated (default: 20)")
self.add_argument("--optim-eps", type=float, default=1e-5,
help="Adam and RMSprop optimizer epsilon (default: 1e-5)")
self.add_argument("--optim-alpha", type=float, default=0.99,
help="RMSprop optimizer apha (default: 0.99)")
self.add_argument("--batch-size", type=int, default=1280,
help="batch size for PPO (default: 1280)")
self.add_argument("--entropy-coef", type=float, default=0.01,
help="entropy term coefficient (default: 0.01)")
self.add_argument("--dropout", type=float, default=0.5,
help="dropout probability for processed corrections (default: 0.5)")
self.add_argument("--save-each-epoch", action="store_true", default=False,
help="store model at each epoch")
self.add_argument("--class-weights", action="store_true", default=False,
help="use class weights in loss function")
self.add_argument("--compute-cic", action="store_true", default=False,
help="compute and log causal influence of communication metric after each epoch")
# Model parameters
self.add_argument("--image-dim", type=int, default=128,
help="dimensionality of the image embedding")
self.add_argument("--memory-dim", type=int, default=128,
help="dimensionality of the memory LSTM")
self.add_argument("--instr-dim", type=int, default=128,
help="dimensionality of the memory LSTM")
self.add_argument("--no-instr", action="store_true", default=False,
help="don't use instructions in the model")
self.add_argument("--instr-arch", default="gru",
help="arch to encode instructions, possible values: gru, bigru, conv, bow (default: gru)")
self.add_argument("--no-mem", action="store_true", default=False,
help="don't use memory in the model")
self.add_argument("--arch", default='expert_filmcnn',
help="image embedding architecture")
self.add_argument("--learner", action="store_true", default=False,
help="use ordinary learner")
# Corrector parameters
self.add_argument("--corrector", action="store_true", default=False,
help="use correction module")
self.add_argument("--corr-length", type=int, default=2,
help="length of correction messages (max length if --var-corr-length true)")
self.add_argument("--corr-own-vocab", action="store_true", default=False,
help="corrector uses its own vocabulary instead of instruction vocabulary")
self.add_argument("--corr-embedding-dim", type=int, default=0,
help="embedding dimensionality for corrector")
self.add_argument("--corr-vocab-size", type=int, default=3,
help="vocabulary size of corrector")
self.add_argument("--pretrained-corrector", type=str, default=None,
help="location of pretrained corrector to use and freeze")
self.add_argument("--show-corrections", action="store_true", default=False,
help="show correction messages")
self.add_argument("--corrector-frozen", action="store_true", default=False,
help="freeze pretrained corrector")
self.add_argument("--random-corrector", action="store_true", default=False,
help="randomize correction messages")
self.add_argument("--var-corr-length", action="store_true", default=False,
help="variable length correction messages with penalty for longer ones")
self.add_argument("--corr-loss-coef", type=float, default=0.1,
help="correction loss coefficient (untested default: 0.1)")
self.add_argument("--weigh-corrections", action="store_true", default=False,
help="weigh corrections depending on entropy of previous timestep")
self.add_argument("--correction-weight-loss-coef", type=float, default=1.0,
help="coefficient for correction weight loss")
# Validation parameters
self.add_argument("--val-seed", type=int, default=0,
help="seed for environment used for validation (default: 0)")
self.add_argument("--val-interval", type=int, default=1,
help="number of epochs between two validation checks (default: 1)")
self.add_argument("--val-episodes", type=int, default=500,
help="number of episodes used to evaluate the agent, and to evaluate validation accuracy")
def parse_args(self):
args = super().parse_args()
# Set seed for all randomness sources
if args.seed == 0:
args.seed = np.random.randint(10000)
if args.task_id_seed:
args.seed = int(os.environ['SLURM_ARRAY_TASK_ID'])
print('set seed to {}'.format(args.seed))
# TODO: more validation
return args
| true | true |
f72e143947c6de4070ab9a4373d8b1dcda8d2087 | 4,490 | py | Python | code/processing/growth_rates/2021-08-12_r1_DoubleKO_acetate/processing.py | cremerlab/useless_expression | a6020674f0ae73b4cc6173de60a0ea93016ee562 | [
"MIT"
] | null | null | null | code/processing/growth_rates/2021-08-12_r1_DoubleKO_acetate/processing.py | cremerlab/useless_expression | a6020674f0ae73b4cc6173de60a0ea93016ee562 | [
"MIT"
] | null | null | null | code/processing/growth_rates/2021-08-12_r1_DoubleKO_acetate/processing.py | cremerlab/useless_expression | a6020674f0ae73b4cc6173de60a0ea93016ee562 | [
"MIT"
] | null | null | null | #%%
import numpy as np
import pandas as pd
import futileprot.io
import futileprot.viz
import altair as alt
import altair_saver
colors, palette = futileprot.viz.altair_style()
# Define experiment parameters
DATE = '2021-08-12'
STRAINS = 'DoubleKO'
MEDIUM = 'acetate'
RUN_NO = 1
ROOT = '../../../..'
SKIPROWS = 36
OD_BOUNDS = [0.03, 0.18]
# Add the well identifiers
MAP = {'GC073': ['C3', 'D3', 'E3'],
'GC069': ['C4', 'D4', 'E4'],
'GC075': ['C5', 'D5', 'E5'],
'GC070': ['C6', 'D6', 'E6'],
'GC065': ['C7', 'D7', 'E7'],
'GC098': ['C8', 'D8', 'E8'],
'GC074': ['C9', 'D9', 'E9'],
'GC097': ['C10', 'D10' ,'E10'],
'GC084': ['F3', 'F4', 'F5'],
'GC106': ['F6', 'F7', 'F8'],
'GC100': ['F9', 'F10', 'F11']}
# Generate a list of all valid wells
wells = [f'{letter}{number}' for letter in ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] for number in np.arange(1,13)]
# Load the data
data = pd.read_csv(f'{ROOT}/data/growth_rates/{DATE}_r{RUN_NO}_{STRAINS}_{MEDIUM}/{DATE}_r{RUN_NO}.csv',
skiprows=SKIPROWS)
# Melt and drop unnecessary stuff
melted = data.melt(id_vars=['Time'], var_name='well', value_name='od_600nm')
melted = melted.loc[melted['well'].isin(wells)]
melted.dropna(inplace=True)
# Add strain identifier and replicates
melted['strain'] = 'blank'
melted['replicate'] = 0
for strain, wells in MAP.items():
for idx, well in enumerate(wells):
melted.loc[melted['well']==well, 'strain'] = strain
melted.loc[melted['well']==well, 'replicate'] = idx + 1
# Add information regarding date and growth medium
melted['growth_medium'] = MEDIUM
melted['date'] = DATE
melted['run_number'] = RUN_NO
# Convert time to elapsed time
melted['time_sec'] = pd.to_timedelta(melted['Time'].values)
melted['time_sec'] = melted['time_sec'].dt.total_seconds()
melted['elapsed_time_hr'] = (melted['time_sec'] - melted['time_sec'].min())/3600
# Drop unnecessary Time columns
melted.drop(columns=['Time', 'time_sec'], inplace=True)
# Reformat blank value as average eentry per time
measurement = []
for g, d in melted.groupby(['elapsed_time_hr']):
d = d.copy()
avg_blank = d[d['strain']=='blank']
meas = d[d['strain']!='blank']
meas['avg_blank_value'] = avg_blank['od_600nm'].mean()
measurement.append(meas)
measurement = pd.concat(measurement, sort=False)
measurement.rename(columns={'strain':'identifier'}, inplace=True)
# Add shorthand strain information and class identifier
strain_shorthand, _, strain_class = futileprot.io.standardize_strains(measurement['identifier'].values)
measurement['strain'] = strain_shorthand
measurement['class'] = strain_class
# measurement = pd.concat(measurements, sort=False)
# Save to disk
measurement.to_csv(f'./output/{DATE}_r{RUN_NO}_{STRAINS}_{MEDIUM}_measurements.csv', index=False)
#%%
# Perform the blank subtraction
measurement['od_600nm_subtracted'] = measurement['od_600nm'].values - measurement['avg_blank_value'].values
# Given truncation, recalculated elapsed time and save truncated data
trunc = []
for g, d in measurement.groupby(['strain', 'replicate']):
d = d.copy()
d = d[(d['od_600nm_subtracted'] >= OD_BOUNDS[0]) &
(d['od_600nm_subtracted'] <= OD_BOUNDS[1])]
d['elapsed_time_hr'] -= d['elapsed_time_hr'].min()
trunc.append(d)
trunc = pd.concat(trunc, sort=False)
trunc = trunc[['strain', 'elapsed_time_hr',
'od_600nm_subtracted', 'replicate', 'growth_medium',
'date', 'run_number', 'identifier', 'class']]
trunc.rename(columns={'od_600nm_subtracted':'od_600nm',
'replicate':'technical_replicate'}, inplace=True)
trunc.to_csv(f'./output/{DATE}_r{RUN_NO}_{STRAINS}_{MEDIUM}_exponential_phase.csv', index=False)
# %%
# Generate a figure of all of the raw traces
raw_traces = alt.Chart(
data=measurement,
width=400,
height=200
).mark_line(
point=True,
opacity=0.75
).encode(
x=alt.X('elapsed_time_hr:Q', title='elapsed time [hr]'),
y=alt.Y('od_600nm:Q', title='optical density [a.u.]'),
color=alt.Color('replicate:N', title='technical replicate')
).facet(
row='strain'
)
altair_saver.save(raw_traces, f'output/{DATE}_r{RUN_NO}_{STRAINS}_{MEDIUM}_raw_traces.png',
scale_factor=2)
# %%
| 35.634921 | 114 | 0.625167 |
import numpy as np
import pandas as pd
import futileprot.io
import futileprot.viz
import altair as alt
import altair_saver
colors, palette = futileprot.viz.altair_style()
DATE = '2021-08-12'
STRAINS = 'DoubleKO'
MEDIUM = 'acetate'
RUN_NO = 1
ROOT = '../../../..'
SKIPROWS = 36
OD_BOUNDS = [0.03, 0.18]
MAP = {'GC073': ['C3', 'D3', 'E3'],
'GC069': ['C4', 'D4', 'E4'],
'GC075': ['C5', 'D5', 'E5'],
'GC070': ['C6', 'D6', 'E6'],
'GC065': ['C7', 'D7', 'E7'],
'GC098': ['C8', 'D8', 'E8'],
'GC074': ['C9', 'D9', 'E9'],
'GC097': ['C10', 'D10' ,'E10'],
'GC084': ['F3', 'F4', 'F5'],
'GC106': ['F6', 'F7', 'F8'],
'GC100': ['F9', 'F10', 'F11']}
wells = [f'{letter}{number}' for letter in ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] for number in np.arange(1,13)]
data = pd.read_csv(f'{ROOT}/data/growth_rates/{DATE}_r{RUN_NO}_{STRAINS}_{MEDIUM}/{DATE}_r{RUN_NO}.csv',
skiprows=SKIPROWS)
melted = data.melt(id_vars=['Time'], var_name='well', value_name='od_600nm')
melted = melted.loc[melted['well'].isin(wells)]
melted.dropna(inplace=True)
melted['strain'] = 'blank'
melted['replicate'] = 0
for strain, wells in MAP.items():
for idx, well in enumerate(wells):
melted.loc[melted['well']==well, 'strain'] = strain
melted.loc[melted['well']==well, 'replicate'] = idx + 1
melted['growth_medium'] = MEDIUM
melted['date'] = DATE
melted['run_number'] = RUN_NO
melted['time_sec'] = pd.to_timedelta(melted['Time'].values)
melted['time_sec'] = melted['time_sec'].dt.total_seconds()
melted['elapsed_time_hr'] = (melted['time_sec'] - melted['time_sec'].min())/3600
melted.drop(columns=['Time', 'time_sec'], inplace=True)
measurement = []
for g, d in melted.groupby(['elapsed_time_hr']):
d = d.copy()
avg_blank = d[d['strain']=='blank']
meas = d[d['strain']!='blank']
meas['avg_blank_value'] = avg_blank['od_600nm'].mean()
measurement.append(meas)
measurement = pd.concat(measurement, sort=False)
measurement.rename(columns={'strain':'identifier'}, inplace=True)
strain_shorthand, _, strain_class = futileprot.io.standardize_strains(measurement['identifier'].values)
measurement['strain'] = strain_shorthand
measurement['class'] = strain_class
measurement.to_csv(f'./output/{DATE}_r{RUN_NO}_{STRAINS}_{MEDIUM}_measurements.csv', index=False)
measurement['od_600nm_subtracted'] = measurement['od_600nm'].values - measurement['avg_blank_value'].values
trunc = []
for g, d in measurement.groupby(['strain', 'replicate']):
d = d.copy()
d = d[(d['od_600nm_subtracted'] >= OD_BOUNDS[0]) &
(d['od_600nm_subtracted'] <= OD_BOUNDS[1])]
d['elapsed_time_hr'] -= d['elapsed_time_hr'].min()
trunc.append(d)
trunc = pd.concat(trunc, sort=False)
trunc = trunc[['strain', 'elapsed_time_hr',
'od_600nm_subtracted', 'replicate', 'growth_medium',
'date', 'run_number', 'identifier', 'class']]
trunc.rename(columns={'od_600nm_subtracted':'od_600nm',
'replicate':'technical_replicate'}, inplace=True)
trunc.to_csv(f'./output/{DATE}_r{RUN_NO}_{STRAINS}_{MEDIUM}_exponential_phase.csv', index=False)
raw_traces = alt.Chart(
data=measurement,
width=400,
height=200
).mark_line(
point=True,
opacity=0.75
).encode(
x=alt.X('elapsed_time_hr:Q', title='elapsed time [hr]'),
y=alt.Y('od_600nm:Q', title='optical density [a.u.]'),
color=alt.Color('replicate:N', title='technical replicate')
).facet(
row='strain'
)
altair_saver.save(raw_traces, f'output/{DATE}_r{RUN_NO}_{STRAINS}_{MEDIUM}_raw_traces.png',
scale_factor=2)
| true | true |
f72e15605c30b10b83c14db6cee2837591f28a1f | 21,806 | py | Python | pajbot/models/stream.py | jardg/pajbot | e1fca604fe25e12dd4761cb0bfc15c140e7cf012 | [
"MIT"
] | null | null | null | pajbot/models/stream.py | jardg/pajbot | e1fca604fe25e12dd4761cb0bfc15c140e7cf012 | [
"MIT"
] | null | null | null | pajbot/models/stream.py | jardg/pajbot | e1fca604fe25e12dd4761cb0bfc15c140e7cf012 | [
"MIT"
] | null | null | null | import argparse
import collections
import datetime
import json
import logging
import math
import urllib
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy.dialects.mysql import BIGINT
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import reconstructor
from sqlalchemy.orm import relationship
from pajbot.managers.db import Base
from pajbot.managers.db import DBManager
from pajbot.managers.handler import HandlerManager
from pajbot.managers.redis import RedisManager
log = logging.getLogger('pajbot')
def parse_twitch_datetime(datetime_str):
return datetime.datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%SZ')
class Stream(Base):
__tablename__ = 'tb_stream'
id = Column(Integer, primary_key=True)
title = Column(String(256), nullable=False)
stream_start = Column(DateTime, nullable=False)
stream_end = Column(DateTime, nullable=True)
ended = Column(Boolean, nullable=False, default=False)
stream_chunks = relationship('StreamChunk',
backref='stream',
cascade='save-update, merge, expunge',
lazy='joined')
def __init__(self, created_at, **options):
self.id = None
self.title = options.get('title', 'NO TITLE')
self.stream_start = parse_twitch_datetime(created_at)
self.stream_end = None
self.ended = False
@property
def uptime(self):
"""
Returns a TimeDelta for how long the stream was online, or is online.
"""
if self.ended is False:
return datetime.datetime.now() - self.stream_start
else:
return self.stream_end - self.stream_start
class StreamChunk(Base):
__tablename__ = 'tb_stream_chunk'
id = Column(Integer, primary_key=True)
stream_id = Column(Integer, ForeignKey('tb_stream.id'), nullable=False)
broadcast_id = Column(BIGINT(unsigned=True), nullable=False)
video_url = Column(String(128), nullable=True)
video_preview_image_url = Column(String(256), nullable=True)
chunk_start = Column(DateTime, nullable=False)
chunk_end = Column(DateTime, nullable=True)
highlights = relationship('StreamChunkHighlight',
backref='stream_chunk',
cascade='save-update, merge, expunge',
lazy='joined')
def __init__(self, stream, broadcast_id, created_at, **options):
self.id = None
self.stream_id = stream.id
self.broadcast_id = broadcast_id
self.video_url = None
self.video_preview_image_url = None
self.chunk_start = parse_twitch_datetime(created_at)
self.chunk_end = None
self.stream = stream
self.highlights = []
class StreamChunkHighlight(Base):
__tablename__ = 'tb_stream_chunk_highlight'
id = Column(Integer, primary_key=True)
stream_chunk_id = Column(Integer, ForeignKey('tb_stream_chunk.id'), nullable=False)
created_by = Column(Integer, nullable=True)
last_edited_by = Column(Integer, nullable=True)
created_at = Column(DateTime, nullable=False)
highlight_offset = Column(Integer, nullable=False)
description = Column(String(128), nullable=True)
override_link = Column(String(256), nullable=True)
thumbnail = Column(Boolean, nullable=True, default=None)
video_url = None
created_by_user = relationship('User',
lazy='noload',
primaryjoin='User.id==StreamChunkHighlight.created_by',
foreign_keys='StreamChunkHighlight.created_by',
cascade='save-update, merge, expunge',
uselist=False)
last_edited_by_user = relationship('User',
lazy='noload',
primaryjoin='User.id==StreamChunkHighlight.last_edited_by',
foreign_keys='StreamChunkHighlight.last_edited_by',
cascade='save-update, merge, expunge',
uselist=False)
DEFAULT_OFFSET = 0
def __init__(self, stream_chunk, **options):
self.stream_chunk_id = stream_chunk.id
self.created_at = datetime.datetime.now()
self.highlight_offset = options.get('offset', self.DEFAULT_OFFSET)
self.description = options.get('description', None)
self.override_link = options.get('override_link', None)
self.thumbnail = None
self.created_by = options.get('created_by', None)
self.last_edited_by = options.get('last_edited_by', None)
self.stream_chunk = stream_chunk
self.refresh_video_url()
stream_chunk.highlights.append(self)
@reconstructor
def on_load(self):
self.refresh_video_url()
@hybrid_property
def created_at_with_offset(self):
return self.created_at - self.highlight_offset
def refresh_video_url(self):
if self.override_link is not None:
self.video_url = self.override_link
elif self.stream_chunk.video_url is None:
self.video_url = None
else:
date_diff = self.created_at - self.stream_chunk.chunk_start
total_seconds = date_diff.total_seconds()
total_seconds -= abs(self.highlight_offset)
timedata = collections.OrderedDict()
timedata['h'] = math.trunc(total_seconds / 3600)
timedata['m'] = math.trunc(total_seconds / 60 % 60)
timedata['s'] = math.trunc(total_seconds % 60)
pretimedata = {
'h': 0,
'm': timedata['h'],
's': timedata['h'] + timedata['m']
}
# XXX: Is it an issue if the format is like this: ?t=03m
# i.e. a time format with minutes but _not_ seconds? try it out
timestamp = ''.join(['{value:02d}{key}'.format(value=value, key=key) for key, value in timedata.items() if value > 0 or pretimedata[key] > 0])
self.video_url = '{stream_chunk.video_url}?t={timestamp}'.format(stream_chunk=self.stream_chunk, timestamp=timestamp)
class StreamManager:
NUM_OFFLINES_REQUIRED = 10
STATUS_CHECK_INTERVAL = 20 # seconds
VIDEO_URL_CHECK_INTERVAL = 60 * 5 # seconds
def fetch_video_url_stage1(self):
if self.online is False:
return
try:
data = self.bot.twitchapi.get(['channels', self.bot.streamer, 'videos'], parameters={'broadcasts': 'true'}, base='https://api.twitch.tv/kraken/')
self.bot.mainthread_queue.add(self.refresh_video_url_stage2,
args=[data])
except urllib.error.HTTPError as e:
raw_data = e.read().decode('utf-8')
log.exception('OMGScoots')
log.info(raw_data)
except:
log.exception('Uncaught exception in fetch_video_url')
def fetch_video_url_stage2(self, data):
stream_chunk = self.current_stream_chunk if self.current_stream_chunk.video_url is None else None
try:
for video in data['videos']:
if video['broadcast_type'] == 'archive':
recorded_at = parse_twitch_datetime(video['recorded_at'])
if stream_chunk is not None:
time_diff = stream_chunk.chunk_start - recorded_at
if abs(time_diff.total_seconds()) < 60 * 5:
# we found the relevant video!
return video['url'], video['preview'], video['recorded_at']
else:
if video['status'] == 'recording':
return video['url'], video['preview'], video['recorded_at']
except urllib.error.HTTPError as e:
raw_data = e.read().decode('utf-8')
log.exception('OMGScoots')
log.info(raw_data)
except:
log.exception('Uncaught exception in fetch_video_url')
return None, None, None
def __init__(self, bot):
self.bot = bot
self.current_stream_chunk = None # should this even exist?
self.num_offlines = 0
self.first_offline = None
self.num_viewers = 0
self.game = 'Loading...'
self.title = 'Loading...'
self.bot.execute_every(self.STATUS_CHECK_INTERVAL,
self.bot.action_queue.add,
(self.refresh_stream_status_stage1, ))
self.bot.execute_every(self.VIDEO_URL_CHECK_INTERVAL,
self.bot.action_queue.add,
(self.refresh_video_url_stage1, ))
"""
This will load the latest stream so we can post an accurate
"time since last online" figure.
"""
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
self.current_stream = db_session.query(Stream).filter_by(ended=False).order_by(Stream.stream_start.desc()).first()
self.last_stream = db_session.query(Stream).filter_by(ended=True).order_by(Stream.stream_end.desc()).first()
if self.current_stream:
self.current_stream_chunk = db_session.query(StreamChunk).filter_by(stream_id=self.current_stream.id).order_by(StreamChunk.chunk_start.desc()).first()
log.info('Set current stream chunk here to {0}'.format(self.current_stream_chunk))
db_session.expunge_all()
def get_viewer_data(self, redis=None):
if self.offline:
return False
if not redis:
redis = RedisManager.get()
data = redis.hget(
'{streamer}:viewer_data'.format(streamer=self.bot.streamer),
self.current_stream.id)
if data is None:
data = {}
else:
data = json.loads(data)
return data
def update_chatters(self, chatters, minutes):
"""
chatters is a list of usernames
"""
if self.offline:
return False
redis = RedisManager.get()
data = self.get_viewer_data(redis=redis)
for chatter in chatters:
if chatter in data:
data[chatter] += minutes
else:
data[chatter] = minutes
redis.hset(
'{streamer}:viewer_data'.format(streamer=self.bot.streamer),
self.current_stream.id,
json.dumps(data, separators=(',', ':')))
@property
def online(self):
return self.current_stream is not None
@property
def offline(self):
return self.current_stream is None
def commit(self):
log.info('commiting something?')
def create_stream_chunk(self, status):
if self.current_stream_chunk is not None:
# There's already a stream chunk started!
self.current_stream_chunk.chunk_end = datetime.datetime.now()
DBManager.session_add_expunge(self.current_stream_chunk)
stream_chunk = None
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
stream_chunk = db_session.query(StreamChunk).filter_by(broadcast_id=status['broadcast_id']).one_or_none()
if stream_chunk is None:
log.info('Creating stream chunk, from create_stream_chunk')
stream_chunk = StreamChunk(self.current_stream, status['broadcast_id'], status['created_at'])
self.current_stream_chunk = stream_chunk
db_session.add(stream_chunk)
db_session.commit()
else:
log.info('We already have a stream chunk!')
self.current_stream_chunk = stream_chunk
stream_chunk = None
db_session.expunge_all()
if stream_chunk:
self.current_stream.stream_chunks.append(stream_chunk)
def create_stream(self, status):
log.info('Attempting to create a stream!')
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
stream_chunk = db_session.query(StreamChunk).filter_by(broadcast_id=status['broadcast_id']).one_or_none()
new_stream = False
if stream_chunk is not None:
stream = stream_chunk.stream
else:
log.info('checking if there is an active stream already')
stream = db_session.query(Stream).filter_by(ended=False).order_by(Stream.stream_start.desc()).first()
new_stream = stream is None
if new_stream:
log.info('No active stream, create new!')
stream = Stream(status['created_at'],
title=status['title'])
db_session.add(stream)
db_session.commit()
log.info('Successfully added stream!')
stream_chunk = StreamChunk(stream, status['broadcast_id'], status['created_at'])
db_session.add(stream_chunk)
db_session.commit()
stream.stream_chunks.append(stream_chunk)
log.info('Created stream chunk')
self.current_stream = stream
self.current_stream_chunk = stream_chunk
db_session.expunge_all()
if new_stream:
HandlerManager.trigger('on_stream_start', stop_on_false=False)
log.info('Successfully created a stream')
def go_offline(self):
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
self.current_stream.ended = True
self.current_stream.stream_end = self.first_offline
self.current_stream_chunk.chunk_end = self.first_offline
db_session.add(self.current_stream)
db_session.add(self.current_stream_chunk)
db_session.commit()
db_session.expunge_all()
self.last_stream = self.current_stream
self.current_stream = None
self.current_stream_chunk = None
HandlerManager.trigger('on_stream_stop', stop_on_false=False)
def refresh_stream_status_stage1(self):
try:
status = self.bot.twitchapi.get_status(self.bot.streamer)
if status['error'] is True:
# log.error('An error occured while fetching stream status')
# I'll comment this out since all errors are posted live anyway
return
self.bot.mainthread_queue.add(self.refresh_stream_status_stage2,
args=[status])
except:
log.exception('Uncaught exception while refreshing stream status (Stage 1)')
def refresh_stream_status_stage2(self, status):
try:
redis = RedisManager.get()
redis.hmset('stream_data', {
'{streamer}:online'.format(streamer=self.bot.streamer): status['online'],
'{streamer}:viewers'.format(streamer=self.bot.streamer): status['viewers'],
'{streamer}:game'.format(streamer=self.bot.streamer): status['game'],
})
self.num_viewers = status['viewers']
self.game = status['game']
self.title = status['title']
if status['online']:
if self.current_stream is None:
self.create_stream(status)
if self.current_stream_chunk is None:
self.create_stream_chunk(status)
if self.current_stream_chunk.broadcast_id != status['broadcast_id']:
log.debug('Detected a new chunk!')
self.create_stream_chunk(status)
self.num_offlines = 0
self.first_offline = None
else:
if self.online is True:
log.info('Offline. {0}'.format(self.num_offlines))
if self.first_offline is None:
self.first_offline = datetime.datetime.now()
if self.num_offlines >= 10:
log.info('Switching to offline state!')
self.go_offline()
self.num_offlines += 1
except:
log.exception('Uncaught exception while refreshing stream status (Stage 2)')
def refresh_video_url_stage1(self):
self.fetch_video_url_stage1()
def refresh_video_url_stage2(self, data):
if self.online is False:
return
if self.current_stream_chunk is None or self.current_stream is None:
return
log.info('Attempting to fetch video url for broadcast {0}'.format(self.current_stream_chunk.broadcast_id))
stream_chunk = self.current_stream_chunk if self.current_stream_chunk.video_url is None else None
video_url, video_preview_image_url, video_recorded_at = self.fetch_video_url_stage2(data)
if video_url is not None:
log.info('Successfully fetched a video url: {0}'.format(video_url))
if self.current_stream_chunk is None or self.current_stream_chunk.video_url is None:
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
self.current_stream_chunk.video_url = video_url
self.current_stream_chunk.video_preview_image_url = video_preview_image_url
db_session.add(self.current_stream_chunk)
db_session.commit()
db_session.expunge_all()
log.info('Successfully commited video url data.')
elif self.current_stream_chunk.video_url != video_url:
# End current stream chunk
self.current_stream_chunk.chunk_end = datetime.datetime.now()
DBManager.session_add_expunge(self.current_stream_chunk)
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
stream_chunk = StreamChunk(self.current_stream, self.current_stream_chunk.broadcast_id, video_recorded_at)
self.current_stream_chunk = stream_chunk
self.current_stream_chunk.video_url = video_url
self.current_stream_chunk.video_preview_image_url = video_preview_image_url
db_session.add(self.current_stream_chunk)
db_session.commit()
db_session.expunge_all()
log.info('Successfully commited video url data in a new chunk.')
else:
log.info('Not video for broadcast found')
def create_highlight(self, **options):
"""
Returns an error message (string) if something went wrong, otherwise returns True
"""
if self.online is False or self.current_stream_chunk is None:
return 'The stream is not online'
if self.current_stream_chunk.video_url is None:
return 'No video URL fetched for this chunk yet, try in 5 minutes'
try:
highlight = StreamChunkHighlight(self.current_stream_chunk, **options)
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
db_session.add(highlight)
db_session.add(self.current_stream_chunk)
except:
log.exception('uncaught exception in create_highlight')
return 'Unknown reason, ask pajlada'
return True
def parse_highlight_arguments(self, message):
parser = argparse.ArgumentParser()
parser.add_argument('--offset', dest='offset', type=int)
parser.add_argument('--id', dest='id', type=int)
parser.add_argument('--link', dest='override_link')
parser.add_argument('--url', dest='override_link')
parser.add_argument('--overridelink', dest='override_link')
parser.add_argument('--no-link', dest='override_link', action='store_false')
try:
args, unknown = parser.parse_known_args(message.split())
except SystemExit:
return False, False
except:
log.exception('Unhandled exception in add_highlight')
return False, False
# Strip options of any values that are set as None
options = {k: v for k, v in vars(args).items() if v is not None}
response = ' '.join(unknown)
if 'override_link' in options and options['override_link'] is False:
options['override_link'] = None
return options, response
def update_highlight(self, id, **options):
"""
Returns True if a highlight was modified, otherwise return False
"""
if 'offset' in options:
options['highlight_offset'] = options.pop('offset')
num_rows = 0
try:
with DBManager.create_session_scope() as db_session:
num_rows = db_session.query(StreamChunkHighlight).filter_by(id=id).update(options)
except:
log.exception('AAAAAAAAAA FIXME')
return (num_rows == 1)
def remove_highlight(self, id):
"""
Returns True if a highlight was removed, otherwise return False
"""
with DBManager.create_session_scope() as db_session:
num_rows = db_session.query(StreamChunkHighlight).filter(StreamChunkHighlight.id == id).delete()
return (num_rows == 1)
def get_stream_value(self, key, extra={}):
return getattr(self, key, None)
def get_current_stream_value(self, key, extra={}):
if self.current_stream is not None:
return getattr(self.current_stream, key, None)
else:
return None
def get_last_stream_value(self, key, extra={}):
if self.last_stream is not None:
return getattr(self.last_stream, key, None)
else:
return None
| 38.731794 | 166 | 0.623957 | import argparse
import collections
import datetime
import json
import logging
import math
import urllib
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy.dialects.mysql import BIGINT
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import reconstructor
from sqlalchemy.orm import relationship
from pajbot.managers.db import Base
from pajbot.managers.db import DBManager
from pajbot.managers.handler import HandlerManager
from pajbot.managers.redis import RedisManager
log = logging.getLogger('pajbot')
def parse_twitch_datetime(datetime_str):
return datetime.datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%SZ')
class Stream(Base):
__tablename__ = 'tb_stream'
id = Column(Integer, primary_key=True)
title = Column(String(256), nullable=False)
stream_start = Column(DateTime, nullable=False)
stream_end = Column(DateTime, nullable=True)
ended = Column(Boolean, nullable=False, default=False)
stream_chunks = relationship('StreamChunk',
backref='stream',
cascade='save-update, merge, expunge',
lazy='joined')
def __init__(self, created_at, **options):
self.id = None
self.title = options.get('title', 'NO TITLE')
self.stream_start = parse_twitch_datetime(created_at)
self.stream_end = None
self.ended = False
@property
def uptime(self):
if self.ended is False:
return datetime.datetime.now() - self.stream_start
else:
return self.stream_end - self.stream_start
class StreamChunk(Base):
__tablename__ = 'tb_stream_chunk'
id = Column(Integer, primary_key=True)
stream_id = Column(Integer, ForeignKey('tb_stream.id'), nullable=False)
broadcast_id = Column(BIGINT(unsigned=True), nullable=False)
video_url = Column(String(128), nullable=True)
video_preview_image_url = Column(String(256), nullable=True)
chunk_start = Column(DateTime, nullable=False)
chunk_end = Column(DateTime, nullable=True)
highlights = relationship('StreamChunkHighlight',
backref='stream_chunk',
cascade='save-update, merge, expunge',
lazy='joined')
def __init__(self, stream, broadcast_id, created_at, **options):
self.id = None
self.stream_id = stream.id
self.broadcast_id = broadcast_id
self.video_url = None
self.video_preview_image_url = None
self.chunk_start = parse_twitch_datetime(created_at)
self.chunk_end = None
self.stream = stream
self.highlights = []
class StreamChunkHighlight(Base):
__tablename__ = 'tb_stream_chunk_highlight'
id = Column(Integer, primary_key=True)
stream_chunk_id = Column(Integer, ForeignKey('tb_stream_chunk.id'), nullable=False)
created_by = Column(Integer, nullable=True)
last_edited_by = Column(Integer, nullable=True)
created_at = Column(DateTime, nullable=False)
highlight_offset = Column(Integer, nullable=False)
description = Column(String(128), nullable=True)
override_link = Column(String(256), nullable=True)
thumbnail = Column(Boolean, nullable=True, default=None)
video_url = None
created_by_user = relationship('User',
lazy='noload',
primaryjoin='User.id==StreamChunkHighlight.created_by',
foreign_keys='StreamChunkHighlight.created_by',
cascade='save-update, merge, expunge',
uselist=False)
last_edited_by_user = relationship('User',
lazy='noload',
primaryjoin='User.id==StreamChunkHighlight.last_edited_by',
foreign_keys='StreamChunkHighlight.last_edited_by',
cascade='save-update, merge, expunge',
uselist=False)
DEFAULT_OFFSET = 0
def __init__(self, stream_chunk, **options):
self.stream_chunk_id = stream_chunk.id
self.created_at = datetime.datetime.now()
self.highlight_offset = options.get('offset', self.DEFAULT_OFFSET)
self.description = options.get('description', None)
self.override_link = options.get('override_link', None)
self.thumbnail = None
self.created_by = options.get('created_by', None)
self.last_edited_by = options.get('last_edited_by', None)
self.stream_chunk = stream_chunk
self.refresh_video_url()
stream_chunk.highlights.append(self)
@reconstructor
def on_load(self):
self.refresh_video_url()
@hybrid_property
def created_at_with_offset(self):
return self.created_at - self.highlight_offset
def refresh_video_url(self):
if self.override_link is not None:
self.video_url = self.override_link
elif self.stream_chunk.video_url is None:
self.video_url = None
else:
date_diff = self.created_at - self.stream_chunk.chunk_start
total_seconds = date_diff.total_seconds()
total_seconds -= abs(self.highlight_offset)
timedata = collections.OrderedDict()
timedata['h'] = math.trunc(total_seconds / 3600)
timedata['m'] = math.trunc(total_seconds / 60 % 60)
timedata['s'] = math.trunc(total_seconds % 60)
pretimedata = {
'h': 0,
'm': timedata['h'],
's': timedata['h'] + timedata['m']
}
timestamp = ''.join(['{value:02d}{key}'.format(value=value, key=key) for key, value in timedata.items() if value > 0 or pretimedata[key] > 0])
self.video_url = '{stream_chunk.video_url}?t={timestamp}'.format(stream_chunk=self.stream_chunk, timestamp=timestamp)
class StreamManager:
NUM_OFFLINES_REQUIRED = 10
STATUS_CHECK_INTERVAL = 20
VIDEO_URL_CHECK_INTERVAL = 60 * 5
def fetch_video_url_stage1(self):
if self.online is False:
return
try:
data = self.bot.twitchapi.get(['channels', self.bot.streamer, 'videos'], parameters={'broadcasts': 'true'}, base='https://api.twitch.tv/kraken/')
self.bot.mainthread_queue.add(self.refresh_video_url_stage2,
args=[data])
except urllib.error.HTTPError as e:
raw_data = e.read().decode('utf-8')
log.exception('OMGScoots')
log.info(raw_data)
except:
log.exception('Uncaught exception in fetch_video_url')
def fetch_video_url_stage2(self, data):
stream_chunk = self.current_stream_chunk if self.current_stream_chunk.video_url is None else None
try:
for video in data['videos']:
if video['broadcast_type'] == 'archive':
recorded_at = parse_twitch_datetime(video['recorded_at'])
if stream_chunk is not None:
time_diff = stream_chunk.chunk_start - recorded_at
if abs(time_diff.total_seconds()) < 60 * 5:
return video['url'], video['preview'], video['recorded_at']
else:
if video['status'] == 'recording':
return video['url'], video['preview'], video['recorded_at']
except urllib.error.HTTPError as e:
raw_data = e.read().decode('utf-8')
log.exception('OMGScoots')
log.info(raw_data)
except:
log.exception('Uncaught exception in fetch_video_url')
return None, None, None
def __init__(self, bot):
self.bot = bot
self.current_stream_chunk = None
self.num_offlines = 0
self.first_offline = None
self.num_viewers = 0
self.game = 'Loading...'
self.title = 'Loading...'
self.bot.execute_every(self.STATUS_CHECK_INTERVAL,
self.bot.action_queue.add,
(self.refresh_stream_status_stage1, ))
self.bot.execute_every(self.VIDEO_URL_CHECK_INTERVAL,
self.bot.action_queue.add,
(self.refresh_video_url_stage1, ))
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
self.current_stream = db_session.query(Stream).filter_by(ended=False).order_by(Stream.stream_start.desc()).first()
self.last_stream = db_session.query(Stream).filter_by(ended=True).order_by(Stream.stream_end.desc()).first()
if self.current_stream:
self.current_stream_chunk = db_session.query(StreamChunk).filter_by(stream_id=self.current_stream.id).order_by(StreamChunk.chunk_start.desc()).first()
log.info('Set current stream chunk here to {0}'.format(self.current_stream_chunk))
db_session.expunge_all()
def get_viewer_data(self, redis=None):
if self.offline:
return False
if not redis:
redis = RedisManager.get()
data = redis.hget(
'{streamer}:viewer_data'.format(streamer=self.bot.streamer),
self.current_stream.id)
if data is None:
data = {}
else:
data = json.loads(data)
return data
def update_chatters(self, chatters, minutes):
if self.offline:
return False
redis = RedisManager.get()
data = self.get_viewer_data(redis=redis)
for chatter in chatters:
if chatter in data:
data[chatter] += minutes
else:
data[chatter] = minutes
redis.hset(
'{streamer}:viewer_data'.format(streamer=self.bot.streamer),
self.current_stream.id,
json.dumps(data, separators=(',', ':')))
@property
def online(self):
return self.current_stream is not None
@property
def offline(self):
return self.current_stream is None
def commit(self):
log.info('commiting something?')
def create_stream_chunk(self, status):
if self.current_stream_chunk is not None:
self.current_stream_chunk.chunk_end = datetime.datetime.now()
DBManager.session_add_expunge(self.current_stream_chunk)
stream_chunk = None
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
stream_chunk = db_session.query(StreamChunk).filter_by(broadcast_id=status['broadcast_id']).one_or_none()
if stream_chunk is None:
log.info('Creating stream chunk, from create_stream_chunk')
stream_chunk = StreamChunk(self.current_stream, status['broadcast_id'], status['created_at'])
self.current_stream_chunk = stream_chunk
db_session.add(stream_chunk)
db_session.commit()
else:
log.info('We already have a stream chunk!')
self.current_stream_chunk = stream_chunk
stream_chunk = None
db_session.expunge_all()
if stream_chunk:
self.current_stream.stream_chunks.append(stream_chunk)
def create_stream(self, status):
log.info('Attempting to create a stream!')
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
stream_chunk = db_session.query(StreamChunk).filter_by(broadcast_id=status['broadcast_id']).one_or_none()
new_stream = False
if stream_chunk is not None:
stream = stream_chunk.stream
else:
log.info('checking if there is an active stream already')
stream = db_session.query(Stream).filter_by(ended=False).order_by(Stream.stream_start.desc()).first()
new_stream = stream is None
if new_stream:
log.info('No active stream, create new!')
stream = Stream(status['created_at'],
title=status['title'])
db_session.add(stream)
db_session.commit()
log.info('Successfully added stream!')
stream_chunk = StreamChunk(stream, status['broadcast_id'], status['created_at'])
db_session.add(stream_chunk)
db_session.commit()
stream.stream_chunks.append(stream_chunk)
log.info('Created stream chunk')
self.current_stream = stream
self.current_stream_chunk = stream_chunk
db_session.expunge_all()
if new_stream:
HandlerManager.trigger('on_stream_start', stop_on_false=False)
log.info('Successfully created a stream')
def go_offline(self):
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
self.current_stream.ended = True
self.current_stream.stream_end = self.first_offline
self.current_stream_chunk.chunk_end = self.first_offline
db_session.add(self.current_stream)
db_session.add(self.current_stream_chunk)
db_session.commit()
db_session.expunge_all()
self.last_stream = self.current_stream
self.current_stream = None
self.current_stream_chunk = None
HandlerManager.trigger('on_stream_stop', stop_on_false=False)
def refresh_stream_status_stage1(self):
try:
status = self.bot.twitchapi.get_status(self.bot.streamer)
if status['error'] is True:
# log.error('An error occured while fetching stream status')
# I'll comment this out since all errors are posted live anyway
return
self.bot.mainthread_queue.add(self.refresh_stream_status_stage2,
args=[status])
except:
log.exception('Uncaught exception while refreshing stream status (Stage 1)')
def refresh_stream_status_stage2(self, status):
try:
redis = RedisManager.get()
redis.hmset('stream_data', {
'{streamer}:online'.format(streamer=self.bot.streamer): status['online'],
'{streamer}:viewers'.format(streamer=self.bot.streamer): status['viewers'],
'{streamer}:game'.format(streamer=self.bot.streamer): status['game'],
})
self.num_viewers = status['viewers']
self.game = status['game']
self.title = status['title']
if status['online']:
if self.current_stream is None:
self.create_stream(status)
if self.current_stream_chunk is None:
self.create_stream_chunk(status)
if self.current_stream_chunk.broadcast_id != status['broadcast_id']:
log.debug('Detected a new chunk!')
self.create_stream_chunk(status)
self.num_offlines = 0
self.first_offline = None
else:
if self.online is True:
log.info('Offline. {0}'.format(self.num_offlines))
if self.first_offline is None:
self.first_offline = datetime.datetime.now()
if self.num_offlines >= 10:
log.info('Switching to offline state!')
self.go_offline()
self.num_offlines += 1
except:
log.exception('Uncaught exception while refreshing stream status (Stage 2)')
def refresh_video_url_stage1(self):
self.fetch_video_url_stage1()
def refresh_video_url_stage2(self, data):
if self.online is False:
return
if self.current_stream_chunk is None or self.current_stream is None:
return
log.info('Attempting to fetch video url for broadcast {0}'.format(self.current_stream_chunk.broadcast_id))
stream_chunk = self.current_stream_chunk if self.current_stream_chunk.video_url is None else None
video_url, video_preview_image_url, video_recorded_at = self.fetch_video_url_stage2(data)
if video_url is not None:
log.info('Successfully fetched a video url: {0}'.format(video_url))
if self.current_stream_chunk is None or self.current_stream_chunk.video_url is None:
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
self.current_stream_chunk.video_url = video_url
self.current_stream_chunk.video_preview_image_url = video_preview_image_url
db_session.add(self.current_stream_chunk)
db_session.commit()
db_session.expunge_all()
log.info('Successfully commited video url data.')
elif self.current_stream_chunk.video_url != video_url:
self.current_stream_chunk.chunk_end = datetime.datetime.now()
DBManager.session_add_expunge(self.current_stream_chunk)
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
stream_chunk = StreamChunk(self.current_stream, self.current_stream_chunk.broadcast_id, video_recorded_at)
self.current_stream_chunk = stream_chunk
self.current_stream_chunk.video_url = video_url
self.current_stream_chunk.video_preview_image_url = video_preview_image_url
db_session.add(self.current_stream_chunk)
db_session.commit()
db_session.expunge_all()
log.info('Successfully commited video url data in a new chunk.')
else:
log.info('Not video for broadcast found')
def create_highlight(self, **options):
if self.online is False or self.current_stream_chunk is None:
return 'The stream is not online'
if self.current_stream_chunk.video_url is None:
return 'No video URL fetched for this chunk yet, try in 5 minutes'
try:
highlight = StreamChunkHighlight(self.current_stream_chunk, **options)
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
db_session.add(highlight)
db_session.add(self.current_stream_chunk)
except:
log.exception('uncaught exception in create_highlight')
return 'Unknown reason, ask pajlada'
return True
def parse_highlight_arguments(self, message):
parser = argparse.ArgumentParser()
parser.add_argument('--offset', dest='offset', type=int)
parser.add_argument('--id', dest='id', type=int)
parser.add_argument('--link', dest='override_link')
parser.add_argument('--url', dest='override_link')
parser.add_argument('--overridelink', dest='override_link')
parser.add_argument('--no-link', dest='override_link', action='store_false')
try:
args, unknown = parser.parse_known_args(message.split())
except SystemExit:
return False, False
except:
log.exception('Unhandled exception in add_highlight')
return False, False
options = {k: v for k, v in vars(args).items() if v is not None}
response = ' '.join(unknown)
if 'override_link' in options and options['override_link'] is False:
options['override_link'] = None
return options, response
def update_highlight(self, id, **options):
if 'offset' in options:
options['highlight_offset'] = options.pop('offset')
num_rows = 0
try:
with DBManager.create_session_scope() as db_session:
num_rows = db_session.query(StreamChunkHighlight).filter_by(id=id).update(options)
except:
log.exception('AAAAAAAAAA FIXME')
return (num_rows == 1)
def remove_highlight(self, id):
with DBManager.create_session_scope() as db_session:
num_rows = db_session.query(StreamChunkHighlight).filter(StreamChunkHighlight.id == id).delete()
return (num_rows == 1)
def get_stream_value(self, key, extra={}):
return getattr(self, key, None)
def get_current_stream_value(self, key, extra={}):
if self.current_stream is not None:
return getattr(self.current_stream, key, None)
else:
return None
def get_last_stream_value(self, key, extra={}):
if self.last_stream is not None:
return getattr(self.last_stream, key, None)
else:
return None
| true | true |
f72e169be186cbba5f4a9ef17348cd703e1b3cf9 | 455 | py | Python | shop/urls.py | rahulbiswas24680/Django-Ecommerce | 89c0cbc12241b89bd88ce46b6c2f2e918b5719a7 | [
"MIT"
] | null | null | null | shop/urls.py | rahulbiswas24680/Django-Ecommerce | 89c0cbc12241b89bd88ce46b6c2f2e918b5719a7 | [
"MIT"
] | null | null | null | shop/urls.py | rahulbiswas24680/Django-Ecommerce | 89c0cbc12241b89bd88ce46b6c2f2e918b5719a7 | [
"MIT"
] | null | null | null | from django.urls import path
from shop.views import HomeListView, ProductListView, ProductDetailView, contact, about
urlpatterns = [
path('', HomeListView.as_view(), name='home'),
path('contact/', contact, name='contact'),
path('about/', about, name='about'),
path('<slug:category_slug>/', ProductListView.as_view(), name='category-products'),
path('<int:id>/<slug:slug>/', ProductDetailView.as_view(), name='product-detail'),
]
| 37.916667 | 87 | 0.69011 | from django.urls import path
from shop.views import HomeListView, ProductListView, ProductDetailView, contact, about
urlpatterns = [
path('', HomeListView.as_view(), name='home'),
path('contact/', contact, name='contact'),
path('about/', about, name='about'),
path('<slug:category_slug>/', ProductListView.as_view(), name='category-products'),
path('<int:id>/<slug:slug>/', ProductDetailView.as_view(), name='product-detail'),
]
| true | true |
f72e16b1768e507ae7dadbc2a35886081b34ddce | 187 | py | Python | testutils/settings.py | kolyat/walletdjango | 492bb0ec903eec697892c6eac7e6ebba077acacd | [
"MIT"
] | null | null | null | testutils/settings.py | kolyat/walletdjango | 492bb0ec903eec697892c6eac7e6ebba077acacd | [
"MIT"
] | null | null | null | testutils/settings.py | kolyat/walletdjango | 492bb0ec903eec697892c6eac7e6ebba077acacd | [
"MIT"
] | null | null | null | import logging
LOG_OPTIONS = {
'filemode': 'a',
'format': '%(asctime)s [%(module)20s] %(levelname)7s - %(funcName)s'
' - %(message)s',
'level': logging.INFO
}
| 18.7 | 72 | 0.534759 | import logging
LOG_OPTIONS = {
'filemode': 'a',
'format': '%(asctime)s [%(module)20s] %(levelname)7s - %(funcName)s'
' - %(message)s',
'level': logging.INFO
}
| true | true |
f72e16dd4f099ffedb6a0cc233fa8a9ccd6fc1d8 | 3,024 | py | Python | run.py | ChristopherKchilton/Kepler-Exoplanet-App | b4c7586bf5780d22745178221e39ae97b6aa8448 | [
"MIT"
] | 1 | 2021-09-23T22:04:24.000Z | 2021-09-23T22:04:24.000Z | run.py | ChristopherKchilton/Kepler-Exoplanet-App | b4c7586bf5780d22745178221e39ae97b6aa8448 | [
"MIT"
] | null | null | null | run.py | ChristopherKchilton/Kepler-Exoplanet-App | b4c7586bf5780d22745178221e39ae97b6aa8448 | [
"MIT"
] | null | null | null | # Imports from 3rd party libraries
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
# Imports from this application
from app import app, server
from pages import index, predictions, insights, process, newpage
# Navbar docs: https://dash-bootstrap-components.opensource.faculty.ai/l/components/navbar
navbar = dbc.NavbarSimple(
brand='Exoplanet Canidates',
brand_href='/',
children=[
dbc.NavItem(dcc.Link('Predictions', href='/predictions', className='nav-link')),
dbc.NavItem(dcc.Link('Insights', href='/insights', className='nav-link')),
dbc.NavItem(dcc.Link('Process', href='/process', className='nav-link')),
],
sticky='top',
color='dark',
light=False,
dark=True
)
# Footer docs:
# dbc.Container, dbc.Row, dbc.Col: https://dash-bootstrap-components.opensource.faculty.ai/l/components/layout
# html.P: https://dash.plot.ly/dash-html-components
# fa (font awesome) : https://fontawesome.com/icons/github-square?style=brands
# mr (margin right) : https://getbootstrap.com/docs/4.3/utilities/spacing/
# className='lead' : https://getbootstrap.com/docs/4.3/content/typography/#lead
footer = dbc.Container(
dbc.Row(
dbc.Col(
html.P(
[
html.Span('Christopher Chilton', className='mr-2'),
html.A(html.I(className='fas fa-envelope-square mr-1'), href='mailto:chris.kchilton@gmail.com'),
html.A(html.I(className='fab fa-github-square mr-1'), href='https://github.com/ChristopherKchilton/Kepler-Exoplanet-App'),
html.A(html.I(className='fab fa-linkedin mr-1'), href='https://www.linkedin.com/in/christopher-chilton-a15aa492/'),
# html.A(html.I(className='fab fa-twitter-square mr-1'), href='https://twitter.com/<you>'),
],
className='lead'
)
)
)
)
# Layout docs:
# html.Div: https://dash.plot.ly/getting-started
# dcc.Location: https://dash.plot.ly/dash-core-components/location
# dbc.Container: https://dash-bootstrap-components.opensource.faculty.ai/l/components/layout
app.layout = html.Div([
dcc.Location(id='url', refresh=False),
navbar,
dbc.Container(id='page-content', className='mt-4'),
html.Hr(),
footer
])
# URL Routing for Multi-Page Apps: https://dash.plot.ly/urls
@app.callback(Output('page-content', 'children'),
[Input('url', 'pathname')])
def display_page(pathname):
if pathname == '/':
return index.layout
elif pathname == '/predictions':
return predictions.layout
elif pathname == '/insights':
return insights.layout
elif pathname == '/process':
return process.layout
else:
return dcc.Markdown('## Page not found')
# Run app server: https://dash.plot.ly/getting-started
if __name__ == '__main__':
app.run_server(debug=True) | 37.8 | 143 | 0.654431 |
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
from app import app, server
from pages import index, predictions, insights, process, newpage
navbar = dbc.NavbarSimple(
brand='Exoplanet Canidates',
brand_href='/',
children=[
dbc.NavItem(dcc.Link('Predictions', href='/predictions', className='nav-link')),
dbc.NavItem(dcc.Link('Insights', href='/insights', className='nav-link')),
dbc.NavItem(dcc.Link('Process', href='/process', className='nav-link')),
],
sticky='top',
color='dark',
light=False,
dark=True
)
er = dbc.Container(
dbc.Row(
dbc.Col(
html.P(
[
html.Span('Christopher Chilton', className='mr-2'),
html.A(html.I(className='fas fa-envelope-square mr-1'), href='mailto:chris.kchilton@gmail.com'),
html.A(html.I(className='fab fa-github-square mr-1'), href='https://github.com/ChristopherKchilton/Kepler-Exoplanet-App'),
html.A(html.I(className='fab fa-linkedin mr-1'), href='https://www.linkedin.com/in/christopher-chilton-a15aa492/'),
],
className='lead'
)
)
)
)
app.layout = html.Div([
dcc.Location(id='url', refresh=False),
navbar,
dbc.Container(id='page-content', className='mt-4'),
html.Hr(),
footer
])
@app.callback(Output('page-content', 'children'),
[Input('url', 'pathname')])
def display_page(pathname):
if pathname == '/':
return index.layout
elif pathname == '/predictions':
return predictions.layout
elif pathname == '/insights':
return insights.layout
elif pathname == '/process':
return process.layout
else:
return dcc.Markdown('## Page not found')
if __name__ == '__main__':
app.run_server(debug=True) | true | true |
f72e181408e8c012d638f6c4cfc87f3ae2a44420 | 431 | py | Python | python/0205. isIsomorphic.py | whtahy/leetcode | a2955123d203b155455ceefe38514fd0077d6db9 | [
"CC0-1.0"
] | 1 | 2017-12-09T05:37:51.000Z | 2017-12-09T05:37:51.000Z | python/0205. isIsomorphic.py | whtahy/leetcode | a2955123d203b155455ceefe38514fd0077d6db9 | [
"CC0-1.0"
] | null | null | null | python/0205. isIsomorphic.py | whtahy/leetcode | a2955123d203b155455ceefe38514fd0077d6db9 | [
"CC0-1.0"
] | null | null | null | # pigeonhole
class Solution:
def isIsomorphic(self, s: str, t: str) -> bool:
return len(set(zip(s, t))) == len(set(s)) == len(set(t))
# two dict
class Solution:
def isIsomorphic(self, s: str, t: str) -> bool:
dx, dy = {}, {}
for x, y in zip(s, t):
if (x in dx and dx[x] != y) or (y in dy and dy[y] != x):
return False
dx[x], dy[y] = y, x
return True
| 28.733333 | 68 | 0.484919 |
class Solution:
def isIsomorphic(self, s: str, t: str) -> bool:
return len(set(zip(s, t))) == len(set(s)) == len(set(t))
class Solution:
def isIsomorphic(self, s: str, t: str) -> bool:
dx, dy = {}, {}
for x, y in zip(s, t):
if (x in dx and dx[x] != y) or (y in dy and dy[y] != x):
return False
dx[x], dy[y] = y, x
return True
| true | true |
f72e188591ab127798133a1c55f21b9ce1703fc6 | 2,380 | py | Python | ixnetwork_restpy/testplatform/sessions/ixnetwork/globals/ixnet/ixnet.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 20 | 2019-05-07T01:59:14.000Z | 2022-02-11T05:24:47.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/globals/ixnet/ixnet.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 60 | 2019-04-03T18:59:35.000Z | 2022-02-22T12:05:05.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/globals/ixnet/ixnet.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 13 | 2019-05-20T10:48:31.000Z | 2021-10-06T07:45:44.000Z | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class Ixnet(Base):
"""Tracks remote clients connected using the ixNet API Service over websockets.
The Ixnet class encapsulates a required ixnet resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'ixnet'
_SDM_ATT_MAP = {
'ConnectedClients': 'connectedClients',
'IsActive': 'isActive',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(Ixnet, self).__init__(parent, list_op)
@property
def ConnectedClients(self):
# type: () -> List[str]
"""
Returns
-------
- list(str): Returns the remote address and remote port for each of the currently connected ixNet clients.
"""
return self._get_attribute(self._SDM_ATT_MAP['ConnectedClients'])
@property
def IsActive(self):
# type: () -> bool
"""
Returns
-------
- bool: Returns true if any remote clients are connected, false if no remote clients are connected.
"""
return self._get_attribute(self._SDM_ATT_MAP['IsActive'])
| 37.777778 | 135 | 0.702941 |
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class Ixnet(Base):
__slots__ = ()
_SDM_NAME = 'ixnet'
_SDM_ATT_MAP = {
'ConnectedClients': 'connectedClients',
'IsActive': 'isActive',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(Ixnet, self).__init__(parent, list_op)
@property
def ConnectedClients(self):
return self._get_attribute(self._SDM_ATT_MAP['ConnectedClients'])
@property
def IsActive(self):
return self._get_attribute(self._SDM_ATT_MAP['IsActive'])
| true | true |
f72e1890c2d8b98f2632b1de86850ffdce51ad17 | 3,875 | py | Python | tests/test_laplacian_matrices.py | jkxing/pytorch3d | 71dbebe8010a0dac3e56be464778aa48fbd3bcd3 | [
"BSD-3-Clause"
] | 6,041 | 2020-01-23T23:29:41.000Z | 2022-03-31T21:35:13.000Z | tests/test_laplacian_matrices.py | jkxing/pytorch3d | 71dbebe8010a0dac3e56be464778aa48fbd3bcd3 | [
"BSD-3-Clause"
] | 1,054 | 2020-01-24T15:23:15.000Z | 2022-03-31T15:31:59.000Z | tests/test_laplacian_matrices.py | jkxing/pytorch3d | 71dbebe8010a0dac3e56be464778aa48fbd3bcd3 | [
"BSD-3-Clause"
] | 943 | 2020-01-24T00:10:30.000Z | 2022-03-31T04:02:35.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import unittest
import torch
from common_testing import TestCaseMixin, get_random_cuda_device
from pytorch3d.ops import cot_laplacian, laplacian, norm_laplacian
from pytorch3d.structures.meshes import Meshes
class TestLaplacianMatrices(TestCaseMixin, unittest.TestCase):
def setUp(self) -> None:
super().setUp()
torch.manual_seed(1)
def init_mesh(self) -> Meshes:
V, F = 32, 64
device = get_random_cuda_device()
# random vertices
verts = torch.rand((V, 3), dtype=torch.float32, device=device)
# random valid faces (no self circles, e.g. (v0, v0, v1))
faces = torch.stack([torch.randperm(V) for f in range(F)], dim=0)[:, :3]
faces = faces.to(device=device)
return Meshes(verts=[verts], faces=[faces])
def test_laplacian(self):
mesh = self.init_mesh()
verts = mesh.verts_packed()
edges = mesh.edges_packed()
V, E = verts.shape[0], edges.shape[0]
L = laplacian(verts, edges)
Lnaive = torch.zeros((V, V), dtype=torch.float32, device=verts.device)
for e in range(E):
e0, e1 = edges[e]
Lnaive[e0, e1] = 1
# symetric
Lnaive[e1, e0] = 1
deg = Lnaive.sum(1).view(-1, 1)
deg[deg > 0] = 1.0 / deg[deg > 0]
Lnaive = Lnaive * deg
diag = torch.eye(V, dtype=torch.float32, device=mesh.device)
Lnaive.masked_fill_(diag > 0, -1)
self.assertClose(L.to_dense(), Lnaive)
def test_cot_laplacian(self):
mesh = self.init_mesh()
verts = mesh.verts_packed()
faces = mesh.faces_packed()
V = verts.shape[0]
eps = 1e-12
L, inv_areas = cot_laplacian(verts, faces, eps=eps)
Lnaive = torch.zeros((V, V), dtype=torch.float32, device=verts.device)
inv_areas_naive = torch.zeros((V, 1), dtype=torch.float32, device=verts.device)
for f in faces:
v0 = verts[f[0], :]
v1 = verts[f[1], :]
v2 = verts[f[2], :]
A = (v1 - v2).norm()
B = (v0 - v2).norm()
C = (v0 - v1).norm()
s = 0.5 * (A + B + C)
face_area = (s * (s - A) * (s - B) * (s - C)).clamp_(min=1e-12).sqrt()
inv_areas_naive[f[0]] += face_area
inv_areas_naive[f[1]] += face_area
inv_areas_naive[f[2]] += face_area
A2, B2, C2 = A * A, B * B, C * C
cota = (B2 + C2 - A2) / face_area / 4.0
cotb = (A2 + C2 - B2) / face_area / 4.0
cotc = (A2 + B2 - C2) / face_area / 4.0
Lnaive[f[1], f[2]] += cota
Lnaive[f[2], f[0]] += cotb
Lnaive[f[0], f[1]] += cotc
# symetric
Lnaive[f[2], f[1]] += cota
Lnaive[f[0], f[2]] += cotb
Lnaive[f[1], f[0]] += cotc
idx = inv_areas_naive > 0
inv_areas_naive[idx] = 1.0 / inv_areas_naive[idx]
self.assertClose(inv_areas, inv_areas_naive)
self.assertClose(L.to_dense(), Lnaive)
def test_norm_laplacian(self):
mesh = self.init_mesh()
verts = mesh.verts_packed()
edges = mesh.edges_packed()
V, E = verts.shape[0], edges.shape[0]
eps = 1e-12
L = norm_laplacian(verts, edges, eps=eps)
Lnaive = torch.zeros((V, V), dtype=torch.float32, device=verts.device)
for e in range(E):
e0, e1 = edges[e]
v0 = verts[e0]
v1 = verts[e1]
w01 = 1.0 / ((v0 - v1).norm() + eps)
Lnaive[e0, e1] += w01
Lnaive[e1, e0] += w01
self.assertClose(L.to_dense(), Lnaive)
| 32.291667 | 87 | 0.544516 |
import unittest
import torch
from common_testing import TestCaseMixin, get_random_cuda_device
from pytorch3d.ops import cot_laplacian, laplacian, norm_laplacian
from pytorch3d.structures.meshes import Meshes
class TestLaplacianMatrices(TestCaseMixin, unittest.TestCase):
def setUp(self) -> None:
super().setUp()
torch.manual_seed(1)
def init_mesh(self) -> Meshes:
V, F = 32, 64
device = get_random_cuda_device()
verts = torch.rand((V, 3), dtype=torch.float32, device=device)
faces = torch.stack([torch.randperm(V) for f in range(F)], dim=0)[:, :3]
faces = faces.to(device=device)
return Meshes(verts=[verts], faces=[faces])
def test_laplacian(self):
mesh = self.init_mesh()
verts = mesh.verts_packed()
edges = mesh.edges_packed()
V, E = verts.shape[0], edges.shape[0]
L = laplacian(verts, edges)
Lnaive = torch.zeros((V, V), dtype=torch.float32, device=verts.device)
for e in range(E):
e0, e1 = edges[e]
Lnaive[e0, e1] = 1
Lnaive[e1, e0] = 1
deg = Lnaive.sum(1).view(-1, 1)
deg[deg > 0] = 1.0 / deg[deg > 0]
Lnaive = Lnaive * deg
diag = torch.eye(V, dtype=torch.float32, device=mesh.device)
Lnaive.masked_fill_(diag > 0, -1)
self.assertClose(L.to_dense(), Lnaive)
def test_cot_laplacian(self):
mesh = self.init_mesh()
verts = mesh.verts_packed()
faces = mesh.faces_packed()
V = verts.shape[0]
eps = 1e-12
L, inv_areas = cot_laplacian(verts, faces, eps=eps)
Lnaive = torch.zeros((V, V), dtype=torch.float32, device=verts.device)
inv_areas_naive = torch.zeros((V, 1), dtype=torch.float32, device=verts.device)
for f in faces:
v0 = verts[f[0], :]
v1 = verts[f[1], :]
v2 = verts[f[2], :]
A = (v1 - v2).norm()
B = (v0 - v2).norm()
C = (v0 - v1).norm()
s = 0.5 * (A + B + C)
face_area = (s * (s - A) * (s - B) * (s - C)).clamp_(min=1e-12).sqrt()
inv_areas_naive[f[0]] += face_area
inv_areas_naive[f[1]] += face_area
inv_areas_naive[f[2]] += face_area
A2, B2, C2 = A * A, B * B, C * C
cota = (B2 + C2 - A2) / face_area / 4.0
cotb = (A2 + C2 - B2) / face_area / 4.0
cotc = (A2 + B2 - C2) / face_area / 4.0
Lnaive[f[1], f[2]] += cota
Lnaive[f[2], f[0]] += cotb
Lnaive[f[0], f[1]] += cotc
Lnaive[f[2], f[1]] += cota
Lnaive[f[0], f[2]] += cotb
Lnaive[f[1], f[0]] += cotc
idx = inv_areas_naive > 0
inv_areas_naive[idx] = 1.0 / inv_areas_naive[idx]
self.assertClose(inv_areas, inv_areas_naive)
self.assertClose(L.to_dense(), Lnaive)
def test_norm_laplacian(self):
mesh = self.init_mesh()
verts = mesh.verts_packed()
edges = mesh.edges_packed()
V, E = verts.shape[0], edges.shape[0]
eps = 1e-12
L = norm_laplacian(verts, edges, eps=eps)
Lnaive = torch.zeros((V, V), dtype=torch.float32, device=verts.device)
for e in range(E):
e0, e1 = edges[e]
v0 = verts[e0]
v1 = verts[e1]
w01 = 1.0 / ((v0 - v1).norm() + eps)
Lnaive[e0, e1] += w01
Lnaive[e1, e0] += w01
self.assertClose(L.to_dense(), Lnaive)
| true | true |
f72e18e481d5ce31a4a2a149cd3a756c57128ad2 | 301 | py | Python | apps/courses/migrations/0008_delete_bannercourse.py | goodbad3/MoocOnline | 6d2007aff2a553ca71d956cf8173b87c939983f8 | [
"MIT"
] | null | null | null | apps/courses/migrations/0008_delete_bannercourse.py | goodbad3/MoocOnline | 6d2007aff2a553ca71d956cf8173b87c939983f8 | [
"MIT"
] | null | null | null | apps/courses/migrations/0008_delete_bannercourse.py | goodbad3/MoocOnline | 6d2007aff2a553ca71d956cf8173b87c939983f8 | [
"MIT"
] | null | null | null | # Generated by Django 2.1.7 on 2019-05-24 00:52
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('courses', '0007_auto_20190520_0046'),
]
operations = [
migrations.DeleteModel(
name='BannerCourse',
),
]
| 17.705882 | 47 | 0.611296 |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('courses', '0007_auto_20190520_0046'),
]
operations = [
migrations.DeleteModel(
name='BannerCourse',
),
]
| true | true |
f72e19a8d03ed28ae5f6c10ae78398cc29a948af | 3,521 | py | Python | application/classify_tissue_border.py | ryanmdavis/classifyHistology | 563687250f6d81a7e2596607587238354e7279e5 | [
"MIT"
] | null | null | null | application/classify_tissue_border.py | ryanmdavis/classifyHistology | 563687250f6d81a7e2596607587238354e7279e5 | [
"MIT"
] | null | null | null | application/classify_tissue_border.py | ryanmdavis/classifyHistology | 563687250f6d81a7e2596607587238354e7279e5 | [
"MIT"
] | null | null | null | from classifyHistology.train_net import vars_phs_consts_metrics as vars
from classifyHistology.train_net import functions as func
from classifyHistology.extract_images import rw_images as extract
from classifyHistology.application import net_plot as netplot
from classifyHistology.application import classify_tissue as ct
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
import scipy.signal as sig
ah={
'border_step':20, # number of pixels to step along tissue border before capturing the next image
'train_image_size_rc':[48,192],
'rotate_deg':[0],
'translate_pix_aug_col':[0],
'translate_pix_aug_row':[0],
'reflect_horiz':0,
'mov_avg_win':100,
'save_root_dir':'/home/ryan/Documents/Datasets/classify_histology/augmented3',
'image_fill_factor':3/4, #must by <1, >0
'im_downscale_factor':3,
'test_dataset_size':0.4, #20% of data will go into test dataset
'norm_vec_len_px':100,
'threshold_blue':200,
'strel_size':10
}
# training hyperparameteers
th = {
'training_iters': 2,
'learning_rate': 0.001,
'batch_size': 128,
'n_input': [16,64,3],
'n_classes': 2,
'net':'convNet3',
'dropout_keep_prob': 0.5}
dp = {
'annotation_offset1_px': 30,
'annotation_offset2_px': 70,
'mov_med_filt_width': 5}
# load the model path
model_path=['/home/ryan/Dropbox/Code/classifyHistology/TensorBoard/Output11-39-49AM-January-06-2019/model/model.ckpt']
# #model_path=['/home/ryan/Dropbox/Code/classifyHistology/TensorBoard/Output09-43-53PM-December-17-2018/model/model.ckpt','/home/ryan/Dropbox/Code/classifyHistology/TensorBoard/Output12-22-08AM-December-18-2018/model/model.ckpt','/home/ryan/Dropbox/Code/classifyHistology/TensorBoard/Output02-58-28AM-December-18-2018/model/model.ckpt'] #EOD 12/17
# #model_path=['/home/ryan/Dropbox/Code/classifyHistology/TensorBoard/Output10-05-07PM-December-19-2018/model/model.ckpt','/home/ryan/Dropbox/Code/classifyHistology/TensorBoard/Output07-56-55AM-December-20-2018/model/model.ckpt'
# load the images to classify
# image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/patient180-tumor1-tr-3-test'
# image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/Patient18-normal4-tl-1-'
# image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/large_dataset/Patient001'
# image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/Patient18-tumor5-br-2-'
# image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/Patient18-tumor5-bl-1-'
# image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/Patient101-normal-1-' # this is the patient where I get the large dataset from
# image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/Patient101-tumor-boundry-1-'
# image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/Patient101-tumor-boundry-1-4'
# image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/Patient101-tumor-1-'
image_location='/media/ryan/002E-0232/nanozoomer_images/Application_Data/Patient18-normal3-tr-4-'
normal_angle_rad_list,image_pos_rc_list,images_non_standardized,f_path=extract.rwImages(image_location,ah,to_mem=True,show_steps=False)
images_to_classify=ct.standardizeImages(images_non_standardized,ah['save_root_dir'])
probs,is_cancer=ct.classify(model_path,images_to_classify,th)
netplot.displayAnnotated(f_path,normal_angle_rad_list,image_pos_rc_list,probs,f_path,dp)
| 53.348485 | 347 | 0.789548 | from classifyHistology.train_net import vars_phs_consts_metrics as vars
from classifyHistology.train_net import functions as func
from classifyHistology.extract_images import rw_images as extract
from classifyHistology.application import net_plot as netplot
from classifyHistology.application import classify_tissue as ct
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
import scipy.signal as sig
ah={
'border_step':20,
'train_image_size_rc':[48,192],
'rotate_deg':[0],
'translate_pix_aug_col':[0],
'translate_pix_aug_row':[0],
'reflect_horiz':0,
'mov_avg_win':100,
'save_root_dir':'/home/ryan/Documents/Datasets/classify_histology/augmented3',
'image_fill_factor':3/4,
'im_downscale_factor':3,
'test_dataset_size':0.4,
'norm_vec_len_px':100,
'threshold_blue':200,
'strel_size':10
}
th = {
'training_iters': 2,
'learning_rate': 0.001,
'batch_size': 128,
'n_input': [16,64,3],
'n_classes': 2,
'net':'convNet3',
'dropout_keep_prob': 0.5}
dp = {
'annotation_offset1_px': 30,
'annotation_offset2_px': 70,
'mov_med_filt_width': 5}
model_path=['/home/ryan/Dropbox/Code/classifyHistology/TensorBoard/Output11-39-49AM-January-06-2019/model/model.ckpt']
| true | true |
f72e1a0c4c0092fffb1e5ccec630614657eabff5 | 25,564 | py | Python | pyscf/gto/moleintor.py | nmardirossian/pyscf | 57c8912dcfcc1157a822feede63df54ed1067115 | [
"BSD-2-Clause"
] | 1 | 2018-05-02T19:55:30.000Z | 2018-05-02T19:55:30.000Z | pyscf/gto/moleintor.py | nmardirossian/pyscf | 57c8912dcfcc1157a822feede63df54ed1067115 | [
"BSD-2-Clause"
] | null | null | null | pyscf/gto/moleintor.py | nmardirossian/pyscf | 57c8912dcfcc1157a822feede63df54ed1067115 | [
"BSD-2-Clause"
] | 1 | 2018-12-06T03:10:50.000Z | 2018-12-06T03:10:50.000Z | #!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
import ctypes
import numpy
from pyscf import lib
libcgto = lib.load_library('libcgto')
ANG_OF = 1
NPRIM_OF = 2
NCTR_OF = 3
KAPPA_OF = 4
PTR_EXP = 5
PTR_COEFF = 6
BAS_SLOTS = 8
def getints(intor_name, atm, bas, env, shls_slice=None, comp=1, hermi=0,
aosym='s1', ao_loc=None, cintopt=None, out=None):
r'''1e and 2e integral generator.
Args:
intor_name : str
================================ =============
Function Expression
================================ =============
"int1e_ovlp_sph" ( \| \)
"int1e_nuc_sph" ( \| nuc \| \)
"int1e_kin_sph" (.5 \| p dot p\)
"int1e_ia01p_sph" (#C(0 1) \| nabla-rinv \| cross p\)
"int1e_giao_irjxp_sph" (#C(0 1) \| r cross p\)
"int1e_cg_irxp_sph" (#C(0 1) \| rc cross p\)
"int1e_giao_a11part_sph" (-.5 \| nabla-rinv \| r\)
"int1e_cg_a11part_sph" (-.5 \| nabla-rinv \| rc\)
"int1e_a01gp_sph" (g \| nabla-rinv cross p \|\)
"int1e_igkin_sph" (#C(0 .5) g \| p dot p\)
"int1e_igovlp_sph" (#C(0 1) g \|\)
"int1e_ignuc_sph" (#C(0 1) g \| nuc \|\)
"int1e_z_sph" ( \| zc \| \)
"int1e_zz_sph" ( \| zc zc \| \)
"int1e_r_sph" ( \| rc \| \)
"int1e_r2_sph" ( \| rc dot rc \| \)
"int1e_rr_sph" ( \| rc rc \| \)
"int1e_pnucp_sph" (p* \| nuc dot p \| \)
"int1e_prinvxp_sph" (p* \| rinv cross p \| \)
"int1e_ovlp_spinor" ( \| \)
"int1e_nuc_spinor" ( \| nuc \|\)
"int1e_srsr_spinor" (sigma dot r \| sigma dot r\)
"int1e_sr_spinor" (sigma dot r \|\)
"int1e_srsp_spinor" (sigma dot r \| sigma dot p\)
"int1e_spsp_spinor" (sigma dot p \| sigma dot p\)
"int1e_sp_spinor" (sigma dot p \|\)
"int1e_spnucsp_spinor" (sigma dot p \| nuc \| sigma dot p\)
"int1e_srnucsr_spinor" (sigma dot r \| nuc \| sigma dot r\)
"int1e_govlp_spinor" (g \|\)
"int1e_gnuc_spinor" (g \| nuc \|\)
"int1e_cg_sa10sa01_spinor" (.5 sigma cross rc \| sigma cross nabla-rinv \|\)
"int1e_cg_sa10sp_spinor" (.5 rc cross sigma \| sigma dot p\)
"int1e_cg_sa10nucsp_spinor" (.5 rc cross sigma \| nuc \| sigma dot p\)
"int1e_giao_sa10sa01_spinor" (.5 sigma cross r \| sigma cross nabla-rinv \|\)
"int1e_giao_sa10sp_spinor" (.5 r cross sigma \| sigma dot p\)
"int1e_giao_sa10nucsp_spinor" (.5 r cross sigma \| nuc \| sigma dot p\)
"int1e_sa01sp_spinor" (\| nabla-rinv cross sigma \| sigma dot p\)
"int1e_spgsp_spinor" (g sigma dot p \| sigma dot p\)
"int1e_spgnucsp_spinor" (g sigma dot p \| nuc \| sigma dot p\)
"int1e_spgsa01_spinor" (g sigma dot p \| nabla-rinv cross sigma \|\)
"int1e_spspsp_spinor" (sigma dot p \| sigma dot p sigma dot p\)
"int1e_spnuc_spinor" (sigma dot p \| nuc \|\)
"int1e_ovlp_cart" ( \| \)
"int1e_nuc_cart" ( \| nuc \| \)
"int1e_kin_cart" (.5 \| p dot p\)
"int1e_ia01p_cart" (#C(0 1) \| nabla-rinv \| cross p\)
"int1e_giao_irjxp_cart" (#C(0 1) \| r cross p\)
"int1e_cg_irxp_cart" (#C(0 1) \| rc cross p\)
"int1e_giao_a11part_cart" (-.5 \| nabla-rinv \| r\)
"int1e_cg_a11part_cart" (-.5 \| nabla-rinv \| rc\)
"int1e_a01gp_cart" (g \| nabla-rinv cross p \|\)
"int1e_igkin_cart" (#C(0 .5) g \| p dot p\)
"int1e_igovlp_cart" (#C(0 1) g \|\)
"int1e_ignuc_cart" (#C(0 1) g \| nuc \|\)
"int1e_ipovlp_sph" (nabla \|\)
"int1e_ipkin_sph" (.5 nabla \| p dot p\)
"int1e_ipnuc_sph" (nabla \| nuc \|\)
"int1e_iprinv_sph" (nabla \| rinv \|\)
"int1e_rinv_sph" (\| rinv \|\)
"int1e_ipovlp_spinor" (nabla \|\)
"int1e_ipkin_spinor" (.5 nabla \| p dot p\)
"int1e_ipnuc_spinor" (nabla \| nuc \|\)
"int1e_iprinv_spinor" (nabla \| rinv \|\)
"int1e_ipspnucsp_spinor" (nabla sigma dot p \| nuc \| sigma dot p\)
"int1e_ipsprinvsp_spinor" (nabla sigma dot p \| rinv \| sigma dot p\)
"int1e_ipovlp_cart" (nabla \|\)
"int1e_ipkin_cart" (.5 nabla \| p dot p\)
"int1e_ipnuc_cart" (nabla \| nuc \|\)
"int1e_iprinv_cart" (nabla \| rinv \|\)
"int1e_rinv_cart" (\| rinv \|\)
"int2e_p1vxp1_sph" ( p* \, cross p \| \, \) ; SSO
"int2e_sph" ( \, \| \, \)
"int2e_ig1_sph" (#C(0 1) g \, \| \, \)
"int2e_spinor" (, \| \, \)
"int2e_spsp1_spinor" (sigma dot p \, sigma dot p \| \, \)
"int2e_spsp1spsp2_spinor" (sigma dot p \, sigma dot p \| sigma dot p \, sigma dot p \)
"int2e_srsr1_spinor" (sigma dot r \, sigma dot r \| \,\)
"int2e_srsr1srsr2_spinor" (sigma dot r \, sigma dot r \| sigma dot r \, sigma dot r\)
"int2e_cg_sa10sp1_spinor" (.5 rc cross sigma \, sigma dot p \| \,\)
"int2e_cg_sa10sp1spsp2_spinor" (.5 rc cross sigma \, sigma dot p \| sigma dot p \, sigma dot p \)
"int2e_giao_sa10sp1_spinor" (.5 r cross sigma \, sigma dot p \| \,\)
"int2e_giao_sa10sp1spsp2_spinor" (.5 r cross sigma \, sigma dot p \| sigma dot p \, sigma dot p \)
"int2e_g1_spinor" (g \, \| \,\)
"int2e_spgsp1_spinor" (g sigma dot p \, sigma dot p \| \,\)
"int2e_g1spsp2_spinor" (g \, \| sigma dot p \, sigma dot p\)
"int2e_spgsp1spsp2_spinor" (g sigma dot p \, sigma dot p \| sigma dot p \, sigma dot p\)
"int2e_spv1_spinor" (sigma dot p \, \| \,\)
"int2e_vsp1_spinor" (\, sigma dot p \| \,\)
"int2e_spsp2_spinor" (\, \| sigma dot p \, sigma dot p\)
"int2e_spv1spv2_spinor" (sigma dot p \, \| sigma dot p \,\)
"int2e_vsp1spv2_spinor" (\, sigma dot p \| sigma dot p \,\)
"int2e_spv1vsp2_spinor" (sigma dot p \, \| \, sigma dot p\)
"int2e_vsp1vsp2_spinor" (\, sigma dot p \| \, sigma dot p\)
"int2e_spv1spsp2_spinor" (sigma dot p \, \| sigma dot p \, sigma dot p\)
"int2e_vsp1spsp2_spinor" (\, sigma dot p \| sigma dot p \, sigma dot p\)
"int2e_ig1_cart" (#C(0 1) g \, \| \, \)
"int2e_ip1_sph" (nabla \, \| \,\)
"int2e_ip1_spinor" (nabla \, \| \,\)
"int2e_ipspsp1_spinor" (nabla sigma dot p \, sigma dot p \| \,\)
"int2e_ip1spsp2_spinor" (nabla \, \| sigma dot p \, sigma dot p\)
"int2e_ipspsp1spsp2_spinor" (nabla sigma dot p \, sigma dot p \| sigma dot p \, sigma dot p\)
"int2e_ipsrsr1_spinor" (nabla sigma dot r \, sigma dot r \| \,\)
"int2e_ip1srsr2_spinor" (nabla \, \| sigma dot r \, sigma dot r\)
"int2e_ipsrsr1srsr2_spinor" (nabla sigma dot r \, sigma dot r \| sigma dot r \, sigma dot r\)
"int2e_ip1_cart" (nabla \, \| \,\)
"int2e_ssp1ssp2_spinor" ( \, sigma dot p \| gaunt \| \, sigma dot p\)
"int2e_cg_ssa10ssp2_spinor" (rc cross sigma \, \| gaunt \| \, sigma dot p\)
"int2e_giao_ssa10ssp2_spinor" (r cross sigma \, \| gaunt \| \, sigma dot p\)
"int2e_gssp1ssp2_spinor" (g \, sigma dot p \| gaunt \| \, sigma dot p\)
"int2e_ipip1_sph" ( nabla nabla \, \| \, \)
"int2e_ipvip1_sph" ( nabla \, nabla \| \, \)
"int2e_ip1ip2_sph" ( nabla \, \| nabla \, \)
"int3c2e_ip1_sph" (nabla \, \| \)
"int3c2e_ip2_sph" ( \, \| nabla\)
"int2c2e_ip1_sph" (nabla \| r12 \| \)
"int3c2e_spinor" (nabla \, \| \)
"int3c2e_spsp1_spinor" (nabla \, \| \)
"int3c2e_ip1_spinor" (nabla \, \| \)
"int3c2e_ip2_spinor" ( \, \| nabla\)
"int3c2e_ipspsp1_spinor" (nabla sigma dot p \, sigma dot p \| \)
"int3c2e_spsp1ip2_spinor" (sigma dot p \, sigma dot p \| nabla \)
================================ =============
atm : int32 ndarray
libcint integral function argument
bas : int32 ndarray
libcint integral function argument
env : float64 ndarray
libcint integral function argument
Kwargs:
shls_slice : 8-element list
(ish_start, ish_end, jsh_start, jsh_end, ksh_start, ksh_end, lsh_start, lsh_end)
comp : int
Components of the integrals, e.g. int1e_ipovlp has 3 components.
hermi : int (1e integral only)
Symmetry of the 1e integrals
| 0 : no symmetry assumed (default)
| 1 : hermitian
| 2 : anti-hermitian
aosym : str (2e integral only)
Symmetry of the 2e integrals
| 4 or '4' or 's4': 4-fold symmetry (default)
| '2ij' or 's2ij' : symmetry between i, j in (ij|kl)
| '2kl' or 's2kl' : symmetry between k, l in (ij|kl)
| 1 or '1' or 's1': no symmetry
out : ndarray (2e integral only)
array to store the 2e AO integrals
Returns:
ndarray of 1-electron integrals, can be either 2-dim or 3-dim, depending on comp
Examples:
>>> mol.build(atom='H 0 0 0; H 0 0 1.1', basis='sto-3g')
>>> gto.getints('int1e_ipnuc_sph', mol._atm, mol._bas, mol._env, comp=3) # <nabla i | V_nuc | j>
[[[ 0. 0. ]
[ 0. 0. ]]
[[ 0. 0. ]
[ 0. 0. ]]
[[ 0.10289944 0.48176097]
[-0.48176097 -0.10289944]]]
'''
intor_name = ascint3(intor_name)
if (intor_name.startswith('int1e') or
intor_name.startswith('ECP') or
intor_name.startswith('int2c2e')):
return getints2c(intor_name, atm, bas, env, shls_slice, comp,
hermi, ao_loc, cintopt, out)
elif intor_name.startswith('int2e') or intor_name.startswith('int4c1e'):
return getints4c(intor_name, atm, bas, env, shls_slice, comp,
aosym, ao_loc, cintopt, out)
elif intor_name.startswith('int3c'):
return getints3c(intor_name, atm, bas, env, shls_slice, comp,
aosym, ao_loc, cintopt, out)
else:
raise RuntimeError('Unknown intor %s' % intor_name)
def getints2c(intor_name, atm, bas, env, shls_slice=None, comp=1, hermi=0,
ao_loc=None, cintopt=None, out=None):
atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
env = numpy.asarray(env, dtype=numpy.double, order='C')
natm = atm.shape[0]
nbas = bas.shape[0]
if shls_slice is None:
shls_slice = (0, nbas, 0, nbas)
else:
assert(shls_slice[1] <= nbas and shls_slice[3] <= nbas)
if ao_loc is None:
ao_loc = make_loc(bas, intor_name)
i0, i1, j0, j1 = shls_slice[:4]
naoi = ao_loc[i1] - ao_loc[i0]
naoj = ao_loc[j1] - ao_loc[j0]
if intor_name.endswith('_cart') or intor_name.endswith('_sph'):
mat = numpy.ndarray((naoi,naoj,comp), numpy.double, out, order='F')
drv_name = 'GTOint2c'
else:
mat = numpy.ndarray((naoi,naoj,comp), numpy.complex, out, order='F')
if '2c2e' in intor_name:
assert(hermi != lib.HERMITIAN and
hermi != lib.ANTIHERMI)
drv_name = 'GTOint2c_spinor'
if cintopt is None:
cintopt = make_cintopt(atm, bas, env, intor_name)
# cintopt = lib.c_null_ptr()
fn = getattr(libcgto, drv_name)
fn(getattr(libcgto, intor_name), mat.ctypes.data_as(ctypes.c_void_p),
ctypes.c_int(comp), ctypes.c_int(hermi),
(ctypes.c_int*4)(*(shls_slice[:4])),
ao_loc.ctypes.data_as(ctypes.c_void_p), cintopt,
atm.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(natm),
bas.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(nbas),
env.ctypes.data_as(ctypes.c_void_p))
mat = mat.transpose(2,0,1)
if comp == 1:
mat = mat[0]
return mat
def getints3c(intor_name, atm, bas, env, shls_slice=None, comp=1,
aosym='s1', ao_loc=None, cintopt=None, out=None):
atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
env = numpy.asarray(env, dtype=numpy.double, order='C')
natm = atm.shape[0]
nbas = bas.shape[0]
if shls_slice is None:
shls_slice = (0, nbas, 0, nbas, 0, nbas)
else:
assert(shls_slice[1] <= nbas and
shls_slice[3] <= nbas and
shls_slice[5] <= nbas)
i0, i1, j0, j1, k0, k1 = shls_slice[:6]
if ao_loc is None:
ao_loc = make_loc(bas, intor_name)
if k0 > j1 and k0 > i1:
if 'ssc' in intor_name:
ao_loc[k0-1:] = ao_loc[k0] + make_loc(bas[k0:], 'cart')
elif 'spinor' in intor_name:
ao_loc[k0-1:] = ao_loc[k0] + make_loc(bas[k0:], intor_name)
naok = ao_loc[k1] - ao_loc[k0]
if aosym in ('s1',):
naoi = ao_loc[i1] - ao_loc[i0]
naoj = ao_loc[j1] - ao_loc[j0]
shape = (naoi, naoj, naok, comp)
else:
aosym = 's2ij'
nij = ao_loc[i1]*(ao_loc[i1]+1)//2 - ao_loc[i0]*(ao_loc[i0]+1)//2
shape = (nij, naok, comp)
if 'spinor' in intor_name:
mat = numpy.ndarray(shape, numpy.complex, out, order='F')
drv = libcgto.GTOr3c_drv
fill = getattr(libcgto, 'GTOr3c_fill_'+aosym)
else:
mat = numpy.ndarray(shape, numpy.double, out, order='F')
drv = libcgto.GTOnr3c_drv
fill = getattr(libcgto, 'GTOnr3c_fill_'+aosym)
if cintopt is None:
cintopt = make_cintopt(atm, bas, env, intor_name)
drv(getattr(libcgto, intor_name), fill,
mat.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(comp),
(ctypes.c_int*6)(*(shls_slice[:6])),
ao_loc.ctypes.data_as(ctypes.c_void_p), cintopt,
atm.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(natm),
bas.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(nbas),
env.ctypes.data_as(ctypes.c_void_p))
mat = numpy.rollaxis(mat, -1, 0)
if comp == 1:
mat = mat[0]
return mat
def getints4c(intor_name, atm, bas, env, shls_slice=None, comp=1,
aosym='s1', ao_loc=None, cintopt=None, out=None):
aosym = _stand_sym_code(aosym)
atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
env = numpy.asarray(env, dtype=numpy.double, order='C')
c_atm = atm.ctypes.data_as(ctypes.c_void_p)
c_bas = bas.ctypes.data_as(ctypes.c_void_p)
c_env = env.ctypes.data_as(ctypes.c_void_p)
natm = atm.shape[0]
nbas = bas.shape[0]
ao_loc = make_loc(bas, intor_name)
if cintopt is None:
cintopt = make_cintopt(atm, bas, env, intor_name)
if aosym == 's8':
assert('_spinor' not in intor_name)
assert(shls_slice is None)
from pyscf.scf import _vhf
nao = ao_loc[-1]
nao_pair = nao*(nao+1)//2
out = numpy.ndarray((nao_pair*(nao_pair+1)//2), buffer=out)
drv = _vhf.libcvhf.GTO2e_cart_or_sph
drv(getattr(libcgto, intor_name), cintopt,
out.ctypes.data_as(ctypes.c_void_p),
ao_loc.ctypes.data_as(ctypes.c_void_p),
c_atm, ctypes.c_int(natm), c_bas, ctypes.c_int(nbas), c_env)
return out
else:
if shls_slice is None:
shls_slice = (0, nbas, 0, nbas, 0, nbas, 0, nbas)
elif len(shls_slice) == 4:
shls_slice = shls_slice + (0, nbas, 0, nbas)
else:
assert(shls_slice[1] <= nbas and shls_slice[3] <= nbas and
shls_slice[5] <= nbas and shls_slice[7] <= nbas)
i0, i1, j0, j1, k0, k1, l0, l1 = shls_slice
naoi = ao_loc[i1] - ao_loc[i0]
naoj = ao_loc[j1] - ao_loc[j0]
naok = ao_loc[k1] - ao_loc[k0]
naol = ao_loc[l1] - ao_loc[l0]
if aosym in ('s4', 's2ij'):
nij = naoi * (naoi + 1) // 2
assert(numpy.all(ao_loc[i0:i1]-ao_loc[i0] == ao_loc[j0:j1]-ao_loc[j0]))
else:
nij = naoi * naoj
if aosym in ('s4', 's2kl'):
nkl = naok * (naok + 1) // 2
assert(numpy.all(ao_loc[k0:k1]-ao_loc[k0] == ao_loc[l0:l1]-ao_loc[l0]))
else:
nkl = naok * naol
if comp == 1:
out = numpy.ndarray((nij,nkl), buffer=out)
else:
out = numpy.ndarray((comp,nij,nkl), buffer=out)
prescreen = lib.c_null_ptr()
drv = libcgto.GTOnr2e_fill_drv
drv(getattr(libcgto, intor_name),
getattr(libcgto, 'GTOnr2e_fill_'+aosym), prescreen,
out.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(comp),
(ctypes.c_int*8)(*shls_slice),
ao_loc.ctypes.data_as(ctypes.c_void_p), cintopt,
c_atm, ctypes.c_int(natm), c_bas, ctypes.c_int(nbas), c_env)
return out
def getints_by_shell(intor_name, shls, atm, bas, env, comp=1):
r'''For given 2, 3 or 4 shells, interface for libcint to get 1e, 2e,
2-center-2e or 3-center-2e integrals
Args:
intor_name : str
See also :func:`getints` for the supported intor_name
shls : list of int
The AO shell-ids of the integrals
atm : int32 ndarray
libcint integral function argument
bas : int32 ndarray
libcint integral function argument
env : float64 ndarray
libcint integral function argument
Kwargs:
comp : int
Components of the integrals, e.g. int1e_ipovlp has 3 components.
Returns:
ndarray of 2-dim to 5-dim, depending on the integral type (1e,
2e, 3c-2e, 2c2e) and the value of comp
Examples:
The gradients of the spherical 2e integrals
>>> mol.build(atom='H 0 0 0; H 0 0 1.1', basis='sto-3g')
>>> gto.getints_by_shell('int2e_ip1_sph', (0,1,0,1), mol._atm, mol._bas, mol._env, comp=3)
[[[[[-0. ]]]]
[[[[-0. ]]]]
[[[[-0.08760462]]]]]
'''
intor_name = ascint3(intor_name)
atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
env = numpy.asarray(env, dtype=numpy.double, order='C')
natm = ctypes.c_int(atm.shape[0])
nbas = ctypes.c_int(bas.shape[0])
if intor_name.endswith('_cart'):
dtype = numpy.double
def num_cgto_of(basid):
l = bas[basid,ANG_OF]
return (l+1)*(l+2)//2 * bas[basid,NCTR_OF]
elif intor_name.endswith('_sph'):
dtype = numpy.double
def num_cgto_of(basid):
l = bas[basid,ANG_OF]
return (l*2+1) * bas[basid,NCTR_OF]
else:
from pyscf.gto.mole import len_spinor
dtype = numpy.complex
def num_cgto_of(basid):
l = bas[basid,ANG_OF]
k = bas[basid,KAPPA_OF]
return len_spinor(l,k) * bas[basid,NCTR_OF]
null = lib.c_null_ptr()
if intor_name.startswith('int3c'):
assert(len(shls) == 3)
di = num_cgto_of(shls[0])
dj = num_cgto_of(shls[1])
l = bas[shls[2],ANG_OF]
if intor_name.endswith('_ssc'): # mixed spherical-cartesian
dk = (l+1)*(l+2)//2 * bas[shls[2],NCTR_OF]
else:
dk = (l*2+1) * bas[shls[2],NCTR_OF]
buf = numpy.empty((di,dj,dk,comp), dtype, order='F')
fintor = getattr(libcgto, intor_name)
fintor(buf.ctypes.data_as(ctypes.c_void_p),
null, (ctypes.c_int*3)(*shls),
atm.ctypes.data_as(ctypes.c_void_p), natm,
bas.ctypes.data_as(ctypes.c_void_p), nbas,
env.ctypes.data_as(ctypes.c_void_p), null, null)
if comp == 1:
return buf.reshape(di,dj,dk)
else:
return buf.transpose(3,0,1,2)
elif intor_name.startswith('int2e') or intor_name.startswith('int4c'):
assert(len(shls) == 4)
di, dj, dk, dl = [num_cgto_of(x) for x in shls]
buf = numpy.empty((di,dj,dk,dl,comp), dtype, order='F')
fintor = getattr(libcgto, intor_name)
fintor(buf.ctypes.data_as(ctypes.c_void_p),
null, (ctypes.c_int*4)(*shls),
atm.ctypes.data_as(ctypes.c_void_p), natm,
bas.ctypes.data_as(ctypes.c_void_p), nbas,
env.ctypes.data_as(ctypes.c_void_p), null, null)
if comp == 1:
return buf.reshape(di,dj,dk,dl)
else:
return buf.transpose(4,0,1,2,3)
elif (intor_name.startswith('int2c') or '1e' in intor_name or
'ECP' in intor_name):
assert(len(shls) == 2)
di = num_cgto_of(shls[0])
dj = num_cgto_of(shls[1])
buf = numpy.empty((di,dj,comp), dtype, order='F')
fintor = getattr(libcgto, intor_name)
fintor(buf.ctypes.data_as(ctypes.c_void_p),
null, (ctypes.c_int*2)(*shls),
atm.ctypes.data_as(ctypes.c_void_p), natm,
bas.ctypes.data_as(ctypes.c_void_p), nbas,
env.ctypes.data_as(ctypes.c_void_p), null, null)
if comp == 1:
return buf.reshape(di,dj)
else:
return buf.transpose(2,0,1)
else:
raise RuntimeError('Unknown intor %s' % intor_name)
def make_loc(bas, key):
if 'cart' in key:
l = bas[:,ANG_OF]
dims = (l+1)*(l+2)//2 * bas[:,NCTR_OF]
elif 'sph' in key:
dims = (bas[:,ANG_OF]*2+1) * bas[:,NCTR_OF]
else: # spinor
l = bas[:,ANG_OF]
k = bas[:,KAPPA_OF]
dims = (l*4+2) * bas[:,NCTR_OF]
dims[k<0] = (l[k<0] * 2 + 2) * bas[k<0,NCTR_OF]
dims[k>0] = (l[k>0] * 2 ) * bas[k>0,NCTR_OF]
ao_loc = numpy.empty(len(dims)+1, dtype=numpy.int32)
ao_loc[0] = 0
dims.cumsum(dtype=numpy.int32, out=ao_loc[1:])
return ao_loc
def make_cintopt(atm, bas, env, intor):
intor = intor.replace('_sph','').replace('_cart','').replace('_spinor','')
c_atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
c_bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
c_env = numpy.asarray(env, dtype=numpy.double, order='C')
natm = c_atm.shape[0]
nbas = c_bas.shape[0]
cintopt = lib.c_null_ptr()
foptinit = getattr(libcgto, intor+'_optimizer')
foptinit(ctypes.byref(cintopt),
c_atm.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(natm),
c_bas.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(nbas),
c_env.ctypes.data_as(ctypes.c_void_p))
return ctypes.cast(cintopt, _cintoptHandler)
class _cintoptHandler(ctypes.c_void_p):
def __del__(self):
libcgto.CINTdel_optimizer(ctypes.byref(self))
def _stand_sym_code(sym):
if isinstance(sym, int):
return 's%d' % sym
elif sym[0] in 'sS':
return sym.lower()
else:
return 's' + sym.lower()
def ascint3(intor_name):
'''convert cint2 function name to cint3 function name'''
if intor_name.startswith('cint'):
intor_name = intor_name[1:]
if not (intor_name.endswith('_cart') or
intor_name.endswith('_sph') or
intor_name.endswith('_spinor')):
intor_name = intor_name + '_spinor'
return intor_name
if __name__ == '__main__':
from pyscf import gto
mol = gto.Mole()
mol.verbose = 0
mol.output = None
mol.atom.extend([
["H", (0, 0, 0 )],
["H", (0, 0, 1 )],
])
mol.basis = {"H": 'cc-pvdz'}
mol.build()
mol.set_rinv_origin(mol.atom_coord(0))
for i in range(mol.nbas):
for j in range(mol.nbas):
print(i, j, getints_by_shell('int1e_prinvxp_sph', (i,j),
mol._atm, mol._bas, mol._env, 3))
| 44.692308 | 112 | 0.521319 |
import ctypes
import numpy
from pyscf import lib
libcgto = lib.load_library('libcgto')
ANG_OF = 1
NPRIM_OF = 2
NCTR_OF = 3
KAPPA_OF = 4
PTR_EXP = 5
PTR_COEFF = 6
BAS_SLOTS = 8
def getints(intor_name, atm, bas, env, shls_slice=None, comp=1, hermi=0,
aosym='s1', ao_loc=None, cintopt=None, out=None):
intor_name = ascint3(intor_name)
if (intor_name.startswith('int1e') or
intor_name.startswith('ECP') or
intor_name.startswith('int2c2e')):
return getints2c(intor_name, atm, bas, env, shls_slice, comp,
hermi, ao_loc, cintopt, out)
elif intor_name.startswith('int2e') or intor_name.startswith('int4c1e'):
return getints4c(intor_name, atm, bas, env, shls_slice, comp,
aosym, ao_loc, cintopt, out)
elif intor_name.startswith('int3c'):
return getints3c(intor_name, atm, bas, env, shls_slice, comp,
aosym, ao_loc, cintopt, out)
else:
raise RuntimeError('Unknown intor %s' % intor_name)
def getints2c(intor_name, atm, bas, env, shls_slice=None, comp=1, hermi=0,
ao_loc=None, cintopt=None, out=None):
atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
env = numpy.asarray(env, dtype=numpy.double, order='C')
natm = atm.shape[0]
nbas = bas.shape[0]
if shls_slice is None:
shls_slice = (0, nbas, 0, nbas)
else:
assert(shls_slice[1] <= nbas and shls_slice[3] <= nbas)
if ao_loc is None:
ao_loc = make_loc(bas, intor_name)
i0, i1, j0, j1 = shls_slice[:4]
naoi = ao_loc[i1] - ao_loc[i0]
naoj = ao_loc[j1] - ao_loc[j0]
if intor_name.endswith('_cart') or intor_name.endswith('_sph'):
mat = numpy.ndarray((naoi,naoj,comp), numpy.double, out, order='F')
drv_name = 'GTOint2c'
else:
mat = numpy.ndarray((naoi,naoj,comp), numpy.complex, out, order='F')
if '2c2e' in intor_name:
assert(hermi != lib.HERMITIAN and
hermi != lib.ANTIHERMI)
drv_name = 'GTOint2c_spinor'
if cintopt is None:
cintopt = make_cintopt(atm, bas, env, intor_name)
fn = getattr(libcgto, drv_name)
fn(getattr(libcgto, intor_name), mat.ctypes.data_as(ctypes.c_void_p),
ctypes.c_int(comp), ctypes.c_int(hermi),
(ctypes.c_int*4)(*(shls_slice[:4])),
ao_loc.ctypes.data_as(ctypes.c_void_p), cintopt,
atm.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(natm),
bas.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(nbas),
env.ctypes.data_as(ctypes.c_void_p))
mat = mat.transpose(2,0,1)
if comp == 1:
mat = mat[0]
return mat
def getints3c(intor_name, atm, bas, env, shls_slice=None, comp=1,
aosym='s1', ao_loc=None, cintopt=None, out=None):
atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
env = numpy.asarray(env, dtype=numpy.double, order='C')
natm = atm.shape[0]
nbas = bas.shape[0]
if shls_slice is None:
shls_slice = (0, nbas, 0, nbas, 0, nbas)
else:
assert(shls_slice[1] <= nbas and
shls_slice[3] <= nbas and
shls_slice[5] <= nbas)
i0, i1, j0, j1, k0, k1 = shls_slice[:6]
if ao_loc is None:
ao_loc = make_loc(bas, intor_name)
if k0 > j1 and k0 > i1:
if 'ssc' in intor_name:
ao_loc[k0-1:] = ao_loc[k0] + make_loc(bas[k0:], 'cart')
elif 'spinor' in intor_name:
ao_loc[k0-1:] = ao_loc[k0] + make_loc(bas[k0:], intor_name)
naok = ao_loc[k1] - ao_loc[k0]
if aosym in ('s1',):
naoi = ao_loc[i1] - ao_loc[i0]
naoj = ao_loc[j1] - ao_loc[j0]
shape = (naoi, naoj, naok, comp)
else:
aosym = 's2ij'
nij = ao_loc[i1]*(ao_loc[i1]+1)//2 - ao_loc[i0]*(ao_loc[i0]+1)//2
shape = (nij, naok, comp)
if 'spinor' in intor_name:
mat = numpy.ndarray(shape, numpy.complex, out, order='F')
drv = libcgto.GTOr3c_drv
fill = getattr(libcgto, 'GTOr3c_fill_'+aosym)
else:
mat = numpy.ndarray(shape, numpy.double, out, order='F')
drv = libcgto.GTOnr3c_drv
fill = getattr(libcgto, 'GTOnr3c_fill_'+aosym)
if cintopt is None:
cintopt = make_cintopt(atm, bas, env, intor_name)
drv(getattr(libcgto, intor_name), fill,
mat.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(comp),
(ctypes.c_int*6)(*(shls_slice[:6])),
ao_loc.ctypes.data_as(ctypes.c_void_p), cintopt,
atm.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(natm),
bas.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(nbas),
env.ctypes.data_as(ctypes.c_void_p))
mat = numpy.rollaxis(mat, -1, 0)
if comp == 1:
mat = mat[0]
return mat
def getints4c(intor_name, atm, bas, env, shls_slice=None, comp=1,
aosym='s1', ao_loc=None, cintopt=None, out=None):
aosym = _stand_sym_code(aosym)
atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
env = numpy.asarray(env, dtype=numpy.double, order='C')
c_atm = atm.ctypes.data_as(ctypes.c_void_p)
c_bas = bas.ctypes.data_as(ctypes.c_void_p)
c_env = env.ctypes.data_as(ctypes.c_void_p)
natm = atm.shape[0]
nbas = bas.shape[0]
ao_loc = make_loc(bas, intor_name)
if cintopt is None:
cintopt = make_cintopt(atm, bas, env, intor_name)
if aosym == 's8':
assert('_spinor' not in intor_name)
assert(shls_slice is None)
from pyscf.scf import _vhf
nao = ao_loc[-1]
nao_pair = nao*(nao+1)//2
out = numpy.ndarray((nao_pair*(nao_pair+1)//2), buffer=out)
drv = _vhf.libcvhf.GTO2e_cart_or_sph
drv(getattr(libcgto, intor_name), cintopt,
out.ctypes.data_as(ctypes.c_void_p),
ao_loc.ctypes.data_as(ctypes.c_void_p),
c_atm, ctypes.c_int(natm), c_bas, ctypes.c_int(nbas), c_env)
return out
else:
if shls_slice is None:
shls_slice = (0, nbas, 0, nbas, 0, nbas, 0, nbas)
elif len(shls_slice) == 4:
shls_slice = shls_slice + (0, nbas, 0, nbas)
else:
assert(shls_slice[1] <= nbas and shls_slice[3] <= nbas and
shls_slice[5] <= nbas and shls_slice[7] <= nbas)
i0, i1, j0, j1, k0, k1, l0, l1 = shls_slice
naoi = ao_loc[i1] - ao_loc[i0]
naoj = ao_loc[j1] - ao_loc[j0]
naok = ao_loc[k1] - ao_loc[k0]
naol = ao_loc[l1] - ao_loc[l0]
if aosym in ('s4', 's2ij'):
nij = naoi * (naoi + 1) // 2
assert(numpy.all(ao_loc[i0:i1]-ao_loc[i0] == ao_loc[j0:j1]-ao_loc[j0]))
else:
nij = naoi * naoj
if aosym in ('s4', 's2kl'):
nkl = naok * (naok + 1) // 2
assert(numpy.all(ao_loc[k0:k1]-ao_loc[k0] == ao_loc[l0:l1]-ao_loc[l0]))
else:
nkl = naok * naol
if comp == 1:
out = numpy.ndarray((nij,nkl), buffer=out)
else:
out = numpy.ndarray((comp,nij,nkl), buffer=out)
prescreen = lib.c_null_ptr()
drv = libcgto.GTOnr2e_fill_drv
drv(getattr(libcgto, intor_name),
getattr(libcgto, 'GTOnr2e_fill_'+aosym), prescreen,
out.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(comp),
(ctypes.c_int*8)(*shls_slice),
ao_loc.ctypes.data_as(ctypes.c_void_p), cintopt,
c_atm, ctypes.c_int(natm), c_bas, ctypes.c_int(nbas), c_env)
return out
def getints_by_shell(intor_name, shls, atm, bas, env, comp=1):
intor_name = ascint3(intor_name)
atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
env = numpy.asarray(env, dtype=numpy.double, order='C')
natm = ctypes.c_int(atm.shape[0])
nbas = ctypes.c_int(bas.shape[0])
if intor_name.endswith('_cart'):
dtype = numpy.double
def num_cgto_of(basid):
l = bas[basid,ANG_OF]
return (l+1)*(l+2)//2 * bas[basid,NCTR_OF]
elif intor_name.endswith('_sph'):
dtype = numpy.double
def num_cgto_of(basid):
l = bas[basid,ANG_OF]
return (l*2+1) * bas[basid,NCTR_OF]
else:
from pyscf.gto.mole import len_spinor
dtype = numpy.complex
def num_cgto_of(basid):
l = bas[basid,ANG_OF]
k = bas[basid,KAPPA_OF]
return len_spinor(l,k) * bas[basid,NCTR_OF]
null = lib.c_null_ptr()
if intor_name.startswith('int3c'):
assert(len(shls) == 3)
di = num_cgto_of(shls[0])
dj = num_cgto_of(shls[1])
l = bas[shls[2],ANG_OF]
if intor_name.endswith('_ssc'):
dk = (l+1)*(l+2)//2 * bas[shls[2],NCTR_OF]
else:
dk = (l*2+1) * bas[shls[2],NCTR_OF]
buf = numpy.empty((di,dj,dk,comp), dtype, order='F')
fintor = getattr(libcgto, intor_name)
fintor(buf.ctypes.data_as(ctypes.c_void_p),
null, (ctypes.c_int*3)(*shls),
atm.ctypes.data_as(ctypes.c_void_p), natm,
bas.ctypes.data_as(ctypes.c_void_p), nbas,
env.ctypes.data_as(ctypes.c_void_p), null, null)
if comp == 1:
return buf.reshape(di,dj,dk)
else:
return buf.transpose(3,0,1,2)
elif intor_name.startswith('int2e') or intor_name.startswith('int4c'):
assert(len(shls) == 4)
di, dj, dk, dl = [num_cgto_of(x) for x in shls]
buf = numpy.empty((di,dj,dk,dl,comp), dtype, order='F')
fintor = getattr(libcgto, intor_name)
fintor(buf.ctypes.data_as(ctypes.c_void_p),
null, (ctypes.c_int*4)(*shls),
atm.ctypes.data_as(ctypes.c_void_p), natm,
bas.ctypes.data_as(ctypes.c_void_p), nbas,
env.ctypes.data_as(ctypes.c_void_p), null, null)
if comp == 1:
return buf.reshape(di,dj,dk,dl)
else:
return buf.transpose(4,0,1,2,3)
elif (intor_name.startswith('int2c') or '1e' in intor_name or
'ECP' in intor_name):
assert(len(shls) == 2)
di = num_cgto_of(shls[0])
dj = num_cgto_of(shls[1])
buf = numpy.empty((di,dj,comp), dtype, order='F')
fintor = getattr(libcgto, intor_name)
fintor(buf.ctypes.data_as(ctypes.c_void_p),
null, (ctypes.c_int*2)(*shls),
atm.ctypes.data_as(ctypes.c_void_p), natm,
bas.ctypes.data_as(ctypes.c_void_p), nbas,
env.ctypes.data_as(ctypes.c_void_p), null, null)
if comp == 1:
return buf.reshape(di,dj)
else:
return buf.transpose(2,0,1)
else:
raise RuntimeError('Unknown intor %s' % intor_name)
def make_loc(bas, key):
if 'cart' in key:
l = bas[:,ANG_OF]
dims = (l+1)*(l+2)//2 * bas[:,NCTR_OF]
elif 'sph' in key:
dims = (bas[:,ANG_OF]*2+1) * bas[:,NCTR_OF]
else:
l = bas[:,ANG_OF]
k = bas[:,KAPPA_OF]
dims = (l*4+2) * bas[:,NCTR_OF]
dims[k<0] = (l[k<0] * 2 + 2) * bas[k<0,NCTR_OF]
dims[k>0] = (l[k>0] * 2 ) * bas[k>0,NCTR_OF]
ao_loc = numpy.empty(len(dims)+1, dtype=numpy.int32)
ao_loc[0] = 0
dims.cumsum(dtype=numpy.int32, out=ao_loc[1:])
return ao_loc
def make_cintopt(atm, bas, env, intor):
intor = intor.replace('_sph','').replace('_cart','').replace('_spinor','')
c_atm = numpy.asarray(atm, dtype=numpy.int32, order='C')
c_bas = numpy.asarray(bas, dtype=numpy.int32, order='C')
c_env = numpy.asarray(env, dtype=numpy.double, order='C')
natm = c_atm.shape[0]
nbas = c_bas.shape[0]
cintopt = lib.c_null_ptr()
foptinit = getattr(libcgto, intor+'_optimizer')
foptinit(ctypes.byref(cintopt),
c_atm.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(natm),
c_bas.ctypes.data_as(ctypes.c_void_p), ctypes.c_int(nbas),
c_env.ctypes.data_as(ctypes.c_void_p))
return ctypes.cast(cintopt, _cintoptHandler)
class _cintoptHandler(ctypes.c_void_p):
def __del__(self):
libcgto.CINTdel_optimizer(ctypes.byref(self))
def _stand_sym_code(sym):
if isinstance(sym, int):
return 's%d' % sym
elif sym[0] in 'sS':
return sym.lower()
else:
return 's' + sym.lower()
def ascint3(intor_name):
if intor_name.startswith('cint'):
intor_name = intor_name[1:]
if not (intor_name.endswith('_cart') or
intor_name.endswith('_sph') or
intor_name.endswith('_spinor')):
intor_name = intor_name + '_spinor'
return intor_name
if __name__ == '__main__':
from pyscf import gto
mol = gto.Mole()
mol.verbose = 0
mol.output = None
mol.atom.extend([
["H", (0, 0, 0 )],
["H", (0, 0, 1 )],
])
mol.basis = {"H": 'cc-pvdz'}
mol.build()
mol.set_rinv_origin(mol.atom_coord(0))
for i in range(mol.nbas):
for j in range(mol.nbas):
print(i, j, getints_by_shell('int1e_prinvxp_sph', (i,j),
mol._atm, mol._bas, mol._env, 3))
| true | true |
f72e1a89e2c0ed5dc395019217f028511c27787e | 466 | py | Python | examples/docs_snippets/docs_snippets/getting_started/hello_world.py | withshubh/dagster | ff4a0db53e126f44097a337eecef54988cc718ef | [
"Apache-2.0"
] | null | null | null | examples/docs_snippets/docs_snippets/getting_started/hello_world.py | withshubh/dagster | ff4a0db53e126f44097a337eecef54988cc718ef | [
"Apache-2.0"
] | null | null | null | examples/docs_snippets/docs_snippets/getting_started/hello_world.py | withshubh/dagster | ff4a0db53e126f44097a337eecef54988cc718ef | [
"Apache-2.0"
] | null | null | null | """isort:skip_file"""
# start_pipeline_marker
from dagster import pipeline, solid
@solid
def get_name(_):
return "dagster"
@solid
def hello(context, name: str):
context.log.info("Hello, {name}!".format(name=name))
@pipeline
def hello_pipeline():
hello(get_name())
# end_pipeline_marker
# start_execute_marker
from dagster import execute_pipeline
if __name__ == "__main__":
result = execute_pipeline(hello_pipeline)
# end_execute_marker
| 14.121212 | 56 | 0.73176 |
from dagster import pipeline, solid
@solid
def get_name(_):
return "dagster"
@solid
def hello(context, name: str):
context.log.info("Hello, {name}!".format(name=name))
@pipeline
def hello_pipeline():
hello(get_name())
from dagster import execute_pipeline
if __name__ == "__main__":
result = execute_pipeline(hello_pipeline)
| true | true |
f72e1ac178437c28c22c6de700ed7a60dbbd12cd | 295 | py | Python | 11_if_elif.py | SourceLastBenchCoder/Python_Programming | 4c4281252ab657cbb781f98fe5c945738a2c618e | [
"MIT"
] | null | null | null | 11_if_elif.py | SourceLastBenchCoder/Python_Programming | 4c4281252ab657cbb781f98fe5c945738a2c618e | [
"MIT"
] | null | null | null | 11_if_elif.py | SourceLastBenchCoder/Python_Programming | 4c4281252ab657cbb781f98fe5c945738a2c618e | [
"MIT"
] | null | null | null | # Based on condition we can execute some statements
# if elif help us to do
print("Welcome To Leap Year Check")
year = int(input("Enter year you need to check : "))
if (year % 100==0 and year % 400==0) or (year%100!=0 and year % 4 ==0):
print("Leap Year")
else:
print("Not Leap Year")
| 26.818182 | 71 | 0.654237 |
print("Welcome To Leap Year Check")
year = int(input("Enter year you need to check : "))
if (year % 100==0 and year % 400==0) or (year%100!=0 and year % 4 ==0):
print("Leap Year")
else:
print("Not Leap Year")
| true | true |
f72e1bad2c6091aacd8048c0c01e469eb65f3ad5 | 389 | py | Python | venv/Scripts/pip3.7-script.py | nazar1ous/web_map | 178d82ec4d2eb9f64cd17e8d7bc16f9280b34ce6 | [
"MIT"
] | null | null | null | venv/Scripts/pip3.7-script.py | nazar1ous/web_map | 178d82ec4d2eb9f64cd17e8d7bc16f9280b34ce6 | [
"MIT"
] | null | null | null | venv/Scripts/pip3.7-script.py | nazar1ous/web_map | 178d82ec4d2eb9f64cd17e8d7bc16f9280b34ce6 | [
"MIT"
] | null | null | null | #!D:\web_map\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.7'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')()
)
| 29.923077 | 70 | 0.652956 |
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')()
)
| true | true |
f72e1bd75c24985218086f542fda4a69a65ae9c3 | 1,211 | py | Python | utils.py | leimao/Logistic_Regression_Python | a64ed85d0bea8010d85e9c1e056a3af09b2e43c4 | [
"MIT"
] | 1 | 2019-07-03T19:39:22.000Z | 2019-07-03T19:39:22.000Z | utils.py | leimao/Logistic_Regression_Python | a64ed85d0bea8010d85e9c1e056a3af09b2e43c4 | [
"MIT"
] | null | null | null | utils.py | leimao/Logistic_Regression_Python | a64ed85d0bea8010d85e9c1e056a3af09b2e43c4 | [
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
def train_test_splitter(X, y, ratio = 0.8, random_seed = 0):
assert(len(X) == len(y)), "The number of points in feature matrix and target vector should be the same."
np.random.seed(random_seed)
n = len(y)
idx = np.arange(n)
np.random.shuffle(idx)
train_idx = idx[:int(n * ratio)]
test_idx = idx[int(n * ratio):]
return X[train_idx,:], X[test_idx,:], y[train_idx], y[test_idx]
def error_rate(y, y_predicted):
assert len(y) == len(y_predicted), "The number of targets and predictions should be the same."
assert len(y) != 0, "The number of targets and predictions should not be zero."
return np.sum(np.array(y) != np.array(y_predicted)) / len(y)
def plot_losses(losses, savefig = False, showfig = False, filename = 'loss.png'):
fig = plt.figure(figsize = (12,8))
plt.plot(np.arange(len(losses)), losses, color = 'r', marker = 'o', label = 'Loss')
plt.legend()
plt.ylabel('Loss')
plt.xlabel('Number of Iterations')
if savefig:
fig.savefig(filename, format = 'png', dpi = 600, bbox_inches = 'tight')
if showfig:
plt.show()
plt.close()
return | 29.536585 | 108 | 0.635012 |
import numpy as np
import matplotlib.pyplot as plt
def train_test_splitter(X, y, ratio = 0.8, random_seed = 0):
assert(len(X) == len(y)), "The number of points in feature matrix and target vector should be the same."
np.random.seed(random_seed)
n = len(y)
idx = np.arange(n)
np.random.shuffle(idx)
train_idx = idx[:int(n * ratio)]
test_idx = idx[int(n * ratio):]
return X[train_idx,:], X[test_idx,:], y[train_idx], y[test_idx]
def error_rate(y, y_predicted):
assert len(y) == len(y_predicted), "The number of targets and predictions should be the same."
assert len(y) != 0, "The number of targets and predictions should not be zero."
return np.sum(np.array(y) != np.array(y_predicted)) / len(y)
def plot_losses(losses, savefig = False, showfig = False, filename = 'loss.png'):
fig = plt.figure(figsize = (12,8))
plt.plot(np.arange(len(losses)), losses, color = 'r', marker = 'o', label = 'Loss')
plt.legend()
plt.ylabel('Loss')
plt.xlabel('Number of Iterations')
if savefig:
fig.savefig(filename, format = 'png', dpi = 600, bbox_inches = 'tight')
if showfig:
plt.show()
plt.close()
return | true | true |
f72e1d8baa7253d199d596179a9f8d93d67099f1 | 904 | py | Python | rainy/prelude.py | alexmlamb/blocks_rl_gru_setup | fe462f79518d14f828e2c7cbf210cd105ff982f4 | [
"Apache-2.0"
] | null | null | null | rainy/prelude.py | alexmlamb/blocks_rl_gru_setup | fe462f79518d14f828e2c7cbf210cd105ff982f4 | [
"Apache-2.0"
] | null | null | null | rainy/prelude.py | alexmlamb/blocks_rl_gru_setup | fe462f79518d14f828e2c7cbf210cd105ff982f4 | [
"Apache-2.0"
] | null | null | null | from torch import nn, Tensor
from typing import Any, Callable, Iterable, Sequence, Tuple, TypeVar, Union
from .utils.device import Device
try:
from typing import GenericMeta, NamedTupleMeta # type: ignore
class GenericNamedMeta(NamedTupleMeta, GenericMeta):
pass
except ImportError:
from typing import NamedTupleMeta # type: ignore
GenericNamedMeta = NamedTupleMeta # type: ignore
T = TypeVar('T')
Self = Any
class Array(Sequence[T]):
@property
def shape(self) -> tuple:
...
def squeeze(self) -> Self:
...
def transpose(self, *args) -> Self:
...
def __rsub__(self, value: Any) -> Self:
...
def __truediv__(self, rvalue: Any) -> Self:
...
Action = TypeVar('Action', int, Array)
State = TypeVar('State')
NetFn = Callable[[Tuple[int, ...], int, Device], nn.Module]
Params = Iterable[Union[Tensor, dict]]
| 22.04878 | 75 | 0.643805 | from torch import nn, Tensor
from typing import Any, Callable, Iterable, Sequence, Tuple, TypeVar, Union
from .utils.device import Device
try:
from typing import GenericMeta, NamedTupleMeta
class GenericNamedMeta(NamedTupleMeta, GenericMeta):
pass
except ImportError:
from typing import NamedTupleMeta
GenericNamedMeta = NamedTupleMeta
T = TypeVar('T')
Self = Any
class Array(Sequence[T]):
@property
def shape(self) -> tuple:
...
def squeeze(self) -> Self:
...
def transpose(self, *args) -> Self:
...
def __rsub__(self, value: Any) -> Self:
...
def __truediv__(self, rvalue: Any) -> Self:
...
Action = TypeVar('Action', int, Array)
State = TypeVar('State')
NetFn = Callable[[Tuple[int, ...], int, Device], nn.Module]
Params = Iterable[Union[Tensor, dict]]
| true | true |
f72e1db093ed93bef84afb8b9b2990ff90634668 | 953 | py | Python | type/cell/semantic_cell_type.py | abhinav-kumar-thakur/table-understanding | 5448e02dec87ea3974bfc118ebeace86e8918285 | [
"MIT"
] | null | null | null | type/cell/semantic_cell_type.py | abhinav-kumar-thakur/table-understanding | 5448e02dec87ea3974bfc118ebeace86e8918285 | [
"MIT"
] | null | null | null | type/cell/semantic_cell_type.py | abhinav-kumar-thakur/table-understanding | 5448e02dec87ea3974bfc118ebeace86e8918285 | [
"MIT"
] | null | null | null | from type.cell.cell_type import CellType
class SemanticCellType:
EMPTY = CellType("empty", 0)
CARDINAL = CellType("cardinal", 1)
STRING = CellType("string", 2)
DATETIME = CellType("datetime", 3)
LOCATION = CellType("location", 4)
ORG = CellType("organization", 5)
ORDINAL = CellType("ordinal", 6)
NOMINAL = CellType("nominal", 7)
PERSON = CellType("person", 8)
#EVENT = CellType("event", 9)
inverse_dict = {
"ordinal": ORDINAL,
"cardinal": CARDINAL,
"nominal": NOMINAL,
"location": LOCATION,
"person": PERSON,
"organization": ORG,
#"event": EVENT,
"datetime": DATETIME,
"empty": EMPTY,
"string": STRING
}
id2str = {_.id(): _.str() for _ in inverse_dict.values()}
id2obj = {_.id(): _ for _ in inverse_dict.values()}
@staticmethod
def cell_type_count():
return len(SemanticCellType.inverse_dict)
| 25.756757 | 61 | 0.593914 | from type.cell.cell_type import CellType
class SemanticCellType:
EMPTY = CellType("empty", 0)
CARDINAL = CellType("cardinal", 1)
STRING = CellType("string", 2)
DATETIME = CellType("datetime", 3)
LOCATION = CellType("location", 4)
ORG = CellType("organization", 5)
ORDINAL = CellType("ordinal", 6)
NOMINAL = CellType("nominal", 7)
PERSON = CellType("person", 8)
inverse_dict = {
"ordinal": ORDINAL,
"cardinal": CARDINAL,
"nominal": NOMINAL,
"location": LOCATION,
"person": PERSON,
"organization": ORG,
"datetime": DATETIME,
"empty": EMPTY,
"string": STRING
}
id2str = {_.id(): _.str() for _ in inverse_dict.values()}
id2obj = {_.id(): _ for _ in inverse_dict.values()}
@staticmethod
def cell_type_count():
return len(SemanticCellType.inverse_dict)
| true | true |
f72e1df6602968e74951389c310cdbd1d8165481 | 713 | py | Python | utils/ldap_client.py | bsquizz/qontract-reconcile | 39cecc4a3ca601931a9c37639d5e03f87b9d4266 | [
"Apache-2.0"
] | null | null | null | utils/ldap_client.py | bsquizz/qontract-reconcile | 39cecc4a3ca601931a9c37639d5e03f87b9d4266 | [
"Apache-2.0"
] | null | null | null | utils/ldap_client.py | bsquizz/qontract-reconcile | 39cecc4a3ca601931a9c37639d5e03f87b9d4266 | [
"Apache-2.0"
] | null | null | null | from ldap3 import Server, Connection, ALL
from utils.config import get_config
_client = None
_base_dn = None
def init(serverUrl):
global _client
if _client is None:
server = Server(serverUrl, get_info=ALL)
_client = Connection(server, None, None, auto_bind=True)
return _client
def init_from_config():
global _base_dn
config = get_config()
serverUrl = config['ldap']['server']
_base_dn = config['ldap']['base_dn']
return init(serverUrl)
def user_exists(username):
global _client
global _base_dn
init_from_config()
search_filter = "uid={},{}".format(username, _base_dn)
return _client.search(search_filter, '(objectclass=person)')
| 18.763158 | 64 | 0.68864 | from ldap3 import Server, Connection, ALL
from utils.config import get_config
_client = None
_base_dn = None
def init(serverUrl):
global _client
if _client is None:
server = Server(serverUrl, get_info=ALL)
_client = Connection(server, None, None, auto_bind=True)
return _client
def init_from_config():
global _base_dn
config = get_config()
serverUrl = config['ldap']['server']
_base_dn = config['ldap']['base_dn']
return init(serverUrl)
def user_exists(username):
global _client
global _base_dn
init_from_config()
search_filter = "uid={},{}".format(username, _base_dn)
return _client.search(search_filter, '(objectclass=person)')
| true | true |
f72e1fe51499ee40b28d7a5f72bab175f6ef679c | 29,831 | py | Python | lib/streamlit/server/server.py | imjuangarcia/streamlit | 7a6ddf9490f34c674c3a952469ec95091ba8ac79 | [
"Apache-2.0"
] | null | null | null | lib/streamlit/server/server.py | imjuangarcia/streamlit | 7a6ddf9490f34c674c3a952469ec95091ba8ac79 | [
"Apache-2.0"
] | 35 | 2021-10-12T04:41:39.000Z | 2022-03-28T04:50:45.000Z | lib/streamlit/server/server.py | imjuangarcia/streamlit | 7a6ddf9490f34c674c3a952469ec95091ba8ac79 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2022 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import base64
import binascii
import logging
import os
import socket
import sys
import errno
import json
import time
import traceback
import click
from enum import Enum
from typing import (
Any,
Dict,
Optional,
Tuple,
Callable,
Awaitable,
Generator,
List,
Set,
)
import tornado.concurrent
import tornado.gen
import tornado.ioloop
import tornado.locks
import tornado.netutil
import tornado.web
import tornado.websocket
from tornado.websocket import WebSocketHandler
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from streamlit import config
from streamlit import file_util
from streamlit import source_util
from streamlit import util
from streamlit.caching import get_memo_stats_provider, get_singleton_stats_provider
from streamlit.config_option import ConfigOption
from streamlit.forward_msg_cache import ForwardMsgCache
from streamlit.forward_msg_cache import create_reference_msg
from streamlit.forward_msg_cache import populate_hash_if_needed
from streamlit.in_memory_file_manager import in_memory_file_manager
from streamlit.legacy_caching.caching import _mem_caches
from streamlit.app_session import AppSession
from streamlit.stats import StatsHandler, StatsManager
from streamlit.uploaded_file_manager import UploadedFileManager
from streamlit.logger import get_logger
from streamlit.components.v1.components import ComponentRegistry
from streamlit.components.v1.components import ComponentRequestHandler
from streamlit.proto.BackMsg_pb2 import BackMsg
from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
from streamlit.server.upload_file_request_handler import (
UploadFileRequestHandler,
UPLOAD_FILE_ROUTE,
)
from streamlit.session_data import SessionData
from streamlit.state import (
SCRIPT_RUN_WITHOUT_ERRORS_KEY,
SessionStateStatProvider,
)
from streamlit.server.routes import AddSlashHandler
from streamlit.server.routes import AssetsFileHandler
from streamlit.server.routes import DebugHandler
from streamlit.server.routes import HealthHandler
from streamlit.server.routes import MediaFileHandler
from streamlit.server.routes import MessageCacheHandler
from streamlit.server.routes import StaticFileHandler
from streamlit.server.server_util import is_cacheable_msg
from streamlit.server.server_util import is_url_from_allowed_origins
from streamlit.server.server_util import make_url_path_regex
from streamlit.server.server_util import serialize_forward_msg
from streamlit.server.server_util import get_max_message_size_bytes
from streamlit.watcher import LocalSourcesWatcher
LOGGER = get_logger(__name__)
TORNADO_SETTINGS = {
# Gzip HTTP responses.
"compress_response": True,
# Ping every 1s to keep WS alive.
# 2021.06.22: this value was previously 20s, and was causing
# connection instability for a small number of users. This smaller
# ping_interval fixes that instability.
# https://github.com/streamlit/streamlit/issues/3196
"websocket_ping_interval": 1,
# If we don't get a ping response within 30s, the connection
# is timed out.
"websocket_ping_timeout": 30,
}
# When server.port is not available it will look for the next available port
# up to MAX_PORT_SEARCH_RETRIES.
MAX_PORT_SEARCH_RETRIES = 100
# When server.address starts with this prefix, the server will bind
# to an unix socket.
UNIX_SOCKET_PREFIX = "unix://"
# Wait for the script run result for 60s and if no result is available give up
SCRIPT_RUN_CHECK_TIMEOUT = 60
class SessionInfo:
"""Type stored in our _session_info_by_id dict.
For each AppSession, the server tracks that session's
script_run_count. This is used to track the age of messages in
the ForwardMsgCache.
"""
def __init__(self, ws: WebSocketHandler, session: AppSession):
"""Initialize a SessionInfo instance.
Parameters
----------
session : AppSession
The AppSession object.
ws : _BrowserWebSocketHandler
The websocket corresponding to this session.
"""
self.session = session
self.ws = ws
self.script_run_count = 0
def __repr__(self) -> str:
return util.repr_(self)
class State(Enum):
INITIAL = "INITIAL"
WAITING_FOR_FIRST_BROWSER = "WAITING_FOR_FIRST_BROWSER"
ONE_OR_MORE_BROWSERS_CONNECTED = "ONE_OR_MORE_BROWSERS_CONNECTED"
NO_BROWSERS_CONNECTED = "NO_BROWSERS_CONNECTED"
STOPPING = "STOPPING"
STOPPED = "STOPPED"
class RetriesExceeded(Exception):
pass
def server_port_is_manually_set() -> bool:
return config.is_manually_set("server.port")
def server_address_is_unix_socket() -> bool:
address = config.get_option("server.address")
return address is not None and address.startswith(UNIX_SOCKET_PREFIX)
def start_listening(app: tornado.web.Application) -> None:
"""Makes the server start listening at the configured port.
In case the port is already taken it tries listening to the next available
port. It will error after MAX_PORT_SEARCH_RETRIES attempts.
"""
http_server = HTTPServer(
app, max_buffer_size=config.get_option("server.maxUploadSize") * 1024 * 1024
)
if server_address_is_unix_socket():
start_listening_unix_socket(http_server)
else:
start_listening_tcp_socket(http_server)
def start_listening_unix_socket(http_server: HTTPServer) -> None:
address = config.get_option("server.address")
file_name = os.path.expanduser(address[len(UNIX_SOCKET_PREFIX) :])
unix_socket = tornado.netutil.bind_unix_socket(file_name)
http_server.add_socket(unix_socket)
def start_listening_tcp_socket(http_server: HTTPServer) -> None:
call_count = 0
port = None
while call_count < MAX_PORT_SEARCH_RETRIES:
address = config.get_option("server.address")
port = config.get_option("server.port")
try:
http_server.listen(port, address)
break # It worked! So let's break out of the loop.
except (OSError, socket.error) as e:
if e.errno == errno.EADDRINUSE:
if server_port_is_manually_set():
LOGGER.error("Port %s is already in use", port)
sys.exit(1)
else:
LOGGER.debug(
"Port %s already in use, trying to use the next one.", port
)
port += 1
# Save port 3000 because it is used for the development
# server in the front end.
if port == 3000:
port += 1
config.set_option(
"server.port", port, ConfigOption.STREAMLIT_DEFINITION
)
call_count += 1
else:
raise
if call_count >= MAX_PORT_SEARCH_RETRIES:
raise RetriesExceeded(
f"Cannot start Streamlit server. Port {port} is already in use, and "
f"Streamlit was unable to find a free port after {MAX_PORT_SEARCH_RETRIES} attempts.",
)
class Server:
_singleton: Optional["Server"] = None
@classmethod
def get_current(cls) -> "Server":
"""
Returns
-------
Server
The singleton Server object.
"""
if Server._singleton is None:
raise RuntimeError("Server has not been initialized yet")
return Server._singleton
def __init__(
self, ioloop: IOLoop, main_script_path: str, command_line: Optional[str]
):
"""Create the server. It won't be started yet."""
if Server._singleton is not None:
raise RuntimeError("Server already initialized. Use .get_current() instead")
Server._singleton = self
_set_tornado_log_levels()
self._ioloop = ioloop
self._main_script_path = main_script_path
self._command_line = command_line if command_line is not None else ""
# Mapping of AppSession.id -> SessionInfo.
self._session_info_by_id: Dict[str, SessionInfo] = {}
self._must_stop = tornado.locks.Event()
self._state = State.INITIAL
self._message_cache = ForwardMsgCache()
self._uploaded_file_mgr = UploadedFileManager()
self._uploaded_file_mgr.on_files_updated.connect(self.on_files_updated)
self._session_data: Optional[SessionData] = None
self._has_connection = tornado.locks.Condition()
self._need_send_data = tornado.locks.Event()
# StatsManager
self._stats_mgr = StatsManager()
self._stats_mgr.register_provider(get_memo_stats_provider())
self._stats_mgr.register_provider(get_singleton_stats_provider())
self._stats_mgr.register_provider(_mem_caches)
self._stats_mgr.register_provider(self._message_cache)
self._stats_mgr.register_provider(in_memory_file_manager)
self._stats_mgr.register_provider(self._uploaded_file_mgr)
self._stats_mgr.register_provider(
SessionStateStatProvider(self._session_info_by_id)
)
def __repr__(self) -> str:
return util.repr_(self)
@property
def main_script_path(self) -> str:
return self._main_script_path
def get_session_by_id(self, session_id: str) -> Optional[AppSession]:
"""Return the AppSession corresponding to the given id, or None if
no such session exists."""
session_info = self._get_session_info(session_id)
if session_info is None:
return None
return session_info.session
def on_files_updated(self, session_id: str) -> None:
"""Event handler for UploadedFileManager.on_file_added.
Ensures that uploaded files from stale sessions get deleted.
"""
session_info = self._get_session_info(session_id)
if session_info is None:
# If an uploaded file doesn't belong to an existing session,
# remove it so it doesn't stick around forever.
self._uploaded_file_mgr.remove_session_files(session_id)
def _get_session_info(self, session_id: str) -> Optional[SessionInfo]:
"""Return the SessionInfo with the given id, or None if no such
session exists.
"""
return self._session_info_by_id.get(session_id, None)
def start(self, on_started: Callable[["Server"], Any]) -> None:
"""Start the server.
Parameters
----------
on_started : callable
A callback that will be called when the server's run-loop
has started, and the server is ready to begin receiving clients.
"""
if self._state != State.INITIAL:
raise RuntimeError("Server has already been started")
LOGGER.debug("Starting server...")
app = self._create_app()
start_listening(app)
port = config.get_option("server.port")
LOGGER.debug("Server started on port %s", port)
self._ioloop.spawn_callback(self._loop_coroutine, on_started)
def _create_app(self) -> tornado.web.Application:
"""Create our tornado web app."""
base = config.get_option("server.baseUrlPath")
routes: List[Any] = [
(
make_url_path_regex(base, "stream"),
_BrowserWebSocketHandler,
dict(server=self),
),
(
make_url_path_regex(base, "healthz"),
HealthHandler,
dict(callback=lambda: self.is_ready_for_browser_connection),
),
(make_url_path_regex(base, "debugz"), DebugHandler, dict(server=self)),
(
make_url_path_regex(base, "message"),
MessageCacheHandler,
dict(cache=self._message_cache),
),
(
make_url_path_regex(base, "st-metrics"),
StatsHandler,
dict(stats_manager=self._stats_mgr),
),
(
make_url_path_regex(
base,
UPLOAD_FILE_ROUTE,
),
UploadFileRequestHandler,
dict(
file_mgr=self._uploaded_file_mgr,
get_session_info=self._get_session_info,
),
),
(
make_url_path_regex(base, "assets/(.*)"),
AssetsFileHandler,
{"path": "%s/" % file_util.get_assets_dir()},
),
(make_url_path_regex(base, "media/(.*)"), MediaFileHandler, {"path": ""}),
(
make_url_path_regex(base, "component/(.*)"),
ComponentRequestHandler,
dict(registry=ComponentRegistry.instance()),
),
]
if config.get_option("server.scriptHealthCheckEnabled"):
routes.extend(
[
(
make_url_path_regex(base, "script-health-check"),
HealthHandler,
dict(callback=lambda: self.does_script_run_without_error()),
)
]
)
if config.get_option("global.developmentMode"):
LOGGER.debug("Serving static content from the Node dev server")
else:
static_path = file_util.get_static_dir()
LOGGER.debug("Serving static content from %s", static_path)
routes.extend(
[
(
make_url_path_regex(base, "(.*)"),
StaticFileHandler,
{
"path": "%s/" % static_path,
"default_filename": "index.html",
"get_pages": lambda: set(
[
page_info["page_name"]
for page_info in source_util.get_pages(
self.main_script_path
).values()
]
),
},
),
(make_url_path_regex(base, trailing_slash=False), AddSlashHandler),
]
)
return tornado.web.Application(
routes,
cookie_secret=config.get_option("server.cookieSecret"),
xsrf_cookies=config.get_option("server.enableXsrfProtection"),
# Set the websocket message size. The default value is too low.
websocket_max_message_size=get_max_message_size_bytes(),
**TORNADO_SETTINGS, # type: ignore[arg-type]
)
def _set_state(self, new_state: State) -> None:
LOGGER.debug("Server state: %s -> %s" % (self._state, new_state))
self._state = new_state
@property
async def is_ready_for_browser_connection(self) -> Tuple[bool, str]:
if self._state not in (State.INITIAL, State.STOPPING, State.STOPPED):
return True, "ok"
return False, "unavailable"
async def does_script_run_without_error(self) -> Tuple[bool, str]:
"""Load and execute the app's script to verify it runs without an error.
Returns
-------
(True, "ok") if the script completes without error, or (False, err_msg)
if the script raises an exception.
"""
session_data = SessionData(self._main_script_path, self._command_line)
local_sources_watcher = LocalSourcesWatcher(session_data)
session = AppSession(
ioloop=self._ioloop,
session_data=session_data,
uploaded_file_manager=self._uploaded_file_mgr,
message_enqueued_callback=self._enqueued_some_message,
local_sources_watcher=local_sources_watcher,
user_info={"email": "test@test.com"},
)
try:
session.request_rerun(None)
now = time.perf_counter()
while (
SCRIPT_RUN_WITHOUT_ERRORS_KEY not in session.session_state
and (time.perf_counter() - now) < SCRIPT_RUN_CHECK_TIMEOUT
):
await tornado.gen.sleep(0.1)
if SCRIPT_RUN_WITHOUT_ERRORS_KEY not in session.session_state:
return False, "timeout"
ok = session.session_state[SCRIPT_RUN_WITHOUT_ERRORS_KEY]
msg = "ok" if ok else "error"
return ok, msg
finally:
session.shutdown()
@property
def browser_is_connected(self) -> bool:
return self._state == State.ONE_OR_MORE_BROWSERS_CONNECTED
@property
def is_running_hello(self) -> bool:
from streamlit.hello import Hello
return self._main_script_path == Hello.__file__
@tornado.gen.coroutine
def _loop_coroutine(
self, on_started: Optional[Callable[["Server"], Any]] = None
) -> Generator[Any, None, None]:
try:
if self._state == State.INITIAL:
self._set_state(State.WAITING_FOR_FIRST_BROWSER)
elif self._state == State.ONE_OR_MORE_BROWSERS_CONNECTED:
pass
else:
raise RuntimeError("Bad server state at start: %s" % self._state)
if on_started is not None:
on_started(self)
while not self._must_stop.is_set():
if self._state == State.WAITING_FOR_FIRST_BROWSER:
yield tornado.gen.convert_yielded(
asyncio.wait(
[self._must_stop.wait(), self._has_connection.wait()],
return_when=asyncio.FIRST_COMPLETED,
)
)
elif self._state == State.ONE_OR_MORE_BROWSERS_CONNECTED:
self._need_send_data.clear()
# Shallow-clone our sessions into a list, so we can iterate
# over it and not worry about whether it's being changed
# outside this coroutine.
session_infos = list(self._session_info_by_id.values())
for session_info in session_infos:
msg_list = session_info.session.flush_browser_queue()
for msg in msg_list:
try:
self._send_message(session_info, msg)
except tornado.websocket.WebSocketClosedError:
self._close_app_session(session_info.session.id)
yield
yield
yield tornado.gen.sleep(0.01)
elif self._state == State.NO_BROWSERS_CONNECTED:
yield tornado.gen.convert_yielded(
asyncio.wait(
[self._must_stop.wait(), self._has_connection.wait()],
return_when=asyncio.FIRST_COMPLETED,
)
)
else:
# Break out of the thread loop if we encounter any other state.
break
yield tornado.gen.convert_yielded(
asyncio.wait(
[self._must_stop.wait(), self._need_send_data.wait()],
return_when=asyncio.FIRST_COMPLETED,
)
)
# Shut down all AppSessions
for session_info in list(self._session_info_by_id.values()):
session_info.session.shutdown()
self._set_state(State.STOPPED)
except Exception:
# Can't just re-raise here because co-routines use Tornado
# exceptions for control flow, which appears to swallow the reraised
# exception.
traceback.print_exc()
LOGGER.info(
"""
Please report this bug at https://github.com/streamlit/streamlit/issues.
"""
)
finally:
self._on_stopped()
def _send_message(self, session_info: SessionInfo, msg: ForwardMsg) -> None:
"""Send a message to a client.
If the client is likely to have already cached the message, we may
instead send a "reference" message that contains only the hash of the
message.
Parameters
----------
session_info : SessionInfo
The SessionInfo associated with websocket
msg : ForwardMsg
The message to send to the client
"""
msg.metadata.cacheable = is_cacheable_msg(msg)
msg_to_send = msg
if msg.metadata.cacheable:
populate_hash_if_needed(msg)
if self._message_cache.has_message_reference(
msg, session_info.session, session_info.script_run_count
):
# This session has probably cached this message. Send
# a reference instead.
LOGGER.debug("Sending cached message ref (hash=%s)" % msg.hash)
msg_to_send = create_reference_msg(msg)
# Cache the message so it can be referenced in the future.
# If the message is already cached, this will reset its
# age.
LOGGER.debug("Caching message (hash=%s)" % msg.hash)
self._message_cache.add_message(
msg, session_info.session, session_info.script_run_count
)
# If this was a `script_finished` message, we increment the
# script_run_count for this session, and update the cache
if (
msg.WhichOneof("type") == "script_finished"
and msg.script_finished == ForwardMsg.FINISHED_SUCCESSFULLY
):
LOGGER.debug(
"Script run finished successfully; "
"removing expired entries from MessageCache "
"(max_age=%s)",
config.get_option("global.maxCachedMessageAge"),
)
session_info.script_run_count += 1
self._message_cache.remove_expired_session_entries(
session_info.session, session_info.script_run_count
)
# Ship it off!
session_info.ws.write_message(serialize_forward_msg(msg_to_send), binary=True)
def _enqueued_some_message(self) -> None:
self._ioloop.add_callback(self._need_send_data.set)
def stop(self, from_signal=False) -> None:
click.secho(" Stopping...", fg="blue")
self._set_state(State.STOPPING)
if from_signal:
self._ioloop.add_callback_from_signal(self._must_stop.set)
else:
self._ioloop.add_callback(self._must_stop.set)
def _on_stopped(self) -> None:
"""Called when our runloop is exiting, to shut down the ioloop.
This will end our process.
(Tests can patch this method out, to prevent the test's ioloop
from being shutdown.)
"""
self._ioloop.stop()
def _create_app_session(self, ws: WebSocketHandler) -> AppSession:
"""Register a connected browser with the server.
Parameters
----------
ws : _BrowserWebSocketHandler
The newly-connected websocket handler.
Returns
-------
AppSession
The newly-created AppSession for this browser connection.
"""
session_data = SessionData(self._main_script_path, self._command_line)
local_sources_watcher = LocalSourcesWatcher(session_data)
is_public_cloud_app = False
try:
header_content = ws.request.headers["X-Streamlit-User"]
payload = base64.b64decode(header_content)
user_obj = json.loads(payload)
email = user_obj["email"]
is_public_cloud_app = user_obj["isPublicCloudApp"]
except (KeyError, binascii.Error, json.decoder.JSONDecodeError):
email = "test@localhost.com"
user_info: Dict[str, Optional[str]] = dict()
if is_public_cloud_app:
user_info["email"] = None
else:
user_info["email"] = email
session = AppSession(
ioloop=self._ioloop,
session_data=session_data,
uploaded_file_manager=self._uploaded_file_mgr,
message_enqueued_callback=self._enqueued_some_message,
local_sources_watcher=local_sources_watcher,
user_info=user_info,
)
LOGGER.debug(
"Created new session for ws %s. Session ID: %s", id(ws), session.id
)
assert (
session.id not in self._session_info_by_id
), f"session.id '{session.id}' registered multiple times!"
self._session_info_by_id[session.id] = SessionInfo(ws, session)
self._set_state(State.ONE_OR_MORE_BROWSERS_CONNECTED)
self._has_connection.notify_all()
return session
def _close_app_session(self, session_id: str) -> None:
"""Shutdown and remove a AppSession.
This function may be called multiple times for the same session,
which is not an error. (Subsequent calls just no-op.)
Parameters
----------
session_id : str
The AppSession's id string.
"""
if session_id in self._session_info_by_id:
session_info = self._session_info_by_id[session_id]
del self._session_info_by_id[session_id]
session_info.session.shutdown()
if len(self._session_info_by_id) == 0:
self._set_state(State.NO_BROWSERS_CONNECTED)
class _BrowserWebSocketHandler(WebSocketHandler):
"""Handles a WebSocket connection from the browser"""
def initialize(self, server: Server) -> None:
self._server = server
self._session: Optional[AppSession] = None
# The XSRF cookie is normally set when xsrf_form_html is used, but in a pure-Javascript application
# that does not use any regular forms we just need to read the self.xsrf_token manually to set the
# cookie as a side effect.
# See https://www.tornadoweb.org/en/stable/guide/security.html#cross-site-request-forgery-protection
# for more details.
if config.get_option("server.enableXsrfProtection"):
_ = self.xsrf_token
def check_origin(self, origin: str) -> bool:
"""Set up CORS."""
return super().check_origin(origin) or is_url_from_allowed_origins(origin)
def open(self, *args, **kwargs) -> Optional[Awaitable[None]]:
self._session = self._server._create_app_session(self)
return None
def on_close(self) -> None:
if not self._session:
return
self._server._close_app_session(self._session.id)
self._session = None
def get_compression_options(self) -> Optional[Dict[Any, Any]]:
"""Enable WebSocket compression.
Returning an empty dict enables websocket compression. Returning
None disables it.
(See the docstring in the parent class.)
"""
if config.get_option("server.enableWebsocketCompression"):
return {}
return None
@tornado.gen.coroutine
def on_message(self, payload: bytes) -> None:
if not self._session:
return
msg = BackMsg()
try:
msg.ParseFromString(payload)
msg_type = msg.WhichOneof("type")
LOGGER.debug("Received the following back message:\n%s", msg)
if msg_type == "rerun_script":
self._session.handle_rerun_script_request(msg.rerun_script)
elif msg_type == "load_git_info":
self._session.handle_git_information_request()
elif msg_type == "clear_cache":
self._session.handle_clear_cache_request()
elif msg_type == "set_run_on_save":
self._session.handle_set_run_on_save_request(msg.set_run_on_save)
elif msg_type == "stop_script":
self._session.handle_stop_script_request()
elif msg_type == "close_connection":
if config.get_option("global.developmentMode"):
Server.get_current().stop()
else:
LOGGER.warning(
"Client tried to close connection when "
"not in development mode"
)
else:
LOGGER.warning('No handler for "%s"', msg_type)
except BaseException as e:
LOGGER.error(e)
self._session.handle_backmsg_exception(e)
def _set_tornado_log_levels() -> None:
if not config.get_option("global.developmentMode"):
# Hide logs unless they're super important.
# Example of stuff we don't care about: 404 about .js.map files.
logging.getLogger("tornado.access").setLevel(logging.ERROR)
logging.getLogger("tornado.application").setLevel(logging.ERROR)
logging.getLogger("tornado.general").setLevel(logging.ERROR)
| 35.940964 | 108 | 0.61902 |
import asyncio
import base64
import binascii
import logging
import os
import socket
import sys
import errno
import json
import time
import traceback
import click
from enum import Enum
from typing import (
Any,
Dict,
Optional,
Tuple,
Callable,
Awaitable,
Generator,
List,
Set,
)
import tornado.concurrent
import tornado.gen
import tornado.ioloop
import tornado.locks
import tornado.netutil
import tornado.web
import tornado.websocket
from tornado.websocket import WebSocketHandler
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from streamlit import config
from streamlit import file_util
from streamlit import source_util
from streamlit import util
from streamlit.caching import get_memo_stats_provider, get_singleton_stats_provider
from streamlit.config_option import ConfigOption
from streamlit.forward_msg_cache import ForwardMsgCache
from streamlit.forward_msg_cache import create_reference_msg
from streamlit.forward_msg_cache import populate_hash_if_needed
from streamlit.in_memory_file_manager import in_memory_file_manager
from streamlit.legacy_caching.caching import _mem_caches
from streamlit.app_session import AppSession
from streamlit.stats import StatsHandler, StatsManager
from streamlit.uploaded_file_manager import UploadedFileManager
from streamlit.logger import get_logger
from streamlit.components.v1.components import ComponentRegistry
from streamlit.components.v1.components import ComponentRequestHandler
from streamlit.proto.BackMsg_pb2 import BackMsg
from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
from streamlit.server.upload_file_request_handler import (
UploadFileRequestHandler,
UPLOAD_FILE_ROUTE,
)
from streamlit.session_data import SessionData
from streamlit.state import (
SCRIPT_RUN_WITHOUT_ERRORS_KEY,
SessionStateStatProvider,
)
from streamlit.server.routes import AddSlashHandler
from streamlit.server.routes import AssetsFileHandler
from streamlit.server.routes import DebugHandler
from streamlit.server.routes import HealthHandler
from streamlit.server.routes import MediaFileHandler
from streamlit.server.routes import MessageCacheHandler
from streamlit.server.routes import StaticFileHandler
from streamlit.server.server_util import is_cacheable_msg
from streamlit.server.server_util import is_url_from_allowed_origins
from streamlit.server.server_util import make_url_path_regex
from streamlit.server.server_util import serialize_forward_msg
from streamlit.server.server_util import get_max_message_size_bytes
from streamlit.watcher import LocalSourcesWatcher
LOGGER = get_logger(__name__)
TORNADO_SETTINGS = {
"compress_response": True,
"websocket_ping_interval": 1,
# is timed out.
"websocket_ping_timeout": 30,
}
# When server.port is not available it will look for the next available port
# up to MAX_PORT_SEARCH_RETRIES.
MAX_PORT_SEARCH_RETRIES = 100
# When server.address starts with this prefix, the server will bind
# to an unix socket.
UNIX_SOCKET_PREFIX = "unix://"
# Wait for the script run result for 60s and if no result is available give up
SCRIPT_RUN_CHECK_TIMEOUT = 60
class SessionInfo:
def __init__(self, ws: WebSocketHandler, session: AppSession):
self.session = session
self.ws = ws
self.script_run_count = 0
def __repr__(self) -> str:
return util.repr_(self)
class State(Enum):
INITIAL = "INITIAL"
WAITING_FOR_FIRST_BROWSER = "WAITING_FOR_FIRST_BROWSER"
ONE_OR_MORE_BROWSERS_CONNECTED = "ONE_OR_MORE_BROWSERS_CONNECTED"
NO_BROWSERS_CONNECTED = "NO_BROWSERS_CONNECTED"
STOPPING = "STOPPING"
STOPPED = "STOPPED"
class RetriesExceeded(Exception):
pass
def server_port_is_manually_set() -> bool:
return config.is_manually_set("server.port")
def server_address_is_unix_socket() -> bool:
address = config.get_option("server.address")
return address is not None and address.startswith(UNIX_SOCKET_PREFIX)
def start_listening(app: tornado.web.Application) -> None:
http_server = HTTPServer(
app, max_buffer_size=config.get_option("server.maxUploadSize") * 1024 * 1024
)
if server_address_is_unix_socket():
start_listening_unix_socket(http_server)
else:
start_listening_tcp_socket(http_server)
def start_listening_unix_socket(http_server: HTTPServer) -> None:
address = config.get_option("server.address")
file_name = os.path.expanduser(address[len(UNIX_SOCKET_PREFIX) :])
unix_socket = tornado.netutil.bind_unix_socket(file_name)
http_server.add_socket(unix_socket)
def start_listening_tcp_socket(http_server: HTTPServer) -> None:
call_count = 0
port = None
while call_count < MAX_PORT_SEARCH_RETRIES:
address = config.get_option("server.address")
port = config.get_option("server.port")
try:
http_server.listen(port, address)
break # It worked! So let's break out of the loop.
except (OSError, socket.error) as e:
if e.errno == errno.EADDRINUSE:
if server_port_is_manually_set():
LOGGER.error("Port %s is already in use", port)
sys.exit(1)
else:
LOGGER.debug(
"Port %s already in use, trying to use the next one.", port
)
port += 1
if port == 3000:
port += 1
config.set_option(
"server.port", port, ConfigOption.STREAMLIT_DEFINITION
)
call_count += 1
else:
raise
if call_count >= MAX_PORT_SEARCH_RETRIES:
raise RetriesExceeded(
f"Cannot start Streamlit server. Port {port} is already in use, and "
f"Streamlit was unable to find a free port after {MAX_PORT_SEARCH_RETRIES} attempts.",
)
class Server:
_singleton: Optional["Server"] = None
@classmethod
def get_current(cls) -> "Server":
if Server._singleton is None:
raise RuntimeError("Server has not been initialized yet")
return Server._singleton
def __init__(
self, ioloop: IOLoop, main_script_path: str, command_line: Optional[str]
):
if Server._singleton is not None:
raise RuntimeError("Server already initialized. Use .get_current() instead")
Server._singleton = self
_set_tornado_log_levels()
self._ioloop = ioloop
self._main_script_path = main_script_path
self._command_line = command_line if command_line is not None else ""
self._session_info_by_id: Dict[str, SessionInfo] = {}
self._must_stop = tornado.locks.Event()
self._state = State.INITIAL
self._message_cache = ForwardMsgCache()
self._uploaded_file_mgr = UploadedFileManager()
self._uploaded_file_mgr.on_files_updated.connect(self.on_files_updated)
self._session_data: Optional[SessionData] = None
self._has_connection = tornado.locks.Condition()
self._need_send_data = tornado.locks.Event()
self._stats_mgr = StatsManager()
self._stats_mgr.register_provider(get_memo_stats_provider())
self._stats_mgr.register_provider(get_singleton_stats_provider())
self._stats_mgr.register_provider(_mem_caches)
self._stats_mgr.register_provider(self._message_cache)
self._stats_mgr.register_provider(in_memory_file_manager)
self._stats_mgr.register_provider(self._uploaded_file_mgr)
self._stats_mgr.register_provider(
SessionStateStatProvider(self._session_info_by_id)
)
def __repr__(self) -> str:
return util.repr_(self)
@property
def main_script_path(self) -> str:
return self._main_script_path
def get_session_by_id(self, session_id: str) -> Optional[AppSession]:
session_info = self._get_session_info(session_id)
if session_info is None:
return None
return session_info.session
def on_files_updated(self, session_id: str) -> None:
session_info = self._get_session_info(session_id)
if session_info is None:
# remove it so it doesn't stick around forever.
self._uploaded_file_mgr.remove_session_files(session_id)
def _get_session_info(self, session_id: str) -> Optional[SessionInfo]:
return self._session_info_by_id.get(session_id, None)
def start(self, on_started: Callable[["Server"], Any]) -> None:
if self._state != State.INITIAL:
raise RuntimeError("Server has already been started")
LOGGER.debug("Starting server...")
app = self._create_app()
start_listening(app)
port = config.get_option("server.port")
LOGGER.debug("Server started on port %s", port)
self._ioloop.spawn_callback(self._loop_coroutine, on_started)
def _create_app(self) -> tornado.web.Application:
base = config.get_option("server.baseUrlPath")
routes: List[Any] = [
(
make_url_path_regex(base, "stream"),
_BrowserWebSocketHandler,
dict(server=self),
),
(
make_url_path_regex(base, "healthz"),
HealthHandler,
dict(callback=lambda: self.is_ready_for_browser_connection),
),
(make_url_path_regex(base, "debugz"), DebugHandler, dict(server=self)),
(
make_url_path_regex(base, "message"),
MessageCacheHandler,
dict(cache=self._message_cache),
),
(
make_url_path_regex(base, "st-metrics"),
StatsHandler,
dict(stats_manager=self._stats_mgr),
),
(
make_url_path_regex(
base,
UPLOAD_FILE_ROUTE,
),
UploadFileRequestHandler,
dict(
file_mgr=self._uploaded_file_mgr,
get_session_info=self._get_session_info,
),
),
(
make_url_path_regex(base, "assets/(.*)"),
AssetsFileHandler,
{"path": "%s/" % file_util.get_assets_dir()},
),
(make_url_path_regex(base, "media/(.*)"), MediaFileHandler, {"path": ""}),
(
make_url_path_regex(base, "component/(.*)"),
ComponentRequestHandler,
dict(registry=ComponentRegistry.instance()),
),
]
if config.get_option("server.scriptHealthCheckEnabled"):
routes.extend(
[
(
make_url_path_regex(base, "script-health-check"),
HealthHandler,
dict(callback=lambda: self.does_script_run_without_error()),
)
]
)
if config.get_option("global.developmentMode"):
LOGGER.debug("Serving static content from the Node dev server")
else:
static_path = file_util.get_static_dir()
LOGGER.debug("Serving static content from %s", static_path)
routes.extend(
[
(
make_url_path_regex(base, "(.*)"),
StaticFileHandler,
{
"path": "%s/" % static_path,
"default_filename": "index.html",
"get_pages": lambda: set(
[
page_info["page_name"]
for page_info in source_util.get_pages(
self.main_script_path
).values()
]
),
},
),
(make_url_path_regex(base, trailing_slash=False), AddSlashHandler),
]
)
return tornado.web.Application(
routes,
cookie_secret=config.get_option("server.cookieSecret"),
xsrf_cookies=config.get_option("server.enableXsrfProtection"),
websocket_max_message_size=get_max_message_size_bytes(),
**TORNADO_SETTINGS,
)
def _set_state(self, new_state: State) -> None:
LOGGER.debug("Server state: %s -> %s" % (self._state, new_state))
self._state = new_state
@property
async def is_ready_for_browser_connection(self) -> Tuple[bool, str]:
if self._state not in (State.INITIAL, State.STOPPING, State.STOPPED):
return True, "ok"
return False, "unavailable"
async def does_script_run_without_error(self) -> Tuple[bool, str]:
session_data = SessionData(self._main_script_path, self._command_line)
local_sources_watcher = LocalSourcesWatcher(session_data)
session = AppSession(
ioloop=self._ioloop,
session_data=session_data,
uploaded_file_manager=self._uploaded_file_mgr,
message_enqueued_callback=self._enqueued_some_message,
local_sources_watcher=local_sources_watcher,
user_info={"email": "test@test.com"},
)
try:
session.request_rerun(None)
now = time.perf_counter()
while (
SCRIPT_RUN_WITHOUT_ERRORS_KEY not in session.session_state
and (time.perf_counter() - now) < SCRIPT_RUN_CHECK_TIMEOUT
):
await tornado.gen.sleep(0.1)
if SCRIPT_RUN_WITHOUT_ERRORS_KEY not in session.session_state:
return False, "timeout"
ok = session.session_state[SCRIPT_RUN_WITHOUT_ERRORS_KEY]
msg = "ok" if ok else "error"
return ok, msg
finally:
session.shutdown()
@property
def browser_is_connected(self) -> bool:
return self._state == State.ONE_OR_MORE_BROWSERS_CONNECTED
@property
def is_running_hello(self) -> bool:
from streamlit.hello import Hello
return self._main_script_path == Hello.__file__
@tornado.gen.coroutine
def _loop_coroutine(
self, on_started: Optional[Callable[["Server"], Any]] = None
) -> Generator[Any, None, None]:
try:
if self._state == State.INITIAL:
self._set_state(State.WAITING_FOR_FIRST_BROWSER)
elif self._state == State.ONE_OR_MORE_BROWSERS_CONNECTED:
pass
else:
raise RuntimeError("Bad server state at start: %s" % self._state)
if on_started is not None:
on_started(self)
while not self._must_stop.is_set():
if self._state == State.WAITING_FOR_FIRST_BROWSER:
yield tornado.gen.convert_yielded(
asyncio.wait(
[self._must_stop.wait(), self._has_connection.wait()],
return_when=asyncio.FIRST_COMPLETED,
)
)
elif self._state == State.ONE_OR_MORE_BROWSERS_CONNECTED:
self._need_send_data.clear()
# outside this coroutine.
session_infos = list(self._session_info_by_id.values())
for session_info in session_infos:
msg_list = session_info.session.flush_browser_queue()
for msg in msg_list:
try:
self._send_message(session_info, msg)
except tornado.websocket.WebSocketClosedError:
self._close_app_session(session_info.session.id)
yield
yield
yield tornado.gen.sleep(0.01)
elif self._state == State.NO_BROWSERS_CONNECTED:
yield tornado.gen.convert_yielded(
asyncio.wait(
[self._must_stop.wait(), self._has_connection.wait()],
return_when=asyncio.FIRST_COMPLETED,
)
)
else:
# Break out of the thread loop if we encounter any other state.
break
yield tornado.gen.convert_yielded(
asyncio.wait(
[self._must_stop.wait(), self._need_send_data.wait()],
return_when=asyncio.FIRST_COMPLETED,
)
)
# Shut down all AppSessions
for session_info in list(self._session_info_by_id.values()):
session_info.session.shutdown()
self._set_state(State.STOPPED)
except Exception:
# Can't just re-raise here because co-routines use Tornado
traceback.print_exc()
LOGGER.info(
"""
Please report this bug at https://github.com/streamlit/streamlit/issues.
"""
)
finally:
self._on_stopped()
def _send_message(self, session_info: SessionInfo, msg: ForwardMsg) -> None:
msg.metadata.cacheable = is_cacheable_msg(msg)
msg_to_send = msg
if msg.metadata.cacheable:
populate_hash_if_needed(msg)
if self._message_cache.has_message_reference(
msg, session_info.session, session_info.script_run_count
):
LOGGER.debug("Sending cached message ref (hash=%s)" % msg.hash)
msg_to_send = create_reference_msg(msg)
LOGGER.debug("Caching message (hash=%s)" % msg.hash)
self._message_cache.add_message(
msg, session_info.session, session_info.script_run_count
)
if (
msg.WhichOneof("type") == "script_finished"
and msg.script_finished == ForwardMsg.FINISHED_SUCCESSFULLY
):
LOGGER.debug(
"Script run finished successfully; "
"removing expired entries from MessageCache "
"(max_age=%s)",
config.get_option("global.maxCachedMessageAge"),
)
session_info.script_run_count += 1
self._message_cache.remove_expired_session_entries(
session_info.session, session_info.script_run_count
)
session_info.ws.write_message(serialize_forward_msg(msg_to_send), binary=True)
def _enqueued_some_message(self) -> None:
self._ioloop.add_callback(self._need_send_data.set)
def stop(self, from_signal=False) -> None:
click.secho(" Stopping...", fg="blue")
self._set_state(State.STOPPING)
if from_signal:
self._ioloop.add_callback_from_signal(self._must_stop.set)
else:
self._ioloop.add_callback(self._must_stop.set)
def _on_stopped(self) -> None:
self._ioloop.stop()
def _create_app_session(self, ws: WebSocketHandler) -> AppSession:
session_data = SessionData(self._main_script_path, self._command_line)
local_sources_watcher = LocalSourcesWatcher(session_data)
is_public_cloud_app = False
try:
header_content = ws.request.headers["X-Streamlit-User"]
payload = base64.b64decode(header_content)
user_obj = json.loads(payload)
email = user_obj["email"]
is_public_cloud_app = user_obj["isPublicCloudApp"]
except (KeyError, binascii.Error, json.decoder.JSONDecodeError):
email = "test@localhost.com"
user_info: Dict[str, Optional[str]] = dict()
if is_public_cloud_app:
user_info["email"] = None
else:
user_info["email"] = email
session = AppSession(
ioloop=self._ioloop,
session_data=session_data,
uploaded_file_manager=self._uploaded_file_mgr,
message_enqueued_callback=self._enqueued_some_message,
local_sources_watcher=local_sources_watcher,
user_info=user_info,
)
LOGGER.debug(
"Created new session for ws %s. Session ID: %s", id(ws), session.id
)
assert (
session.id not in self._session_info_by_id
), f"session.id '{session.id}' registered multiple times!"
self._session_info_by_id[session.id] = SessionInfo(ws, session)
self._set_state(State.ONE_OR_MORE_BROWSERS_CONNECTED)
self._has_connection.notify_all()
return session
def _close_app_session(self, session_id: str) -> None:
if session_id in self._session_info_by_id:
session_info = self._session_info_by_id[session_id]
del self._session_info_by_id[session_id]
session_info.session.shutdown()
if len(self._session_info_by_id) == 0:
self._set_state(State.NO_BROWSERS_CONNECTED)
class _BrowserWebSocketHandler(WebSocketHandler):
def initialize(self, server: Server) -> None:
self._server = server
self._session: Optional[AppSession] = None
("server.enableXsrfProtection"):
_ = self.xsrf_token
def check_origin(self, origin: str) -> bool:
return super().check_origin(origin) or is_url_from_allowed_origins(origin)
def open(self, *args, **kwargs) -> Optional[Awaitable[None]]:
self._session = self._server._create_app_session(self)
return None
def on_close(self) -> None:
if not self._session:
return
self._server._close_app_session(self._session.id)
self._session = None
def get_compression_options(self) -> Optional[Dict[Any, Any]]:
if config.get_option("server.enableWebsocketCompression"):
return {}
return None
@tornado.gen.coroutine
def on_message(self, payload: bytes) -> None:
if not self._session:
return
msg = BackMsg()
try:
msg.ParseFromString(payload)
msg_type = msg.WhichOneof("type")
LOGGER.debug("Received the following back message:\n%s", msg)
if msg_type == "rerun_script":
self._session.handle_rerun_script_request(msg.rerun_script)
elif msg_type == "load_git_info":
self._session.handle_git_information_request()
elif msg_type == "clear_cache":
self._session.handle_clear_cache_request()
elif msg_type == "set_run_on_save":
self._session.handle_set_run_on_save_request(msg.set_run_on_save)
elif msg_type == "stop_script":
self._session.handle_stop_script_request()
elif msg_type == "close_connection":
if config.get_option("global.developmentMode"):
Server.get_current().stop()
else:
LOGGER.warning(
"Client tried to close connection when "
"not in development mode"
)
else:
LOGGER.warning('No handler for "%s"', msg_type)
except BaseException as e:
LOGGER.error(e)
self._session.handle_backmsg_exception(e)
def _set_tornado_log_levels() -> None:
if not config.get_option("global.developmentMode"):
# Example of stuff we don't care about: 404 about .js.map files.
logging.getLogger("tornado.access").setLevel(logging.ERROR)
logging.getLogger("tornado.application").setLevel(logging.ERROR)
logging.getLogger("tornado.general").setLevel(logging.ERROR)
| true | true |
f72e2100a101a3cb6cb26d863be2cfdad32062ec | 729 | py | Python | houseplants/houseplants_api/models.py | galihmelon/houseplants-server | 0c5238e5f5cc66eb2e64f30d28e9ce9c9ad50e32 | [
"MIT"
] | null | null | null | houseplants/houseplants_api/models.py | galihmelon/houseplants-server | 0c5238e5f5cc66eb2e64f30d28e9ce9c9ad50e32 | [
"MIT"
] | null | null | null | houseplants/houseplants_api/models.py | galihmelon/houseplants-server | 0c5238e5f5cc66eb2e64f30d28e9ce9c9ad50e32 | [
"MIT"
] | null | null | null | from datetime import date, timedelta
from django.db import models
class Plant(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(max_length=200)
description = models.TextField()
def __str__(self):
return self.name
class WateringLog(models.Model):
plant = models.ForeignKey(
Plant, on_delete=models.CASCADE
)
water_date = models.DateField(auto_now_add=True)
next_suggested_date = models.DateField()
def __str__(self):
return f'{self.plant.name} may need water on {self.next_suggested_date}'
def save(self, *args, **kwargs):
# self.next_suggested_date = date.today() + timedelta(7)
super().save(*args, **kwargs)
| 27 | 80 | 0.687243 | from datetime import date, timedelta
from django.db import models
class Plant(models.Model):
name = models.CharField(max_length=100)
image_url = models.URLField(max_length=200)
description = models.TextField()
def __str__(self):
return self.name
class WateringLog(models.Model):
plant = models.ForeignKey(
Plant, on_delete=models.CASCADE
)
water_date = models.DateField(auto_now_add=True)
next_suggested_date = models.DateField()
def __str__(self):
return f'{self.plant.name} may need water on {self.next_suggested_date}'
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
| true | true |
f72e217f80776ae5f3d2089e8ec2364235bc5d0c | 1,833 | py | Python | Scripts/Web_Scrappers/cricketmonthly_articles/main.py | prkhrv/Python_and_the_Web | 6846334c4151ee94107ef393cbb5e8bc8f6a2e4b | [
"MIT"
] | 3 | 2020-10-13T17:41:33.000Z | 2021-06-02T15:01:58.000Z | Scripts/Web_Scrappers/cricketmonthly_articles/main.py | prkhrv/Python_and_the_Web | 6846334c4151ee94107ef393cbb5e8bc8f6a2e4b | [
"MIT"
] | null | null | null | Scripts/Web_Scrappers/cricketmonthly_articles/main.py | prkhrv/Python_and_the_Web | 6846334c4151ee94107ef393cbb5e8bc8f6a2e4b | [
"MIT"
] | null | null | null | import pandas as pd
import re
import requests as rq
from bs4 import BeautifulSoup
header = {'User-Agent' : 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36'}
r = rq.get("https://www.thecricketmonthly.com/", headers=header)
soup = BeautifulSoup(r.content, 'html.parser')
main_sec = soup.find('section', attrs={'class' : re.compile('col-lhs lhs_content')})
article = main_sec.find_all('article', attrs={'class' : re.compile('col-1-1 module')})
about=[]
link=[]
summary=[]
print('Fetching Latest Articles...')
for a in article:
tag = a.find('h1')
about.append(tag.text)
link.append('https://www.thecricketmonthly.com'+tag.a['href'])
tag = a.find('p')
summary.append(tag.text)
print('Done!')
main_sec = soup.find('ul', attrs={'class' : re.compile('writer-ul')})
li = main_sec.find_all('li')
linkauth=[]
auth=[]
headline=[]
subhead=[]
print('Fetching articles of top Writers...')
for l in li:
linkauth.append(l.a['href'])
spn = l.find('span', attrs={'class' : re.compile('wname')})
auth.append(spn.text)
headline.append(l.a.text)
spn = l.find('span', attrs={'class' : re.compile('subheadline')})
subhead.append(spn.text)
print('Done!')
print('Processing Data...')
la = {'About' : about, 'Short Summary' : summary, 'Further Reading' : link}
tw = {'Writer' : auth, 'Headline' : headline, 'Sub-headline' : subhead, 'Further Reading' : linkauth}
latest_articles = pd.DataFrame.from_dict(la)
top_writers = pd.DataFrame.from_dict(tw)
print('Publishing csv...')
top_writers.to_csv('Articles from Top Writers.csv', index=False)
latest_articles.to_csv('Latest Articles from Cricket Monthly.csv', index=False)
print("Your output can be found in form of two files 'Articles from Top Writers.csv' and 'Latest Articles from Cricket Monthly.csv'")
| 38.1875 | 133 | 0.695035 | import pandas as pd
import re
import requests as rq
from bs4 import BeautifulSoup
header = {'User-Agent' : 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.97 Safari/537.36'}
r = rq.get("https://www.thecricketmonthly.com/", headers=header)
soup = BeautifulSoup(r.content, 'html.parser')
main_sec = soup.find('section', attrs={'class' : re.compile('col-lhs lhs_content')})
article = main_sec.find_all('article', attrs={'class' : re.compile('col-1-1 module')})
about=[]
link=[]
summary=[]
print('Fetching Latest Articles...')
for a in article:
tag = a.find('h1')
about.append(tag.text)
link.append('https://www.thecricketmonthly.com'+tag.a['href'])
tag = a.find('p')
summary.append(tag.text)
print('Done!')
main_sec = soup.find('ul', attrs={'class' : re.compile('writer-ul')})
li = main_sec.find_all('li')
linkauth=[]
auth=[]
headline=[]
subhead=[]
print('Fetching articles of top Writers...')
for l in li:
linkauth.append(l.a['href'])
spn = l.find('span', attrs={'class' : re.compile('wname')})
auth.append(spn.text)
headline.append(l.a.text)
spn = l.find('span', attrs={'class' : re.compile('subheadline')})
subhead.append(spn.text)
print('Done!')
print('Processing Data...')
la = {'About' : about, 'Short Summary' : summary, 'Further Reading' : link}
tw = {'Writer' : auth, 'Headline' : headline, 'Sub-headline' : subhead, 'Further Reading' : linkauth}
latest_articles = pd.DataFrame.from_dict(la)
top_writers = pd.DataFrame.from_dict(tw)
print('Publishing csv...')
top_writers.to_csv('Articles from Top Writers.csv', index=False)
latest_articles.to_csv('Latest Articles from Cricket Monthly.csv', index=False)
print("Your output can be found in form of two files 'Articles from Top Writers.csv' and 'Latest Articles from Cricket Monthly.csv'")
| true | true |
f72e2233099398bb8b0e343df4ebefcffb04e7f5 | 216 | py | Python | sample/udpServer.py | jettom/JtSpider | 7e2cb32415ca5d439b117c0277a7f7b2b27fa0bf | [
"Apache-2.0"
] | 1 | 2019-01-25T12:54:24.000Z | 2019-01-25T12:54:24.000Z | sample/udpServer.py | jettom/JtSpider | 7e2cb32415ca5d439b117c0277a7f7b2b27fa0bf | [
"Apache-2.0"
] | null | null | null | sample/udpServer.py | jettom/JtSpider | 7e2cb32415ca5d439b117c0277a7f7b2b27fa0bf | [
"Apache-2.0"
] | 1 | 2020-05-16T01:18:25.000Z | 2020-05-16T01:18:25.000Z | import socket
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
s.bind(('127.0.0.1', 50007))
while True:
data, addr = s.recvfrom(1024)
print("data: {}, addr: {}".format(data, addr)) | 30.857143 | 59 | 0.615741 | import socket
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
s.bind(('127.0.0.1', 50007))
while True:
data, addr = s.recvfrom(1024)
print("data: {}, addr: {}".format(data, addr)) | true | true |
f72e224dfaaff9672701dfcf91d70745ac6c6e0f | 832 | py | Python | trel/urls.py | alsoncahyadi/julid-be | 9bb63882cab637329b85d35da874ea1c03180cdd | [
"MIT"
] | null | null | null | trel/urls.py | alsoncahyadi/julid-be | 9bb63882cab637329b85d35da874ea1c03180cdd | [
"MIT"
] | null | null | null | trel/urls.py | alsoncahyadi/julid-be | 9bb63882cab637329b85d35da874ea1c03180cdd | [
"MIT"
] | null | null | null | """trello URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import include, path
from django.contrib import admin
from rest_framework import routers
from . import views as v
import os
urlpatterns = [
path('webhook/', v.Webhook.as_view())
]
| 33.28 | 77 | 0.71875 | from django.urls import include, path
from django.contrib import admin
from rest_framework import routers
from . import views as v
import os
urlpatterns = [
path('webhook/', v.Webhook.as_view())
]
| true | true |
f72e22530956476d063278b94d619be46cc94df6 | 3,123 | py | Python | mysql/toolkit/components/operations/export.py | sfneal/mysql-toolkit | 6964f718f4b72eb30f2259adfcfaf3090526c53d | [
"MIT"
] | 4 | 2019-10-07T13:40:29.000Z | 2020-12-29T10:34:07.000Z | mysql/toolkit/components/operations/export.py | sfneal/mysql-toolkit | 6964f718f4b72eb30f2259adfcfaf3090526c53d | [
"MIT"
] | 1 | 2018-10-08T13:57:57.000Z | 2018-10-08T13:57:57.000Z | mysql/toolkit/components/operations/export.py | sfneal/mysql-toolkit | 6964f718f4b72eb30f2259adfcfaf3090526c53d | [
"MIT"
] | 2 | 2018-12-18T09:32:32.000Z | 2019-07-27T11:09:14.000Z | from textwrap import wrap, fill
from tqdm import tqdm
from decimal import Decimal
from json import dumps
from mysql.connector.conversion import MySQLConverterBase
from mysql.toolkit.utils import cols_str, wrap
from mysql.toolkit.commands.dump import write_text
def insert_statement(table, columns, values):
"""Generate an insert statement string for dumping to text file or MySQL execution."""
if not all(isinstance(r, (list, set, tuple)) for r in values):
values = [[r] for r in values]
rows = []
for row in values:
new_row = []
for col in row:
if col is None:
new_col = 'NULL'
elif isinstance(col, (int, float, Decimal)):
new_col = str(MySQLConverterBase().to_mysql(col))
else:
string = str(MySQLConverterBase().to_mysql(col))
if "'" in string:
new_col = '"' + string + '"'
else:
new_col = "'" + string + "'"
new_row.append(new_col)
rows.append(', '.join(new_row))
vals = '(' + '),\n\t('.join(rows) + ')'
statement = "INSERT INTO\n\t{0} ({1}) \nVALUES\n\t{2}".format(wrap(table), cols_str(columns), vals)
return statement
def sql_file_comment(comment):
"""Automatically wrap strings to create SQL comments."""
return '-- ' + '\n-- '.join(fill(comment, 77).split('\n'))
class Export:
def dump_table(self, table, drop_statement=True):
"""Export a table structure and data to SQL file for backup or later import."""
create_statement = self.get_table_definition(table)
data = self.select_all(table)
statements = ['\n', sql_file_comment(''),
sql_file_comment('Table structure and data dump for {0}'.format(table)), sql_file_comment('')]
if drop_statement:
statements.append('\nDROP TABLE IF EXISTS {0};'.format(wrap(table)))
statements.append('{0};\n'.format(create_statement))
if len(data) > 0:
statements.append('{0};'.format(insert_statement(table, self.get_columns(table), data)))
return '\n'.join(statements)
def dump_database(self, file_path, database=None, tables=None):
"""
Export the table structure and data for tables in a database.
If not database is specified, it is assumed the currently connected database
is the source. If no tables are provided, all tables will be dumped.
"""
# Change database if needed
if database:
self.change_db(database)
# Set table
if not tables:
tables = self.tables
# Retrieve and join dump statements
statements = [self.dump_table(table) for table in tqdm(tables, total=len(tables), desc='Generating dump files')]
dump = 'SET FOREIGN_KEY_CHECKS=0;' + '\n'.join(statements) + '\nSET FOREIGN_KEY_CHECKS=1;'
# Write dump statements to sql file
file_path = file_path if file_path.endswith('.sql') else file_path + '.sql'
write_text(dump, file_path)
return file_path
| 40.038462 | 120 | 0.616074 | from textwrap import wrap, fill
from tqdm import tqdm
from decimal import Decimal
from json import dumps
from mysql.connector.conversion import MySQLConverterBase
from mysql.toolkit.utils import cols_str, wrap
from mysql.toolkit.commands.dump import write_text
def insert_statement(table, columns, values):
if not all(isinstance(r, (list, set, tuple)) for r in values):
values = [[r] for r in values]
rows = []
for row in values:
new_row = []
for col in row:
if col is None:
new_col = 'NULL'
elif isinstance(col, (int, float, Decimal)):
new_col = str(MySQLConverterBase().to_mysql(col))
else:
string = str(MySQLConverterBase().to_mysql(col))
if "'" in string:
new_col = '"' + string + '"'
else:
new_col = "'" + string + "'"
new_row.append(new_col)
rows.append(', '.join(new_row))
vals = '(' + '),\n\t('.join(rows) + ')'
statement = "INSERT INTO\n\t{0} ({1}) \nVALUES\n\t{2}".format(wrap(table), cols_str(columns), vals)
return statement
def sql_file_comment(comment):
return '-- ' + '\n-- '.join(fill(comment, 77).split('\n'))
class Export:
def dump_table(self, table, drop_statement=True):
create_statement = self.get_table_definition(table)
data = self.select_all(table)
statements = ['\n', sql_file_comment(''),
sql_file_comment('Table structure and data dump for {0}'.format(table)), sql_file_comment('')]
if drop_statement:
statements.append('\nDROP TABLE IF EXISTS {0};'.format(wrap(table)))
statements.append('{0};\n'.format(create_statement))
if len(data) > 0:
statements.append('{0};'.format(insert_statement(table, self.get_columns(table), data)))
return '\n'.join(statements)
def dump_database(self, file_path, database=None, tables=None):
# Change database if needed
if database:
self.change_db(database)
# Set table
if not tables:
tables = self.tables
# Retrieve and join dump statements
statements = [self.dump_table(table) for table in tqdm(tables, total=len(tables), desc='Generating dump files')]
dump = 'SET FOREIGN_KEY_CHECKS=0;' + '\n'.join(statements) + '\nSET FOREIGN_KEY_CHECKS=1;'
# Write dump statements to sql file
file_path = file_path if file_path.endswith('.sql') else file_path + '.sql'
write_text(dump, file_path)
return file_path
| true | true |
f72e22e7a8f3b0abb20a4ebfe8ebce13506fddff | 5,645 | py | Python | emojified_tweets_wall_of_fame/migrations/0001_initial.py | mhyeun/emojified-tweets-wall-of-fame | 6869a3763e033da9608ec5db86398120f69f00ce | [
"MIT"
] | 1 | 2021-01-09T08:23:07.000Z | 2021-01-09T08:23:07.000Z | emojified_tweets_wall_of_fame/migrations/0001_initial.py | mhyeun/emojified-tweets-wall-of-fame | 6869a3763e033da9608ec5db86398120f69f00ce | [
"MIT"
] | 3 | 2021-01-09T22:50:29.000Z | 2021-01-10T06:47:33.000Z | emojified_tweets_wall_of_fame/migrations/0001_initial.py | mhyeun/emojified-tweets-wall-of-fame | 6869a3763e033da9608ec5db86398120f69f00ce | [
"MIT"
] | null | null | null | # Generated by Django 3.1.5 on 2021-01-08 20:56
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [("auth", "0012_alter_user_first_name_max_length")]
operations = [
migrations.CreateModel(
name="CustomUser",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
blank=True, null=True, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
(
"username",
models.CharField(
error_messages={
"unique": "A user with that username already exists."
},
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
max_length=150,
unique=True,
validators=[
django.contrib.auth.validators.UnicodeUsernameValidator()
],
verbose_name="username",
),
),
(
"first_name",
models.CharField(
blank=True, max_length=150, verbose_name="first name"
),
),
(
"last_name",
models.CharField(
blank=True, max_length=150, verbose_name="last name"
),
),
(
"email",
models.EmailField(
blank=True, max_length=254, verbose_name="email address"
),
),
(
"is_staff",
models.BooleanField(
default=False,
help_text="Designates whether the user can log into this admin site.",
verbose_name="staff status",
),
),
(
"is_active",
models.BooleanField(
default=True,
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
verbose_name="active",
),
),
(
"date_joined",
models.DateTimeField(
default=django.utils.timezone.now, verbose_name="date joined"
),
),
(
"groups",
models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
(
"user_permissions",
models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
],
options={
"verbose_name": "user",
"verbose_name_plural": "users",
"abstract": False,
},
managers=[("objects", django.contrib.auth.models.UserManager())],
),
migrations.CreateModel(
name="Tweet",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("content", models.CharField(max_length=512)),
("votes", models.IntegerField()),
(
"poster",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
],
),
]
| 36.419355 | 138 | 0.39504 |
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [("auth", "0012_alter_user_first_name_max_length")]
operations = [
migrations.CreateModel(
name="CustomUser",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
blank=True, null=True, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
(
"username",
models.CharField(
error_messages={
"unique": "A user with that username already exists."
},
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
max_length=150,
unique=True,
validators=[
django.contrib.auth.validators.UnicodeUsernameValidator()
],
verbose_name="username",
),
),
(
"first_name",
models.CharField(
blank=True, max_length=150, verbose_name="first name"
),
),
(
"last_name",
models.CharField(
blank=True, max_length=150, verbose_name="last name"
),
),
(
"email",
models.EmailField(
blank=True, max_length=254, verbose_name="email address"
),
),
(
"is_staff",
models.BooleanField(
default=False,
help_text="Designates whether the user can log into this admin site.",
verbose_name="staff status",
),
),
(
"is_active",
models.BooleanField(
default=True,
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
verbose_name="active",
),
),
(
"date_joined",
models.DateTimeField(
default=django.utils.timezone.now, verbose_name="date joined"
),
),
(
"groups",
models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
(
"user_permissions",
models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
],
options={
"verbose_name": "user",
"verbose_name_plural": "users",
"abstract": False,
},
managers=[("objects", django.contrib.auth.models.UserManager())],
),
migrations.CreateModel(
name="Tweet",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("content", models.CharField(max_length=512)),
("votes", models.IntegerField()),
(
"poster",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
],
),
]
| true | true |
f72e268ff6db8e715a09e84d55e47f8f37001632 | 1,440 | py | Python | paths.py | backchatio/sublime-ensime | c9eb76c0405fb299f76cecfec958d956fb675892 | [
"MIT"
] | null | null | null | paths.py | backchatio/sublime-ensime | c9eb76c0405fb299f76cecfec958d956fb675892 | [
"MIT"
] | null | null | null | paths.py | backchatio/sublime-ensime | c9eb76c0405fb299f76cecfec958d956fb675892 | [
"MIT"
] | 1 | 2022-03-14T08:40:13.000Z | 2022-03-14T08:40:13.000Z | import os
def encode_path(path):
if not path:
return path
if os.name == "nt":
if os.path.isabs(path):
drive, rest = os.path.splitdrive(path)
return "/" + drive[:-1].upper() + rest.replace("\\", "/")
else:
return path.replace("\\", "/")
else:
return path
def decode_path(path):
if not path:
return path
if os.name == "nt":
if path.startswith("/"):
path = path[1:]
iof = path.find("/")
if iof == -1:
drive = path
rest = ""
else:
drive = path[:iof]
rest = path[iof:]
return (drive + ":" + rest).replace("/", "\\")
else:
return path.replace("/", "\\")
else:
return path
def same_paths(path1, path2):
if not path1 or not path2:
return False
path1_normalized = os.path.normcase(os.path.realpath(path1))
path2_normalized = os.path.normcase(os.path.realpath(path2))
return path1_normalized == path2_normalized
def is_subpath(root, wannabe):
if not root or not wannabe:
return False
root = os.path.normcase(os.path.realpath(root))
wannabe = os.path.normcase(os.path.realpath(wannabe))
return wannabe.startswith(root)
def relative_path(root, wannabe):
if not root or not wannabe:
return None
if not is_subpath(root, wannabe):
return None
root = os.path.normcase(os.path.realpath(root))
wannabe = os.path.normcase(os.path.realpath(wannabe))
return wannabe[len(root) + 1:]
| 24.827586 | 63 | 0.620833 | import os
def encode_path(path):
if not path:
return path
if os.name == "nt":
if os.path.isabs(path):
drive, rest = os.path.splitdrive(path)
return "/" + drive[:-1].upper() + rest.replace("\\", "/")
else:
return path.replace("\\", "/")
else:
return path
def decode_path(path):
if not path:
return path
if os.name == "nt":
if path.startswith("/"):
path = path[1:]
iof = path.find("/")
if iof == -1:
drive = path
rest = ""
else:
drive = path[:iof]
rest = path[iof:]
return (drive + ":" + rest).replace("/", "\\")
else:
return path.replace("/", "\\")
else:
return path
def same_paths(path1, path2):
if not path1 or not path2:
return False
path1_normalized = os.path.normcase(os.path.realpath(path1))
path2_normalized = os.path.normcase(os.path.realpath(path2))
return path1_normalized == path2_normalized
def is_subpath(root, wannabe):
if not root or not wannabe:
return False
root = os.path.normcase(os.path.realpath(root))
wannabe = os.path.normcase(os.path.realpath(wannabe))
return wannabe.startswith(root)
def relative_path(root, wannabe):
if not root or not wannabe:
return None
if not is_subpath(root, wannabe):
return None
root = os.path.normcase(os.path.realpath(root))
wannabe = os.path.normcase(os.path.realpath(wannabe))
return wannabe[len(root) + 1:]
| true | true |
f72e26f013abc1a321dd9381c2450e3e7068c489 | 3,295 | py | Python | pkgs/sdk-pkg/src/genie/libs/sdk/apis/junos/ping/verify.py | wilbeacham85/genielibs | 519da71e3956b86d4211d8649667c0d931dd2715 | [
"Apache-2.0"
] | null | null | null | pkgs/sdk-pkg/src/genie/libs/sdk/apis/junos/ping/verify.py | wilbeacham85/genielibs | 519da71e3956b86d4211d8649667c0d931dd2715 | [
"Apache-2.0"
] | null | null | null | pkgs/sdk-pkg/src/genie/libs/sdk/apis/junos/ping/verify.py | wilbeacham85/genielibs | 519da71e3956b86d4211d8649667c0d931dd2715 | [
"Apache-2.0"
] | null | null | null | """Common verification functions for ping"""
# Python
import logging
# Genie
from genie.utils.timeout import Timeout
from genie.metaparser.util.exceptions import SchemaEmptyParserError
# pyATS
from genie.utils import Dq
log = logging.getLogger(__name__)
def verify_ping(device,
address=None,
ttl=None,
wait=None,
mpls_rsvp=None,
loss_rate=0,
count=None,
max_time=30,
check_interval=10):
""" Verify ping loss rate on ip address provided
Args:
device ('obj'): Device object
address ('str'): Address value
ttl ('int'): ttl value passed in command
wait ('int'): wait value passed in command
mpls_rsvp ('str'): MPLS RSVP value
loss_rate ('int'): Expected loss rate value
count ('int'): Count value for ping command
max_time (`int`): Max time, default: 30
check_interval (`int`): Check interval, default: 10
Returns:
Boolean
Raises:
None
"""
timeout = Timeout(max_time, check_interval)
while timeout.iterate():
if address and count and not ttl and not wait:
cmd = 'ping {address} count {count}'.format(address=address,
count=count)
elif address and count and ttl and wait:
cmd = 'ping {address} ttl {ttl} count {count} wait {wait}'.format(
address=address,
ttl=ttl,
count=count,
wait=wait)
elif not address and mpls_rsvp:
cmd = 'ping mpls rsvp {rsvp}'.format(rsvp=mpls_rsvp)
elif address:
cmd = 'ping {address}'.format(address=address)
else:
log.info('Need to pass address as argument')
return False
try:
out = device.parse(cmd)
except SchemaEmptyParserError as e:
timeout.sleep()
continue
# Example dictionary structure:
# {
# "ping": {
# "address": "10.189.5.94",
# "data-bytes": 56,
# "result": [
# {
# "bytes": 64,
# "from": "10.189.5.94",
# "icmp-seq": 0,
# "time": "2.261",
# "ttl": 62
# },
# ],
# "source": "10.189.5.94",
# "statistics": {
# "loss-rate": 0,
# "received": 1,
# "round-trip": {
# "avg": "2.175",
# "max": "2.399",
# "min": "1.823",
# "stddev": "0.191"
# },
# "send": 1
# }
# }
# }
loss_rate_found = Dq(out).get_values("loss-rate", 0)
if loss_rate_found == loss_rate:
return True
timeout.sleep()
return False
| 33.622449 | 78 | 0.427921 |
import logging
from genie.utils.timeout import Timeout
from genie.metaparser.util.exceptions import SchemaEmptyParserError
from genie.utils import Dq
log = logging.getLogger(__name__)
def verify_ping(device,
address=None,
ttl=None,
wait=None,
mpls_rsvp=None,
loss_rate=0,
count=None,
max_time=30,
check_interval=10):
timeout = Timeout(max_time, check_interval)
while timeout.iterate():
if address and count and not ttl and not wait:
cmd = 'ping {address} count {count}'.format(address=address,
count=count)
elif address and count and ttl and wait:
cmd = 'ping {address} ttl {ttl} count {count} wait {wait}'.format(
address=address,
ttl=ttl,
count=count,
wait=wait)
elif not address and mpls_rsvp:
cmd = 'ping mpls rsvp {rsvp}'.format(rsvp=mpls_rsvp)
elif address:
cmd = 'ping {address}'.format(address=address)
else:
log.info('Need to pass address as argument')
return False
try:
out = device.parse(cmd)
except SchemaEmptyParserError as e:
timeout.sleep()
continue
loss_rate_found = Dq(out).get_values("loss-rate", 0)
if loss_rate_found == loss_rate:
return True
timeout.sleep()
return False
| true | true |
f72e270d2bd48ac9a4c45111b9f76e859c404851 | 2,820 | py | Python | core/utils.py | apauna/RASSH | f564a4582a071bfc197a90f8bb0abe99d078c525 | [
"BSD-3-Clause"
] | 3 | 2019-08-03T08:35:57.000Z | 2022-02-03T14:45:31.000Z | core/utils.py | apauna/RASSH | f564a4582a071bfc197a90f8bb0abe99d078c525 | [
"BSD-3-Clause"
] | null | null | null | core/utils.py | apauna/RASSH | f564a4582a071bfc197a90f8bb0abe99d078c525 | [
"BSD-3-Clause"
] | 5 | 2019-08-03T09:11:34.000Z | 2021-04-24T07:20:05.000Z | # Copyright (c) 2010 Upi Tamminen <desaster@gmail.com>
# See the COPYRIGHT file for more information
import time, anydbm
from rassh.core.config import config
def addToLastlog(message):
f = file('%s/lastlog.txt' % config().get('honeypot', 'data_path'), 'a')
f.write('%s\n' % (message,))
f.close()
def durationHuman(seconds):
seconds = long(round(seconds))
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
years, days = divmod(days, 365.242199)
sdays = str(days)
syears = str(years)
sseconds = str(seconds).rjust(2, '0')
sminutes = str(minutes).rjust(2, '0')
shours = str(hours).rjust(2, '0')
duration = []
if years > 0:
duration.append('%s year' % syears + 's'*(years != 1) + ' ')
else:
if days > 0:
duration.append('%s day' % sdays + 's'*(days != 1) + ' ')
if hours > 0:
duration.append('%s:' % shours)
if minutes >= 0:
duration.append('%s:' % sminutes)
if seconds >= 0:
duration.append('%s' % sseconds)
return ''.join(duration)
# From http://stackoverflow.com/questions/136168/get-last-n-lines-of-a-file-with-python-similar-to-tail
def tail(the_file, lines_2find=20):
the_file.seek(0, 2) #go to end of file
bytes_in_file = the_file.tell()
lines_found, total_bytes_scanned = 0, 0
while lines_2find+1 > lines_found and bytes_in_file > total_bytes_scanned:
byte_block = min(1024, bytes_in_file-total_bytes_scanned)
the_file.seek(-(byte_block+total_bytes_scanned), 2)
total_bytes_scanned += byte_block
lines_found += the_file.read(1024).count('\n')
the_file.seek(-total_bytes_scanned, 2)
line_list = list(the_file.readlines())
return line_list[-lines_2find:]
#we read at least 21 line breaks from the bottom, block by block for speed
#21 to ensure we don't get a half line
# Gives a human-readable uptime string
# Thanks to http://thesmithfam.org/blog/2005/11/19/python-uptime-script/
# (modified to look like the real uptime command)
def uptime(total_seconds):
total_seconds = float(total_seconds)
# Helper vars:
MINUTE = 60
HOUR = MINUTE * 60
DAY = HOUR * 24
# Get the days, hours, etc:
days = int(total_seconds / DAY)
hours = int((total_seconds % DAY) / HOUR)
minutes = int((total_seconds % HOUR) / MINUTE)
# 14 days, 3:53
# 11 min
s = ''
if days > 0:
s += str(days) + " " + (days == 1 and "day" or "days" ) + ", "
if len(s) > 0 or hours > 0:
s += '%s:%s' % (str(hours).rjust(2), str(minutes).rjust(2, '0'))
else:
s += '%s min' % (str(minutes))
return s
# vim: set sw=4 et:
| 33.176471 | 103 | 0.601773 |
import time, anydbm
from rassh.core.config import config
def addToLastlog(message):
f = file('%s/lastlog.txt' % config().get('honeypot', 'data_path'), 'a')
f.write('%s\n' % (message,))
f.close()
def durationHuman(seconds):
seconds = long(round(seconds))
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
years, days = divmod(days, 365.242199)
sdays = str(days)
syears = str(years)
sseconds = str(seconds).rjust(2, '0')
sminutes = str(minutes).rjust(2, '0')
shours = str(hours).rjust(2, '0')
duration = []
if years > 0:
duration.append('%s year' % syears + 's'*(years != 1) + ' ')
else:
if days > 0:
duration.append('%s day' % sdays + 's'*(days != 1) + ' ')
if hours > 0:
duration.append('%s:' % shours)
if minutes >= 0:
duration.append('%s:' % sminutes)
if seconds >= 0:
duration.append('%s' % sseconds)
return ''.join(duration)
def tail(the_file, lines_2find=20):
the_file.seek(0, 2)
bytes_in_file = the_file.tell()
lines_found, total_bytes_scanned = 0, 0
while lines_2find+1 > lines_found and bytes_in_file > total_bytes_scanned:
byte_block = min(1024, bytes_in_file-total_bytes_scanned)
the_file.seek(-(byte_block+total_bytes_scanned), 2)
total_bytes_scanned += byte_block
lines_found += the_file.read(1024).count('\n')
the_file.seek(-total_bytes_scanned, 2)
line_list = list(the_file.readlines())
return line_list[-lines_2find:]
# Gives a human-readable uptime string
# Thanks to http://thesmithfam.org/blog/2005/11/19/python-uptime-script/
# (modified to look like the real uptime command)
def uptime(total_seconds):
total_seconds = float(total_seconds)
# Helper vars:
MINUTE = 60
HOUR = MINUTE * 60
DAY = HOUR * 24
# Get the days, hours, etc:
days = int(total_seconds / DAY)
hours = int((total_seconds % DAY) / HOUR)
minutes = int((total_seconds % HOUR) / MINUTE)
# 14 days, 3:53
# 11 min
s = ''
if days > 0:
s += str(days) + " " + (days == 1 and "day" or "days" ) + ", "
if len(s) > 0 or hours > 0:
s += '%s:%s' % (str(hours).rjust(2), str(minutes).rjust(2, '0'))
else:
s += '%s min' % (str(minutes))
return s
# vim: set sw=4 et:
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.