after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
async def stop_notify(self, _uuid: str) -> None:
"""Deactivate notification/indication on a specified characteristic.
Args:
_uuid: The characteristic to stop notifying/indicating on.
"""
characteristic = self.services.get_characteristic(str(_uuid))
await self._bus.callRemote(
characteristic.path,
"StopNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
self._notification_callbacks.pop(characteristic.path, None)
self._subscriptions.remove(_uuid)
|
async def stop_notify(self, _uuid: str) -> None:
"""Deactivate notification/indication on a specified characteristic.
Args:
_uuid: The characteristic to stop notifying/indicating on.
"""
characteristic = self.services.get_characteristic(str(_uuid))
await self._bus.callRemote(
characteristic.path,
"StopNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
self._notification_callbacks.pop(characteristic.path, None)
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
def _properties_changed_callback(self, message):
"""Notification handler.
In the BlueZ DBus API, notifications come as
PropertiesChanged callbacks on the GATT Characteristic interface
that StartNotify has been called on.
Args:
message (): The PropertiesChanged DBus signal message relaying
the new data on the GATT Characteristic.
"""
logger.debug(
"DBUS: path: {}, domain: {}, body: {}".format(
message.path, message.body[0], message.body[1]
)
)
if message.body[0] == defs.GATT_CHARACTERISTIC_INTERFACE:
if message.path in self._notification_callbacks:
logger.info(
"GATT Char Properties Changed: {0} | {1}".format(
message.path, message.body[1:]
)
)
self._notification_callbacks[message.path](message.path, message.body[1])
elif message.body[0] == defs.DEVICE_INTERFACE:
device_path = "/org/bluez/%s/dev_%s" % (
self.device,
self.address.replace(":", "_"),
)
if message.path == device_path:
message_body_map = message.body[1]
if "Connected" in message_body_map and not message_body_map["Connected"]:
logger.debug("Device {} disconnected.".format(self.address))
self.loop.create_task(self._cleanup())
if self._disconnected_callback is not None:
self._disconnected_callback(self)
|
def _properties_changed_callback(self, message):
"""Notification handler.
In the BlueZ DBus API, notifications come as
PropertiesChanged callbacks on the GATT Characteristic interface
that StartNotify has been called on.
Args:
message (): The PropertiesChanged DBus signal message relaying
the new data on the GATT Characteristic.
"""
if message.body[0] == defs.GATT_CHARACTERISTIC_INTERFACE:
if message.path in self._notification_callbacks:
logger.info(
"GATT Char Properties Changed: {0} | {1}".format(
message.path, message.body[1:]
)
)
self._notification_callbacks[message.path](message.path, message.body[1])
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
elif (
message.member == "InterfacesRemoved"
and message.body[1][0] == defs.BATTERY_INTERFACE
):
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
return
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
|
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
elif (
message.member == "InterfacesRemoved"
and message.body[1][0] == defs.BATTERY_INTERFACE
):
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
return
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
def __init__(self):
self.main_loop = asyncio.get_event_loop()
self.main_loop.create_task(self._handle_nsrunloop())
self.main_loop.create_task(self._central_manager_delegate_ready())
self.nsrunloop = NSRunLoop.currentRunLoop()
self.central_manager_delegate = CentralManagerDelegate.alloc().init()
|
def __init__(self, address, hci_device="hci0"):
raise NotImplementedError("BleakClientCoreBluetooth not implemented yet.")
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
def rssi(self):
"""Get the signal strength in dBm"""
if isinstance(self.details, dict) and "props" in self.details:
rssi = self.details["props"].get("RSSI", 0) # Should not be set to 0...
elif hasattr(self.details, "RawSignalStrengthInDBm"):
rssi = self.details.RawSignalStrengthInDBm
elif hasattr(self.details, "Properties"):
rssi = {p.Key: p.Value for p in self.details.Properties}[
"System.Devices.Aep.SignalStrength"
]
else:
rssi = None
return int(rssi) if rssi is not None else None
|
def rssi(self):
"""Get the signal strength in dBm"""
if isinstance(self.details, dict) and "props" in self.details:
rssi = self.details["props"].get("RSSI", 0) # Should not be set to 0...
elif hasattr(self.details, "RawSignalStrengthInDBm"):
rssi = self.details.RawSignalStrengthInDBm
else:
rssi = None
return int(rssi) if rssi is not None else None
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 2.0.
Returns:
Boolean representing connection status.
"""
# Try to find the desired device.
devices = await discover(timeout=kwargs.get("timeout", 2.0), loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {0} was not found.".format(self.address))
logger.debug("Connecting to BLE device @ {0}".format(self.address))
args = [UInt64(self._device_info.BluetoothAddress)]
if self._address_type is not None:
args.append(
BluetoothAddressType.Public
if self._address_type == "public"
else BluetoothAddressType.Random
)
self._requester = await wrap_IAsyncOperation(
IAsyncOperation[BluetoothLEDevice](
BluetoothLEDevice.FromBluetoothAddressAsync(*args)
),
return_type=BluetoothLEDevice,
loop=self.loop,
)
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: " + args.ToString())
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
services = await self.get_services()
connected = False
if self._services_resolved:
# If services has been resolved, then we assume that we are connected. This is due to
# some issues with getting `is_connected` to give correct response here.
connected = True
else:
for _ in range(5):
await asyncio.sleep(0.2, loop=self.loop)
connected = await self.is_connected()
if connected:
break
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 2.0.
Returns:
Boolean representing connection status.
"""
# Try to find the desired device.
devices = await discover(timeout=kwargs.get("timeout", 2.0), loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {0} was not found.".format(self.address))
logger.debug("Connecting to BLE device @ {0}".format(self.address))
args = [UInt64(self._device_info.BluetoothAddress)]
if self._address_type is not None:
args.append(
BluetoothAddressType.Public
if self._address_type == "public"
else BluetoothAddressType.Random
)
self._requester = await wrap_IAsyncOperation(
IAsyncOperation[BluetoothLEDevice](
BluetoothLEDevice.FromBluetoothAddressAsync(*args)
),
return_type=BluetoothLEDevice,
loop=self.loop,
)
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: " + args.ToString())
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
services = await self.get_services()
connected = False
if self._services_resolved:
# If services has been resolved, then we assume that we are connected. This is due to
# some issues with getting `is_connected` to give correct response here.
connected = True
else:
for _ in range(5):
await asyncio.sleep(0.2, loop=self.loop)
connected = await self.is_connected()
if connected:
break
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def stop_notify(self, _uuid: str) -> None:
"""Deactivate notification/indication on a specified characteristic.
Args:
_uuid: The characteristic to stop notifying/indicating on.
"""
characteristic = self.services.get_characteristic(str(_uuid))
status = await wrap_IAsyncOperation(
IAsyncOperation[GattCommunicationStatus](
characteristic.obj.WriteClientCharacteristicConfigurationDescriptorAsync(
getattr(GattClientCharacteristicConfigurationDescriptorValue, "None")
)
),
return_type=GattCommunicationStatus,
loop=self.loop,
)
if status != GattCommunicationStatus.Success:
raise BleakError(
"Could not stop notify on {0}: {1}".format(characteristic.uuid, status)
)
else:
callback = self._callbacks.pop(characteristic.uuid)
self._bridge.RemoveValueChangedCallback(characteristic.obj, callback)
|
async def stop_notify(self, _uuid: str) -> None:
"""Deactivate notification/indication on a specified characteristic.
Args:
_uuid: The characteristic to stop notifying/indicating on.
"""
characteristic = self.services.get_characteristic(str(_uuid))
status = await wrap_IAsyncOperation(
IAsyncOperation[GattCommunicationStatus](
characteristic.obj.WriteClientCharacteristicConfigurationDescriptorAsync(
getattr(GattClientCharacteristicConfigurationDescriptorValue, "None")
)
),
return_type=GattCommunicationStatus,
loop=self.loop,
)
if status != GattCommunicationStatus.Success:
raise BleakError(
"Could not start notify on {0}: {1}".format(characteristic.uuid, status)
)
else:
callback = self._callbacks.pop(characteristic.uuid)
self._bridge.RemoveValueChangedCallback(characteristic.obj, callback)
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def discover(
timeout: float = 5.0, loop: AbstractEventLoop = None, **kwargs
) -> List[BLEDevice]:
"""Perform a Bluetooth LE Scan using Windows.Devices.Bluetooth.Advertisement
Args:
timeout (float): Time to scan for.
loop (Event Loop): The event loop to use.
Keyword Args:
string_output (bool): If set to false, ``discover`` returns .NET
device objects instead.
Returns:
List of strings or objects found.
"""
loop = loop if loop else asyncio.get_event_loop()
watcher = BluetoothLEAdvertisementWatcher()
devices = {}
def _format_bdaddr(a):
return ":".join("{:02X}".format(x) for x in a.to_bytes(6, byteorder="big"))
def _format_event_args(e):
try:
return "{0}: {1}".format(
_format_bdaddr(e.BluetoothAddress),
e.Advertisement.LocalName or "Unknown",
)
except Exception:
return e.BluetoothAddress
def AdvertisementWatcher_Received(sender, e):
if sender == watcher:
logger.debug("Received {0}.".format(_format_event_args(e)))
if e.BluetoothAddress not in devices:
devices[e.BluetoothAddress] = e
def AdvertisementWatcher_Stopped(sender, e):
if sender == watcher:
logger.debug(
"{0} devices found. Watcher status: {1}.".format(
len(devices), watcher.Status
)
)
watcher.Received += AdvertisementWatcher_Received
watcher.Stopped += AdvertisementWatcher_Stopped
# Watcher works outside of the Python process.
watcher.Start()
await asyncio.sleep(timeout, loop=loop)
watcher.Stop()
try:
watcher.Received -= AdvertisementWatcher_Received
watcher.Stopped -= AdvertisementWatcher_Stopped
except Exception as e:
logger.debug("Could not remove event handlers: {0}...".format(e))
found = []
for d in devices.values():
bdaddr = _format_bdaddr(d.BluetoothAddress)
uuids = []
for u in d.Advertisement.ServiceUuids:
uuids.append(u.ToString())
data = {}
for m in d.Advertisement.ManufacturerData:
md = IBuffer(m.Data)
b = Array.CreateInstance(Byte, md.Length)
reader = DataReader.FromBuffer(md)
reader.ReadBytes(b)
data[m.CompanyId] = bytes(b)
found.append(
BLEDevice(
bdaddr,
d.Advertisement.LocalName,
d,
uuids=uuids,
manufacturer_data=data,
)
)
return found
|
async def discover(
timeout: float = 5.0, loop: AbstractEventLoop = None, **kwargs
) -> List[BLEDevice]:
"""Perform a Bluetooth LE Scan.
Args:
timeout (float): Time to scan for.
loop (Event Loop): The event loop to use.
Keyword Args:
string_output (bool): If set to false, ``discover`` returns .NET
device objects instead.
Returns:
List of strings or objects found.
"""
loop = loop if loop else asyncio.get_event_loop()
watcher = BluetoothLEAdvertisementWatcher()
devices = {}
def _format_bdaddr(a):
return ":".join("{:02X}".format(x) for x in a.to_bytes(6, byteorder="big"))
def _format_event_args(e):
try:
return "{0}: {1}".format(
_format_bdaddr(e.BluetoothAddress),
e.Advertisement.LocalName or "Unknown",
)
except Exception:
return e.BluetoothAddress
def AdvertisementWatcher_Received(sender, e):
if sender == watcher:
logger.debug("Received {0}.".format(_format_event_args(e)))
if e.BluetoothAddress not in devices:
devices[e.BluetoothAddress] = e
def AdvertisementWatcher_Stopped(sender, e):
if sender == watcher:
logger.debug(
"{0} devices found. Watcher status: {1}.".format(
len(devices), watcher.Status
)
)
watcher.Received += AdvertisementWatcher_Received
watcher.Stopped += AdvertisementWatcher_Stopped
# Watcher works outside of the Python process.
watcher.Start()
await asyncio.sleep(timeout, loop=loop)
watcher.Stop()
try:
watcher.Received -= AdvertisementWatcher_Received
watcher.Stopped -= AdvertisementWatcher_Stopped
except Exception as e:
logger.debug("Could not remove event handlers: {0}...".format(e))
found = []
for d in devices.values():
bdaddr = _format_bdaddr(d.BluetoothAddress)
uuids = []
for u in d.Advertisement.ServiceUuids:
uuids.append(u.ToString())
data = {}
for m in d.Advertisement.ManufacturerData:
md = IBuffer(m.Data)
b = Array.CreateInstance(Byte, md.Length)
reader = DataReader.FromBuffer(md)
reader.ReadBytes(b)
data[m.CompanyId] = bytes(b)
found.append(
BLEDevice(
bdaddr,
d.Advertisement.LocalName,
d,
uuids=uuids,
manufacturer_data=data,
)
)
return found
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def run(address, loop, debug=False):
if debug:
import sys
loop.set_debug(True)
l = logging.getLogger("asyncio")
l.setLevel(logging.DEBUG)
h = logging.StreamHandler(sys.stdout)
h.setLevel(logging.DEBUG)
l.addHandler(h)
async with BleakClient(address, loop=loop) as client:
x = await client.is_connected()
logger.info("Connected: {0}".format(x))
system_id = await client.read_gatt_char(SYSTEM_ID_UUID)
print(
"System ID: {0}".format(
":".join(["{:02x}".format(x) for x in system_id[::-1]])
)
)
model_number = await client.read_gatt_char(MODEL_NBR_UUID)
print("Model Number: {0}".format("".join(map(chr, model_number))))
manufacturer_name = await client.read_gatt_char(MANUFACTURER_NAME_UUID)
print("Manufacturer Name: {0}".format("".join(map(chr, manufacturer_name))))
firmware_revision = await client.read_gatt_char(FIRMWARE_REV_UUID)
print("Firmware Revision: {0}".format("".join(map(chr, firmware_revision))))
hardware_revision = await client.read_gatt_char(HARDWARE_REV_UUID)
print("Hardware Revision: {0}".format("".join(map(chr, hardware_revision))))
software_revision = await client.read_gatt_char(SOFTWARE_REV_UUID)
print("Software Revision: {0}".format("".join(map(chr, software_revision))))
battery_level = await client.read_gatt_char(BATTERY_LEVEL_UUID)
print("Battery Level: {0}%".format(int(battery_level[0])))
def keypress_handler(sender, data):
print("{0}: {1}".format(sender, data))
write_value = bytearray([0xA0])
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Pre-Write Value: {0}".format(value))
await client.write_gatt_char(IO_DATA_CHAR_UUID, write_value)
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Post-Write Value: {0}".format(value))
assert value == write_value
await client.start_notify(KEY_PRESS_UUID, keypress_handler)
await asyncio.sleep(5.0, loop=loop)
await client.stop_notify(KEY_PRESS_UUID)
|
async def run(address, loop, debug=False):
if debug:
import sys
# loop.set_debug(True)
l = logging.getLogger("asyncio")
l.setLevel(logging.DEBUG)
h = logging.StreamHandler(sys.stdout)
h.setLevel(logging.DEBUG)
l.addHandler(h)
async with BleakClient(address, loop=loop) as client:
x = await client.is_connected()
logger.info("Connected: {0}".format(x))
system_id = await client.read_gatt_char(SYSTEM_ID_UUID)
print(
"System ID: {0}".format(
":".join(["{:02x}".format(x) for x in system_id[::-1]])
)
)
model_number = await client.read_gatt_char(MODEL_NBR_UUID)
print("Model Number: {0}".format("".join(map(chr, model_number))))
manufacturer_name = await client.read_gatt_char(MANUFACTURER_NAME_UUID)
print("Manufacturer Name: {0}".format("".join(map(chr, manufacturer_name))))
firmware_revision = await client.read_gatt_char(FIRMWARE_REV_UUID)
print("Firmware Revision: {0}".format("".join(map(chr, firmware_revision))))
hardware_revision = await client.read_gatt_char(HARDWARE_REV_UUID)
print("Hardware Revision: {0}".format("".join(map(chr, hardware_revision))))
software_revision = await client.read_gatt_char(SOFTWARE_REV_UUID)
print("Software Revision: {0}".format("".join(map(chr, software_revision))))
battery_level = await client.read_gatt_char(BATTERY_LEVEL_UUID)
print("Battery Level: {0}%".format(int(battery_level[0])))
def keypress_handler(sender, data):
print("{0}: {1}".format(sender, data))
write_value = bytearray([0xA0])
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Pre-Write Value: {0}".format(value))
await client.write_gatt_char(IO_DATA_CHAR_UUID, write_value)
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Post-Write Value: {0}".format(value))
assert value == write_value
await client.start_notify(KEY_PRESS_UUID, keypress_handler)
await asyncio.sleep(5.0, loop=loop)
await client.stop_notify(KEY_PRESS_UUID)
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def run(address, loop, debug=False):
log = logging.getLogger(__name__)
if debug:
import sys
loop.set_debug(True)
log.setLevel(logging.DEBUG)
h = logging.StreamHandler(sys.stdout)
h.setLevel(logging.DEBUG)
log.addHandler(h)
async with BleakClient(address, loop=loop) as client:
x = await client.is_connected()
log.info("Connected: {0}".format(x))
for service in client.services:
log.info("[Service] {0}: {1}".format(service.uuid, service.description))
for char in service.characteristics:
if "read" in char.properties:
try:
value = bytes(await client.read_gatt_char(char.uuid))
except Exception as e:
value = str(e).encode()
else:
value = None
log.info(
"\t[Characteristic] {0}: ({1}) | Name: {2}, Value: {3} ".format(
char.uuid, ",".join(char.properties), char.description, value
)
)
for descriptor in char.descriptors:
value = await client.read_gatt_descriptor(descriptor.handle)
log.info(
"\t\t[Descriptor] {0}: (Handle: {1}) | Value: {2} ".format(
descriptor.uuid, descriptor.handle, bytes(value)
)
)
|
async def run(address, loop, debug=False):
log = logging.getLogger(__name__)
if debug:
import sys
# loop.set_debug(True)
log.setLevel(logging.DEBUG)
h = logging.StreamHandler(sys.stdout)
h.setLevel(logging.DEBUG)
log.addHandler(h)
async with BleakClient(address, loop=loop) as client:
x = await client.is_connected()
log.info("Connected: {0}".format(x))
for service in client.services:
log.info("[Service] {0}: {1}".format(service.uuid, service.description))
for char in service.characteristics:
if "read" in char.properties:
try:
value = bytes(await client.read_gatt_char(char.uuid))
except Exception as e:
value = str(e).encode()
else:
value = None
log.info(
"\t[Characteristic] {0}: ({1}) | Name: {2}, Value: {3} ".format(
char.uuid, ",".join(char.properties), char.description, value
)
)
for descriptor in char.descriptors:
value = await client.read_gatt_descriptor(descriptor.handle)
log.info(
"\t\t[Descriptor] {0}: (Handle: {1}) | Value: {2} ".format(
descriptor.uuid, descriptor.handle, bytes(value)
)
)
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def discover(timeout=5.0, loop=None, **kwargs):
"""Discover nearby Bluetooth Low Energy devices.
Args:
timeout (float): Duration to scan for.
loop (asyncio.AbstractEventLoop): Optional event loop to use.
Keyword Args:
device (str): Bluetooth device to use for discovery.
Returns:
List of tuples containing name, address and signal strength
of nearby devices.
"""
device = kwargs.get("device", "hci0")
loop = loop if loop else asyncio.get_event_loop()
cached_devices = {}
devices = {}
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
bus = await client.connect(reactor, "system").asFuture(loop)
# Add signal listeners
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesAdded",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesRemoved",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.Properties",
member="PropertiesChanged",
).asFuture(loop)
# Find the HCI device to use for scanning and get cached device properties
objects = await bus.callRemote(
"/",
"GetManagedObjects",
interface=defs.OBJECT_MANAGER_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(loop)
adapter_path, interface = _filter_on_adapter(objects, device)
cached_devices = dict(_filter_on_device(objects))
# dd = {'objectPath': '/org/bluez/hci0', 'methodName': 'StartDiscovery',
# 'interface': 'org.bluez.Adapter1', 'destination': 'org.bluez',
# 'signature': '', 'body': (), 'expectReply': True, 'autoStart': True,
# 'timeout': None, 'returnSignature': ''}
# Running Discovery loop.
await bus.callRemote(
adapter_path,
"SetDiscoveryFilter",
interface="org.bluez.Adapter1",
destination="org.bluez",
signature="a{sv}",
body=[{"Transport": "le"}],
).asFuture(loop)
await bus.callRemote(
adapter_path,
"StartDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
await asyncio.sleep(timeout)
await bus.callRemote(
adapter_path,
"StopDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
# Reduce output.
# out = []
# for path, props in devices.items():
# properties = await cli.callRemote(
# path, 'GetAll',
# interface=defs.PROPERTIES_INTERFACE,
# destination=defs.BLUEZ_SERVICE,
# signature='s',
# body=[defs.DEVICE_INTERFACE, ],
# returnSignature='a{sv}').asFuture(loop)
# print(properties)
#
discovered_devices = []
for path, props in devices.items():
if not props:
logger.debug(
"Disregarding %s since no properties could be obtained." % path
)
continue
name, address, _, path = _device_info(path, props)
uuids = props.get("UUIDs", [])
manufacturer_data = props.get("ManufacturerData", {})
discovered_devices.append(
BLEDevice(
address, name, path, uuids=uuids, manufacturer_data=manufacturer_data
)
)
return discovered_devices
|
async def discover(timeout=5.0, loop=None, **kwargs):
"""Discover nearby Bluetooth Low Energy devices.
Args:
timeout (float): Duration to scan for.
loop (asyncio.AbstractEventLoop): Optional event loop to use.
Keyword Args:
device (str): Bluetooth device to use for discovery.
Returns:
List of tuples containing name, address and signal strength
of nearby devices.
"""
device = kwargs.get("device", "hci0")
loop = loop if loop else asyncio.get_event_loop()
cached_devices = {}
devices = {}
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
bus = await client.connect(reactor, "system").asFuture(loop)
# Add signal listeners
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesAdded",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesRemoved",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.Properties",
member="PropertiesChanged",
).asFuture(loop)
# Find the HCI device to use for scanning and get cached device properties
objects = await bus.callRemote(
"/",
"GetManagedObjects",
interface=defs.OBJECT_MANAGER_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(loop)
adapter_path, interface = _filter_on_adapter(objects, device)
cached_devices = dict(_filter_on_device(objects))
# dd = {'objectPath': '/org/bluez/hci0', 'methodName': 'StartDiscovery',
# 'interface': 'org.bluez.Adapter1', 'destination': 'org.bluez',
# 'signature': '', 'body': (), 'expectReply': True, 'autoStart': True,
# 'timeout': None, 'returnSignature': ''}
# Running Discovery loop.
await bus.callRemote(
adapter_path,
"SetDiscoveryFilter",
interface="org.bluez.Adapter1",
destination="org.bluez",
signature="a{sv}",
body=[{"Transport": "le"}],
).asFuture(loop)
await bus.callRemote(
adapter_path,
"StartDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
await asyncio.sleep(timeout)
await bus.callRemote(
adapter_path,
"StopDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
# Reduce output.
# out = []
# for path, props in devices.items():
# properties = await cli.callRemote(
# path, 'GetAll',
# interface=defs.PROPERTIES_INTERFACE,
# destination=defs.BLUEZ_SERVICE,
# signature='s',
# body=[defs.DEVICE_INTERFACE, ],
# returnSignature='a{sv}').asFuture(loop)
# print(properties)
#
discovered_devices = []
for path, props in devices.items():
name, address, _, path = _device_info(path, props)
uuids = props.get("UUIDs", [])
manufacturer_data = props.get("ManufacturerData", {})
discovered_devices.append(
BLEDevice(
address, name, path, uuids=uuids, manufacturer_data=manufacturer_data
)
)
return discovered_devices
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 0.1.
Returns:
Boolean representing connection status.
"""
# A Discover must have been run before connecting to any devices. Do a quick one here
# to ensure that it has been done.
await discover(
timeout=kwargs.get("timeout", 0.1), device=self.device, loop=self.loop
)
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
async def connect(self) -> bool:
"""Connect to the specified GATT server.
Returns:
Boolean representing connection status.
"""
# A Discover must have been run before connecting to any devices. Do a quick one here
# to ensure that it has been done.
await discover(timeout=0.1, loop=self.loop)
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
async def get_services(self) -> BleakGATTServiceCollection:
"""Get all services registered for this GATT server.
Returns:
A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree.
"""
if self._services_resolved:
return self.services
while True:
properties = await self._get_device_properties()
services_resolved = properties.get("ServicesResolved", False)
if services_resolved:
break
await asyncio.sleep(0.02, loop=self.loop)
logger.debug("Get Services...")
objs = await get_managed_objects(
self._bus, self.loop, self._device_path + "/service"
)
# There is no guarantee that services are listed before characteristics
# Managed Objects dict.
# Need multiple iterations to construct the Service Collection
_chars, _descs = [], []
for object_path, interfaces in objs.items():
logger.debug(utils.format_GATT_object(object_path, interfaces))
if defs.GATT_SERVICE_INTERFACE in interfaces:
service = interfaces.get(defs.GATT_SERVICE_INTERFACE)
self.services.add_service(BleakGATTServiceBlueZDBus(service, object_path))
elif defs.GATT_CHARACTERISTIC_INTERFACE in interfaces:
char = interfaces.get(defs.GATT_CHARACTERISTIC_INTERFACE)
_chars.append([char, object_path])
elif defs.GATT_DESCRIPTOR_INTERFACE in interfaces:
desc = interfaces.get(defs.GATT_DESCRIPTOR_INTERFACE)
_descs.append([desc, object_path])
for char, object_path in _chars:
_service = list(filter(lambda x: x.path == char["Service"], self.services))
self.services.add_characteristic(
BleakGATTCharacteristicBlueZDBus(char, object_path, _service[0].uuid)
)
self._char_path_to_uuid[object_path] = char.get("UUID")
for desc, object_path in _descs:
_characteristic = list(
filter(
lambda x: x.path == desc["Characteristic"],
self.services.characteristics.values(),
)
)
self.services.add_descriptor(
BleakGATTDescriptorBlueZDBus(desc, object_path, _characteristic[0].uuid)
)
self._services_resolved = True
return self.services
|
async def get_services(self) -> BleakGATTServiceCollection:
"""Get all services registered for this GATT server.
Returns:
A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree.
"""
if self._services_resolved:
return self.services
while True:
properties = await self._get_device_properties()
services_resolved = properties.get("ServicesResolved", False)
if services_resolved:
break
await asyncio.sleep(0.02, loop=self.loop)
logger.debug("Get Services...")
objs = await get_managed_objects(
self._bus, self.loop, self._device_path + "/service"
)
for object_path, interfaces in objs.items():
logger.debug(utils.format_GATT_object(object_path, interfaces))
if defs.GATT_SERVICE_INTERFACE in interfaces:
service = interfaces.get(defs.GATT_SERVICE_INTERFACE)
self.services.add_service(BleakGATTServiceBlueZDBus(service, object_path))
elif defs.GATT_CHARACTERISTIC_INTERFACE in interfaces:
char = interfaces.get(defs.GATT_CHARACTERISTIC_INTERFACE)
_service = list(filter(lambda x: x.path == char["Service"], self.services))
self.services.add_characteristic(
BleakGATTCharacteristicBlueZDBus(char, object_path, _service[0].uuid)
)
self._char_path_to_uuid[object_path] = char.get("UUID")
elif defs.GATT_DESCRIPTOR_INTERFACE in interfaces:
desc = interfaces.get(defs.GATT_DESCRIPTOR_INTERFACE)
_characteristic = list(
filter(
lambda x: x.path == desc["Characteristic"],
self.services.characteristics.values(),
)
)
self.services.add_descriptor(
BleakGATTDescriptorBlueZDBus(desc, object_path, _characteristic[0].uuid)
)
self._services_resolved = True
return self.services
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> None:
"""Perform a write operation on the specified GATT characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to write to.
data (bytes or bytearray): The data to send.
response (bool): If write-with-response operation should be done. Defaults to `False`.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if (
"write" not in characteristic.properties
and "write-without-response" not in characteristic.properties
):
raise BleakError(
"Characteristic %s does not support write operations!" % str(_uuid)
)
if not response and "write-without-response" not in characteristic.properties:
response = True
# Force response here, since the device only supports that.
if (
response
and "write" not in characteristic.properties
and "write-without-response" in characteristic.properties
):
response = False
logger.warning(
"Characteristic %s does not support Write with response. Trying without..."
% str(_uuid)
)
if response or (self._bluez_version[0] == 5 and self._bluez_version[1] > 50):
# TODO: Add OnValueUpdated handler for response=True?
await self._bus.callRemote(
characteristic.path,
"WriteValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="aya{sv}",
body=[data, {"type": "request" if response else "command"}],
returnSignature="",
).asFuture(self.loop)
else:
# Older versions of BlueZ don't have the "type" option, so we have
# to write the hard way. This isn't the most efficient way of doing
# things, but it works. Also, watch out for txdbus bug that causes
# returned fd to be None. https://github.com/cocagne/txdbus/pull/81
fd, _ = await self._bus.callRemote(
characteristic.path,
"AcquireWrite",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="a{sv}",
body=[{}],
returnSignature="hq",
).asFuture(self.loop)
os.write(fd, data)
os.close(fd)
logger.debug(
"Write Characteristic {0} | {1}: {2}".format(_uuid, characteristic.path, data)
)
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> None:
"""Perform a write operation on the specified GATT characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to write to.
data (bytes or bytearray): The data to send.
response (bool): If write-with-response operation should be done. Defaults to `False`.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if response or (self._bluez_version[0] == 5 and self._bluez_version[1] > 50):
# TODO: Add OnValueUpdated handler for response=True?
await self._bus.callRemote(
characteristic.path,
"WriteValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="aya{sv}",
body=[data, {"type": "request" if response else "command"}],
returnSignature="",
).asFuture(self.loop)
else:
# Older versions of BlueZ don't have the "type" option, so we have
# to write the hard way. This isn't the most efficient way of doing
# things, but it works. Also, watch out for txdbus bug that causes
# returned fd to be None. https://github.com/cocagne/txdbus/pull/81
fd, _ = await self._bus.callRemote(
characteristic.path,
"AcquireWrite",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="a{sv}",
body=[{}],
returnSignature="hq",
).asFuture(self.loop)
os.write(fd, data)
os.close(fd)
logger.debug(
"Write Characteristic {0} | {1}: {2}".format(_uuid, characteristic.path, data)
)
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
def _device_info(path, props):
try:
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
address = props.get("Address", None)
if address is None:
try:
address = path[-17:].replace("_", ":")
if not validate_mac_address(address):
address = None
except Exception:
address = None
rssi = props.get("RSSI", "?")
return name, address, rssi, path
except Exception as e:
logger.exception(e, exc_info=True)
return None, None, None, None
|
def _device_info(path, props):
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
address = props.get("Address", None)
if address is None:
try:
address = path[-17:].replace("_", ":")
if not validate_mac_address(address):
address = None
except Exception:
address = None
rssi = props.get("RSSI", "?")
return name, address, rssi, path
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
elif (
message.member == "InterfacesRemoved"
and message.body[1][0] == defs.BATTERY_INTERFACE
):
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
return
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
|
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Returns:
Boolean representing connection status.
"""
raise NotImplementedError()
|
async def connect(self) -> bool:
"""Connect to the specified GATT server.
Returns:
Boolean representing connection status.
"""
raise NotImplementedError()
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
def __init__(self, address: str, loop: AbstractEventLoop = None, **kwargs):
super(BleakClientDotNet, self).__init__(address, loop, **kwargs)
# Backend specific. Python.NET objects.
self._device_info = None
self._requester = None
self._bridge = Bridge()
self._callbacks = {}
self._address_type = (
kwargs["address_type"]
if "address_type" in kwargs and kwargs["address_type"] in ("public", "random")
else None
)
|
def __init__(self, address: str, loop: AbstractEventLoop = None, **kwargs):
super(BleakClientDotNet, self).__init__(address, loop, **kwargs)
# Backend specific. Python.NET objects.
self._device_info = None
self._requester = None
self._bridge = Bridge()
self._callbacks = {}
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 2.0.
Returns:
Boolean representing connection status.
"""
# Try to find the desired device.
devices = await discover(timeout=kwargs.get("timeout", 2.0), loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {0} was not found.".format(self.address))
logger.debug("Connecting to BLE device @ {0}".format(self.address))
args = [UInt64(self._device_info.BluetoothAddress)]
if self._address_type is not None:
args.append(
BluetoothAddressType.Public
if self._address_type == "public"
else BluetoothAddressType.Random
)
self._requester = await wrap_IAsyncOperation(
IAsyncOperation[BluetoothLEDevice](
BluetoothLEDevice.FromBluetoothAddressAsync(*args)
),
return_type=BluetoothLEDevice,
loop=self.loop,
)
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: " + args.ToString())
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
services = await self.get_services()
connected = False
if self._services_resolved:
# If services has been resolved, then we assume that we are connected. This is due to
# some issues with getting `is_connected` to give correct response here.
connected = True
else:
for _ in range(5):
await asyncio.sleep(0.2, loop=self.loop)
connected = await self.is_connected()
if connected:
break
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
async def connect(self) -> bool:
"""Connect to the specified GATT server.
Returns:
Boolean representing connection status.
"""
# Try to find the desired device.
devices = await discover(2.0, loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {0} was not found.".format(self.address))
logger.debug("Connecting to BLE device @ {0}".format(self.address))
self._requester = await wrap_IAsyncOperation(
IAsyncOperation[BluetoothLEDevice](
BluetoothLEDevice.FromBluetoothAddressAsync(
UInt64(self._device_info.BluetoothAddress)
)
),
return_type=BluetoothLEDevice,
loop=self.loop,
)
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: " + args.ToString())
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
await self.get_services()
await asyncio.sleep(0.2, loop=self.loop)
connected = await self.is_connected()
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> None:
"""Perform a write operation of the specified GATT characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to write to.
data (bytes or bytearray): The data to send.
response (bool): If write-with-response operation should be done. Defaults to `False`.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
raise BleakError("Characteristic {0} was not found!".format(_uuid))
writer = DataWriter()
writer.WriteBytes(Array[Byte](data))
response = (
GattWriteOption.WriteWithResponse
if response
else GattWriteOption.WriteWithoutResponse
)
write_result = await wrap_IAsyncOperation(
IAsyncOperation[GattWriteResult](
characteristic.obj.WriteValueWithResultAsync(
writer.DetachBuffer(), response
)
),
return_type=GattWriteResult,
loop=self.loop,
)
if write_result.Status == GattCommunicationStatus.Success:
logger.debug("Write Characteristic {0} : {1}".format(_uuid, data))
else:
raise BleakError(
"Could not write value {0} to characteristic {1}: {2}".format(
data, characteristic.uuid, write_result.Status
)
)
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> None:
"""Perform a write operation of the specified GATT characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to write to.
data (bytes or bytearray): The data to send.
response (bool): If write-with-response operation should be done. Defaults to `False`.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
raise BleakError("Characteristic {0} was not found!".format(_uuid))
writer = DataWriter()
writer.WriteBytes(Array[Byte](data))
response = (
GattWriteOption.WriteWithResponse
if response
else GattWriteOption.WriteWithoutResponse
)
write_result = await wrap_IAsyncOperation(
IAsyncOperation[GattWriteResult](
characteristic.obj.WriteValueWithResultAsync(
writer.DetachBuffer(), response
)
),
return_type=GattWriteResult,
loop=self.loop,
)
if write_result.Status == GattCommunicationStatus.Success:
logger.debug("Write Characteristic {0} : {1}".format(_uuid, data))
else:
raise BleakError(
"Could not write value {0} to characteristic {1}: {2}".format(
data, characteristic.uuid, write_result.Status
)
)
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
async def wrap_IAsyncOperation(op, return_type, loop):
"""Enables await on .NET Task using asyncio.Event and a lambda callback.
Args:
task (System.Threading.Tasks.Task): .NET async task object to await.
loop (Event Loop): The event loop to await on the Task in.
Returns:
The results of the the .NET Task.
"""
done = asyncio.Event()
# Register AsyncOperationCompletedHandler callback that triggers the above asyncio.Event.
op.Completed = AsyncOperationCompletedHandler[return_type](
lambda x, y: loop.call_soon_threadsafe(done.set)
)
# Wait for callback.
await done.wait()
if op.Status == AsyncStatus.Completed:
return op.GetResults()
elif op.Status == AsyncStatus.Error:
# Exception occurred. Wrap it in BleakDotNetTaskError
# to make it catchable.
raise BleakDotNetTaskError(op.ErrorCode.ToString())
else:
# TODO: Handle IsCancelled.
raise BleakDotNetTaskError("IAsyncOperation Status: {0}".format(op.Status))
|
async def wrap_IAsyncOperation(op, return_type, loop):
"""Enables await on .NET Task using asyncio.Event and a lambda callback.
Args:
task (System.Threading.Tasks.Task): .NET async task object
to await upon.
loop (Event Loop): The event loop to await on the Task in.
Returns:
The results of the the .NET Task.
"""
done = asyncio.Event()
# Register AsyncOperationCompletedHandler callback that triggers the above asyncio.Event.
op.Completed = AsyncOperationCompletedHandler[return_type](
lambda x, y: loop.call_soon_threadsafe(done.set)
)
# Wait for callback.
await done.wait()
if op.Status == AsyncStatus.Completed:
return op.GetResults()
elif op.Status == AsyncStatus.Error:
# Exception occurred. Wrap it in BleakDotNetTaskError
# to make it catchable.
raise BleakDotNetTaskError(op.ErrorCode.ToString())
else:
# TODO: Handle IsCancelled.
raise BleakDotNetTaskError("IAsyncOperation Status: {0}".format(op.Status))
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
async def run(address, loop, debug=False):
log = logging.getLogger(__name__)
if debug:
import sys
# loop.set_debug(True)
log.setLevel(logging.DEBUG)
h = logging.StreamHandler(sys.stdout)
h.setLevel(logging.DEBUG)
log.addHandler(h)
async with BleakClient(address, loop=loop) as client:
x = await client.is_connected()
log.info("Connected: {0}".format(x))
for service in client.services:
log.info("[Service] {0}: {1}".format(service.uuid, service.description))
for char in service.characteristics:
if "read" in char.properties:
try:
value = bytes(await client.read_gatt_char(char.uuid))
except Exception as e:
value = str(e).encode()
else:
value = None
log.info(
"\t[Characteristic] {0}: ({1}) | Name: {2}, Value: {3} ".format(
char.uuid, ",".join(char.properties), char.description, value
)
)
for descriptor in char.descriptors:
value = await client.read_gatt_descriptor(descriptor.handle)
log.info(
"\t\t[Descriptor] {0}: (Handle: {1}) | Value: {2} ".format(
descriptor.uuid, descriptor.handle, bytes(value)
)
)
|
async def run(address, loop, debug=False):
log = logging.getLogger(__name__)
if debug:
import sys
# loop.set_debug(True)
log.setLevel(logging.DEBUG)
h = logging.StreamHandler(sys.stdout)
h.setLevel(logging.DEBUG)
log.addHandler(h)
async with BleakClient(address, loop=loop) as client:
x = await client.is_connected()
log.info("Connected: {0}".format(x))
for service in client.services:
# service.obj is instance of 'Windows.Devices.Bluetooth.GenericAttributeProfile.GattDeviceService'
log.info("[Service] {0}: {1}".format(service.uuid, service.description))
for char in service.characteristics:
# char.obj is instance of 'Windows.Devices.Bluetooth.GenericAttributeProfile.GattCharacteristic'
if "read" in char.properties:
value = bytes(await client.read_gatt_char(char.uuid))
else:
value = None
log.info(
"\t[Characteristic] {0}: ({1}) | Name: {2}, Value: {3} ".format(
char.uuid, ",".join(char.properties), char.description, value
)
)
for descriptor in char.descriptors:
# descriptor.obj is instance of 'Windows.Devices.Bluetooth.GenericAttributeProfile.GattDescriptor
value = await client.read_gatt_descriptor(descriptor.handle)
log.info(
"\t\t[Descriptor] {0}: (Handle: {1}) | Value: {2} ".format(
descriptor.uuid, descriptor.handle, bytes(value)
)
)
|
https://github.com/hbldh/bleak/issues/69
|
Unhandled Error
Traceback (most recent call last):
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 141, in dataReceived
self.rawDBusMessageReceived(raw_msg)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/protocol.py", line 262, in rawDBusMessageReceived
self.signalReceived(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/client.py", line 643, in signalReceived
self.router.routeMessage(msig)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 128, in routeMessage
r.match(m)
--- <exception caught here> ---
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/txdbus/router.py", line 67, in match
self.callback(m)
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 109, in parse_msg
*_device_info(msg_path, devices.get(msg_path))
File "/home/usr/dir/virtualEnv/lib/python3.6/site-packages/bleak/backends/bluezdbus/discovery.py", line 39, in _device_info
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
builtins.AttributeError: 'NoneType' object has no attribute 'get'
|
builtins.AttributeError
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 0.1.
Returns:
Boolean representing connection status.
"""
# A Discover must have been run before connecting to any devices. Do a quick one here
# to ensure that it has been done.
await discover(
timeout=kwargs.get("timeout", 0.1), device=self.device, loop=self.loop
)
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 0.1.
Returns:
Boolean representing connection status.
"""
# A Discover must have been run before connecting to any devices. Do a quick one here
# to ensure that it has been done.
await discover(timeout=kwargs.get("timeout", 0.1), loop=self.loop)
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
https://github.com/hbldh/bleak/issues/76
|
Traceback (most recent call last):
File "bug.py", line 25, in <module>
sys.exit(loop.run_until_complete(main(loop)))
File "/usr/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "bug.py", line 19, in main
async with MyInternalStuff(ble_addr="12:34:56:78:90:ab", adapter='hci1', loop=loop):
File "bug.py", line 12, in __aenter__
await self._dev.__aenter__()
File "/home/yggdrasil/.virtualenvs/svhsystemtests/lib/python3.7/site-packages/bleak/backends/client.py", line 41, in __aenter__
await self.connect()
File "/home/yggdrasil/.virtualenvs/svhsystemtests/lib/python3.7/site-packages/bleak/backends/bluezdbus/client.py", line 65, in connect
await discover(timeout=0.1, loop=self.loop)
File "/home/yggdrasil/.virtualenvs/svhsystemtests/lib/python3.7/site-packages/bleak/backends/bluezdbus/discovery.py", line 143, in discover
adapter_path, interface = _filter_on_adapter(objects, device)
File "/home/yggdrasil/.virtualenvs/svhsystemtests/lib/python3.7/site-packages/bleak/backends/bluezdbus/discovery.py", line 26, in _filter_on_adapter
raise Exception("Bluetooth adapter not found")
Exception: Bluetooth adapter not found
|
Exception
|
def __init__(self, address, loop=None, **kwargs):
super(BleakClientBlueZDBus, self).__init__(address, loop, **kwargs)
self.device = kwargs.get("device") if kwargs.get("device") else "hci0"
self.address = address
# Backend specific, TXDBus objects and data
self._device_path = None
self._bus = None
self._rules = {}
self._char_path_to_uuid = {}
# We need to know BlueZ version since battery level characteristic
# are stored in a separate DBus interface in the BlueZ >= 5.48.
p = subprocess.Popen(["bluetoothctl", "--version"], stdout=subprocess.PIPE)
out, _ = p.communicate()
s = re.search(b"(\\d+).(\\d+)", out.strip(b"'"))
self._bluez_version = tuple(map(int, s.groups()))
|
def __init__(self, address, loop=None, **kwargs):
super(BleakClientBlueZDBus, self).__init__(address, loop, **kwargs)
self.device = kwargs.get("device") if kwargs.get("device") else "hci0"
self.address = address
# Backend specific, TXDBus objects and data
self._device_path = None
self._bus = None
self._descriptors = {}
self._rules = {}
self._char_path_to_uuid = {}
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def connect(self) -> bool:
"""Connect to the specified GATT server.
Returns:
Boolean representing connection status.
"""
# A Discover must have been run before connecting to any devices. Do a quick one here
# to ensure that it has been done.
await discover(timeout=0.1, loop=self.loop)
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
async def connect(self) -> bool:
"""Connect to the specified GATT server.
Returns:
Boolean representing connection status.
"""
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def get_services(self) -> BleakGATTServiceCollection:
"""Get all services registered for this GATT server.
Returns:
A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree.
"""
if self._services_resolved:
return self.services
while True:
properties = await self._get_device_properties()
services_resolved = properties.get("ServicesResolved", False)
if services_resolved:
break
await asyncio.sleep(0.02, loop=self.loop)
logger.debug("Get Services...")
objs = await get_managed_objects(
self._bus, self.loop, self._device_path + "/service"
)
for object_path, interfaces in objs.items():
logger.debug(utils.format_GATT_object(object_path, interfaces))
if defs.GATT_SERVICE_INTERFACE in interfaces:
service = interfaces.get(defs.GATT_SERVICE_INTERFACE)
self.services.add_service(BleakGATTServiceBlueZDBus(service, object_path))
elif defs.GATT_CHARACTERISTIC_INTERFACE in interfaces:
char = interfaces.get(defs.GATT_CHARACTERISTIC_INTERFACE)
_service = list(filter(lambda x: x.path == char["Service"], self.services))
self.services.add_characteristic(
BleakGATTCharacteristicBlueZDBus(char, object_path, _service[0].uuid)
)
self._char_path_to_uuid[object_path] = char.get("UUID")
elif defs.GATT_DESCRIPTOR_INTERFACE in interfaces:
desc = interfaces.get(defs.GATT_DESCRIPTOR_INTERFACE)
_characteristic = list(
filter(
lambda x: x.path == desc["Characteristic"],
self.services.characteristics.values(),
)
)
self.services.add_descriptor(
BleakGATTDescriptorBlueZDBus(desc, object_path, _characteristic[0].uuid)
)
self._services_resolved = True
return self.services
|
async def get_services(self) -> dict:
"""Get all services registered for this GATT server.
Returns:
Dictionary of all service UUIDs as keys and
service object's properties as values.
"""
if self.services:
return self.services
while True:
properties = await self._get_device_properties()
services_resolved = properties.get("ServicesResolved", False)
if services_resolved:
break
await asyncio.sleep(0.02, loop=self.loop)
logger.debug("Get Services...")
objs = await get_managed_objects(
self._bus, self.loop, self._device_path + "/service"
)
self.services = {}
self.characteristics = {}
self._descriptors = {}
for object_path, interfaces in objs.items():
logger.debug(utils.format_GATT_object(object_path, interfaces))
if defs.GATT_SERVICE_INTERFACE in interfaces:
service = interfaces.get(defs.GATT_SERVICE_INTERFACE)
self.services[service.get("UUID")] = service
self.services[service.get("UUID")]["Path"] = object_path
elif defs.GATT_CHARACTERISTIC_INTERFACE in interfaces:
char = interfaces.get(defs.GATT_CHARACTERISTIC_INTERFACE)
self.characteristics[char.get("UUID")] = char
self.characteristics[char.get("UUID")]["Path"] = object_path
self._char_path_to_uuid[object_path] = char.get("UUID")
elif defs.GATT_DESCRIPTOR_INTERFACE in interfaces:
desc = interfaces.get(defs.GATT_DESCRIPTOR_INTERFACE)
self._descriptors[desc.get("UUID")] = desc
self._descriptors[desc.get("UUID")]["Path"] = object_path
self._services_resolved = True
return self.services
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def read_gatt_char(self, _uuid: str) -> bytearray:
"""Read the data on a GATT characteristic.
Args:
_uuid (str or uuid.UUID): UUID for the characteristic to read from.
Returns:
Byte array of data.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
# Special handling for BlueZ >= 5.48, where Battery Service (0000180f-0000-1000-8000-00805f9b34fb:)
# has been moved to interface org.bluez.Battery1 instead of as a regular service.
if _uuid == "00002a19-0000-1000-8000-00805f9b34fb" and (
self._bluez_version[0] == 5 and self._bluez_version[1] >= 48
):
props = await self._get_device_properties(interface=defs.BATTERY_INTERFACE)
# Simulate regular characteristics read to be consistent over all platforms.
value = bytearray([props.get("Percentage", "")])
logger.debug(
"Read Battery Level {0} | {1}: {2}".format(
_uuid, self._device_path, value
)
)
return value
raise BleakError(
"Characteristic with UUID {0} could not be found!".format(_uuid)
)
value = bytearray(
await self._bus.callRemote(
characteristic.path,
"ReadValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="a{sv}",
body=[{}],
returnSignature="ay",
).asFuture(self.loop)
)
logger.debug(
"Read Characteristic {0} | {1}: {2}".format(_uuid, characteristic.path, value)
)
return value
|
async def read_gatt_char(self, _uuid: str) -> bytearray:
"""Read the data on a GATT characteristic.
Args:
_uuid (str or uuid.UUID): UUID for the characteristic to read from.
Returns:
Byte array of data.
"""
char_props = self.characteristics.get(str(_uuid))
if not char_props:
# TODO: Raise error instead?
return None
value = bytearray(
await self._bus.callRemote(
char_props.get("Path"),
"ReadValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="a{sv}",
body=[{}],
returnSignature="ay",
).asFuture(self.loop)
)
logger.debug(
"Read Characteristic {0} | {1}: {2}".format(
_uuid, char_props.get("Path"), value
)
)
return value
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> Any:
"""Write data to a GATT characteristic.
Args:
_uuid (str or uuid.UUID): The UUID of the GATT characteristic to write to.
data (bytearray): The data to write.
response (bool): If write with response should be done.
Returns:
None if not `response=True`, in which case a bytearray is returned.
"""
characteristic = self.services.get_characteristic(str(_uuid))
await self._bus.callRemote(
characteristic.path,
"WriteValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="aya{sv}",
body=[data, {}],
returnSignature="",
).asFuture(self.loop)
logger.debug(
"Write Characteristic {0} | {1}: {2}".format(_uuid, characteristic.path, data)
)
if response:
return await self.read_gatt_char(_uuid)
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> Any:
"""Write data to a GATT characteristic.
Args:
_uuid (str or uuid.UUID): The UUID of the GATT
characteristic to write to.
data (bytearray):
response (bool): Write with response.
Returns:
None if not `response=True`, in which case a bytearray is returned.
"""
char_props = self.characteristics.get(str(_uuid))
await self._bus.callRemote(
char_props.get("Path"),
"WriteValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="aya{sv}",
body=[data, {}],
returnSignature="",
).asFuture(self.loop)
logger.debug(
"Write Characteristic {0} | {1}: {2}".format(
_uuid, char_props.get("Path"), data
)
)
if response:
return await self.read_gatt_char(_uuid)
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def start_notify(
self, _uuid: str, callback: Callable[[str, Any], Any], **kwargs
) -> None:
"""Starts a notification session from a characteristic.
Args:
_uuid (str or uuid.UUID): The UUID of the GATT
characteristic to start subscribing to notifications from.
callback (Callable): A function that will be called on notification.
Keyword Args:
notification_wrapper (bool): Set to `False` to avoid parsing of
notification to bytearray.
"""
_wrap = kwargs.get("notification_wrapper", True)
characteristic = self.services.get_characteristic(str(_uuid))
await self._bus.callRemote(
characteristic.path,
"StartNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
if _wrap:
self._notification_callbacks[characteristic.path] = _data_notification_wrapper(
callback, self._char_path_to_uuid
) # noqa | E123 error in flake8...
else:
self._notification_callbacks[characteristic.path] = (
_regular_notification_wrapper(callback, self._char_path_to_uuid)
) # noqa | E123 error in flake8...
|
async def start_notify(
self, _uuid: str, callback: Callable[[str, Any], Any], **kwargs
) -> None:
"""Starts a notification session from a characteristic.
Args:
_uuid (str or uuid.UUID): The UUID of the GATT
characteristic to start subscribing to notifications from.
callback (Callable): A function that will be called on notification.
Keyword Args:
notification_wrapper (bool): Set to `False` to avoid parsing of
notification to bytearray.
"""
_wrap = kwargs.get("notification_wrapper", True)
char_props = self.characteristics.get(_uuid)
await self._bus.callRemote(
char_props.get("Path"),
"StartNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
if _wrap:
self._notification_callbacks[char_props.get("Path")] = (
_data_notification_wrapper(callback, self._char_path_to_uuid)
) # noqa | E123 error in flake8...
else:
self._notification_callbacks[char_props.get("Path")] = (
_regular_notification_wrapper(callback, self._char_path_to_uuid)
) # noqa | E123 error in flake8...
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def stop_notify(self, _uuid: str) -> None:
"""Stops a notification session from a characteristic.
Args:
_uuid (str or uuid.UUID): The UUID of the characteristic to stop
subscribing to notifications from.
"""
characteristic = self.services.get_characteristic(str(_uuid))
await self._bus.callRemote(
characteristic.path,
"StopNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
self._notification_callbacks.pop(characteristic.path, None)
|
async def stop_notify(self, _uuid: str) -> None:
"""Stops a notification session from a characteristic.
Args:
_uuid (str or uuid.UUID): The UUID of the characteristic to stop
subscribing to notifications from.
"""
char_props = self.characteristics.get(_uuid)
await self._bus.callRemote(
char_props.get("Path"),
"StopNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
self._notification_callbacks.pop(char_props.get("Path"), None)
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def get_all_for_characteristic(self, _uuid):
characteristic = self.services.get_characteristic(str(_uuid))
out = await self._bus.callRemote(
characteristic.path,
"GetAll",
interface=defs.PROPERTIES_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="s",
body=[defs.GATT_CHARACTERISTIC_INTERFACE],
returnSignature="a{sv}",
).asFuture(self.loop)
return out
|
async def get_all_for_characteristic(self, _uuid):
char_props = self.characteristics.get(str(_uuid))
out = await self._bus.callRemote(
char_props.get("Path"),
"GetAll",
interface=defs.PROPERTIES_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="s",
body=[defs.GATT_CHARACTERISTIC_INTERFACE],
returnSignature="a{sv}",
).asFuture(self.loop)
return out
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def _get_device_properties(self, interface=defs.DEVICE_INTERFACE):
return await self._bus.callRemote(
self._device_path,
"GetAll",
interface=defs.PROPERTIES_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="s",
body=[interface],
returnSignature="a{sv}",
).asFuture(self.loop)
|
async def _get_device_properties(self):
return await self._bus.callRemote(
self._device_path,
"GetAll",
interface=defs.PROPERTIES_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="s",
body=[defs.DEVICE_INTERFACE],
returnSignature="a{sv}",
).asFuture(self.loop)
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def _properties_changed_callback(self, message):
"""Notification handler.
In the BlueZ DBus API, notifications come as
PropertiesChanged callbacks on the GATT Characteristic interface
that StartNotify has been called on.
Args:
message (): The PropertiesChanged DBus signal message relaying
the new data on the GATT Characteristic.
"""
if message.body[0] == defs.GATT_CHARACTERISTIC_INTERFACE:
if message.path in self._notification_callbacks:
logger.info(
"GATT Char Properties Changed: {0} | {1}".format(
message.path, message.body[1:]
)
)
self._notification_callbacks[message.path](message.path, message.body[1])
|
def _properties_changed_callback(self, message):
"""Notification handler.
If the BlueZ DBus API, notifications come as
PropertiesChanged callbacks on the GATT Characteristic interface
that StartNotify has been called on.
Args:
message (): The PropertiesChanged DBus signal message relaying
the new data on the GATT Characteristic.
"""
if message.body[0] == defs.GATT_CHARACTERISTIC_INTERFACE:
if message.path in self._notification_callbacks:
logger.info(
"GATT Char Properties Changed: {0} | {1}".format(
message.path, message.body[1:]
)
)
self._notification_callbacks[message.path](message.path, message.body[1])
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def discover(timeout=5.0, loop=None, **kwargs):
"""Discover nearby Bluetooth Low Energy devices.
Args:
timeout (float): Duration to scan for.
loop (asyncio.AbstractEventLoop): Optional event loop to use.
Keyword Args:
device (str): Bluetooth device to use for discovery.
Returns:
List of tuples containing name, address and signal strength
of nearby devices.
"""
device = kwargs.get("device", "hci0")
loop = loop if loop else asyncio.get_event_loop()
devices = {}
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
# Find the HCI device to use for scanning.
bus = await client.connect(reactor, "system").asFuture(loop)
objects = await bus.callRemote(
"/",
"GetManagedObjects",
interface=defs.OBJECT_MANAGER_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(loop)
adapter_path, interface = _filter_on_adapter(objects, device)
# Add signal listeners
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesAdded",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesRemoved",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.Properties",
member="PropertiesChanged",
).asFuture(loop)
await bus.addMatch(
parse_msg, interface="org.bluez.Adapter1", member="PropertyChanged"
).asFuture(loop)
# dd = {'objectPath': '/org/bluez/hci0', 'methodName': 'StartDiscovery',
# 'interface': 'org.bluez.Adapter1', 'destination': 'org.bluez',
# 'signature': '', 'body': (), 'expectReply': True, 'autoStart': True,
# 'timeout': None, 'returnSignature': ''}
# Running Discovery loop.
await bus.callRemote(
adapter_path,
"StartDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
await asyncio.sleep(timeout)
await bus.callRemote(
adapter_path,
"StopDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
# Reduce output.
# out = []
# for path, props in devices.items():
# properties = await cli.callRemote(
# path, 'GetAll',
# interface=defs.PROPERTIES_INTERFACE,
# destination=defs.BLUEZ_SERVICE,
# signature='s',
# body=[defs.DEVICE_INTERFACE, ],
# returnSignature='a{sv}').asFuture(loop)
# print(properties)
#
discovered_devices = []
for path, props in devices.items():
name, address, _, path = _device_info(path, props)
discovered_devices.append(BLEDevice(address, name, path))
return discovered_devices
|
async def discover(timeout=5.0, loop=None, **kwargs):
"""Discover nearby Bluetooth Low Energy devices.
Args:
timeout (float): Duration to scan for.
loop (asyncio.AbstractEventLoop): Optional event loop to use.
Keyword Args:
device (str): Bluetooth device to use for discovery.
Returns:
List of tuples containing name, address and signal strength
of nearby devices.
"""
device = kwargs.get("device", "hci0")
loop = loop if loop else asyncio.get_event_loop()
devices = {}
def parse_msg(message):
if message.member in ("InterfacesAdded", "InterfacesRemoved"):
msg_path = message.body[0]
device_interface = message.body[1].get("org.bluez.Device1", {})
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
# Find the HCI device to use for scanning.
bus = await client.connect(reactor, "system").asFuture(loop)
objects = await bus.callRemote(
"/",
"GetManagedObjects",
interface=defs.OBJECT_MANAGER_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(loop)
adapter_path, interface = _filter_on_adapter(objects, device)
# Add signal listeners
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesAdded",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesRemoved",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.Properties",
member="PropertiesChanged",
).asFuture(loop)
await bus.addMatch(
parse_msg, interface="org.bluez.Adapter1", member="PropertyChanged"
).asFuture(loop)
# dd = {'objectPath': '/org/bluez/hci0', 'methodName': 'StartDiscovery',
# 'interface': 'org.bluez.Adapter1', 'destination': 'org.bluez',
# 'signature': '', 'body': (), 'expectReply': True, 'autoStart': True,
# 'timeout': None, 'returnSignature': ''}
# Running Discovery loop.
await bus.callRemote(
adapter_path,
"StartDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
await asyncio.sleep(timeout)
await bus.callRemote(
adapter_path,
"StopDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
# Reduce output.
# out = []
# for path, props in devices.items():
# properties = await cli.callRemote(
# path, 'GetAll',
# interface=defs.PROPERTIES_INTERFACE,
# destination=defs.BLUEZ_SERVICE,
# signature='s',
# body=[defs.DEVICE_INTERFACE, ],
# returnSignature='a{sv}').asFuture(loop)
# print(properties)
#
discovered_devices = []
for path, props in devices.items():
name, address, _, path = _device_info(path, props)
discovered_devices.append(BLEDevice(address, name, path))
return discovered_devices
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
|
def parse_msg(message):
if message.member in ("InterfacesAdded", "InterfacesRemoved"):
msg_path = message.body[0]
device_interface = message.body[1].get("org.bluez.Device1", {})
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def __init__(self, address, loop=None, **kwargs):
self.address = address
self.loop = loop if loop else asyncio.get_event_loop()
self.services = BleakGATTServiceCollection()
self._services_resolved = False
self._notification_callbacks = {}
|
def __init__(self, address, loop=None, **kwargs):
self.address = address
self.loop = loop if loop else asyncio.get_event_loop()
self.services = {}
self.characteristics = {}
self._services_resolved = False
self._notification_callbacks = {}
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def get_services(self) -> BleakGATTServiceCollection:
raise NotImplementedError()
|
async def get_services(self):
raise NotImplementedError()
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def __init__(self, address: str, loop: AbstractEventLoop = None, **kwargs):
super(BleakClientDotNet, self).__init__(address, loop, **kwargs)
# Backend specific. Python.NET objects.
self._device_info = None
self._requester = None
self._bridge = Bridge()
self._callbacks = {}
|
def __init__(self, address: str, loop: AbstractEventLoop = None, **kwargs):
super(BleakClientDotNet, self).__init__(address, loop, **kwargs)
# Backend specific. Python.NET objects.
self._device_info = None
self._requester = None
self._bridge = Bridge()
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def connect(self) -> bool:
"""Connect the BleakClient to the BLE device.
Returns:
Boolean from :meth:`~is_connected`.
"""
# Try to find the desired device.
devices = await discover(2.0, loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {0} was not found.".format(self.address))
logger.debug("Connecting to BLE device @ {0}".format(self.address))
self._requester = await wrap_IAsyncOperation(
IAsyncOperation[BluetoothLEDevice](
BluetoothLEDevice.FromIdAsync(self._device_info.Id)
),
return_type=BluetoothLEDevice,
loop=self.loop,
)
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: " + args.ToString())
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
await self.get_services()
await asyncio.sleep(0.2, loop=self.loop)
connected = await self.is_connected()
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
async def connect(self) -> bool:
"""Connect the BleakClient to the BLE device.
Returns:
Boolean from :meth:`~is_connected`.
"""
# Try to find the desired device.
devices = await discover(2.0, loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {0} was not found.".format(self.address))
logger.debug("Connecting to BLE device @ {0}".format(self.address))
self._requester = await wrap_Task(
self._bridge.BluetoothLEDeviceFromIdAsync(self._device_info.Id),
loop=self.loop,
)
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: " + args.ToString())
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
await self.get_services()
await asyncio.sleep(0.2, loop=self.loop)
connected = await self.is_connected()
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def disconnect(self) -> bool:
logger.debug("Disconnecting from BLE device...")
# Remove notifications
# TODO: Make sure all notifications are removed prior to Dispose.
# Dispose all components that we have requested and created.
for service in self.services:
# for characteristic in service.characteristics:
# for descriptor in characteristic.descriptors:
# descriptor.obj.Dispose()
# characteristic.obj.Dispose()
service.obj.Dispose()
self.services = BleakGATTServiceCollection()
self._requester.Dispose()
self._requester = None
return not await self.is_connected()
|
async def disconnect(self) -> bool:
logger.debug("Disconnecting from BLE device...")
# Remove notifications
# TODO: Make sure all notifications are removed prior to Dispose.
# Dispose all components that we have requested and created.
for service_uuid, service in self.services.items():
service.Dispose()
self.services = None
self._requester.Dispose()
self._requester = None
return not await self.is_connected()
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def get_services(self) -> BleakGATTServiceCollection:
# Return the Service Collection.
if self._services_resolved:
return self.services
else:
logger.debug("Get Services...")
services_result = await wrap_IAsyncOperation(
IAsyncOperation[GattDeviceServicesResult](
self._requester.GetGattServicesAsync()
),
return_type=GattDeviceServicesResult,
loop=self.loop,
)
if services_result.Status != GattCommunicationStatus.Success:
raise BleakDotNetTaskError("Could not get GATT services.")
# TODO: Check if fetching yeilds failures...
for service in services_result.Services:
characteristics_result = await wrap_IAsyncOperation(
IAsyncOperation[GattCharacteristicsResult](
service.GetCharacteristicsAsync()
),
return_type=GattCharacteristicsResult,
loop=self.loop,
)
self.services.add_service(BleakGATTServiceDotNet(service))
if characteristics_result.Status != GattCommunicationStatus.Success:
raise BleakDotNetTaskError(
"Could not get GATT characteristics for {0}.".format(service)
)
for characteristic in characteristics_result.Characteristics:
descriptors_result = await wrap_IAsyncOperation(
IAsyncOperation[GattDescriptorsResult](
characteristic.GetDescriptorsAsync()
),
return_type=GattDescriptorsResult,
loop=self.loop,
)
self.services.add_characteristic(
BleakGATTCharacteristicDotNet(characteristic)
)
if descriptors_result.Status != GattCommunicationStatus.Success:
raise BleakDotNetTaskError(
"Could not get GATT descriptors for {0}.".format(characteristic)
)
for descriptor in list(descriptors_result.Descriptors):
self.services.add_descriptor(
BleakGATTDescriptorDotNet(
descriptor, characteristic.Uuid.ToString()
)
)
self._services_resolved = True
return self.services
|
async def get_services(self) -> dict:
# Return a list of all services for the device.
if self.services:
return self.services
else:
logger.debug("Get Services...")
services = await wrap_Task(
self._bridge.GetGattServicesAsync(self._requester), loop=self.loop
)
if services.Status == GattCommunicationStatus.Success:
self.services = {s.Uuid.ToString(): s for s in services.Services}
else:
raise BleakDotNetTaskError("Could not get GATT services.")
# TODO: Could this be sped up?
await asyncio.gather(
*[
asyncio.ensure_future(self._get_chars(service), loop=self.loop)
for service_uuid, service in self.services.items()
]
)
self._services_resolved = True
return self.services
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def read_gatt_char(self, _uuid: str) -> bytearray:
"""Perform read operation on the specified characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to read from.
Returns:
(bytearray) The read data.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
raise BleakError("Characteristic {0} was not found!".format(_uuid))
read_result = await wrap_IAsyncOperation(
IAsyncOperation[GattReadResult](
characteristic.obj.ReadValueAsync(BluetoothCacheMode.Uncached)
),
return_type=GattReadResult,
loop=self.loop,
)
if read_result.Status == GattCommunicationStatus.Success:
reader = DataReader.FromBuffer(IBuffer(read_result.Value))
# TODO: Find better way of initializing this...
output = Array[Byte]([0] * reader.UnconsumedBufferLength)
reader.ReadBytes(output)
value = bytearray(output)
logger.debug("Read Characteristic {0} : {1}".format(_uuid, value))
else:
raise BleakError(
"Could not read characteristic value for {0}: {1}".format(
characteristic.uuid, read_result.Status
)
)
return value
|
async def read_gatt_char(self, _uuid: str) -> bytearray:
"""Perform read operation on the specified characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to start notification on.
Returns:
(bytearray) The read data.
"""
characteristic = self.characteristics.get(str(_uuid))
if not characteristic:
raise BleakError("Characteristic {0} was not found!".format(_uuid))
read_results = await wrap_Task(
self._bridge.ReadCharacteristicValueAsync(characteristic), loop=self.loop
)
status, value = read_results.Item1, bytearray(read_results.Item2)
if status == GattCommunicationStatus.Success:
logger.debug("Read Characteristic {0} : {1}".format(_uuid, value))
else:
raise BleakError(
"Could not read characteristic value for {0}: {1}",
characteristic.Uuid.ToString(),
status,
)
return value
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> Any:
"""Perform a write operation of the specified characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to write to.
data (bytes or bytearray): The data to send.
response (bool): If write response is desired.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
raise BleakError("Characteristic {0} was not found!".format(_uuid))
writer = DataWriter()
writer.WriteBytes(Array[Byte](data))
if response:
write_result = await wrap_IAsyncOperation(
IAsyncOperation[GattWriteResult](
characteristic.obj.WriteValueWithResultAsync(writer.DetachBuffer())
),
return_type=GattWriteResult,
loop=self.loop,
)
status = write_result.Status
else:
write_result = await wrap_IAsyncOperation(
IAsyncOperation[GattCommunicationStatus](
characteristic.obj.WriteValueAsync(writer.DetachBuffer())
),
return_type=GattCommunicationStatus,
loop=self.loop,
)
status = write_result
if status == GattCommunicationStatus.Success:
logger.debug("Write Characteristic {0} : {1}".format(_uuid, data))
else:
raise BleakError(
"Could not write value {0} to characteristic {1}: {2}".format(
data, characteristic.uuid, write_result.Status
)
)
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> Any:
"""Perform a write operation of the specified characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to start notification on.
data (bytes or bytearray): The data to send.
response (bool): If write response is desired.
"""
characteristic = self.characteristics.get(str(_uuid))
if not characteristic:
raise BleakError("Characteristic {0} was not found!".format(_uuid))
write_results = await wrap_Task(
self._bridge.WriteCharacteristicValueAsync(characteristic, data, response),
loop=self.loop,
)
if write_results == GattCommunicationStatus.Success:
logger.debug("Write Characteristic {0} : {1}".format(_uuid, data))
else:
raise BleakError(
"Could not write value {0} to characteristic {1}: {2}",
data,
characteristic.Uuid.ToString(),
write_results,
)
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def start_notify(
self, _uuid: str, callback: Callable[[str, Any], Any], **kwargs
) -> None:
"""Activate notifications on a characteristic.
Callbacks must accept two inputs. The first will be a uuid string
object and the second will be a bytearray.
.. code-block:: python
def callback(sender, data):
print(f"{sender}: {data}")
client.start_notify(char_uuid, callback)
Args:
_uuid (str or UUID): The uuid of the characteristics to start notification on.
callback (function): The function to be called on notification.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if self._notification_callbacks.get(str(_uuid)):
await self.stop_notify(_uuid)
status = await self._start_notify(characteristic.obj, callback)
if status != GattCommunicationStatus.Success:
raise BleakError(
"Could not start notify on {0}: {1}".format(characteristic.uuid, status)
)
|
async def start_notify(
self, _uuid: str, callback: Callable[[str, Any], Any], **kwargs
) -> None:
"""Activate notifications on a characteristic.
Callbacks must accept two inputs. The first will be a uuid string
object and the second will be a bytearray.
.. code-block:: python
def callback(sender, data):
print(f"{sender}: {data}")
client.start_notify(char_uuid, callback)
Args:
_uuid (str or UUID): The uuid of the characteristics to start notification on.
callback (function): The function to be called on notification.
"""
characteristic = self.characteristics.get(str(_uuid))
if self._notification_callbacks.get(str(_uuid)):
await self.stop_notify(_uuid)
dotnet_callback = TypedEventHandler[GattCharacteristic, Array[Byte]](
_notification_wrapper(callback)
)
status = await wrap_Task(
self._bridge.StartNotify(characteristic, dotnet_callback), loop=self.loop
)
if status != GattCommunicationStatus.Success:
raise BleakError(
"Could not start notify on {0}: {1}",
characteristic.Uuid.ToString(),
status,
)
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def stop_notify(self, _uuid: str) -> None:
"""Deactivate notification on a specified characteristic.
Args:
_uuid: The characteristic to stop notifying on.
"""
characteristic = self.services.get_characteristic(str(_uuid))
status = await wrap_IAsyncOperation(
IAsyncOperation[GattCommunicationStatus](
characteristic.obj.WriteClientCharacteristicConfigurationDescriptorAsync(
getattr(GattClientCharacteristicConfigurationDescriptorValue, "None")
)
),
return_type=GattCommunicationStatus,
loop=self.loop,
)
if status != GattCommunicationStatus.Success:
raise BleakError(
"Could not start notify on {0}: {1}".format(characteristic.uuid, status)
)
else:
callback = self._callbacks.pop(characteristic.uuid)
self._bridge.RemoveValueChangedCallback(characteristic.obj, callback)
|
async def stop_notify(self, _uuid: str) -> None:
"""Deactivate notification on a specified characteristic.
Args:
_uuid: The characteristic to stop notifying on.
"""
characteristic = self.characteristics.get(str(_uuid))
status = await wrap_Task(self._bridge.StopNotify(characteristic), loop=self.loop)
if status != GattCommunicationStatus.Success:
raise BleakError(
"Could not start notify on {0}: {1}",
characteristic.Uuid.ToString(),
status,
)
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def _notification_wrapper(func: Callable):
@wraps(func)
def dotnet_notification_parser(sender: Any, args: Any):
# Return only the UUID string representation as sender.
# Also do a conversion from System.Bytes[] to bytearray.
reader = DataReader.FromBuffer(args.CharacteristicValue)
output = Array[Byte]([0] * reader.UnconsumedBufferLength)
reader.ReadBytes(output)
return func(sender.Uuid.ToString(), bytearray(output))
return dotnet_notification_parser
|
def _notification_wrapper(func: Callable):
@wraps(func)
def dotnet_notification_parser(sender: Any, data: Any):
# Return only the UUID string representation as sender.
# Also do a conversion from System.Bytes[] to bytearray.
return func(sender.Uuid.ToString(), bytearray(data))
return dotnet_notification_parser
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def dotnet_notification_parser(sender: Any, args: Any):
# Return only the UUID string representation as sender.
# Also do a conversion from System.Bytes[] to bytearray.
reader = DataReader.FromBuffer(args.CharacteristicValue)
output = Array[Byte]([0] * reader.UnconsumedBufferLength)
reader.ReadBytes(output)
return func(sender.Uuid.ToString(), bytearray(output))
|
def dotnet_notification_parser(sender: Any, data: Any):
# Return only the UUID string representation as sender.
# Also do a conversion from System.Bytes[] to bytearray.
return func(sender.Uuid.ToString(), bytearray(data))
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def wrap_Task(task, loop):
"""Enables await on .NET Task using asyncio.Event and a lambda callback.
Args:
task (System.Threading.Tasks.Task): .NET async task object
to await upon.
loop (Event Loop): The event loop to await on the Task in.
Returns:
The results of the the .NET Task.
"""
done = asyncio.Event()
# Register Action<Task> callback that triggers the above asyncio.Event.
task.ContinueWith(Action[Task]())
# Wait for callback.
await done.wait()
# TODO: Handle IsCancelled.
if task.IsFaulted:
# Exception occurred. Wrap it in BleakDotNetTaskError
# to make it catchable.
raise BleakDotNetTaskError(task.Exception.ToString())
return task.Result
|
async def wrap_Task(task, loop):
"""Enables await on .NET Task using asyncio.Event and a lambda callback.
Args:
task (System.Threading.Tasks.Task): .NET async task object
to await upon.
loop (Event Loop): The event loop to await on the Task in.
Returns:
The results of the the .NET Task.
"""
done = asyncio.Event()
# Register Action<Task> callback that triggers the above asyncio.Event.
task.ContinueWith(Action[Task](lambda x: loop.call_soon_threadsafe(done.set)))
# Wait for callback.
await done.wait()
# TODO: Handle IsCancelled.
if task.IsFaulted:
# Exception occurred. Wrap it in BleakDotNetTaskError
# to make it catchable.
raise BleakDotNetTaskError(task.Exception.ToString())
return task.Result
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def __init__(self, operation, return_type, loop):
self.operation = IAsyncOperation[return_type](operation)
self.done = asyncio.Event()
self.return_type = return_type
self._loop = loop
|
def __init__(self, task, loop):
self._loop = loop
self.task = task
self.done = asyncio.Event()
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def __await__(self):
# Register AsyncOperationCompletedHandler callback that triggers the above asyncio.Event.
self.operation.Completed = AsyncOperationCompletedHandler[self.return_type](
lambda x, y: self._loop.call_soon_threadsafe(self.done.set)
)
yield from self.done.wait()
return self
|
def __await__(self):
def callback(task):
self._loop.call_soon_threadsafe(self.done.set)
self.task.ContinueWith(Action[Task](callback))
yield from self.done.wait()
return self
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def result(self):
if self.operation.Status == AsyncStatus.Completed:
return self.operation.GetResults()
elif self.operation.Status == AsyncStatus.Error:
# Exception occurred. Wrap it in BleakDotNetTaskError
# to make it catchable.
raise BleakDotNetTaskError(self.operation.ErrorCode.ToString())
else:
# TODO: Handle IsCancelled.
raise BleakDotNetTaskError(
"IAsyncOperation Status: {0}".format(self.operation.Status)
)
|
def result(self):
# TODO: Handle IsCancelled.
if self.task.IsFaulted:
# Exception occurred. Wrap it in BleakDotNetTaskError
# to make it catchable.
raise BleakDotNetTaskError(self.task.Exception.ToString())
return self.task.Result
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
async def start_notify(
self, _uuid: str, callback: Callable[[str, Any], Any], **kwargs
) -> None:
"""Activate notifications/indications on a characteristic.
Callbacks must accept two inputs. The first will be a uuid string
object and the second will be a bytearray.
.. code-block:: python
def callback(sender, data):
print(f"{sender}: {data}")
client.start_notify(char_uuid, callback)
Args:
_uuid (str or UUID): The uuid of the characteristics to start notification on.
callback (function): The function to be called on notification.
Keyword Args:
notification_wrapper (bool): Set to `False` to avoid parsing of
notification to bytearray.
"""
_wrap = kwargs.get("notification_wrapper", True)
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
# Special handling for BlueZ >= 5.48, where Battery Service (0000180f-0000-1000-8000-00805f9b34fb:)
# has been moved to interface org.bluez.Battery1 instead of as a regular service.
# The org.bluez.Battery1 on the other hand does not provide a notification method, so here we cannot
# provide this functionality...
# See https://kernel.googlesource.com/pub/scm/bluetooth/bluez/+/refs/tags/5.48/doc/battery-api.txt
if _uuid == "00002a19-0000-1000-8000-00805f9b34fb" and (
self._bluez_version[0] == 5 and self._bluez_version[1] >= 48
):
raise BleakError(
"Notifications on Battery Level Char ({0}) is not "
"possible in BlueZ >= 5.48. Use regular read instead.".format(_uuid)
)
raise BleakError(
"Characteristic with UUID {0} could not be found!".format(_uuid)
)
await self._bus.callRemote(
characteristic.path,
"StartNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
if _wrap:
self._notification_callbacks[characteristic.path] = _data_notification_wrapper(
callback, self._char_path_to_uuid
) # noqa | E123 error in flake8...
else:
self._notification_callbacks[characteristic.path] = (
_regular_notification_wrapper(callback, self._char_path_to_uuid)
) # noqa | E123 error in flake8...
|
async def start_notify(
self, _uuid: str, callback: Callable[[str, Any], Any], **kwargs
) -> None:
"""Activate notifications/indications on a characteristic.
Callbacks must accept two inputs. The first will be a uuid string
object and the second will be a bytearray.
.. code-block:: python
def callback(sender, data):
print(f"{sender}: {data}")
client.start_notify(char_uuid, callback)
Args:
_uuid (str or UUID): The uuid of the characteristics to start notification on.
callback (function): The function to be called on notification.
Keyword Args:
notification_wrapper (bool): Set to `False` to avoid parsing of
notification to bytearray.
"""
_wrap = kwargs.get("notification_wrapper", True)
characteristic = self.services.get_characteristic(str(_uuid))
await self._bus.callRemote(
characteristic.path,
"StartNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
if _wrap:
self._notification_callbacks[characteristic.path] = _data_notification_wrapper(
callback, self._char_path_to_uuid
) # noqa | E123 error in flake8...
else:
self._notification_callbacks[characteristic.path] = (
_regular_notification_wrapper(callback, self._char_path_to_uuid)
) # noqa | E123 error in flake8...
|
https://github.com/hbldh/bleak/issues/55
|
Traceback (most recent call last):
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 572, in <module>
loop.run_until_complete(connect_device_bleak(address, loop))
File "/usr/lib/python3.6/asyncio/base_events.py", line 473, in run_until_complete
return future.result()
File "/home/jonas/Schreibtisch/TMRTFlib.py", line 552, in connect_device_bleak
async with BleakClient(address, loop=loop) as client:
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/client.py", line 35, in __aenter__
await self.connect()
File "/home/jonas/.local/lib/python3.6/site-packages/bleak/backends/bluezdbus/client.py", line 76, in connect
raise BleakError(str(e))
bleak.exc.BleakError: org.freedesktop.DBus.Error.UnknownObject: Method "Connect" with signature "" on interface "org.bluez.Device1" doesn't exist
|
bleak.exc.BleakError
|
def _on_message(body, message):
try:
branch = body["payload"]["data"]["repo_url"].split("/")[-1]
rev = body["payload"]["data"]["heads"][0]
if branch in ["autoland", "try"]:
user = body["payload"]["data"]["pushlog_pushes"][0]["user"]
if user in ("reviewbot", "wptsync@mozilla.com"):
return
url = "{}/push/{}/{}/schedules".format(BUGBUG_HTTP_SERVER, branch, rev)
response = requests.get(url, headers={"X-Api-Key": "pulse_listener"})
if response.status_code == 202:
logger.info("Successfully requested {}/{}".format(branch, rev))
else:
logger.warning(
"We got status: {} for: {}".format(response.status_code, url)
)
except Exception:
traceback.print_exc()
finally:
message.ack()
|
def _on_message(body, message):
try:
branch = body["payload"]["data"]["repo_url"].split("/")[-1]
rev = body["payload"]["data"]["heads"][0]
if branch in ["autoland", "try"]:
user = body["payload"]["data"]["pushlog_pushes"][0]["user"]
if user in ("reviewbot", "wptsync@mozilla.com"):
return
url = "{}/push/{}/{}/schedules".format(BUGBUG_HTTP_SERVER, branch, rev)
response = requests.get(url, headers={"X-Api-Key": "pulse_listener"})
if response.status_code == 202:
logger.info("Successfully requested {}/{}".format(branch, rev))
else:
logger.warning(
"We got status: {} for: {}".format(response.status_code, url)
)
except Exception as e:
traceback.print_tb(e)
finally:
message.ack()
|
https://github.com/mozilla/bugbug/issues/1738
|
Jul 29 09:37:20 bugbug app/web.1: Traceback (most recent call last):
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/bugbug_http/listener.py", line 69, in _on_message
Jul 29 09:37:20 bugbug app/web.1: user = body["payload"]["data"]["pushlog_pushes"][0]["user"]
Jul 29 09:37:20 bugbug app/web.1: IndexError: list index out of range
Jul 29 09:37:20 bugbug app/web.1: During handling of the above exception, another exception occurred:
Jul 29 09:37:20 bugbug app/web.1: Traceback (most recent call last):
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/bin/bugbug-http-pulse-listener", line 8, in <module>
Jul 29 09:37:20 bugbug app/web.1: sys.exit(main())
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/bugbug_http/listener.py", line 94, in main
Jul 29 09:37:20 bugbug app/web.1: consumer.run()
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/mixins.py", line 175, in run
Jul 29 09:37:20 bugbug app/web.1: for _ in self.consume(limit=None, **kwargs):
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/mixins.py", line 197, in consume
Jul 29 09:37:20 bugbug app/web.1: conn.drain_events(timeout=safety_interval)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/connection.py", line 324, in drain_events
Jul 29 09:37:20 bugbug app/web.1: return self.transport.drain_events(self.connection, **kwargs)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/transport/pyamqp.py", line 103, in drain_events
Jul 29 09:37:20 bugbug app/web.1: return connection.drain_events(**kwargs)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/connection.py", line 508, in drain_events
Jul 29 09:37:20 bugbug app/web.1: while not self.blocking_read(timeout):
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/connection.py", line 514, in blocking_read
Jul 29 09:37:20 bugbug app/web.1: return self.on_inbound_frame(frame)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/method_framing.py", line 79, in on_frame
Jul 29 09:37:20 bugbug app/web.1: callback(channel, msg.frame_method, msg.frame_args, msg)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/connection.py", line 520, in on_inbound_method
Jul 29 09:37:20 bugbug app/web.1: return self.channels[channel_id].dispatch_method(
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/abstract_channel.py", line 145, in dispatch_method
Jul 29 09:37:20 bugbug app/web.1: listener(*args)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/amqp/channel.py", line 1615, in _on_basic_deliver
Jul 29 09:37:20 bugbug app/web.1: fun(msg)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 624, in _receive_callback
Jul 29 09:37:20 bugbug app/web.1: return on_m(message) if on_m else self.receive(decoded, message)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 590, in receive
Jul 29 09:37:20 bugbug app/web.1: [callback(body, message) for callback in callbacks]
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 590, in <listcomp>
Jul 29 09:37:20 bugbug app/web.1: [callback(body, message) for callback in callbacks]
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/site-packages/bugbug_http/listener.py", line 82, in _on_message
Jul 29 09:37:20 bugbug app/web.1: traceback.print_tb(e)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/traceback.py", line 53, in print_tb
Jul 29 09:37:20 bugbug app/web.1: print_list(extract_tb(tb, limit=limit), file=file)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/traceback.py", line 72, in extract_tb
Jul 29 09:37:20 bugbug app/web.1: return StackSummary.extract(walk_tb(tb), limit=limit)
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/traceback.py", line 347, in extract
Jul 29 09:37:20 bugbug app/web.1: for f, lineno in frame_gen:
Jul 29 09:37:20 bugbug app/web.1: File "/usr/local/lib/python3.8/traceback.py", line 312, in walk_tb
Jul 29 09:37:20 bugbug app/web.1: yield tb.tb_frame, tb.tb_lineno
Jul 29 09:37:20 bugbug app/web.1: AttributeError: 'IndexError' object has no attribute 'tb_frame'
|
IndexError
|
def main():
parser = argparse.ArgumentParser(description="Spawn tasks for bugbug data pipeline")
parser.add_argument("data_pipeline_json")
args = parser.parse_args()
decision_task_id = os.environ.get("TASK_ID")
options = get_taskcluster_options()
add_self = False
if decision_task_id:
add_self = True
task_group_id = decision_task_id
else:
task_group_id = taskcluster.utils.slugId()
keys = {"taskGroupId": task_group_id}
id_mapping = {}
# First pass, do the template rendering and dependencies resolution
tasks = []
with open(args.data_pipeline_json) as pipeline_file:
raw_tasks = yaml.safe_load(pipeline_file.read())
version = os.getenv("TAG", "latest")
context = {"version": version}
rendered = jsone.render(raw_tasks, context)
for task in rendered["tasks"]:
# We need to generate new unique task ids for taskcluster to be happy
# but need to identify dependencies across tasks. So we create a
# mapping between an internal ID and the generate ID
task_id = taskcluster.utils.slugId()
task_internal_id = task["ID"]
if task_internal_id in id_mapping:
raise ValueError(f"Conflicting IDs {task_internal_id}")
# Store each task ID in the id_mapping dictionary before processing dependencies.
# This way, tasks can be defined in any order.
id_mapping[task_internal_id] = task_id
for task in rendered["tasks"]:
task_internal_id = task.pop("ID")
task_id = id_mapping[task_internal_id]
for key, value in keys.items():
task[key] = value
task_payload = task["payload"]
if "env" in task_payload and task_payload["env"]:
task_payload["env"]["TAG"] = version
else:
task_payload["env"] = {
"TAG": version,
}
# Process the dependencies
new_dependencies = []
for dependency in task.get("dependencies", []):
new_dependencies.append(id_mapping[dependency])
if add_self:
new_dependencies.append(decision_task_id)
task["dependencies"] = new_dependencies
tasks.append((task_id, task))
# Now sends them
queue = taskcluster.Queue(options)
try:
for task_id, task_payload in tasks:
queue.createTask(task_id, task_payload)
print(f"https://community-tc.services.mozilla.com/tasks/groups/{task_group_id}")
except taskcluster.exceptions.TaskclusterAuthFailure as e:
print(f"TaskclusterAuthFailure: {e.body}", file=sys.stderr)
raise
|
def main():
parser = argparse.ArgumentParser(description="Spawn tasks for bugbug data pipeline")
parser.add_argument("data_pipeline_json")
args = parser.parse_args()
decision_task_id = os.environ.get("TASK_ID")
options = get_taskcluster_options()
add_self = False
if decision_task_id:
add_self = True
task_group_id = decision_task_id
else:
task_group_id = taskcluster.utils.slugId()
keys = {"taskGroupId": task_group_id}
id_mapping = {}
# First pass, do the template rendering and dependencies resolution
tasks = []
with open(args.data_pipeline_json) as pipeline_file:
raw_tasks = yaml.safe_load(pipeline_file.read())
version = os.getenv("TAG", "latest")
context = {"version": version}
rendered = jsone.render(raw_tasks, context)
for task in rendered["tasks"]:
# We need to generate new unique task ids for taskcluster to be happy
# but need to identify dependencies across tasks. So we create a
# mapping between an internal ID and the generate ID
task_id = taskcluster.utils.slugId()
task_internal_id = task.pop("ID")
if task_internal_id in id_mapping:
raise ValueError(f"Conflicting IDs {task_internal_id}")
id_mapping[task_internal_id] = task_id
for key, value in keys.items():
task[key] = value
task_payload = task["payload"]
if "env" in task_payload and task_payload["env"]:
task_payload["env"]["TAG"] = version
else:
task_payload["env"] = {
"TAG": version,
}
# Process the dependencies
new_dependencies = []
for dependency in task.get("dependencies", []):
new_dependencies.append(id_mapping[dependency])
if add_self:
new_dependencies.append(decision_task_id)
task["dependencies"] = new_dependencies
tasks.append((task_id, task))
# Now sends them
queue = taskcluster.Queue(options)
try:
for task_id, task_payload in tasks:
queue.createTask(task_id, task_payload)
print(f"https://community-tc.services.mozilla.com/tasks/groups/{task_group_id}")
except taskcluster.exceptions.TaskclusterAuthFailure as e:
print(f"TaskclusterAuthFailure: {e.body}", file=sys.stderr)
raise
|
https://github.com/mozilla/bugbug/issues/1282
|
Traceback (most recent call last):
File "/code/spawn_pipeline.py", line 132, in <module>
main()
File "/code/spawn_pipeline.py", line 110, in main
new_dependencies.append(id_mapping[dependency])
KeyError: 'regressor-finder'
|
KeyError
|
def contracted_edge(G, edge, self_loops=True):
"""Returns the graph that results from contracting the specified edge.
Edge contraction identifies the two endpoints of the edge as a single node
incident to any edge that was incident to the original two nodes. A graph
that results from edge contraction is called a *minor* of the original
graph.
Parameters
----------
G : NetworkX graph
The graph whose edge will be contracted.
edge : tuple
Must be a pair of nodes in `G`.
self_loops : Boolean
If this is True, any edges (including `edge`) joining the
endpoints of `edge` in `G` become self-loops on the new node in the
returned graph.
Returns
-------
Networkx graph
A new graph object of the same type as `G` (leaving `G` unmodified)
with endpoints of `edge` identified in a single node. The right node
of `edge` will be merged into the left one, so only the left one will
appear in the returned graph.
Raises
------
ValueError
If `edge` is not an edge in `G`.
Examples
--------
Attempting to contract two nonadjacent nodes yields an error::
>>> G = nx.cycle_graph(4)
>>> nx.contracted_edge(G, (1, 3))
Traceback (most recent call last):
...
ValueError: Edge (1, 3) does not exist in graph G; cannot contract it
Contracting two adjacent nodes in the cycle graph on *n* nodes yields the
cycle graph on *n - 1* nodes::
>>> C5 = nx.cycle_graph(5)
>>> C4 = nx.cycle_graph(4)
>>> M = nx.contracted_edge(C5, (0, 1), self_loops=False)
>>> nx.is_isomorphic(M, C4)
True
See also
--------
contracted_nodes
quotient_graph
"""
u, v = edge[:2]
if not G.has_edge(u, v):
raise ValueError(f"Edge {edge} does not exist in graph G; cannot contract it")
return contracted_nodes(G, u, v, self_loops=self_loops)
|
def contracted_edge(G, edge, self_loops=True):
"""Returns the graph that results from contracting the specified edge.
Edge contraction identifies the two endpoints of the edge as a single node
incident to any edge that was incident to the original two nodes. A graph
that results from edge contraction is called a *minor* of the original
graph.
Parameters
----------
G : NetworkX graph
The graph whose edge will be contracted.
edge : tuple
Must be a pair of nodes in `G`.
self_loops : Boolean
If this is True, any edges (including `edge`) joining the
endpoints of `edge` in `G` become self-loops on the new node in the
returned graph.
Returns
-------
Networkx graph
A new graph object of the same type as `G` (leaving `G` unmodified)
with endpoints of `edge` identified in a single node. The right node
of `edge` will be merged into the left one, so only the left one will
appear in the returned graph.
Raises
------
ValueError
If `edge` is not an edge in `G`.
Examples
--------
Attempting to contract two nonadjacent nodes yields an error::
>>> G = nx.cycle_graph(4)
>>> nx.contracted_edge(G, (1, 3))
Traceback (most recent call last):
...
ValueError: Edge (1, 3) does not exist in graph G; cannot contract it
Contracting two adjacent nodes in the cycle graph on *n* nodes yields the
cycle graph on *n - 1* nodes::
>>> C5 = nx.cycle_graph(5)
>>> C4 = nx.cycle_graph(4)
>>> M = nx.contracted_edge(C5, (0, 1), self_loops=False)
>>> nx.is_isomorphic(M, C4)
True
See also
--------
contracted_nodes
quotient_graph
"""
if not G.has_edge(*edge):
raise ValueError(f"Edge {edge} does not exist in graph G; cannot contract it")
return contracted_nodes(G, *edge, self_loops=self_loops)
|
https://github.com/networkx/networkx/issues/3139
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-220-465f568dfaa5> in <module>()
1 g = nx.complete_graph(5, create_using=nx.MultiGraph())
----> 2 nx.contracted_edge(g, random.choice(list(g.edges)))
~/.virtualenvs/bernoulli/lib/python3.6/site-packages/networkx/algorithms/minors.py in contracted_edge(G, edge, self_loops)
451 raise ValueError('Edge {0} does not exist in graph G; cannot contract'
452 ' it'.format(edge))
--> 453 return contracted_nodes(G, *edge, self_loops=self_loops)
TypeError: contracted_nodes() got multiple values for argument 'self_loops'
|
TypeError
|
def read_shp(path, simplify=True, geom_attrs=True, strict=True):
"""Generates a networkx.DiGraph from shapefiles. Point geometries are
translated into nodes, lines into edges. Coordinate tuples are used as
keys. Attributes are preserved, line geometries are simplified into start
and end coordinates. Accepts a single shapefile or directory of many
shapefiles.
"The Esri Shapefile or simply a shapefile is a popular geospatial vector
data format for geographic information systems software [1]_."
Parameters
----------
path : file or string
File, directory, or filename to read.
simplify: bool
If True, simplify line geometries to start and end coordinates.
If False, and line feature geometry has multiple segments, the
non-geometric attributes for that feature will be repeated for each
edge comprising that feature.
geom_attrs: bool
If True, include the Wkb, Wkt and Json geometry attributes with
each edge.
NOTE: if these attributes are available, write_shp will use them
to write the geometry. If nodes store the underlying coordinates for
the edge geometry as well (as they do when they are read via
this method) and they change, your geomety will be out of sync.
strict: bool
If True, raise NetworkXError when feature geometry is missing or
GeometryType is not supported.
If False, silently ignore missing or unsupported geometry in features.
Returns
-------
G : NetworkX graph
Raises
------
ImportError
If ogr module is not available.
RuntimeError
If file cannot be open or read.
NetworkXError
If strict=True and feature is missing geometry or GeometryType is
not supported.
Examples
--------
>>> G=nx.read_shp('test.shp') # doctest: +SKIP
References
----------
.. [1] https://en.wikipedia.org/wiki/Shapefile
"""
try:
from osgeo import ogr
except ImportError:
raise ImportError("read_shp requires OGR: http://www.gdal.org/")
if not isinstance(path, str):
return
net = nx.DiGraph()
shp = ogr.Open(path)
if shp is None:
raise RuntimeError("Unable to open {}".format(path))
for lyr in shp:
fields = [x.GetName() for x in lyr.schema]
for f in lyr:
g = f.geometry()
if g is None:
if strict:
raise nx.NetworkXError("Bad data: feature missing geometry")
else:
continue
flddata = [f.GetField(f.GetFieldIndex(x)) for x in fields]
attributes = dict(zip(fields, flddata))
attributes["ShpName"] = lyr.GetName()
# Note: Using layer level geometry type
if g.GetGeometryType() == ogr.wkbPoint:
net.add_node((g.GetPoint_2D(0)), **attributes)
elif g.GetGeometryType() in (ogr.wkbLineString, ogr.wkbMultiLineString):
for edge in edges_from_line(g, attributes, simplify, geom_attrs):
e1, e2, attr = edge
net.add_edge(e1, e2)
net[e1][e2].update(attr)
else:
if strict:
raise nx.NetworkXError(
"GeometryType {} not supported".format(g.GetGeometryType())
)
return net
|
def read_shp(path, simplify=True, geom_attrs=True):
"""Generates a networkx.DiGraph from shapefiles. Point geometries are
translated into nodes, lines into edges. Coordinate tuples are used as
keys. Attributes are preserved, line geometries are simplified into start
and end coordinates. Accepts a single shapefile or directory of many
shapefiles.
"The Esri Shapefile or simply a shapefile is a popular geospatial vector
data format for geographic information systems software [1]_."
Parameters
----------
path : file or string
File, directory, or filename to read.
simplify: bool
If True, simplify line geometries to start and end coordinates.
If False, and line feature geometry has multiple segments, the
non-geometric attributes for that feature will be repeated for each
edge comprising that feature.
geom_attrs: bool
If True, include the Wkb, Wkt and Json geometry attributes with
each edge.
NOTE: if these attributes are available, write_shp will use them
to write the geometry. If nodes store the underlying coordinates for
the edge geometry as well (as they do when they are read via
this method) and they change, your geomety will be out of sync.
Returns
-------
G : NetworkX graph
Examples
--------
>>> G=nx.read_shp('test.shp') # doctest: +SKIP
References
----------
.. [1] https://en.wikipedia.org/wiki/Shapefile
"""
try:
from osgeo import ogr
except ImportError:
raise ImportError("read_shp requires OGR: http://www.gdal.org/")
if not isinstance(path, str):
return
net = nx.DiGraph()
shp = ogr.Open(path)
for lyr in shp:
fields = [x.GetName() for x in lyr.schema]
for f in lyr:
flddata = [f.GetField(f.GetFieldIndex(x)) for x in fields]
g = f.geometry()
attributes = dict(zip(fields, flddata))
attributes["ShpName"] = lyr.GetName()
# Note: Using layer level geometry type
if g.GetGeometryType() == ogr.wkbPoint:
net.add_node((g.GetPoint_2D(0)), **attributes)
elif g.GetGeometryType() in (ogr.wkbLineString, ogr.wkbMultiLineString):
for edge in edges_from_line(g, attributes, simplify, geom_attrs):
e1, e2, attr = edge
net.add_edge(e1, e2)
net[e1][e2].update(attr)
else:
raise ImportError(
"GeometryType {} not supported".format(g.GetGeometryType())
)
return net
|
https://github.com/networkx/networkx/issues/2451
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-33-2307d2480ccb> in <module>()
1 # load shapefile into a digraph
----> 2 g = nx.read_shp(shapefile, simplify=False)
/usr/local/anaconda/lib/python3.5/site-packages/networkx/readwrite/nx_shp.py in read_shp(path, simplify)
74 attributes = dict(zip(fields, flddata))
75 attributes["ShpName"] = lyr.GetName()
---> 76 if g.GetGeometryType() == 1: # point
77 net.add_node((g.GetPoint_2D(0)), attributes)
78 if g.GetGeometryType() == 2: # linestring
AttributeError: 'NoneType' object has no attribute 'GetGeometryType'
|
AttributeError
|
def write_shp(G, outdir):
"""Writes a networkx.DiGraph to two shapefiles, edges and nodes.
Nodes and edges are expected to have a Well Known Binary (Wkb) or
Well Known Text (Wkt) key in order to generate geometries. Also
acceptable are nodes with a numeric tuple key (x,y).
"The Esri Shapefile or simply a shapefile is a popular geospatial vector
data format for geographic information systems software [1]_."
Parameters
----------
outdir : directory path
Output directory for the two shapefiles.
Returns
-------
None
Examples
--------
nx.write_shp(digraph, '/shapefiles') # doctest +SKIP
References
----------
.. [1] https://en.wikipedia.org/wiki/Shapefile
"""
try:
from osgeo import ogr
except ImportError:
raise ImportError("write_shp requires OGR: http://www.gdal.org/")
# easier to debug in python if ogr throws exceptions
ogr.UseExceptions()
def netgeometry(key, data):
if "Wkb" in data:
geom = ogr.CreateGeometryFromWkb(data["Wkb"])
elif "Wkt" in data:
geom = ogr.CreateGeometryFromWkt(data["Wkt"])
elif type(key[0]).__name__ == "tuple": # edge keys are packed tuples
geom = ogr.Geometry(ogr.wkbLineString)
_from, _to = key[0], key[1]
try:
geom.SetPoint(0, *_from)
geom.SetPoint(1, *_to)
except TypeError:
# assume user used tuple of int and choked ogr
_ffrom = [float(x) for x in _from]
_fto = [float(x) for x in _to]
geom.SetPoint(0, *_ffrom)
geom.SetPoint(1, *_fto)
else:
geom = ogr.Geometry(ogr.wkbPoint)
try:
geom.SetPoint(0, *key)
except TypeError:
# assume user used tuple of int and choked ogr
fkey = [float(x) for x in key]
geom.SetPoint(0, *fkey)
return geom
# Create_feature with new optional attributes arg (should be dict type)
def create_feature(geometry, lyr, attributes=None):
feature = ogr.Feature(lyr.GetLayerDefn())
feature.SetGeometry(g)
if attributes is not None:
# Loop through attributes, assigning data to each field
for field, data in attributes.items():
feature.SetField(field, data)
lyr.CreateFeature(feature)
feature.Destroy()
drv = ogr.GetDriverByName("ESRI Shapefile")
shpdir = drv.CreateDataSource(outdir)
# delete pre-existing output first otherwise ogr chokes
try:
shpdir.DeleteLayer("nodes")
except:
pass
nodes = shpdir.CreateLayer("nodes", None, ogr.wkbPoint)
for n in G:
data = G.nodes[n]
g = netgeometry(n, data)
create_feature(g, nodes)
try:
shpdir.DeleteLayer("edges")
except:
pass
edges = shpdir.CreateLayer("edges", None, ogr.wkbLineString)
# New edge attribute write support merged into edge loop
fields = {} # storage for field names and their data types
# Conversion dict between python and ogr types
OGRTypes = {int: ogr.OFTInteger, str: ogr.OFTString, float: ogr.OFTReal}
# Edge loop
for e in G.edges(data=True):
attributes = {} # storage for attribute data (indexed by field names)
data = G.get_edge_data(*e)
g = netgeometry(e, data)
# Loop through attribute data in edges
for key, data in e[2].items():
# Reject spatial data not required for attribute table
if key != "Json" and key != "Wkt" and key != "Wkb" and key != "ShpName":
# For all edges check/add field and data type to fields dict
if key not in fields:
# Field not in previous edges so add to dict
if type(data) in OGRTypes:
fields[key] = OGRTypes[type(data)]
else:
# Data type not supported, default to string (char 80)
fields[key] = ogr.OFTString
# Create the new field
newfield = ogr.FieldDefn(key, fields[key])
edges.CreateField(newfield)
# Store the data from new field to dict for CreateLayer()
attributes[key] = data
else:
# Field already exists, add data to dict for CreateLayer()
attributes[key] = data
# Create the feature with, passing new attribute data
create_feature(g, edges, attributes)
nodes, edges = None, None
|
def write_shp(G, outdir):
"""Writes a networkx.DiGraph to two shapefiles, edges and nodes.
Nodes and edges are expected to have a Well Known Binary (Wkb) or
Well Known Text (Wkt) key in order to generate geometries. Also
acceptable are nodes with a numeric tuple key (x,y).
"The Esri Shapefile or simply a shapefile is a popular geospatial vector
data format for geographic information systems software [1]_."
Parameters
----------
outdir : directory path
Output directory for the two shapefiles.
Returns
-------
None
Examples
--------
nx.write_shp(digraph, '/shapefiles') # doctest +SKIP
References
----------
.. [1] https://en.wikipedia.org/wiki/Shapefile
"""
try:
from osgeo import ogr
except ImportError:
raise ImportError("write_shp requires OGR: http://www.gdal.org/")
# easier to debug in python if ogr throws exceptions
ogr.UseExceptions()
def netgeometry(key, data):
if "Wkb" in data:
geom = ogr.CreateGeometryFromWkb(data["Wkb"])
elif "Wkt" in data:
geom = ogr.CreateGeometryFromWkt(data["Wkt"])
elif type(key[0]).__name__ == "tuple": # edge keys are packed tuples
geom = ogr.Geometry(ogr.wkbLineString)
_from, _to = key[0], key[1]
try:
geom.SetPoint(0, *_from)
geom.SetPoint(1, *_to)
except TypeError:
# assume user used tuple of int and choked ogr
_ffrom = [float(x) for x in _from]
_fto = [float(x) for x in _to]
geom.SetPoint(0, *_ffrom)
geom.SetPoint(1, *_fto)
else:
geom = ogr.Geometry(ogr.wkbPoint)
try:
geom.SetPoint(0, *key)
except TypeError:
# assume user used tuple of int and choked ogr
fkey = [float(x) for x in key]
geom.SetPoint(0, *fkey)
return geom
# Create_feature with new optional attributes arg (should be dict type)
def create_feature(geometry, lyr, attributes=None):
feature = ogr.Feature(lyr.GetLayerDefn())
feature.SetGeometry(g)
if attributes is not None:
# Loop through attributes, assigning data to each field
for field, data in attributes.items():
feature.SetField(field, data)
lyr.CreateFeature(feature)
feature.Destroy()
drv = ogr.GetDriverByName("ESRI Shapefile")
shpdir = drv.CreateDataSource(outdir)
# delete pre-existing output first otherwise ogr chokes
try:
shpdir.DeleteLayer("nodes")
except:
pass
nodes = shpdir.CreateLayer("nodes", None, ogr.wkbPoint)
for n in G:
data = G.nodes[n]
g = netgeometry(n, data)
create_feature(g, nodes)
try:
shpdir.DeleteLayer("edges")
except:
pass
edges = shpdir.CreateLayer("edges", None, ogr.wkbLineString)
# New edge attribute write support merged into edge loop
fields = {} # storage for field names and their data types
attributes = {} # storage for attribute data (indexed by field names)
# Conversion dict between python and ogr types
OGRTypes = {int: ogr.OFTInteger, str: ogr.OFTString, float: ogr.OFTReal}
# Edge loop
for e in G.edges(data=True):
data = G.get_edge_data(*e)
g = netgeometry(e, data)
# Loop through attribute data in edges
for key, data in e[2].items():
# Reject spatial data not required for attribute table
if key != "Json" and key != "Wkt" and key != "Wkb" and key != "ShpName":
# For all edges check/add field and data type to fields dict
if key not in fields:
# Field not in previous edges so add to dict
if type(data) in OGRTypes:
fields[key] = OGRTypes[type(data)]
else:
# Data type not supported, default to string (char 80)
fields[key] = ogr.OFTString
# Create the new field
newfield = ogr.FieldDefn(key, fields[key])
edges.CreateField(newfield)
# Store the data from new field to dict for CreateLayer()
attributes[key] = data
else:
# Field already exists, add data to dict for CreateLayer()
attributes[key] = data
# Create the feature with, passing new attribute data
create_feature(g, edges, attributes)
nodes, edges = None, None
|
https://github.com/networkx/networkx/issues/2451
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-33-2307d2480ccb> in <module>()
1 # load shapefile into a digraph
----> 2 g = nx.read_shp(shapefile, simplify=False)
/usr/local/anaconda/lib/python3.5/site-packages/networkx/readwrite/nx_shp.py in read_shp(path, simplify)
74 attributes = dict(zip(fields, flddata))
75 attributes["ShpName"] = lyr.GetName()
---> 76 if g.GetGeometryType() == 1: # point
77 net.add_node((g.GetPoint_2D(0)), attributes)
78 if g.GetGeometryType() == 2: # linestring
AttributeError: 'NoneType' object has no attribute 'GetGeometryType'
|
AttributeError
|
def literal_stringizer(value):
"""Convert a `value` to a Python literal in GML representation.
Parameters
----------
value : object
The `value` to be converted to GML representation.
Returns
-------
rep : string
A double-quoted Python literal representing value. Unprintable
characters are replaced by XML character references.
Raises
------
ValueError
If `value` cannot be converted to GML.
Notes
-----
`literal_stringizer` is largely the same as `repr` in terms of
functionality but attempts prefix `unicode` and `bytes` literals with
`u` and `b` to provide better interoperability of data generated by
Python 2 and Python 3.
The original value can be recovered using the
:func:`networkx.readwrite.gml.literal_destringizer` function.
"""
def stringize(value):
if isinstance(value, (int, long, bool)) or value is None:
if value is True: # GML uses 1/0 for boolean values.
buf.write(str(1))
elif value is False:
buf.write(str(0))
else:
buf.write(str(value))
elif isinstance(value, unicode):
text = repr(value)
if text[0] != "u":
try:
value.encode("latin1")
except UnicodeEncodeError:
text = "u" + text
buf.write(text)
elif isinstance(value, (float, complex, str, bytes)):
buf.write(repr(value))
elif isinstance(value, list):
buf.write("[")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write("]")
elif isinstance(value, tuple):
if len(value) > 1:
buf.write("(")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write(")")
elif value:
buf.write("(")
stringize(value[0])
buf.write(",)")
else:
buf.write("()")
elif isinstance(value, dict):
buf.write("{")
first = True
for key, value in value.items():
if not first:
buf.write(",")
else:
first = False
stringize(key)
buf.write(":")
stringize(value)
buf.write("}")
elif isinstance(value, set):
buf.write("{")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write("}")
else:
raise ValueError("%r cannot be converted into a Python literal" % (value,))
buf = StringIO()
stringize(value)
return buf.getvalue()
|
def literal_stringizer(value):
"""Convert a `value` to a Python literal in GML representation.
Parameters
----------
value : object
The `value` to be converted to GML representation.
Returns
-------
rep : string
A double-quoted Python literal representing value. Unprintable
characters are replaced by XML character references.
Raises
------
ValueError
If `value` cannot be converted to GML.
Notes
-----
`literal_stringizer` is largely the same as `repr` in terms of
functionality but attempts prefix `unicode` and `bytes` literals with
`u` and `b` to provide better interoperability of data generated by
Python 2 and Python 3.
The original value can be recovered using the
:func:`networkx.readwrite.gml.literal_destringizer` function.
"""
def stringize(value):
if isinstance(value, (int, long, bool)) or value is None:
buf.write(str(value))
elif isinstance(value, unicode):
text = repr(value)
if text[0] != "u":
try:
value.encode("latin1")
except UnicodeEncodeError:
text = "u" + text
buf.write(text)
elif isinstance(value, (float, complex, str, bytes)):
buf.write(repr(value))
elif isinstance(value, list):
buf.write("[")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write("]")
elif isinstance(value, tuple):
if len(value) > 1:
buf.write("(")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write(")")
elif value:
buf.write("(")
stringize(value[0])
buf.write(",)")
else:
buf.write("()")
elif isinstance(value, dict):
buf.write("{")
first = True
for key, value in value.items():
if not first:
buf.write(",")
else:
first = False
stringize(key)
buf.write(":")
stringize(value)
buf.write("}")
elif isinstance(value, set):
buf.write("{")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write("}")
else:
raise ValueError("%r cannot be converted into a Python literal" % (value,))
buf = StringIO()
stringize(value)
return buf.getvalue()
|
https://github.com/networkx/networkx/issues/2118
|
Traceback (most recent call last):
File "test.py", line 8, in <module>
H = nx.read_gml('foo.gml')
File "<decorator-gen-198>", line 2, in read_gml
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/utils/decorators.py", line 220, in _open_file
result = func(*new_args, **kwargs)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 210, in read_gml
G = parse_gml_lines(filter_lines(path), label, destringizer)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 383, in parse_gml_lines
graph = parse_graph()
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 372, in parse_graph
curr_token, dct = parse_kv(next(tokens))
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 357, in parse_kv
curr_token, value = parse_dict(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 367, in parse_dict
curr_token, dct = parse_kv(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 357, in parse_kv
curr_token, value = parse_dict(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 367, in parse_dict
curr_token, dct = parse_kv(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 343, in parse_kv
curr_token = next(tokens)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 323, in tokenize
(line[pos:], lineno + 1, pos + 1))
networkx.exception.NetworkXError: cannot tokenize u'True' at (6, 9)
|
networkx.exception.NetworkXError
|
def stringize(value):
if isinstance(value, (int, long, bool)) or value is None:
if value is True: # GML uses 1/0 for boolean values.
buf.write(str(1))
elif value is False:
buf.write(str(0))
else:
buf.write(str(value))
elif isinstance(value, unicode):
text = repr(value)
if text[0] != "u":
try:
value.encode("latin1")
except UnicodeEncodeError:
text = "u" + text
buf.write(text)
elif isinstance(value, (float, complex, str, bytes)):
buf.write(repr(value))
elif isinstance(value, list):
buf.write("[")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write("]")
elif isinstance(value, tuple):
if len(value) > 1:
buf.write("(")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write(")")
elif value:
buf.write("(")
stringize(value[0])
buf.write(",)")
else:
buf.write("()")
elif isinstance(value, dict):
buf.write("{")
first = True
for key, value in value.items():
if not first:
buf.write(",")
else:
first = False
stringize(key)
buf.write(":")
stringize(value)
buf.write("}")
elif isinstance(value, set):
buf.write("{")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write("}")
else:
raise ValueError("%r cannot be converted into a Python literal" % (value,))
|
def stringize(value):
if isinstance(value, (int, long, bool)) or value is None:
buf.write(str(value))
elif isinstance(value, unicode):
text = repr(value)
if text[0] != "u":
try:
value.encode("latin1")
except UnicodeEncodeError:
text = "u" + text
buf.write(text)
elif isinstance(value, (float, complex, str, bytes)):
buf.write(repr(value))
elif isinstance(value, list):
buf.write("[")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write("]")
elif isinstance(value, tuple):
if len(value) > 1:
buf.write("(")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write(")")
elif value:
buf.write("(")
stringize(value[0])
buf.write(",)")
else:
buf.write("()")
elif isinstance(value, dict):
buf.write("{")
first = True
for key, value in value.items():
if not first:
buf.write(",")
else:
first = False
stringize(key)
buf.write(":")
stringize(value)
buf.write("}")
elif isinstance(value, set):
buf.write("{")
first = True
for item in value:
if not first:
buf.write(",")
else:
first = False
stringize(item)
buf.write("}")
else:
raise ValueError("%r cannot be converted into a Python literal" % (value,))
|
https://github.com/networkx/networkx/issues/2118
|
Traceback (most recent call last):
File "test.py", line 8, in <module>
H = nx.read_gml('foo.gml')
File "<decorator-gen-198>", line 2, in read_gml
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/utils/decorators.py", line 220, in _open_file
result = func(*new_args, **kwargs)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 210, in read_gml
G = parse_gml_lines(filter_lines(path), label, destringizer)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 383, in parse_gml_lines
graph = parse_graph()
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 372, in parse_graph
curr_token, dct = parse_kv(next(tokens))
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 357, in parse_kv
curr_token, value = parse_dict(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 367, in parse_dict
curr_token, dct = parse_kv(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 357, in parse_kv
curr_token, value = parse_dict(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 367, in parse_dict
curr_token, dct = parse_kv(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 343, in parse_kv
curr_token = next(tokens)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 323, in tokenize
(line[pos:], lineno + 1, pos + 1))
networkx.exception.NetworkXError: cannot tokenize u'True' at (6, 9)
|
networkx.exception.NetworkXError
|
def generate_gml(G, stringizer=None):
r"""Generate a single entry of the graph `G` in GML format.
Parameters
----------
G : NetworkX graph
The graph to be converted to GML.
stringizer : callable, optional
A `stringizer` which converts non-int/non-float/non-dict values into
strings. If it cannot convert a value into a string, it should raise a
`ValueError` to indicate that. Default value: None.
Returns
-------
lines: generator of strings
Lines of GML data. Newlines are not appended.
Raises
------
NetworkXError
If `stringizer` cannot convert a value into a string, or the value to
convert is not a string while `stringizer` is None.
See Also
--------
literal_stringizer
Notes
-----
Graph attributes named 'directed', 'multigraph', 'node' or
'edge', node attributes named 'id' or 'label', edge attributes
named 'source' or 'target' (or 'key' if `G` is a multigraph)
are ignored because these attribute names are used to encode the graph
structure.
GML files are stored using a 7-bit ASCII encoding with any extended
ASCII characters (iso8859-1) appearing as HTML character entities.
Without specifying a `stringizer`/`destringizer`, the code is capable of
handling `int`/`float`/`str`/`dict`/`list` data as required by the GML
specification. For other data types, you need to explicitly supply a
`stringizer`/`destringizer`.
For additional documentation on the GML file format, please see the
`GML website <http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html>`_.
See the module docstring :mod:`networkx.readwrite.gml` for additional details.
Examples
--------
>>> G = nx.Graph()
>>> G.add_node("1")
>>> print("\n".join(nx.generate_gml(G)))
graph [
node [
id 0
label "1"
]
]
>>> G = nx.OrderedMultiGraph([("a", "b"), ("a", "b")])
>>> print("\n".join(nx.generate_gml(G)))
graph [
multigraph 1
node [
id 0
label "a"
]
node [
id 1
label "b"
]
edge [
source 0
target 1
key 0
]
edge [
source 0
target 1
key 1
]
]
"""
valid_keys = re.compile("^[A-Za-z][0-9A-Za-z]*$")
def stringize(key, value, ignored_keys, indent, in_list=False):
if not isinstance(key, (str, unicode)):
raise NetworkXError("%r is not a string" % (key,))
if not valid_keys.match(key):
raise NetworkXError("%r is not a valid key" % (key,))
if not isinstance(key, str):
key = str(key)
if key not in ignored_keys:
if isinstance(value, (int, long, bool)):
if key == "label":
yield indent + key + ' "' + str(value) + '"'
elif value is True:
# python bool is an instance of int
yield indent + key + " 1"
elif value is False:
yield indent + key + " 0"
else:
yield indent + key + " " + str(value)
elif isinstance(value, float):
text = repr(value).upper()
# GML requires that a real literal contain a decimal point, but
# repr may not output a decimal point when the mantissa is
# integral and hence needs fixing.
epos = text.rfind("E")
if epos != -1 and text.find(".", 0, epos) == -1:
text = text[:epos] + "." + text[epos:]
if key == "label":
yield indent + key + ' "' + test + '"'
else:
yield indent + key + " " + text
elif isinstance(value, dict):
yield indent + key + " ["
next_indent = indent + " "
for key, value in value.items():
for line in stringize(key, value, (), next_indent):
yield line
yield indent + "]"
elif (
isinstance(value, (list, tuple))
and key != "label"
and value
and not in_list
):
next_indent = indent + " "
for val in value:
for line in stringize(key, val, (), next_indent, True):
yield line
else:
if stringizer:
try:
value = stringizer(value)
except ValueError:
raise NetworkXError(
"%r cannot be converted into a string" % (value,)
)
if not isinstance(value, (str, unicode)):
raise NetworkXError("%r is not a string" % (value,))
yield indent + key + ' "' + escape(value) + '"'
multigraph = G.is_multigraph()
yield "graph ["
# Output graph attributes
if G.is_directed():
yield " directed 1"
if multigraph:
yield " multigraph 1"
ignored_keys = {"directed", "multigraph", "node", "edge"}
for attr, value in G.graph.items():
for line in stringize(attr, value, ignored_keys, " "):
yield line
# Output node data
node_id = dict(zip(G, range(len(G))))
ignored_keys = {"id", "label"}
for node, attrs in G.node.items():
yield " node ["
yield " id " + str(node_id[node])
for line in stringize("label", node, (), " "):
yield line
for attr, value in attrs.items():
for line in stringize(attr, value, ignored_keys, " "):
yield line
yield " ]"
# Output edge data
ignored_keys = {"source", "target"}
kwargs = {"data": True}
if multigraph:
ignored_keys.add("key")
kwargs["keys"] = True
for e in G.edges(**kwargs):
yield " edge ["
yield " source " + str(node_id[e[0]])
yield " target " + str(node_id[e[1]])
if multigraph:
for line in stringize("key", e[2], (), " "):
yield line
for attr, value in e[-1].items():
for line in stringize(attr, value, ignored_keys, " "):
yield line
yield " ]"
yield "]"
|
def generate_gml(G, stringizer=None):
r"""Generate a single entry of the graph `G` in GML format.
Parameters
----------
G : NetworkX graph
The graph to be converted to GML.
stringizer : callable, optional
A `stringizer` which converts non-int/non-float/non-dict values into
strings. If it cannot convert a value into a string, it should raise a
`ValueError` to indicate that. Default value: None.
Returns
-------
lines: generator of strings
Lines of GML data. Newlines are not appended.
Raises
------
NetworkXError
If `stringizer` cannot convert a value into a string, or the value to
convert is not a string while `stringizer` is None.
See Also
--------
literal_stringizer
Notes
-----
Graph attributes named 'directed', 'multigraph', 'node' or
'edge', node attributes named 'id' or 'label', edge attributes
named 'source' or 'target' (or 'key' if `G` is a multigraph)
are ignored because these attribute names are used to encode the graph
structure.
GML files are stored using a 7-bit ASCII encoding with any extended
ASCII characters (iso8859-1) appearing as HTML character entities.
Without specifying a `stringizer`/`destringizer`, the code is capable of
handling `int`/`float`/`str`/`dict`/`list` data as required by the GML
specification. For other data types, you need to explicitly supply a
`stringizer`/`destringizer`.
For additional documentation on the GML file format, please see the
`GML website <http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html>`_.
See the module docstring :mod:`networkx.readwrite.gml` for additional details.
Examples
--------
>>> G = nx.Graph()
>>> G.add_node("1")
>>> print("\n".join(nx.generate_gml(G)))
graph [
node [
id 0
label "1"
]
]
>>> G = nx.OrderedMultiGraph([("a", "b"), ("a", "b")])
>>> print("\n".join(nx.generate_gml(G)))
graph [
multigraph 1
node [
id 0
label "a"
]
node [
id 1
label "b"
]
edge [
source 0
target 1
key 0
]
edge [
source 0
target 1
key 1
]
]
"""
valid_keys = re.compile("^[A-Za-z][0-9A-Za-z]*$")
def stringize(key, value, ignored_keys, indent, in_list=False):
if not isinstance(key, (str, unicode)):
raise NetworkXError("%r is not a string" % (key,))
if not valid_keys.match(key):
raise NetworkXError("%r is not a valid key" % (key,))
if not isinstance(key, str):
key = str(key)
if key not in ignored_keys:
if isinstance(value, (int, long)):
if key == "label":
yield indent + key + ' "' + str(value) + '"'
else:
yield indent + key + " " + str(value)
elif isinstance(value, float):
text = repr(value).upper()
# GML requires that a real literal contain a decimal point, but
# repr may not output a decimal point when the mantissa is
# integral and hence needs fixing.
epos = text.rfind("E")
if epos != -1 and text.find(".", 0, epos) == -1:
text = text[:epos] + "." + text[epos:]
if key == "label":
yield indent + key + ' "' + test + '"'
else:
yield indent + key + " " + text
elif isinstance(value, dict):
yield indent + key + " ["
next_indent = indent + " "
for key, value in value.items():
for line in stringize(key, value, (), next_indent):
yield line
yield indent + "]"
elif isinstance(value, list) and value and not in_list:
next_indent = indent + " "
for value in value:
for line in stringize(key, value, (), next_indent, True):
yield line
else:
if stringizer:
try:
value = stringizer(value)
except ValueError:
raise NetworkXError(
"%r cannot be converted into a string" % (value,)
)
if not isinstance(value, (str, unicode)):
raise NetworkXError("%r is not a string" % (value,))
yield indent + key + ' "' + escape(value) + '"'
multigraph = G.is_multigraph()
yield "graph ["
# Output graph attributes
if G.is_directed():
yield " directed 1"
if multigraph:
yield " multigraph 1"
ignored_keys = {"directed", "multigraph", "node", "edge"}
for attr, value in G.graph.items():
for line in stringize(attr, value, ignored_keys, " "):
yield line
# Output node data
node_id = dict(zip(G, range(len(G))))
ignored_keys = {"id", "label"}
for node, attrs in G.node.items():
yield " node ["
yield " id " + str(node_id[node])
for line in stringize("label", node, (), " "):
yield line
for attr, value in attrs.items():
for line in stringize(attr, value, ignored_keys, " "):
yield line
yield " ]"
# Output edge data
ignored_keys = {"source", "target"}
kwargs = {"data": True}
if multigraph:
ignored_keys.add("key")
kwargs["keys"] = True
for e in G.edges(**kwargs):
yield " edge ["
yield " source " + str(node_id[e[0]])
yield " target " + str(node_id[e[1]])
if multigraph:
for line in stringize("key", e[2], (), " "):
yield line
for attr, value in e[-1].items():
for line in stringize(attr, value, ignored_keys, " "):
yield line
yield " ]"
yield "]"
|
https://github.com/networkx/networkx/issues/2118
|
Traceback (most recent call last):
File "test.py", line 8, in <module>
H = nx.read_gml('foo.gml')
File "<decorator-gen-198>", line 2, in read_gml
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/utils/decorators.py", line 220, in _open_file
result = func(*new_args, **kwargs)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 210, in read_gml
G = parse_gml_lines(filter_lines(path), label, destringizer)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 383, in parse_gml_lines
graph = parse_graph()
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 372, in parse_graph
curr_token, dct = parse_kv(next(tokens))
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 357, in parse_kv
curr_token, value = parse_dict(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 367, in parse_dict
curr_token, dct = parse_kv(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 357, in parse_kv
curr_token, value = parse_dict(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 367, in parse_dict
curr_token, dct = parse_kv(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 343, in parse_kv
curr_token = next(tokens)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 323, in tokenize
(line[pos:], lineno + 1, pos + 1))
networkx.exception.NetworkXError: cannot tokenize u'True' at (6, 9)
|
networkx.exception.NetworkXError
|
def stringize(key, value, ignored_keys, indent, in_list=False):
if not isinstance(key, (str, unicode)):
raise NetworkXError("%r is not a string" % (key,))
if not valid_keys.match(key):
raise NetworkXError("%r is not a valid key" % (key,))
if not isinstance(key, str):
key = str(key)
if key not in ignored_keys:
if isinstance(value, (int, long, bool)):
if key == "label":
yield indent + key + ' "' + str(value) + '"'
elif value is True:
# python bool is an instance of int
yield indent + key + " 1"
elif value is False:
yield indent + key + " 0"
else:
yield indent + key + " " + str(value)
elif isinstance(value, float):
text = repr(value).upper()
# GML requires that a real literal contain a decimal point, but
# repr may not output a decimal point when the mantissa is
# integral and hence needs fixing.
epos = text.rfind("E")
if epos != -1 and text.find(".", 0, epos) == -1:
text = text[:epos] + "." + text[epos:]
if key == "label":
yield indent + key + ' "' + test + '"'
else:
yield indent + key + " " + text
elif isinstance(value, dict):
yield indent + key + " ["
next_indent = indent + " "
for key, value in value.items():
for line in stringize(key, value, (), next_indent):
yield line
yield indent + "]"
elif (
isinstance(value, (list, tuple))
and key != "label"
and value
and not in_list
):
next_indent = indent + " "
for val in value:
for line in stringize(key, val, (), next_indent, True):
yield line
else:
if stringizer:
try:
value = stringizer(value)
except ValueError:
raise NetworkXError(
"%r cannot be converted into a string" % (value,)
)
if not isinstance(value, (str, unicode)):
raise NetworkXError("%r is not a string" % (value,))
yield indent + key + ' "' + escape(value) + '"'
|
def stringize(key, value, ignored_keys, indent, in_list=False):
if not isinstance(key, (str, unicode)):
raise NetworkXError("%r is not a string" % (key,))
if not valid_keys.match(key):
raise NetworkXError("%r is not a valid key" % (key,))
if not isinstance(key, str):
key = str(key)
if key not in ignored_keys:
if isinstance(value, (int, long)):
if key == "label":
yield indent + key + ' "' + str(value) + '"'
else:
yield indent + key + " " + str(value)
elif isinstance(value, float):
text = repr(value).upper()
# GML requires that a real literal contain a decimal point, but
# repr may not output a decimal point when the mantissa is
# integral and hence needs fixing.
epos = text.rfind("E")
if epos != -1 and text.find(".", 0, epos) == -1:
text = text[:epos] + "." + text[epos:]
if key == "label":
yield indent + key + ' "' + test + '"'
else:
yield indent + key + " " + text
elif isinstance(value, dict):
yield indent + key + " ["
next_indent = indent + " "
for key, value in value.items():
for line in stringize(key, value, (), next_indent):
yield line
yield indent + "]"
elif isinstance(value, list) and value and not in_list:
next_indent = indent + " "
for value in value:
for line in stringize(key, value, (), next_indent, True):
yield line
else:
if stringizer:
try:
value = stringizer(value)
except ValueError:
raise NetworkXError(
"%r cannot be converted into a string" % (value,)
)
if not isinstance(value, (str, unicode)):
raise NetworkXError("%r is not a string" % (value,))
yield indent + key + ' "' + escape(value) + '"'
|
https://github.com/networkx/networkx/issues/2118
|
Traceback (most recent call last):
File "test.py", line 8, in <module>
H = nx.read_gml('foo.gml')
File "<decorator-gen-198>", line 2, in read_gml
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/utils/decorators.py", line 220, in _open_file
result = func(*new_args, **kwargs)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 210, in read_gml
G = parse_gml_lines(filter_lines(path), label, destringizer)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 383, in parse_gml_lines
graph = parse_graph()
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 372, in parse_graph
curr_token, dct = parse_kv(next(tokens))
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 357, in parse_kv
curr_token, value = parse_dict(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 367, in parse_dict
curr_token, dct = parse_kv(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 357, in parse_kv
curr_token, value = parse_dict(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 367, in parse_dict
curr_token, dct = parse_kv(curr_token)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 343, in parse_kv
curr_token = next(tokens)
File "/Users/domoritz/.virtualenvs/ipython/lib/python2.7/site-packages/networkx/readwrite/gml.py", line 323, in tokenize
(line[pos:], lineno + 1, pos + 1))
networkx.exception.NetworkXError: cannot tokenize u'True' at (6, 9)
|
networkx.exception.NetworkXError
|
def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets):
"""Returns True if and only if the vertex `v` is connected to one of
the target vertices by an alternating path in `G`.
An *alternating path* is a path in which every other edge is in the
specified maximum matching (and the remaining edges in the path are not in
the matching). An alternating path may have matched edges in the even
positions or in the odd positions, as long as the edges alternate between
'matched' and 'unmatched'.
`G` is an undirected bipartite NetworkX graph.
`v` is a vertex in `G`.
`matched_edges` is a set of edges present in a maximum matching in `G`.
`unmatched_edges` is a set of edges not present in a maximum
matching in `G`.
`targets` is a set of vertices.
"""
def _alternating_dfs(u, along_matched=True):
"""Returns True if and only if `u` is connected to one of the
targets by an alternating path.
`u` is a vertex in the graph `G`.
If `along_matched` is True, this step of the depth-first search
will continue only through edges in the given matching. Otherwise, it
will continue only through edges *not* in the given matching.
"""
if along_matched:
edges = itertools.cycle([matched_edges, unmatched_edges])
else:
edges = itertools.cycle([unmatched_edges, matched_edges])
visited = set()
stack = [(u, iter(G[u]), next(edges))]
while stack:
parent, children, valid_edges = stack[-1]
try:
child = next(children)
if child not in visited:
if (parent, child) in valid_edges or (child, parent) in valid_edges:
if child in targets:
return True
visited.add(child)
stack.append((child, iter(G[child]), next(edges)))
except StopIteration:
stack.pop()
return False
# Check for alternating paths starting with edges in the matching, then
# check for alternating paths starting with edges not in the
# matching.
return _alternating_dfs(v, along_matched=True) or _alternating_dfs(
v, along_matched=False
)
|
def _is_connected_by_alternating_path(G, v, matching, targets):
"""Returns True if and only if the vertex `v` is connected to one of
the target vertices by an alternating path in `G`.
An *alternating path* is a path in which every other edge is in the
specified maximum matching (and the remaining edges in the path are not in
the matching). An alternating path may have matched edges in the even
positions or in the odd positions, as long as the edges alternate between
'matched' and 'unmatched'.
`G` is an undirected bipartite NetworkX graph.
`v` is a vertex in `G`.
`matching` is a dictionary representing a maximum matching in `G`, as
returned by, for example, :func:`maximum_matching`.
`targets` is a set of vertices.
"""
# Get the set of matched edges and the set of unmatched edges. Only include
# one version of each undirected edge (for example, include edge (1, 2) but
# not edge (2, 1)).
matched_edges = {(u, v) for u, v in matching.items() if u <= v}
unmatched_edges = set(G.edges()) - matched_edges
def _alternating_dfs(u, depth, along_matched=True):
"""Returns True if and only if `u` is connected to one of the
targets by an alternating path.
`u` is a vertex in the graph `G`.
`depth` specifies the maximum recursion depth of the depth-first
search.
If `along_matched` is True, this step of the depth-first search
will continue only through edges in the given matching. Otherwise, it
will continue only through edges *not* in the given matching.
"""
# Base case 1: u is one of the target vertices. `u` is connected to one
# of the target vertices by an alternating path of length zero.
if u in targets:
return True
# Base case 2: we have exceeded are allowed depth. In this case, we
# have looked at a path of length `n`, so looking any further won't
# help.
if depth < 0:
return False
# Determine which set of edges to look across.
valid_edges = matched_edges if along_matched else unmatched_edges
for v in G[u]:
# Consider only those neighbors connected via a valid edge.
if (u, v) in valid_edges or (v, u) in valid_edges:
# Recursively perform a depth-first search starting from the
# neighbor. Decrement the depth limit and switch which set of
# vertices will be valid for next time.
return _alternating_dfs(v, depth - 1, not along_matched)
# If there are no more vertices to look through and we haven't yet
# found a target vertex, simply say that no path exists.
return False
# Check for alternating paths starting with edges in the matching, then
# check for alternating paths starting with edges not in the
# matching. Initiate the depth-first search with the current depth equal to
# the number of nodes in the graph.
return _alternating_dfs(v, len(G), along_matched=True) or _alternating_dfs(
v, len(G), along_matched=False
)
|
https://github.com/networkx/networkx/issues/2384
|
$ python2.7 vertex_cover.py
Using version 1.11
Edges: [(0, 3), (1, 3), (1, 4), (2, 3)]
Matching: {0: 3, 1: 4, 3: 0, 4: 1}
Vertex cover: set([0, 1])
Traceback (most recent call last):
File "vertex_cover.py", line 16, in <module>
assert u in vertex_cover or v in vertex_cover
AssertionError
$ python3.6 vertex_cover.py
Using version 1.11
Edges: [(0, 3), (3, 1), (3, 2), (1, 4)]
Matching: {0: 3, 1: 4, 3: 0, 4: 1}
Vertex cover: {0, 1}
Traceback (most recent call last):
File "vertex_cover.py", line 16, in <module>
assert u in vertex_cover or v in vertex_cover
AssertionError
|
AssertionError
|
def _alternating_dfs(u, along_matched=True):
"""Returns True if and only if `u` is connected to one of the
targets by an alternating path.
`u` is a vertex in the graph `G`.
If `along_matched` is True, this step of the depth-first search
will continue only through edges in the given matching. Otherwise, it
will continue only through edges *not* in the given matching.
"""
if along_matched:
edges = itertools.cycle([matched_edges, unmatched_edges])
else:
edges = itertools.cycle([unmatched_edges, matched_edges])
visited = set()
stack = [(u, iter(G[u]), next(edges))]
while stack:
parent, children, valid_edges = stack[-1]
try:
child = next(children)
if child not in visited:
if (parent, child) in valid_edges or (child, parent) in valid_edges:
if child in targets:
return True
visited.add(child)
stack.append((child, iter(G[child]), next(edges)))
except StopIteration:
stack.pop()
return False
|
def _alternating_dfs(u, depth, along_matched=True):
"""Returns True if and only if `u` is connected to one of the
targets by an alternating path.
`u` is a vertex in the graph `G`.
`depth` specifies the maximum recursion depth of the depth-first
search.
If `along_matched` is True, this step of the depth-first search
will continue only through edges in the given matching. Otherwise, it
will continue only through edges *not* in the given matching.
"""
# Base case 1: u is one of the target vertices. `u` is connected to one
# of the target vertices by an alternating path of length zero.
if u in targets:
return True
# Base case 2: we have exceeded are allowed depth. In this case, we
# have looked at a path of length `n`, so looking any further won't
# help.
if depth < 0:
return False
# Determine which set of edges to look across.
valid_edges = matched_edges if along_matched else unmatched_edges
for v in G[u]:
# Consider only those neighbors connected via a valid edge.
if (u, v) in valid_edges or (v, u) in valid_edges:
# Recursively perform a depth-first search starting from the
# neighbor. Decrement the depth limit and switch which set of
# vertices will be valid for next time.
return _alternating_dfs(v, depth - 1, not along_matched)
# If there are no more vertices to look through and we haven't yet
# found a target vertex, simply say that no path exists.
return False
|
https://github.com/networkx/networkx/issues/2384
|
$ python2.7 vertex_cover.py
Using version 1.11
Edges: [(0, 3), (1, 3), (1, 4), (2, 3)]
Matching: {0: 3, 1: 4, 3: 0, 4: 1}
Vertex cover: set([0, 1])
Traceback (most recent call last):
File "vertex_cover.py", line 16, in <module>
assert u in vertex_cover or v in vertex_cover
AssertionError
$ python3.6 vertex_cover.py
Using version 1.11
Edges: [(0, 3), (3, 1), (3, 2), (1, 4)]
Matching: {0: 3, 1: 4, 3: 0, 4: 1}
Vertex cover: {0, 1}
Traceback (most recent call last):
File "vertex_cover.py", line 16, in <module>
assert u in vertex_cover or v in vertex_cover
AssertionError
|
AssertionError
|
def _connected_by_alternating_paths(G, matching, targets):
"""Returns the set of vertices that are connected to one of the target
vertices by an alternating path in `G` or are themselves a target.
An *alternating path* is a path in which every other edge is in the
specified maximum matching (and the remaining edges in the path are not in
the matching). An alternating path may have matched edges in the even
positions or in the odd positions, as long as the edges alternate between
'matched' and 'unmatched'.
`G` is an undirected bipartite NetworkX graph.
`matching` is a dictionary representing a maximum matching in `G`, as
returned by, for example, :func:`maximum_matching`.
`targets` is a set of vertices.
"""
# Get the set of matched edges and the set of unmatched edges. Only include
# one version of each undirected edge (for example, include edge (1, 2) but
# not edge (2, 1)). Using frozensets as an intermediary step we do not
# require nodes to be orderable.
edge_sets = {frozenset((u, v)) for u, v in matching.items()}
matched_edges = {tuple(edge) for edge in edge_sets}
unmatched_edges = {
(u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets
}
return {
v
for v in G
if v in targets
or _is_connected_by_alternating_path(
G, v, matched_edges, unmatched_edges, targets
)
}
|
def _connected_by_alternating_paths(G, matching, targets):
"""Returns the set of vertices that are connected to one of the target
vertices by an alternating path in `G`.
An *alternating path* is a path in which every other edge is in the
specified maximum matching (and the remaining edges in the path are not in
the matching). An alternating path may have matched edges in the even
positions or in the odd positions, as long as the edges alternate between
'matched' and 'unmatched'.
`G` is an undirected bipartite NetworkX graph.
`matching` is a dictionary representing a maximum matching in `G`, as
returned by, for example, :func:`maximum_matching`.
`targets` is a set of vertices.
"""
# TODO This can be parallelized.
return {v for v in G if _is_connected_by_alternating_path(G, v, matching, targets)}
|
https://github.com/networkx/networkx/issues/2384
|
$ python2.7 vertex_cover.py
Using version 1.11
Edges: [(0, 3), (1, 3), (1, 4), (2, 3)]
Matching: {0: 3, 1: 4, 3: 0, 4: 1}
Vertex cover: set([0, 1])
Traceback (most recent call last):
File "vertex_cover.py", line 16, in <module>
assert u in vertex_cover or v in vertex_cover
AssertionError
$ python3.6 vertex_cover.py
Using version 1.11
Edges: [(0, 3), (3, 1), (3, 2), (1, 4)]
Matching: {0: 3, 1: 4, 3: 0, 4: 1}
Vertex cover: {0, 1}
Traceback (most recent call last):
File "vertex_cover.py", line 16, in <module>
assert u in vertex_cover or v in vertex_cover
AssertionError
|
AssertionError
|
def girvan_newman(G, most_valuable_edge=None):
"""Finds communities in a graph using the Girvan–Newman method.
Parameters
----------
G : NetworkX graph
most_valuable_edge : function
Function that takes a graph as input and outputs an edge. The
edge returned by this function will be recomputed and removed at
each iteration of the algorithm.
If not specified, the edge with the highest
:func:`networkx.edge_betweenness_centrality` will be used.
Returns
-------
iterator
Iterator over tuples of sets of nodes in `G`. Each set of node
is a community, each tuple is a sequence of communities at a
particular level of the algorithm.
Examples
--------
To get the first pair of communities::
>>> G = nx.path_graph(10)
>>> comp = girvan_newman(G)
>>> tuple(sorted(c) for c in next(comp))
([0, 1, 2, 3, 4], [5, 6, 7, 8, 9])
To get only the first *k* tuples of communities, use
:func:`itertools.islice`::
>>> import itertools
>>> G = nx.path_graph(8)
>>> k = 2
>>> comp = girvan_newman(G)
>>> for communities in itertools.islice(comp, k):
... print(tuple(sorted(c) for c in communities)) # doctest: +SKIP
...
([0, 1, 2, 3], [4, 5, 6, 7])
([0, 1], [2, 3], [4, 5, 6, 7])
To stop getting tuples of communities once the number of communities
is greater than *k*, use :func:`itertools.takewhile`::
>>> import itertools
>>> G = nx.path_graph(8)
>>> k = 4
>>> comp = girvan_newman(G)
>>> limited = itertools.takewhile(lambda c: len(c) <= k, comp)
>>> for communities in limited:
... print(tuple(sorted(c) for c in communities)) # doctest: +SKIP
...
([0, 1, 2, 3], [4, 5, 6, 7])
([0, 1], [2, 3], [4, 5, 6, 7])
([0, 1], [2, 3], [4, 5], [6, 7])
To just choose an edge to remove based on the weight::
>>> from operator import itemgetter
>>> G = nx.path_graph(10)
>>> edges = G.edges()
>>> nx.set_edge_attributes(G, 'weight', {(u, v): v for u, v in edges})
>>> def heaviest(G):
... u, v, w = max(G.edges(data='weight'), key=itemgetter(2))
... return (u, v)
...
>>> comp = girvan_newman(G, most_valuable_edge=heaviest)
>>> tuple(sorted(c) for c in next(comp))
([0, 1, 2, 3, 4, 5, 6, 7, 8], [9])
To utilize edge weights when choosing an edge with, for example, the
highest betweenness centrality::
>>> from networkx import edge_betweenness_centrality as betweenness
>>> def most_central_edge(G):
... centrality = betweenness(G, weight='weight')
... return max(centrality, key=centrality.get)
...
>>> G = nx.path_graph(10)
>>> comp = girvan_newman(G, most_valuable_edge=most_central_edge)
>>> tuple(sorted(c) for c in next(comp))
([0, 1, 2, 3, 4], [5, 6, 7, 8, 9])
To specify a different ranking algorithm for edges, use the
`most_valuable_edge` keyword argument::
>>> from networkx import edge_betweenness_centrality
>>> from random import random
>>> def most_central_edge(G):
... centrality = edge_betweenness_centrality(G)
... max_cent = max(centrality.values())
... # Scale the centrality values so they are between 0 and 1,
... # and add some random noise.
... centrality = {e: c / max_cent for e, c in centrality.items()}
... # Add some random noise.
... centrality = {e: c + random() for e, c in centrality.items()}
... return max(centrality, key=centrality.get)
...
>>> G = nx.path_graph(10)
>>> comp = girvan_newman(G, most_valuable_edge=most_central_edge)
Notes
-----
The Girvan–Newman algorithm detects communities by progressively
removing edges from the original graph. The algorithm removes the
"most valuable" edge, traditionally the edge with the highest
betweenness centrality, at each step. As the graph breaks down into
pieces, the tightly knit community structure is exposed and the
result can be depicted as a dendrogram.
"""
# If the graph is already empty, simply return its connected
# components.
if G.number_of_edges() == 0:
yield tuple(nx.connected_components(G))
return
# If no function is provided for computing the most valuable edge,
# use the edge betweenness centrality.
if most_valuable_edge is None:
def most_valuable_edge(G):
"""Returns the edge with the highest betweenness centrality
in the graph `G`.
"""
# We have guaranteed that the graph is non-empty, so this
# dictionary will never be empty.
betweenness = nx.edge_betweenness_centrality(G)
return max(betweenness, key=betweenness.get)
# The copy of G here must include the edge weight data.
g = G.copy().to_undirected()
# Self-loops must be removed because their removal has no effect on
# the connected components of the graph.
g.remove_edges_from(g.selfloop_edges())
while g.number_of_edges() > 0:
yield _without_most_central_edges(g, most_valuable_edge)
|
def girvan_newman(G, weight=None):
"""Find communities in graph using Girvan–Newman method.
Parameters
----------
G : NetworkX graph
weight : string, optional (default=None)
Edge data key corresponding to the edge weight.
Returns
-------
List of tuples which contains the clusters of nodes.
Examples
--------
>>> G = nx.path_graph(10)
>>> comp = girvan_newman(G)
>>> comp[0]
([0, 1, 2, 3, 4], [8, 9, 5, 6, 7])
Notes
-----
The Girvan–Newman algorithm detects communities by progressively removing
edges from the original graph. Algorithm removes edge with the highest
betweenness centrality at each step. As the graph breaks down into pieces,
the tightly knit community structure is exposed and result can be depicted
as a dendrogram.
"""
# The copy of G here must include the edge weight data.
g = G.copy().to_undirected()
components = []
while g.number_of_edges() > 0:
_remove_max_edge(g, weight)
components.append(tuple(list(H) for H in nx.connected_component_subgraphs(g)))
return components
|
https://github.com/networkx/networkx/issues/1799
|
./community_detection.py ../data/en_Influenza_1.pickle
Traceback (most recent call last):
File "./community_detection.py", line 32, in <module>
communities = nx.girvan_newman(args.graph)
File "[path]/env/lib/python3.4/site-packages/networkx/algorithms/community/centrality.py", line 39, in girvan_newman
_remove_max_edge(g, weight)
File "[path]/env/lib/python3.4/site-packages/networkx/algorithms/community/centrality.py", line 61, in _remove_max_edge
max_value = max(betweenness.values())
ValueError: max() arg is an empty sequence
|
ValueError
|
def reject_nuls():
for key, values in request.values.iterlists():
if "\0" in key or any("\0" in x for x in values):
raise BadRequest("NUL byte found in request data")
|
def reject_nuls():
for key, values in request.args.iterlists():
if "\0" in key or any("\0" in x for x in values):
raise BadRequest("NUL byte found in query data")
|
https://github.com/indico/indico/issues/4159
|
2019-11-16 21:49:23,476 1a8ef9e1a5eb4b69 indico.flask - ERROR errors.py:96 -- A string literal cannot contain NUL (0x00) characters.
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1949, in full_dispatch_request
rv = self.dispatch_request()
File "/usr/local/lib/python2.7/dist-packages/flask/app.py", line 1935, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/usr/local/lib/python2.7/dist-packages/indico/web/flask/util.py", line 84, in wrapper
return obj().process()
File "/usr/local/lib/python2.7/dist-packages/indico/web/rh.py", line 275, in process
res = self._do_process()
File "/usr/local/lib/python2.7/dist-packages/indico/web/rh.py", line 245, in _do_process
rv = self._process()
File "/usr/local/lib/python2.7/dist-packages/indico/modules/auth/controllers.py", line 89, in _process
response = multipass.handle_login_form(provider, form.data)
File "/usr/local/lib/python2.7/dist-packages/flask_multipass/core.py", line 459, in handle_login_form
response = provider.process_local_login(data)
File "/usr/local/lib/python2.7/dist-packages/flask_multipass/providers/sqlalchemy.py", line 55, in process_local_login
type(self).identifier_column == data['identifier']).first()
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 3222, in first
ret = list(self[0:1])
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 3012, in __getitem__
return list(res)
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 3324, in __iter__
return self._execute_and_instances(context)
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 3349, in _execute_and_instances
result = conn.execute(querycontext.statement, self._params)
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 988, in execute
return meth(self, multiparams, params)
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/sql/elements.py", line 287, in _execute_on_connection
return connection._execute_clauseelement(self, multiparams, params)
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 1107, in _execute_clauseelement
distilled_params,
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 1248, in _execute_context
e, statement, parameters, cursor, context
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 1468, in _handle_dbapi_exception
util.reraise(*exc_info)
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 1244, in _execute_context
cursor, statement, parameters, context
File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/default.py", line 550, in do_execute
cursor.execute(statement, parameters)
ValueError: A string literal cannot contain NUL (0x00) characters.
{u'data': {u'get': {},
u'headers': {'Accept': u'image/gif, image/x-xbitmap, image/jpeg, image/pjpeg, image/png, */*',
'Accept-Charset': u'iso-8859-1,utf-8;q=0.9,*;q=0.1',
'Accept-Language': u'en',
'Connection': u'Keep-Alive',
'Content-Length': u'171',
'Content-Type': u'application/x-www-form-urlencoded',
'Cookie': u'indico_session=***',
'Host': u'indico-02.***',
'Pragma': u'no-cache',
'User-Agent': u'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)'},
u'json': None,
u'post': {'_provider': u'indico',
'csrf_token': u'00000000-0000-0000-0000-000000000000',
'identifier': u'../../../../../../../../etc/passwd\x00',
'next': u'%2F%23create-event%3Alecture',
'password': u'<8 chars hidden>'},
u'url': {}},
u'endpoint': u'auth.login',
u'id': '1a8ef9e1a5eb4b69',
u'ip': '***',
u'method': 'POST',
u'referrer': None,
u'rh': 'RHLogin',
u'time': '2019-11-16T21:49:23.477402',
u'url': u'https://SERVERNAME/login/',
u'user': None,
u'user_agent': u'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)'}
|
ValueError
|
def _check_version(self, distribution, current_version=None):
try:
response = requests.get(
"https://pypi.python.org/pypi/{}/json".format(distribution)
)
except requests.RequestException as exc:
Logger.get("versioncheck").warning(
"Version check for %s failed: %s", distribution, exc
)
raise NoReportError.wrap_exc(ServiceUnavailable())
try:
data = response.json()
except ValueError:
return None
if current_version is None:
try:
current_version = get_distribution(distribution).version
except DistributionNotFound:
return None
current_version = Version(current_version)
if current_version.is_prerelease:
# if we are on a prerelease, get the latest one even if it's also a prerelease
latest_version = Version(data["info"]["version"])
else:
# if we are stable, get the latest stable version
versions = [v for v in map(Version, data["releases"]) if not v.is_prerelease]
latest_version = max(versions) if versions else None
return {
"current_version": unicode(current_version),
"latest_version": unicode(latest_version) if latest_version else None,
"outdated": (current_version < latest_version) if latest_version else False,
}
|
def _check_version(self, distribution, current_version=None):
response = requests.get("https://pypi.python.org/pypi/{}/json".format(distribution))
try:
data = response.json()
except ValueError:
return None
if current_version is None:
try:
current_version = get_distribution(distribution).version
except DistributionNotFound:
return None
current_version = Version(current_version)
if current_version.is_prerelease:
# if we are on a prerelease, get the latest one even if it's also a prerelease
latest_version = Version(data["info"]["version"])
else:
# if we are stable, get the latest stable version
versions = [v for v in map(Version, data["releases"]) if not v.is_prerelease]
latest_version = max(versions) if versions else None
return {
"current_version": unicode(current_version),
"latest_version": unicode(latest_version) if latest_version else None,
"outdated": (current_version < latest_version) if latest_version else False,
}
|
https://github.com/indico/indico/issues/3209
|
2018-01-18 06:57:43,036 28e04a15777a48cb indico.flask - ERROR errors.py:107 -- HTTPSConnectionPool(host='pypi.python.org', port=443): Max retries exceeded with url: /pypi/indico/json (Caused by NewConnectionError('<urllib3.connection.VerifiedHTTPSConnection object at 0xda97a90>: Failed to establish a new connection: [Errno -2] Name or service not known',))
Traceback (most recent call last):
File "/opt/indico/.venv/lib/python2.7/site-packages/flask/app.py", line 1612, in full_dispatch_request
rv = self.dispatch_request()
File "/opt/indico/.venv/lib/python2.7/site-packages/flask/app.py", line 1598, in dispatch_request
return self.view_functions[rule.endpoint](**req.view_args)
File "/opt/indico/.venv/lib/python2.7/site-packages/indico/web/flask/util.py", line 114, in wrapper
return obj().process()
File "/opt/indico/.venv/lib/python2.7/site-packages/indico/web/rh.py", line 270, in process
res = self._do_process()
File "/opt/indico/.venv/lib/python2.7/site-packages/indico/web/rh.py", line 249, in _do_process
return self._process()
File "/opt/indico/.venv/lib/python2.7/site-packages/indico/modules/core/controllers.py", line 205, in _process
return jsonify(indico=self._check_version('indico', indico.__version__),
File "/opt/indico/.venv/lib/python2.7/site-packages/indico/modules/core/controllers.py", line 182, in _check_version
response = requests.get('https://pypi.python.org/pypi/{}/json'.format(distribution))
File "/opt/indico/.venv/lib/python2.7/site-packages/requests/api.py", line 72, in get
return request('get', url, params=params, **kwargs)
File "/opt/indico/.venv/lib/python2.7/site-packages/requests/api.py", line 58, in request
return session.request(method=method, url=url, **kwargs)
File "/opt/indico/.venv/lib/python2.7/site-packages/requests/sessions.py", line 508, in request
resp = self.send(prep, **send_kwargs)
File "/opt/indico/.venv/lib/python2.7/site-packages/requests/sessions.py", line 618, in send
r = adapter.send(request, **kwargs)
File "/opt/indico/.venv/lib/python2.7/site-packages/requests/adapters.py", line 508, in send
raise ConnectionError(e, request=request)
ConnectionError: HTTPSConnectionPool(host='pypi.python.org', port=443): Max retries exceeded with url: /pypi/indico/json (Caused by NewConnectionError('<urllib3.connection.VerifiedHTTPSConnection object at 0xda97a90>: Failed to establish a new connection: [Errno -2] Name or service not known',))
{u'data': {u'get': {'_': u'1516255062639'},
u'headers': {'Accept': u'*/*',
'Accept-Encoding': u'gzip, deflate, br',
'Accept-Language': u'en-US,en;q=0.5',
'Content-Length': u'',
'Content-Type': u'',
'Cookie': u'indico_session=***; ajs_user_id=null; ajs_group_id=null; ajs_anonymous_id=%2200000000000000000000000000%22',
'Host': u'indico.openbrain.sk',
'Referer': u'https://indico.openbrain.sk/admin/settings/',
'User-Agent': u'Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0',
'X-Csrf-Token': u'***',
'X-Requested-With': u'XMLHttpRequest'},
u'json': None,
u'post': {},
u'url': {}},
u'endpoint': u'core.version_check',
u'id': '28e04a15777a48cb',
u'ip': '46.193.138.1',
u'method': 'GET',
u'referrer': 'https://indico.openbrain.sk/admin/settings/',
u'rh': 'RHVersionCheck',
u'time': '2018-01-18T06:57:43.111483',
u'url': u'https://indico.openbrain.sk/admin/version-check?_=1516255062639',
u'user': {u'email': u'mvala@saske.sk', u'id': 1, u'name': u'Martin Vala'},
u'user_agent': u'Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0'}
|
ConnectionError
|
def __init__(self, *args, **kwargs):
self.application = kwargs.pop("application", None)
super(ApplicationForm, self).__init__(*args, **kwargs)
if self.application is not None:
for field in self.application.system_app_type.enforced_data:
# preserve existing value for disabled fields
self[field].data = self[field].object_data
|
def __init__(self, *args, **kwargs):
self.application = kwargs.pop("application", None)
super(ApplicationForm, self).__init__(*args, **kwargs)
for field in self.application.system_app_type.enforced_data:
# preserve existing value for disabled fields
self[field].data = self[field].object_data
|
https://github.com/indico/indico/issues/3075
|
Traceback (most recent call last):
File "/usr/home/indicoadm/indicov2/.indicov2/lib/python2.7/site-packages/indico/legacy/webinterface/rh/base.py", line 493, in process
profile_name, res = self._do_process(profile)
File "/usr/home/indicoadm/indicov2/.indicov2/lib/python2.7/site-packages/indico/legacy/webinterface/rh/base.py", line 464, in _do_process
res = self._process()
File "/usr/home/indicoadm/indicov2/.indicov2/lib/python2.7/site-packages/indico/modules/oauth/controllers.py", line 132, in _process
form = ApplicationForm(obj=FormDefaults(is_enabled=True))
File "/usr/home/indicoadm/indicov2/.indicov2/lib/python2.7/site-packages/indico/web/forms/base.py", line 70, in __call__
return super(IndicoFormMeta, cls).__call__(*args, **kwargs)
File "/usr/home/indicoadm/indicov2/.indicov2/lib/python2.7/site-packages/wtforms/form.py", line 212, in __call__
return type.__call__(cls, *args, **kwargs)
File "/usr/home/indicoadm/indicov2/.indicov2/lib/python2.7/site-packages/indico/modules/oauth/forms.py", line 63, in __init__
for field in self.application.system_app_type.enforced_data:
AttributeError: 'NoneType' object has no attribute 'system_app_type'
|
AttributeError
|
def get_columns(self, connection, tablename, dbname, owner, schema, **kw):
# Get base columns
columns = ischema.columns
computed_cols = ischema.computed_columns
if owner:
whereclause = sql.and_(
columns.c.table_name == tablename,
columns.c.table_schema == owner,
)
table_fullname = "%s.%s" % (owner, tablename)
full_name = columns.c.table_schema + "." + columns.c.table_name
join_on = computed_cols.c.object_id == func.object_id(full_name)
else:
whereclause = columns.c.table_name == tablename
table_fullname = tablename
join_on = computed_cols.c.object_id == func.object_id(columns.c.table_name)
join_on = sql.and_(join_on, columns.c.column_name == computed_cols.c.name)
join = columns.join(computed_cols, onclause=join_on, isouter=True)
if self._supports_nvarchar_max:
computed_definition = computed_cols.c.definition
else:
# tds_version 4.2 does not support NVARCHAR(MAX)
computed_definition = sql.cast(computed_cols.c.definition, NVARCHAR(4000))
s = sql.select(
[columns, computed_definition, computed_cols.c.is_persisted],
whereclause,
from_obj=join,
order_by=[columns.c.ordinal_position],
)
c = connection.execute(s)
cols = []
while True:
row = c.fetchone()
if row is None:
break
name = row[columns.c.column_name]
type_ = row[columns.c.data_type]
nullable = row[columns.c.is_nullable] == "YES"
charlen = row[columns.c.character_maximum_length]
numericprec = row[columns.c.numeric_precision]
numericscale = row[columns.c.numeric_scale]
default = row[columns.c.column_default]
collation = row[columns.c.collation_name]
definition = row[computed_definition]
is_persisted = row[computed_cols.c.is_persisted]
coltype = self.ischema_names.get(type_, None)
kwargs = {}
if coltype in (
MSString,
MSChar,
MSNVarchar,
MSNChar,
MSText,
MSNText,
MSBinary,
MSVarBinary,
sqltypes.LargeBinary,
):
if charlen == -1:
charlen = None
kwargs["length"] = charlen
if collation:
kwargs["collation"] = collation
if coltype is None:
util.warn("Did not recognize type '%s' of column '%s'" % (type_, name))
coltype = sqltypes.NULLTYPE
else:
if issubclass(coltype, sqltypes.Numeric):
kwargs["precision"] = numericprec
if not issubclass(coltype, sqltypes.Float):
kwargs["scale"] = numericscale
coltype = coltype(**kwargs)
cdict = {
"name": name,
"type": coltype,
"nullable": nullable,
"default": default,
"autoincrement": False,
}
if definition is not None and is_persisted is not None:
cdict["computed"] = {
"sqltext": definition,
"persisted": is_persisted,
}
cols.append(cdict)
# autoincrement and identity
colmap = {}
for col in cols:
colmap[col["name"]] = col
# We also run an sp_columns to check for identity columns:
cursor = connection.execute(
sql.text(
"EXEC sp_columns @table_name = :table_name, @table_owner = :table_owner",
),
{"table_name": tablename, "table_owner": owner},
)
ic = None
while True:
row = cursor.fetchone()
if row is None:
break
(col_name, type_name) = row[3], row[5]
if type_name.endswith("identity") and col_name in colmap:
ic = col_name
colmap[col_name]["autoincrement"] = True
colmap[col_name]["dialect_options"] = {
"mssql_identity_start": 1,
"mssql_identity_increment": 1,
}
break
cursor.close()
if ic is not None and self.server_version_info >= MS_2005_VERSION:
table_fullname = "%s.%s" % (owner, tablename)
cursor = connection.execute(
"select ident_seed('%s'), ident_incr('%s')"
% (table_fullname, table_fullname)
)
row = cursor.first()
if row is not None and row[0] is not None:
colmap[ic]["dialect_options"].update(
{
"mssql_identity_start": int(row[0]),
"mssql_identity_increment": int(row[1]),
}
)
return cols
|
def get_columns(self, connection, tablename, dbname, owner, schema, **kw):
# Get base columns
columns = ischema.columns
computed_cols = ischema.computed_columns
if owner:
whereclause = sql.and_(
columns.c.table_name == tablename,
columns.c.table_schema == owner,
)
table_fullname = "%s.%s" % (owner, tablename)
full_name = columns.c.table_schema + "." + columns.c.table_name
join_on = computed_cols.c.object_id == func.object_id(full_name)
else:
whereclause = columns.c.table_name == tablename
table_fullname = tablename
join_on = computed_cols.c.object_id == func.object_id(columns.c.table_name)
join_on = sql.and_(join_on, columns.c.column_name == computed_cols.c.name)
join = columns.join(computed_cols, onclause=join_on, isouter=True)
if self._supports_nvarchar_max:
computed_definition = computed_cols.c.definition
else:
# tds_version 4.2 does not support NVARCHAR(MAX)
computed_definition = sql.cast(computed_cols.c.definition, NVARCHAR(4000))
s = sql.select(
[columns, computed_definition, computed_cols.c.is_persisted],
whereclause,
from_obj=join,
order_by=[columns.c.ordinal_position],
)
c = connection.execute(s)
cols = []
while True:
row = c.fetchone()
if row is None:
break
name = row[columns.c.column_name]
type_ = row[columns.c.data_type]
nullable = row[columns.c.is_nullable] == "YES"
charlen = row[columns.c.character_maximum_length]
numericprec = row[columns.c.numeric_precision]
numericscale = row[columns.c.numeric_scale]
default = row[columns.c.column_default]
collation = row[columns.c.collation_name]
definition = row[computed_definition]
is_persisted = row[computed_cols.c.is_persisted]
coltype = self.ischema_names.get(type_, None)
kwargs = {}
if coltype in (
MSString,
MSChar,
MSNVarchar,
MSNChar,
MSText,
MSNText,
MSBinary,
MSVarBinary,
sqltypes.LargeBinary,
):
if charlen == -1:
charlen = None
kwargs["length"] = charlen
if collation:
kwargs["collation"] = collation
if coltype is None:
util.warn("Did not recognize type '%s' of column '%s'" % (type_, name))
coltype = sqltypes.NULLTYPE
else:
if issubclass(coltype, sqltypes.Numeric):
kwargs["precision"] = numericprec
if not issubclass(coltype, sqltypes.Float):
kwargs["scale"] = numericscale
coltype = coltype(**kwargs)
cdict = {
"name": name,
"type": coltype,
"nullable": nullable,
"default": default,
"autoincrement": False,
}
if definition is not None and is_persisted is not None:
cdict["computed"] = {
"sqltext": definition,
"persisted": is_persisted,
}
cols.append(cdict)
# autoincrement and identity
colmap = {}
for col in cols:
colmap[col["name"]] = col
# We also run an sp_columns to check for identity columns:
cursor = connection.execute(
sql.text(
"sp_columns @table_name = :table_name, @table_owner = :table_owner",
),
{"table_name": tablename, "table_owner": owner},
)
ic = None
while True:
row = cursor.fetchone()
if row is None:
break
(col_name, type_name) = row[3], row[5]
if type_name.endswith("identity") and col_name in colmap:
ic = col_name
colmap[col_name]["autoincrement"] = True
colmap[col_name]["dialect_options"] = {
"mssql_identity_start": 1,
"mssql_identity_increment": 1,
}
break
cursor.close()
if ic is not None and self.server_version_info >= MS_2005_VERSION:
table_fullname = "%s.%s" % (owner, tablename)
cursor = connection.execute(
"select ident_seed('%s'), ident_incr('%s')"
% (table_fullname, table_fullname)
)
row = cursor.first()
if row is not None and row[0] is not None:
colmap[ic]["dialect_options"].update(
{
"mssql_identity_start": int(row[0]),
"mssql_identity_increment": int(row[1]),
}
)
return cols
|
https://github.com/sqlalchemy/sqlalchemy/issues/5921
|
Traceback (most recent call last):
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\base.py", line 1276, in _execute_context
self.dialect.do_execute(
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
pyodbc.ProgrammingError: ('42000', "[42000] [Microsoft][ODBC SQL Server Driver][SQL Server]Incorrect syntax near 'sp_columns'. (102) (SQLExecDirectW); [42000] [Microsoft][ODBC SQL Server Driver][SQL Server]Statement(s) could not be prepared. (8180)")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\reflection.py", line 390, in get_columns
col_defs = self.dialect.get_columns(
File "<string>", line 2, in get_columns
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\reflection.py", line 52, in cache
ret = fn(self, con, *args, **kw)
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\dialects\mssql\base.py", line 2195, in wrap
return _switch_db(
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\dialects\mssql\base.py", line 2219, in _switch_db
return fn(*arg, **kw)
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\dialects\mssql\base.py", line 2741, in get_columns
cursor = connection.execute(
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\base.py", line 2235, in execute
return connection.execute(statement, *multiparams, **params)
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\base.py", line 1011, in execute
return meth(self, multiparams, params)
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\sql\elements.py", line 298, in _execute_on_connection
return connection._execute_clauseelement(self, multiparams, params)
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\base.py", line 1124, in _execute_clauseelement
ret = self._execute_context(
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\base.py", line 1316, in _execute_context
self._handle_dbapi_exception(
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\base.py", line 1510, in _handle_dbapi_exception
util.raise_(
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\util\compat.py", line 182, in raise_
raise exception
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\base.py", line 1276, in _execute_context
self.dialect.do_execute(
File "C:\somewhere\miniconda3\envs\migration\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
sqlalchemy.exc.ProgrammingError: (pyodbc.ProgrammingError) ('42000', "[42000] [Microsoft][ODBC SQL Server Driver][SQL Server]Incorrect syntax near 'sp_columns'. (102) (SQLExecDirectW); [42000] [Microsoft][ODBC SQL Server Driver][SQL Server]Statement(s) could not be prepared. (8180)")
[SQL: sp_columns @table_name = ?, @table_owner = ?]
[parameters: ('XXX', 'YYY')]
(Background on this error at: http://sqlalche.me/e/13/f405)
|
pyodbc.ProgrammingError
|
def _is_event_name(name):
# _sa_event prefix is special to support internal-only event names.
# most event names are just plain method names that aren't
# underscored.
return (not name.startswith("_") and name != "dispatch") or name.startswith(
"_sa_event"
)
|
def _is_event_name(name):
return not name.startswith("_") and name != "dispatch"
|
https://github.com/sqlalchemy/sqlalchemy/issues/5919
|
Traceback (most recent call last):
File "/home/kshutt/.config/JetBrains/PyCharm2020.3/scratches/scratch_4.py", line 41, in <module>
table.__table__.create(engine)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 928, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 832, in visit_table
include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2961, in visit_create_table
_include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3012, in create_table_constraints
for constraint in constraints
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3009, in <genexpr>
p
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3019, in <genexpr>
or not getattr(constraint, "use_alter", False)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1928, in visit_primary_key_constraint
constraint
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3238, in visit_primary_key_constraint
formatted_name = self.preparer.format_constraint(constraint)
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3779, in format_constraint
return self.quote(name)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3701, in quote
if self._requires_quotes(ident):
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3614, in _requires_quotes
or value[0] in self.illegal_initial_characters
IndexError: string index out of range
|
IndexError
|
def _column_X(self, idx, attrname):
if self._is_fk:
try:
fk = self.const.elements[idx]
except IndexError:
return ""
else:
return getattr(fk.parent, attrname)
else:
cols = list(self.const.columns)
try:
col = cols[idx]
except IndexError:
return ""
else:
return getattr(col, attrname)
|
def _column_X(self, idx):
if self._is_fk:
fk = self.const.elements[idx]
return fk.parent
else:
return list(self.const.columns)[idx]
|
https://github.com/sqlalchemy/sqlalchemy/issues/5919
|
Traceback (most recent call last):
File "/home/kshutt/.config/JetBrains/PyCharm2020.3/scratches/scratch_4.py", line 41, in <module>
table.__table__.create(engine)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 928, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 832, in visit_table
include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2961, in visit_create_table
_include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3012, in create_table_constraints
for constraint in constraints
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3009, in <genexpr>
p
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3019, in <genexpr>
or not getattr(constraint, "use_alter", False)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1928, in visit_primary_key_constraint
constraint
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3238, in visit_primary_key_constraint
formatted_name = self.preparer.format_constraint(constraint)
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3779, in format_constraint
return self.quote(name)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3701, in quote
if self._requires_quotes(ident):
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3614, in _requires_quotes
or value[0] in self.illegal_initial_characters
IndexError: string index out of range
|
IndexError
|
def _key_column_X_key(self, idx):
# note this method was missing before
# [ticket:3989], meaning tokens like ``%(column_0_key)s`` weren't
# working even though documented.
return self._column_X(idx, "key")
|
def _key_column_X_key(self, idx):
# note this method was missing before
# [ticket:3989], meaning tokens like ``%(column_0_key)s`` weren't
# working even though documented.
return self._column_X(idx).key
|
https://github.com/sqlalchemy/sqlalchemy/issues/5919
|
Traceback (most recent call last):
File "/home/kshutt/.config/JetBrains/PyCharm2020.3/scratches/scratch_4.py", line 41, in <module>
table.__table__.create(engine)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 928, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 832, in visit_table
include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2961, in visit_create_table
_include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3012, in create_table_constraints
for constraint in constraints
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3009, in <genexpr>
p
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3019, in <genexpr>
or not getattr(constraint, "use_alter", False)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1928, in visit_primary_key_constraint
constraint
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3238, in visit_primary_key_constraint
formatted_name = self.preparer.format_constraint(constraint)
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3779, in format_constraint
return self.quote(name)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3701, in quote
if self._requires_quotes(ident):
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3614, in _requires_quotes
or value[0] in self.illegal_initial_characters
IndexError: string index out of range
|
IndexError
|
def _key_column_X_name(self, idx):
return self._column_X(idx, "name")
|
def _key_column_X_name(self, idx):
return self._column_X(idx).name
|
https://github.com/sqlalchemy/sqlalchemy/issues/5919
|
Traceback (most recent call last):
File "/home/kshutt/.config/JetBrains/PyCharm2020.3/scratches/scratch_4.py", line 41, in <module>
table.__table__.create(engine)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 928, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 832, in visit_table
include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2961, in visit_create_table
_include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3012, in create_table_constraints
for constraint in constraints
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3009, in <genexpr>
p
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3019, in <genexpr>
or not getattr(constraint, "use_alter", False)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1928, in visit_primary_key_constraint
constraint
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3238, in visit_primary_key_constraint
formatted_name = self.preparer.format_constraint(constraint)
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3779, in format_constraint
return self.quote(name)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3701, in quote
if self._requires_quotes(ident):
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3614, in _requires_quotes
or value[0] in self.illegal_initial_characters
IndexError: string index out of range
|
IndexError
|
def _key_column_X_label(self, idx):
return self._column_X(idx, "_ddl_label")
|
def _key_column_X_label(self, idx):
return self._column_X(idx)._ddl_label
|
https://github.com/sqlalchemy/sqlalchemy/issues/5919
|
Traceback (most recent call last):
File "/home/kshutt/.config/JetBrains/PyCharm2020.3/scratches/scratch_4.py", line 41, in <module>
table.__table__.create(engine)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 928, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 832, in visit_table
include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2961, in visit_create_table
_include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3012, in create_table_constraints
for constraint in constraints
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3009, in <genexpr>
p
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3019, in <genexpr>
or not getattr(constraint, "use_alter", False)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1928, in visit_primary_key_constraint
constraint
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3238, in visit_primary_key_constraint
formatted_name = self.preparer.format_constraint(constraint)
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3779, in format_constraint
return self.quote(name)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3701, in quote
if self._requires_quotes(ident):
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3614, in _requires_quotes
or value[0] in self.illegal_initial_characters
IndexError: string index out of range
|
IndexError
|
def _constraint_name(const, table):
if isinstance(table, Column):
# this path occurs for a CheckConstraint linked to a Column
# for column-attached constraint, set another event
# to link the column attached to the table as this constraint
# associated with the table.
event.listen(
table,
"after_parent_attach",
lambda col, table: _constraint_name(const, table),
)
elif isinstance(table, Table):
if isinstance(const.name, (conv, _defer_name)):
return
newname = _constraint_name_for_table(const, table)
if newname:
const.name = newname
|
def _constraint_name(const, table):
if isinstance(table, Column):
# for column-attached constraint, set another event
# to link the column attached to the table as this constraint
# associated with the table.
event.listen(
table,
"after_parent_attach",
lambda col, table: _constraint_name(const, table),
)
elif isinstance(table, Table):
if isinstance(const.name, (conv, _defer_name)):
return
newname = _constraint_name_for_table(const, table)
if newname is not None:
const.name = newname
|
https://github.com/sqlalchemy/sqlalchemy/issues/5919
|
Traceback (most recent call last):
File "/home/kshutt/.config/JetBrains/PyCharm2020.3/scratches/scratch_4.py", line 41, in <module>
table.__table__.create(engine)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 928, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 832, in visit_table
include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2961, in visit_create_table
_include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3012, in create_table_constraints
for constraint in constraints
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3009, in <genexpr>
p
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3019, in <genexpr>
or not getattr(constraint, "use_alter", False)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1928, in visit_primary_key_constraint
constraint
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3238, in visit_primary_key_constraint
formatted_name = self.preparer.format_constraint(constraint)
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3779, in format_constraint
return self.quote(name)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3701, in quote
if self._requires_quotes(ident):
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3614, in _requires_quotes
or value[0] in self.illegal_initial_characters
IndexError: string index out of range
|
IndexError
|
def _set_parent(self, table, allow_replacements=True):
if not self.name:
raise exc.ArgumentError(
"Column must be constructed with a non-blank name or "
"assign a non-blank .name before adding to a Table."
)
self._reset_memoizations()
if self.key is None:
self.key = self.name
existing = getattr(self, "table", None)
if existing is not None and existing is not table:
raise exc.ArgumentError(
"Column object '%s' already assigned to Table '%s'"
% (self.key, existing.description)
)
if self.key in table._columns:
col = table._columns.get(self.key)
if col is not self:
if not allow_replacements:
util.warn_deprecated(
"A column with name '%s' is already present "
"in table '%s'. Please use method "
":meth:`_schema.Table.append_column` with the "
"parameter ``replace_existing=True`` to replace an "
"existing column." % (self.key, table.name),
"1.4",
)
for fk in col.foreign_keys:
table.foreign_keys.remove(fk)
if fk.constraint in table.constraints:
# this might have been removed
# already, if it's a composite constraint
# and more than one col being replaced
table.constraints.remove(fk.constraint)
table._columns.replace(self)
self.table = table
if self.primary_key:
table.primary_key._replace(self)
elif self.key in table.primary_key:
raise exc.ArgumentError(
"Trying to redefine primary-key column '%s' as a "
"non-primary-key column on table '%s'" % (self.key, table.fullname)
)
if self.index:
if isinstance(self.index, util.string_types):
raise exc.ArgumentError(
"The 'index' keyword argument on Column is boolean only. "
"To create indexes with a specific name, create an "
"explicit Index object external to the Table."
)
table.append_constraint(
Index(None, self.key, unique=bool(self.unique), _column_flag=True)
)
elif self.unique:
if isinstance(self.unique, util.string_types):
raise exc.ArgumentError(
"The 'unique' keyword argument on Column is boolean "
"only. To create unique constraints or indexes with a "
"specific name, append an explicit UniqueConstraint to "
"the Table's list of elements, or create an explicit "
"Index object external to the Table."
)
table.append_constraint(UniqueConstraint(self.key, _column_flag=True))
self._setup_on_memoized_fks(lambda fk: fk._set_remote_table(table))
if self.identity and (
isinstance(self.default, Sequence) or isinstance(self.onupdate, Sequence)
):
raise exc.ArgumentError("An column cannot specify both Identity and Sequence.")
|
def _set_parent(self, table, allow_replacements=True):
if not self.name:
raise exc.ArgumentError(
"Column must be constructed with a non-blank name or "
"assign a non-blank .name before adding to a Table."
)
self._reset_memoizations()
if self.key is None:
self.key = self.name
existing = getattr(self, "table", None)
if existing is not None and existing is not table:
raise exc.ArgumentError(
"Column object '%s' already assigned to Table '%s'"
% (self.key, existing.description)
)
if self.key in table._columns:
col = table._columns.get(self.key)
if col is not self:
if not allow_replacements:
util.warn_deprecated(
"A column with name '%s' is already present "
"in table '%s'. Please use method "
":meth:`_schema.Table.append_column` with the "
"parameter ``replace_existing=True`` to replace an "
"existing column." % (self.key, table.name),
"1.4",
)
for fk in col.foreign_keys:
table.foreign_keys.remove(fk)
if fk.constraint in table.constraints:
# this might have been removed
# already, if it's a composite constraint
# and more than one col being replaced
table.constraints.remove(fk.constraint)
table._columns.replace(self)
if self.primary_key:
table.primary_key._replace(self)
elif self.key in table.primary_key:
raise exc.ArgumentError(
"Trying to redefine primary-key column '%s' as a "
"non-primary-key column on table '%s'" % (self.key, table.fullname)
)
self.table = table
if self.index:
if isinstance(self.index, util.string_types):
raise exc.ArgumentError(
"The 'index' keyword argument on Column is boolean only. "
"To create indexes with a specific name, create an "
"explicit Index object external to the Table."
)
table.append_constraint(
Index(None, self.key, unique=bool(self.unique), _column_flag=True)
)
elif self.unique:
if isinstance(self.unique, util.string_types):
raise exc.ArgumentError(
"The 'unique' keyword argument on Column is boolean "
"only. To create unique constraints or indexes with a "
"specific name, append an explicit UniqueConstraint to "
"the Table's list of elements, or create an explicit "
"Index object external to the Table."
)
table.append_constraint(UniqueConstraint(self.key, _column_flag=True))
self._setup_on_memoized_fks(lambda fk: fk._set_remote_table(table))
if self.identity and (
isinstance(self.default, Sequence) or isinstance(self.onupdate, Sequence)
):
raise exc.ArgumentError("An column cannot specify both Identity and Sequence.")
|
https://github.com/sqlalchemy/sqlalchemy/issues/5919
|
Traceback (most recent call last):
File "/home/kshutt/.config/JetBrains/PyCharm2020.3/scratches/scratch_4.py", line 41, in <module>
table.__table__.create(engine)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 928, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 832, in visit_table
include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2961, in visit_create_table
_include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3012, in create_table_constraints
for constraint in constraints
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3009, in <genexpr>
p
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3019, in <genexpr>
or not getattr(constraint, "use_alter", False)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1928, in visit_primary_key_constraint
constraint
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3238, in visit_primary_key_constraint
formatted_name = self.preparer.format_constraint(constraint)
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3779, in format_constraint
return self.quote(name)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3701, in quote
if self._requires_quotes(ident):
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3614, in _requires_quotes
or value[0] in self.illegal_initial_characters
IndexError: string index out of range
|
IndexError
|
def _replace(self, col):
PrimaryKeyConstraint._autoincrement_column._reset(self)
self.columns.replace(col)
self.dispatch._sa_event_column_added_to_pk_constraint(self, col)
|
def _replace(self, col):
PrimaryKeyConstraint._autoincrement_column._reset(self)
self.columns.replace(col)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5919
|
Traceback (most recent call last):
File "/home/kshutt/.config/JetBrains/PyCharm2020.3/scratches/scratch_4.py", line 41, in <module>
table.__table__.create(engine)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 928, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 832, in visit_table
include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2961, in visit_create_table
_include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3012, in create_table_constraints
for constraint in constraints
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3009, in <genexpr>
p
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3019, in <genexpr>
or not getattr(constraint, "use_alter", False)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1928, in visit_primary_key_constraint
constraint
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3238, in visit_primary_key_constraint
formatted_name = self.preparer.format_constraint(constraint)
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3779, in format_constraint
return self.quote(name)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3701, in quote
if self._requires_quotes(ident):
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3614, in _requires_quotes
or value[0] in self.illegal_initial_characters
IndexError: string index out of range
|
IndexError
|
def _set_parent(self, table):
if not self.name:
raise exc.ArgumentError(
"Column must be constructed with a non-blank name or "
"assign a non-blank .name before adding to a Table."
)
if self.key is None:
self.key = self.name
existing = getattr(self, "table", None)
if existing is not None and existing is not table:
raise exc.ArgumentError(
"Column object '%s' already assigned to Table '%s'"
% (self.key, existing.description)
)
if self.key in table._columns:
col = table._columns.get(self.key)
if col is not self:
for fk in col.foreign_keys:
table.foreign_keys.remove(fk)
if fk.constraint in table.constraints:
# this might have been removed
# already, if it's a composite constraint
# and more than one col being replaced
table.constraints.remove(fk.constraint)
table._columns.replace(self)
self.table = table
if self.primary_key:
table.primary_key._replace(self)
elif self.key in table.primary_key:
raise exc.ArgumentError(
"Trying to redefine primary-key column '%s' as a "
"non-primary-key column on table '%s'" % (self.key, table.fullname)
)
if self.index:
if isinstance(self.index, util.string_types):
raise exc.ArgumentError(
"The 'index' keyword argument on Column is boolean only. "
"To create indexes with a specific name, create an "
"explicit Index object external to the Table."
)
table.append_constraint(
Index(None, self.key, unique=bool(self.unique), _column_flag=True)
)
elif self.unique:
if isinstance(self.unique, util.string_types):
raise exc.ArgumentError(
"The 'unique' keyword argument on Column is boolean "
"only. To create unique constraints or indexes with a "
"specific name, append an explicit UniqueConstraint to "
"the Table's list of elements, or create an explicit "
"Index object external to the Table."
)
table.append_constraint(UniqueConstraint(self.key, _column_flag=True))
self._setup_on_memoized_fks(lambda fk: fk._set_remote_table(table))
|
def _set_parent(self, table):
if not self.name:
raise exc.ArgumentError(
"Column must be constructed with a non-blank name or "
"assign a non-blank .name before adding to a Table."
)
if self.key is None:
self.key = self.name
existing = getattr(self, "table", None)
if existing is not None and existing is not table:
raise exc.ArgumentError(
"Column object '%s' already assigned to Table '%s'"
% (self.key, existing.description)
)
if self.key in table._columns:
col = table._columns.get(self.key)
if col is not self:
for fk in col.foreign_keys:
table.foreign_keys.remove(fk)
if fk.constraint in table.constraints:
# this might have been removed
# already, if it's a composite constraint
# and more than one col being replaced
table.constraints.remove(fk.constraint)
table._columns.replace(self)
if self.primary_key:
table.primary_key._replace(self)
elif self.key in table.primary_key:
raise exc.ArgumentError(
"Trying to redefine primary-key column '%s' as a "
"non-primary-key column on table '%s'" % (self.key, table.fullname)
)
self.table = table
if self.index:
if isinstance(self.index, util.string_types):
raise exc.ArgumentError(
"The 'index' keyword argument on Column is boolean only. "
"To create indexes with a specific name, create an "
"explicit Index object external to the Table."
)
table.append_constraint(
Index(None, self.key, unique=bool(self.unique), _column_flag=True)
)
elif self.unique:
if isinstance(self.unique, util.string_types):
raise exc.ArgumentError(
"The 'unique' keyword argument on Column is boolean "
"only. To create unique constraints or indexes with a "
"specific name, append an explicit UniqueConstraint to "
"the Table's list of elements, or create an explicit "
"Index object external to the Table."
)
table.append_constraint(UniqueConstraint(self.key, _column_flag=True))
self._setup_on_memoized_fks(lambda fk: fk._set_remote_table(table))
|
https://github.com/sqlalchemy/sqlalchemy/issues/5919
|
Traceback (most recent call last):
File "/home/kshutt/.config/JetBrains/PyCharm2020.3/scratches/scratch_4.py", line 41, in <module>
table.__table__.create(engine)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 928, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 832, in visit_table
include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2961, in visit_create_table
_include_foreign_key_constraints=create.include_foreign_key_constraints, # noqa
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3012, in create_table_constraints
for constraint in constraints
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3009, in <genexpr>
p
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3019, in <genexpr>
or not getattr(constraint, "use_alter", False)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1928, in visit_primary_key_constraint
constraint
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3238, in visit_primary_key_constraint
formatted_name = self.preparer.format_constraint(constraint)
File "<string>", line 1, in <lambda>
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3779, in format_constraint
return self.quote(name)
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3701, in quote
if self._requires_quotes(ident):
File "/home/kshutt/mandate/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 3614, in _requires_quotes
or value[0] in self.illegal_initial_characters
IndexError: string index out of range
|
IndexError
|
def copy(self, target_table=None, **kw):
elements = [
(
schema._copy_expression(expr, self.parent, target_table),
self.operators[expr.name],
)
for expr in self.columns
]
c = self.__class__(
*elements,
name=self.name,
deferrable=self.deferrable,
initially=self.initially,
where=self.where,
using=self.using,
)
c.dispatch._update(self.dispatch)
return c
|
def copy(self, **kw):
elements = [(col, self.operators[col]) for col in self.columns.keys()]
c = self.__class__(
*elements,
name=self.name,
deferrable=self.deferrable,
initially=self.initially,
where=self.where,
using=self.using,
)
c.dispatch._update(self.dispatch)
return c
|
https://github.com/sqlalchemy/sqlalchemy/issues/5850
|
=============================================== FAILURES ================================================
_________________________________ ToMetaDataExcludeConstraint.test_copy _________________________________
Traceback (most recent call last):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/sql/test_metadata.py", line 5238, in test_copy
table_c = table.tometadata(meta2)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 1063, in tometadata
table.append_constraint(
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 867, in append_constraint
constraint._set_parent_with_dispatch(self)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/base.py", line 463, in _set_parent_with_dispatch
self._set_parent(parent)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3065, in _set_parent
ColumnCollectionMixin._set_parent(self, table)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3022, in _set_parent
for col in self._col_expressions(table):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3016, in _col_expressions
return [
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3017, in <listcomp>
table.c[col] if isinstance(col, util.string_types) else col
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/util/_collections.py", line 194, in __getitem__
return self._data[key]
KeyError: "daterange(valid_from_date, valid_thru_date, '[]')"
======================================== short test summary info ========================================
FAILED test/sql/test_metadata.py::ToMetaDataExcludeConstraint::test_copy - KeyError: "daterange(valid_...
|
KeyError
|
def visit_unique_constraint(self, constraint):
text = super(SQLiteDDLCompiler, self).visit_unique_constraint(constraint)
on_conflict_clause = constraint.dialect_options["sqlite"]["on_conflict"]
if on_conflict_clause is None and len(constraint.columns) == 1:
col1 = list(constraint)[0]
if isinstance(col1, schema.SchemaItem):
on_conflict_clause = list(constraint)[0].dialect_options["sqlite"][
"on_conflict_unique"
]
if on_conflict_clause is not None:
text += " ON CONFLICT " + on_conflict_clause
return text
|
def visit_unique_constraint(self, constraint):
text = super(SQLiteDDLCompiler, self).visit_unique_constraint(constraint)
on_conflict_clause = constraint.dialect_options["sqlite"]["on_conflict"]
if on_conflict_clause is None and len(constraint.columns) == 1:
on_conflict_clause = list(constraint)[0].dialect_options["sqlite"][
"on_conflict_unique"
]
if on_conflict_clause is not None:
text += " ON CONFLICT " + on_conflict_clause
return text
|
https://github.com/sqlalchemy/sqlalchemy/issues/5850
|
=============================================== FAILURES ================================================
_________________________________ ToMetaDataExcludeConstraint.test_copy _________________________________
Traceback (most recent call last):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/sql/test_metadata.py", line 5238, in test_copy
table_c = table.tometadata(meta2)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 1063, in tometadata
table.append_constraint(
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 867, in append_constraint
constraint._set_parent_with_dispatch(self)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/base.py", line 463, in _set_parent_with_dispatch
self._set_parent(parent)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3065, in _set_parent
ColumnCollectionMixin._set_parent(self, table)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3022, in _set_parent
for col in self._col_expressions(table):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3016, in _col_expressions
return [
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3017, in <listcomp>
table.c[col] if isinstance(col, util.string_types) else col
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/util/_collections.py", line 194, in __getitem__
return self._data[key]
KeyError: "daterange(valid_from_date, valid_thru_date, '[]')"
======================================== short test summary info ========================================
FAILED test/sql/test_metadata.py::ToMetaDataExcludeConstraint::test_copy - KeyError: "daterange(valid_...
|
KeyError
|
def _copy_expression(expression, source_table, target_table):
if source_table is None or target_table is None:
return expression
def replace(col):
if (
isinstance(col, Column)
and col.table is source_table
and col.key in source_table.c
):
return target_table.c[col.key]
else:
return None
return visitors.replacement_traverse(expression, {}, replace)
|
def _copy_expression(expression, source_table, target_table):
def replace(col):
if (
isinstance(col, Column)
and col.table is source_table
and col.key in source_table.c
):
return target_table.c[col.key]
else:
return None
return visitors.replacement_traverse(expression, {}, replace)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5850
|
=============================================== FAILURES ================================================
_________________________________ ToMetaDataExcludeConstraint.test_copy _________________________________
Traceback (most recent call last):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/sql/test_metadata.py", line 5238, in test_copy
table_c = table.tometadata(meta2)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 1063, in tometadata
table.append_constraint(
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 867, in append_constraint
constraint._set_parent_with_dispatch(self)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/base.py", line 463, in _set_parent_with_dispatch
self._set_parent(parent)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3065, in _set_parent
ColumnCollectionMixin._set_parent(self, table)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3022, in _set_parent
for col in self._col_expressions(table):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3016, in _col_expressions
return [
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3017, in <listcomp>
table.c[col] if isinstance(col, util.string_types) else col
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/util/_collections.py", line 194, in __getitem__
return self._data[key]
KeyError: "daterange(valid_from_date, valid_thru_date, '[]')"
======================================== short test summary info ========================================
FAILED test/sql/test_metadata.py::ToMetaDataExcludeConstraint::test_copy - KeyError: "daterange(valid_...
|
KeyError
|
def copy(self, target_table=None, **kw):
# ticket #5276
constraint_kwargs = {}
for dialect_name in self.dialect_options:
dialect_options = self.dialect_options[dialect_name]._non_defaults
for (
dialect_option_key,
dialect_option_value,
) in dialect_options.items():
constraint_kwargs[dialect_name + "_" + dialect_option_key] = (
dialect_option_value
)
c = self.__class__(
name=self.name,
deferrable=self.deferrable,
initially=self.initially,
*[_copy_expression(expr, self.parent, target_table) for expr in self.columns],
**constraint_kwargs,
)
return self._schema_item_copy(c)
|
def copy(self, **kw):
# ticket #5276
constraint_kwargs = {}
for dialect_name in self.dialect_options:
dialect_options = self.dialect_options[dialect_name]._non_defaults
for (
dialect_option_key,
dialect_option_value,
) in dialect_options.items():
constraint_kwargs[dialect_name + "_" + dialect_option_key] = (
dialect_option_value
)
c = self.__class__(
name=self.name,
deferrable=self.deferrable,
initially=self.initially,
*self.columns.keys(),
**constraint_kwargs,
)
return self._schema_item_copy(c)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5850
|
=============================================== FAILURES ================================================
_________________________________ ToMetaDataExcludeConstraint.test_copy _________________________________
Traceback (most recent call last):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/sql/test_metadata.py", line 5238, in test_copy
table_c = table.tometadata(meta2)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 1063, in tometadata
table.append_constraint(
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 867, in append_constraint
constraint._set_parent_with_dispatch(self)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/base.py", line 463, in _set_parent_with_dispatch
self._set_parent(parent)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3065, in _set_parent
ColumnCollectionMixin._set_parent(self, table)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3022, in _set_parent
for col in self._col_expressions(table):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3016, in _col_expressions
return [
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3017, in <listcomp>
table.c[col] if isinstance(col, util.string_types) else col
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/util/_collections.py", line 194, in __getitem__
return self._data[key]
KeyError: "daterange(valid_from_date, valid_thru_date, '[]')"
======================================== short test summary info ========================================
FAILED test/sql/test_metadata.py::ToMetaDataExcludeConstraint::test_copy - KeyError: "daterange(valid_...
|
KeyError
|
def copy(self, target_table=None, **kw):
if target_table is not None:
# note that target_table is None for the copy process of
# a column-bound CheckConstraint, so this path is not reached
# in that case.
sqltext = _copy_expression(self.sqltext, self.table, target_table)
else:
sqltext = self.sqltext
c = CheckConstraint(
sqltext,
name=self.name,
initially=self.initially,
deferrable=self.deferrable,
_create_rule=self._create_rule,
table=target_table,
_autoattach=False,
_type_bound=self._type_bound,
)
return self._schema_item_copy(c)
|
def copy(self, target_table=None, **kw):
if target_table is not None:
sqltext = _copy_expression(self.sqltext, self.table, target_table)
else:
sqltext = self.sqltext
c = CheckConstraint(
sqltext,
name=self.name,
initially=self.initially,
deferrable=self.deferrable,
_create_rule=self._create_rule,
table=target_table,
_autoattach=False,
_type_bound=self._type_bound,
)
return self._schema_item_copy(c)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5850
|
=============================================== FAILURES ================================================
_________________________________ ToMetaDataExcludeConstraint.test_copy _________________________________
Traceback (most recent call last):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/sql/test_metadata.py", line 5238, in test_copy
table_c = table.tometadata(meta2)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 1063, in tometadata
table.append_constraint(
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 867, in append_constraint
constraint._set_parent_with_dispatch(self)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/base.py", line 463, in _set_parent_with_dispatch
self._set_parent(parent)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3065, in _set_parent
ColumnCollectionMixin._set_parent(self, table)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3022, in _set_parent
for col in self._col_expressions(table):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3016, in _col_expressions
return [
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3017, in <listcomp>
table.c[col] if isinstance(col, util.string_types) else col
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/util/_collections.py", line 194, in __getitem__
return self._data[key]
KeyError: "daterange(valid_from_date, valid_thru_date, '[]')"
======================================== short test summary info ========================================
FAILED test/sql/test_metadata.py::ToMetaDataExcludeConstraint::test_copy - KeyError: "daterange(valid_...
|
KeyError
|
def copy(self, target_table=None, **kw):
sqltext = _copy_expression(
self.sqltext,
self.column.table if self.column is not None else None,
target_table,
)
g = Computed(sqltext, persisted=self.persisted)
return self._schema_item_copy(g)
|
def copy(self, target_table=None, **kw):
if target_table is not None:
sqltext = _copy_expression(self.sqltext, self.table, target_table)
else:
sqltext = self.sqltext
g = Computed(sqltext, persisted=self.persisted)
return self._schema_item_copy(g)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5850
|
=============================================== FAILURES ================================================
_________________________________ ToMetaDataExcludeConstraint.test_copy _________________________________
Traceback (most recent call last):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/sql/test_metadata.py", line 5238, in test_copy
table_c = table.tometadata(meta2)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 1063, in tometadata
table.append_constraint(
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 867, in append_constraint
constraint._set_parent_with_dispatch(self)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/base.py", line 463, in _set_parent_with_dispatch
self._set_parent(parent)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3065, in _set_parent
ColumnCollectionMixin._set_parent(self, table)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3022, in _set_parent
for col in self._col_expressions(table):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3016, in _col_expressions
return [
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3017, in <listcomp>
table.c[col] if isinstance(col, util.string_types) else col
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/util/_collections.py", line 194, in __getitem__
return self._data[key]
KeyError: "daterange(valid_from_date, valid_thru_date, '[]')"
======================================== short test summary info ========================================
FAILED test/sql/test_metadata.py::ToMetaDataExcludeConstraint::test_copy - KeyError: "daterange(valid_...
|
KeyError
|
def copy(self, **kw):
i = Identity(
always=self.always,
on_null=self.on_null,
start=self.start,
increment=self.increment,
minvalue=self.minvalue,
maxvalue=self.maxvalue,
nominvalue=self.nominvalue,
nomaxvalue=self.nomaxvalue,
cycle=self.cycle,
cache=self.cache,
order=self.order,
)
return self._schema_item_copy(i)
|
def copy(self, target_table=None, **kw):
i = Identity(
always=self.always,
on_null=self.on_null,
start=self.start,
increment=self.increment,
minvalue=self.minvalue,
maxvalue=self.maxvalue,
nominvalue=self.nominvalue,
nomaxvalue=self.nomaxvalue,
cycle=self.cycle,
cache=self.cache,
order=self.order,
)
return self._schema_item_copy(i)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5850
|
=============================================== FAILURES ================================================
_________________________________ ToMetaDataExcludeConstraint.test_copy _________________________________
Traceback (most recent call last):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/sql/test_metadata.py", line 5238, in test_copy
table_c = table.tometadata(meta2)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 1063, in tometadata
table.append_constraint(
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 867, in append_constraint
constraint._set_parent_with_dispatch(self)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/base.py", line 463, in _set_parent_with_dispatch
self._set_parent(parent)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3065, in _set_parent
ColumnCollectionMixin._set_parent(self, table)
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3022, in _set_parent
for col in self._col_expressions(table):
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3016, in _col_expressions
return [
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/sql/schema.py", line 3017, in <listcomp>
table.c[col] if isinstance(col, util.string_types) else col
File "/home/jeffh/workspace/vortex-workspace/vfinance/subrepos/SQLAlchemy-1.3.18/test/../lib/sqlalchemy/util/_collections.py", line 194, in __getitem__
return self._data[key]
KeyError: "daterange(valid_from_date, valid_thru_date, '[]')"
======================================== short test summary info ========================================
FAILED test/sql/test_metadata.py::ToMetaDataExcludeConstraint::test_copy - KeyError: "daterange(valid_...
|
KeyError
|
def __init__(self, compiler, element_type, message=None):
super(UnsupportedCompilationError, self).__init__(
"Compiler %r can't render element of type %s%s"
% (compiler, element_type, ": %s" % message if message else "")
)
|
def __init__(self, compiler, element_type):
super(UnsupportedCompilationError, self).__init__(
"Compiler %r can't render element of type %s" % (compiler, element_type)
)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5836
|
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 89, in _compiler_dispatch
meth = getter(visitor)
AttributeError: 'StrSQLCompiler' object has no attribute 'visit_column_element'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 425, in _wrap_existing_dispatch
return existing_dispatch(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 93, in _compiler_dispatch
replace_context=err,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy.sql.compiler.StrSQLCompiler object at 0x7f01d84cf8d0> can't render element of type <class 'sqlalchemy.sql.elements.ColumnElement'> (Background on this error at: http://sqlalche.me/e/13/l7de)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "repro.py", line 89, in <module>
main()
File "repro.py", line 85, in main
print(result)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 3538, in __str__
context = self._compile_context()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4114, in _compile_context
context.statement = self._compound_eager_statement(context)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4180, in _compound_eager_statement
from_clause, eager_join, eager_join.stop_on
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 621, in splice_joins
right.onclause = adapter.traverse(right.onclause)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 240, in traverse
return replacement_traverse(obj, self.__traverse_options__, replace)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 484, in replacement_traverse
obj = clone(obj, **opts)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 473, in clone
newelem = replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 236, in replace
e = v.replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 848, in replace
return self._corresponding_column(col, True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 821, in _corresponding_column
col, require_embedded=require_embedded
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 560, in corresponding_column
if self.c.contains_column(column):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 1392, in _populate_column_collection
for col in self.element.columns._all_columns:
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 3903, in _populate_column_collection
c._make_proxy(self, key=key, name=name, name_is_truncatable=True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 844, in _make_proxy
key = str(self)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 491, in __str__
return str(self.compile())
File "<string>", line 1, in <lambda>
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 487, in _compiler
return dialect.statement_compiler(dialect, self, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 592, in __init__
Compiled.__init__(self, dialect, statement, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 432, in _wrap_existing_dispatch
from_=uce,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.CompileError: <class '__main__.combine_date_time'> construct has no default compilation handler.
|
AttributeError
|
def compiles(class_, *specs):
"""Register a function as a compiler for a
given :class:`_expression.ClauseElement` type."""
def decorate(fn):
# get an existing @compiles handler
existing = class_.__dict__.get("_compiler_dispatcher", None)
# get the original handler. All ClauseElement classes have one
# of these, but some TypeEngine classes will not.
existing_dispatch = getattr(class_, "_compiler_dispatch", None)
if not existing:
existing = _dispatcher()
if existing_dispatch:
def _wrap_existing_dispatch(element, compiler, **kw):
try:
return existing_dispatch(element, compiler, **kw)
except exc.UnsupportedCompilationError as uce:
util.raise_(
exc.UnsupportedCompilationError(
compiler,
type(element),
message="%s construct has no default "
"compilation handler." % type(element),
),
from_=uce,
)
existing.specs["default"] = _wrap_existing_dispatch
# TODO: why is the lambda needed ?
setattr(
class_,
"_compiler_dispatch",
lambda *arg, **kw: existing(*arg, **kw),
)
setattr(class_, "_compiler_dispatcher", existing)
if specs:
for s in specs:
existing.specs[s] = fn
else:
existing.specs["default"] = fn
return fn
return decorate
|
def compiles(class_, *specs):
"""Register a function as a compiler for a
given :class:`_expression.ClauseElement` type."""
def decorate(fn):
# get an existing @compiles handler
existing = class_.__dict__.get("_compiler_dispatcher", None)
# get the original handler. All ClauseElement classes have one
# of these, but some TypeEngine classes will not.
existing_dispatch = getattr(class_, "_compiler_dispatch", None)
if not existing:
existing = _dispatcher()
if existing_dispatch:
def _wrap_existing_dispatch(element, compiler, **kw):
try:
return existing_dispatch(element, compiler, **kw)
except exc.UnsupportedCompilationError as uce:
util.raise_(
exc.CompileError(
"%s construct has no default "
"compilation handler." % type(element)
),
from_=uce,
)
existing.specs["default"] = _wrap_existing_dispatch
# TODO: why is the lambda needed ?
setattr(
class_,
"_compiler_dispatch",
lambda *arg, **kw: existing(*arg, **kw),
)
setattr(class_, "_compiler_dispatcher", existing)
if specs:
for s in specs:
existing.specs[s] = fn
else:
existing.specs["default"] = fn
return fn
return decorate
|
https://github.com/sqlalchemy/sqlalchemy/issues/5836
|
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 89, in _compiler_dispatch
meth = getter(visitor)
AttributeError: 'StrSQLCompiler' object has no attribute 'visit_column_element'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 425, in _wrap_existing_dispatch
return existing_dispatch(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 93, in _compiler_dispatch
replace_context=err,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy.sql.compiler.StrSQLCompiler object at 0x7f01d84cf8d0> can't render element of type <class 'sqlalchemy.sql.elements.ColumnElement'> (Background on this error at: http://sqlalche.me/e/13/l7de)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "repro.py", line 89, in <module>
main()
File "repro.py", line 85, in main
print(result)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 3538, in __str__
context = self._compile_context()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4114, in _compile_context
context.statement = self._compound_eager_statement(context)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4180, in _compound_eager_statement
from_clause, eager_join, eager_join.stop_on
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 621, in splice_joins
right.onclause = adapter.traverse(right.onclause)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 240, in traverse
return replacement_traverse(obj, self.__traverse_options__, replace)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 484, in replacement_traverse
obj = clone(obj, **opts)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 473, in clone
newelem = replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 236, in replace
e = v.replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 848, in replace
return self._corresponding_column(col, True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 821, in _corresponding_column
col, require_embedded=require_embedded
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 560, in corresponding_column
if self.c.contains_column(column):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 1392, in _populate_column_collection
for col in self.element.columns._all_columns:
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 3903, in _populate_column_collection
c._make_proxy(self, key=key, name=name, name_is_truncatable=True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 844, in _make_proxy
key = str(self)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 491, in __str__
return str(self.compile())
File "<string>", line 1, in <lambda>
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 487, in _compiler
return dialect.statement_compiler(dialect, self, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 592, in __init__
Compiled.__init__(self, dialect, statement, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 432, in _wrap_existing_dispatch
from_=uce,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.CompileError: <class '__main__.combine_date_time'> construct has no default compilation handler.
|
AttributeError
|
def decorate(fn):
# get an existing @compiles handler
existing = class_.__dict__.get("_compiler_dispatcher", None)
# get the original handler. All ClauseElement classes have one
# of these, but some TypeEngine classes will not.
existing_dispatch = getattr(class_, "_compiler_dispatch", None)
if not existing:
existing = _dispatcher()
if existing_dispatch:
def _wrap_existing_dispatch(element, compiler, **kw):
try:
return existing_dispatch(element, compiler, **kw)
except exc.UnsupportedCompilationError as uce:
util.raise_(
exc.UnsupportedCompilationError(
compiler,
type(element),
message="%s construct has no default "
"compilation handler." % type(element),
),
from_=uce,
)
existing.specs["default"] = _wrap_existing_dispatch
# TODO: why is the lambda needed ?
setattr(
class_,
"_compiler_dispatch",
lambda *arg, **kw: existing(*arg, **kw),
)
setattr(class_, "_compiler_dispatcher", existing)
if specs:
for s in specs:
existing.specs[s] = fn
else:
existing.specs["default"] = fn
return fn
|
def decorate(fn):
# get an existing @compiles handler
existing = class_.__dict__.get("_compiler_dispatcher", None)
# get the original handler. All ClauseElement classes have one
# of these, but some TypeEngine classes will not.
existing_dispatch = getattr(class_, "_compiler_dispatch", None)
if not existing:
existing = _dispatcher()
if existing_dispatch:
def _wrap_existing_dispatch(element, compiler, **kw):
try:
return existing_dispatch(element, compiler, **kw)
except exc.UnsupportedCompilationError as uce:
util.raise_(
exc.CompileError(
"%s construct has no default "
"compilation handler." % type(element)
),
from_=uce,
)
existing.specs["default"] = _wrap_existing_dispatch
# TODO: why is the lambda needed ?
setattr(
class_,
"_compiler_dispatch",
lambda *arg, **kw: existing(*arg, **kw),
)
setattr(class_, "_compiler_dispatcher", existing)
if specs:
for s in specs:
existing.specs[s] = fn
else:
existing.specs["default"] = fn
return fn
|
https://github.com/sqlalchemy/sqlalchemy/issues/5836
|
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 89, in _compiler_dispatch
meth = getter(visitor)
AttributeError: 'StrSQLCompiler' object has no attribute 'visit_column_element'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 425, in _wrap_existing_dispatch
return existing_dispatch(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 93, in _compiler_dispatch
replace_context=err,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy.sql.compiler.StrSQLCompiler object at 0x7f01d84cf8d0> can't render element of type <class 'sqlalchemy.sql.elements.ColumnElement'> (Background on this error at: http://sqlalche.me/e/13/l7de)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "repro.py", line 89, in <module>
main()
File "repro.py", line 85, in main
print(result)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 3538, in __str__
context = self._compile_context()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4114, in _compile_context
context.statement = self._compound_eager_statement(context)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4180, in _compound_eager_statement
from_clause, eager_join, eager_join.stop_on
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 621, in splice_joins
right.onclause = adapter.traverse(right.onclause)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 240, in traverse
return replacement_traverse(obj, self.__traverse_options__, replace)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 484, in replacement_traverse
obj = clone(obj, **opts)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 473, in clone
newelem = replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 236, in replace
e = v.replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 848, in replace
return self._corresponding_column(col, True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 821, in _corresponding_column
col, require_embedded=require_embedded
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 560, in corresponding_column
if self.c.contains_column(column):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 1392, in _populate_column_collection
for col in self.element.columns._all_columns:
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 3903, in _populate_column_collection
c._make_proxy(self, key=key, name=name, name_is_truncatable=True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 844, in _make_proxy
key = str(self)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 491, in __str__
return str(self.compile())
File "<string>", line 1, in <lambda>
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 487, in _compiler
return dialect.statement_compiler(dialect, self, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 592, in __init__
Compiled.__init__(self, dialect, statement, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 432, in _wrap_existing_dispatch
from_=uce,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.CompileError: <class '__main__.combine_date_time'> construct has no default compilation handler.
|
AttributeError
|
def _wrap_existing_dispatch(element, compiler, **kw):
try:
return existing_dispatch(element, compiler, **kw)
except exc.UnsupportedCompilationError as uce:
util.raise_(
exc.UnsupportedCompilationError(
compiler,
type(element),
message="%s construct has no default "
"compilation handler." % type(element),
),
from_=uce,
)
|
def _wrap_existing_dispatch(element, compiler, **kw):
try:
return existing_dispatch(element, compiler, **kw)
except exc.UnsupportedCompilationError as uce:
util.raise_(
exc.CompileError(
"%s construct has no default compilation handler." % type(element)
),
from_=uce,
)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5836
|
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 89, in _compiler_dispatch
meth = getter(visitor)
AttributeError: 'StrSQLCompiler' object has no attribute 'visit_column_element'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 425, in _wrap_existing_dispatch
return existing_dispatch(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 93, in _compiler_dispatch
replace_context=err,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy.sql.compiler.StrSQLCompiler object at 0x7f01d84cf8d0> can't render element of type <class 'sqlalchemy.sql.elements.ColumnElement'> (Background on this error at: http://sqlalche.me/e/13/l7de)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "repro.py", line 89, in <module>
main()
File "repro.py", line 85, in main
print(result)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 3538, in __str__
context = self._compile_context()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4114, in _compile_context
context.statement = self._compound_eager_statement(context)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4180, in _compound_eager_statement
from_clause, eager_join, eager_join.stop_on
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 621, in splice_joins
right.onclause = adapter.traverse(right.onclause)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 240, in traverse
return replacement_traverse(obj, self.__traverse_options__, replace)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 484, in replacement_traverse
obj = clone(obj, **opts)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 473, in clone
newelem = replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 236, in replace
e = v.replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 848, in replace
return self._corresponding_column(col, True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 821, in _corresponding_column
col, require_embedded=require_embedded
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 560, in corresponding_column
if self.c.contains_column(column):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 1392, in _populate_column_collection
for col in self.element.columns._all_columns:
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 3903, in _populate_column_collection
c._make_proxy(self, key=key, name=name, name_is_truncatable=True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 844, in _make_proxy
key = str(self)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 491, in __str__
return str(self.compile())
File "<string>", line 1, in <lambda>
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 487, in _compiler
return dialect.statement_compiler(dialect, self, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 592, in __init__
Compiled.__init__(self, dialect, statement, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 432, in _wrap_existing_dispatch
from_=uce,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.CompileError: <class '__main__.combine_date_time'> construct has no default compilation handler.
|
AttributeError
|
def __call__(self, element, compiler, **kw):
# TODO: yes, this could also switch off of DBAPI in use.
fn = self.specs.get(compiler.dialect.name, None)
if not fn:
try:
fn = self.specs["default"]
except KeyError as ke:
util.raise_(
exc.UnsupportedCompilationError(
compiler,
type(element),
message="%s construct has no default "
"compilation handler." % type(element),
),
replace_context=ke,
)
# if compilation includes add_to_result_map, collect add_to_result_map
# arguments from the user-defined callable, which are probably none
# because this is not public API. if it wasn't called, then call it
# ourselves.
arm = kw.get("add_to_result_map", None)
if arm:
arm_collection = []
kw["add_to_result_map"] = lambda *args: arm_collection.append(args)
expr = fn(element, compiler, **kw)
if arm:
if not arm_collection:
arm_collection.append((None, None, (element,), sqltypes.NULLTYPE))
for tup in arm_collection:
arm(*tup)
return expr
|
def __call__(self, element, compiler, **kw):
# TODO: yes, this could also switch off of DBAPI in use.
fn = self.specs.get(compiler.dialect.name, None)
if not fn:
try:
fn = self.specs["default"]
except KeyError as ke:
util.raise_(
exc.CompileError(
"%s construct has no default compilation handler." % type(element)
),
replace_context=ke,
)
# if compilation includes add_to_result_map, collect add_to_result_map
# arguments from the user-defined callable, which are probably none
# because this is not public API. if it wasn't called, then call it
# ourselves.
arm = kw.get("add_to_result_map", None)
if arm:
arm_collection = []
kw["add_to_result_map"] = lambda *args: arm_collection.append(args)
expr = fn(element, compiler, **kw)
if arm:
if not arm_collection:
arm_collection.append((None, None, (element,), sqltypes.NULLTYPE))
for tup in arm_collection:
arm(*tup)
return expr
|
https://github.com/sqlalchemy/sqlalchemy/issues/5836
|
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 89, in _compiler_dispatch
meth = getter(visitor)
AttributeError: 'StrSQLCompiler' object has no attribute 'visit_column_element'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 425, in _wrap_existing_dispatch
return existing_dispatch(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 93, in _compiler_dispatch
replace_context=err,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy.sql.compiler.StrSQLCompiler object at 0x7f01d84cf8d0> can't render element of type <class 'sqlalchemy.sql.elements.ColumnElement'> (Background on this error at: http://sqlalche.me/e/13/l7de)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "repro.py", line 89, in <module>
main()
File "repro.py", line 85, in main
print(result)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 3538, in __str__
context = self._compile_context()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4114, in _compile_context
context.statement = self._compound_eager_statement(context)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4180, in _compound_eager_statement
from_clause, eager_join, eager_join.stop_on
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 621, in splice_joins
right.onclause = adapter.traverse(right.onclause)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 240, in traverse
return replacement_traverse(obj, self.__traverse_options__, replace)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 484, in replacement_traverse
obj = clone(obj, **opts)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 473, in clone
newelem = replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 236, in replace
e = v.replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 848, in replace
return self._corresponding_column(col, True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 821, in _corresponding_column
col, require_embedded=require_embedded
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 560, in corresponding_column
if self.c.contains_column(column):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 1392, in _populate_column_collection
for col in self.element.columns._all_columns:
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 3903, in _populate_column_collection
c._make_proxy(self, key=key, name=name, name_is_truncatable=True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 844, in _make_proxy
key = str(self)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 491, in __str__
return str(self.compile())
File "<string>", line 1, in <lambda>
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 487, in _compiler
return dialect.statement_compiler(dialect, self, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 592, in __init__
Compiled.__init__(self, dialect, statement, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 432, in _wrap_existing_dispatch
from_=uce,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.CompileError: <class '__main__.combine_date_time'> construct has no default compilation handler.
|
AttributeError
|
def __call__(self, element, compiler, **kw):
# TODO: yes, this could also switch off of DBAPI in use.
fn = self.specs.get(compiler.dialect.name, None)
if not fn:
try:
fn = self.specs["default"]
except KeyError as ke:
util.raise_(
exc.UnsupportedCompilationError(
compiler,
type(element),
message="%s construct has no default "
"compilation handler." % type(element),
),
replace_context=ke,
)
return fn(element, compiler, **kw)
|
def __call__(self, element, compiler, **kw):
# TODO: yes, this could also switch off of DBAPI in use.
fn = self.specs.get(compiler.dialect.name, None)
if not fn:
try:
fn = self.specs["default"]
except KeyError as ke:
util.raise_(
exc.CompileError(
"%s construct has no default compilation handler." % type(element)
),
replace_context=ke,
)
return fn(element, compiler, **kw)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5836
|
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 89, in _compiler_dispatch
meth = getter(visitor)
AttributeError: 'StrSQLCompiler' object has no attribute 'visit_column_element'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 425, in _wrap_existing_dispatch
return existing_dispatch(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 93, in _compiler_dispatch
replace_context=err,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.UnsupportedCompilationError: Compiler <sqlalchemy.sql.compiler.StrSQLCompiler object at 0x7f01d84cf8d0> can't render element of type <class 'sqlalchemy.sql.elements.ColumnElement'> (Background on this error at: http://sqlalche.me/e/13/l7de)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "repro.py", line 89, in <module>
main()
File "repro.py", line 85, in main
print(result)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 3538, in __str__
context = self._compile_context()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4114, in _compile_context
context.statement = self._compound_eager_statement(context)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/orm/query.py", line 4180, in _compound_eager_statement
from_clause, eager_join, eager_join.stop_on
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 621, in splice_joins
right.onclause = adapter.traverse(right.onclause)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 240, in traverse
return replacement_traverse(obj, self.__traverse_options__, replace)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 484, in replacement_traverse
obj = clone(obj, **opts)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 473, in clone
newelem = replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py", line 236, in replace
e = v.replace(elem)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 848, in replace
return self._corresponding_column(col, True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/util.py", line 821, in _corresponding_column
col, require_embedded=require_embedded
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 560, in corresponding_column
if self.c.contains_column(column):
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 1392, in _populate_column_collection
for col in self.element.columns._all_columns:
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py", line 893, in __get__
obj.__dict__[self.__name__] = result = self.fget(obj)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 647, in columns
self._populate_column_collection()
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/selectable.py", line 3903, in _populate_column_collection
c._make_proxy(self, key=key, name=name, name_is_truncatable=True)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 844, in _make_proxy
key = str(self)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 491, in __str__
return str(self.compile())
File "<string>", line 1, in <lambda>
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/elements.py", line 487, in _compiler
return dialect.statement_compiler(dialect, self, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 592, in __init__
Compiled.__init__(self, dialect, statement, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 322, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/sql/compiler.py", line 352, in process
return obj._compiler_dispatch(self, **kwargs)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/ext/compiler.py", line 432, in _wrap_existing_dispatch
from_=uce,
File "/root/git/feed_test_results/venv/lib/python3.7/site-packages/sqlalchemy/util/compat.py", line 182, in raise_
raise exception
sqlalchemy.exc.CompileError: <class '__main__.combine_date_time'> construct has no default compilation handler.
|
AttributeError
|
def visit_create_index(self, create, include_schema=False):
index = create.element
self._verify_index_table(index)
preparer = self.preparer
text = "CREATE "
if index.unique:
text += "UNIQUE "
# handle clustering option
clustered = index.dialect_options["mssql"]["clustered"]
if clustered is not None:
if clustered:
text += "CLUSTERED "
else:
text += "NONCLUSTERED "
text += "INDEX %s ON %s (%s)" % (
self._prepared_index_name(index, include_schema=include_schema),
preparer.format_table(index.table),
", ".join(
self.sql_compiler.process(expr, include_table=False, literal_binds=True)
for expr in index.expressions
),
)
# handle other included columns
if index.dialect_options["mssql"]["include"]:
inclusions = [
index.table.c[col] if isinstance(col, util.string_types) else col
for col in index.dialect_options["mssql"]["include"]
]
text += " INCLUDE (%s)" % ", ".join(
[preparer.quote(c.name) for c in inclusions]
)
whereclause = index.dialect_options["mssql"]["where"]
if whereclause is not None:
whereclause = coercions.expect(roles.DDLExpressionRole, whereclause)
where_compiled = self.sql_compiler.process(
whereclause, include_table=False, literal_binds=True
)
text += " WHERE " + where_compiled
return text
|
def visit_create_index(self, create, include_schema=False):
index = create.element
self._verify_index_table(index)
preparer = self.preparer
text = "CREATE "
if index.unique:
text += "UNIQUE "
# handle clustering option
clustered = index.dialect_options["mssql"]["clustered"]
if clustered is not None:
if clustered:
text += "CLUSTERED "
else:
text += "NONCLUSTERED "
text += "INDEX %s ON %s (%s)" % (
self._prepared_index_name(index, include_schema=include_schema),
preparer.format_table(index.table),
", ".join(
self.sql_compiler.process(expr, include_table=False, literal_binds=True)
for expr in index.expressions
),
)
whereclause = index.dialect_options["mssql"]["where"]
if whereclause is not None:
whereclause = coercions.expect(roles.DDLExpressionRole, whereclause)
where_compiled = self.sql_compiler.process(
whereclause, include_table=False, literal_binds=True
)
text += " WHERE " + where_compiled
# handle other included columns
if index.dialect_options["mssql"]["include"]:
inclusions = [
index.table.c[col] if isinstance(col, util.string_types) else col
for col in index.dialect_options["mssql"]["include"]
]
text += " INCLUDE (%s)" % ", ".join(
[preparer.quote(c.name) for c in inclusions]
)
return text
|
https://github.com/sqlalchemy/sqlalchemy/issues/5751
|
Traceback (most recent call last):
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1276, in _execute_context
self.dialect.do_execute(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
pyodbc.ProgrammingError: ('42000', "[42000] [Microsoft][ODBC SQL Server Driver][SQL Server]Incorrect syntax near '('. (102) (SQLExecDirectW)")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce.py", line 42, in <module>
setup()
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce.py", line 39, in setup
metadata.create_all(engine)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\schema.py", line 4553, in create_all
bind._run_visitor(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\ddl.py", line 783, in visit_metadata
self.traverse_single(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\ddl.py", line 839, in visit_table
self.traverse_single(index)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\ddl.py", line 868, in visit_index
self.connection.execute(CreateIndex(index))
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1011, in execute
return meth(self, multiparams, params)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1068, in _execute_ddl
ret = self._execute_context(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1316, in _execute_context
self._handle_dbapi_exception(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1510, in _handle_dbapi_exception
util.raise_(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\util\compat.py", line 182, in raise_
raise exception
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1276, in _execute_context
self.dialect.do_execute(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
sqlalchemy.exc.ProgrammingError: (pyodbc.ProgrammingError) ('42000', "[42000] [Microsoft][ODBC SQL Server Driver][SQL Server]Incorrect syntax near '('. (102) (SQLExecDirectW)")
[SQL: CREATE INDEX idx_name_adult ON person (name) WHERE age > 18 INCLUDE (age)]
(Background on this error at: http://sqlalche.me/e/13/f405)
|
pyodbc.ProgrammingError
|
def visit_create_index(self, create, include_schema=False):
index = create.element
self._verify_index_table(index)
preparer = self.preparer
text = "CREATE "
if index.unique:
text += "UNIQUE "
# handle clustering option
clustered = index.dialect_options["mssql"]["clustered"]
if clustered is not None:
if clustered:
text += "CLUSTERED "
else:
text += "NONCLUSTERED "
text += "INDEX %s ON %s (%s)" % (
self._prepared_index_name(index, include_schema=include_schema),
preparer.format_table(index.table),
", ".join(
self.sql_compiler.process(expr, include_table=False, literal_binds=True)
for expr in index.expressions
),
)
# handle other included columns
if index.dialect_options["mssql"]["include"]:
inclusions = [
index.table.c[col] if isinstance(col, util.string_types) else col
for col in index.dialect_options["mssql"]["include"]
]
text += " INCLUDE (%s)" % ", ".join(
[preparer.quote(c.name) for c in inclusions]
)
whereclause = index.dialect_options["mssql"]["where"]
if whereclause is not None:
where_compiled = self.sql_compiler.process(
whereclause, include_table=False, literal_binds=True
)
text += " WHERE " + where_compiled
return text
|
def visit_create_index(self, create, include_schema=False):
index = create.element
self._verify_index_table(index)
preparer = self.preparer
text = "CREATE "
if index.unique:
text += "UNIQUE "
# handle clustering option
clustered = index.dialect_options["mssql"]["clustered"]
if clustered is not None:
if clustered:
text += "CLUSTERED "
else:
text += "NONCLUSTERED "
text += "INDEX %s ON %s (%s)" % (
self._prepared_index_name(index, include_schema=include_schema),
preparer.format_table(index.table),
", ".join(
self.sql_compiler.process(expr, include_table=False, literal_binds=True)
for expr in index.expressions
),
)
whereclause = index.dialect_options["mssql"]["where"]
if whereclause is not None:
where_compiled = self.sql_compiler.process(
whereclause, include_table=False, literal_binds=True
)
text += " WHERE " + where_compiled
# handle other included columns
if index.dialect_options["mssql"]["include"]:
inclusions = [
index.table.c[col] if isinstance(col, util.string_types) else col
for col in index.dialect_options["mssql"]["include"]
]
text += " INCLUDE (%s)" % ", ".join(
[preparer.quote(c.name) for c in inclusions]
)
return text
|
https://github.com/sqlalchemy/sqlalchemy/issues/5751
|
Traceback (most recent call last):
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1276, in _execute_context
self.dialect.do_execute(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
pyodbc.ProgrammingError: ('42000', "[42000] [Microsoft][ODBC SQL Server Driver][SQL Server]Incorrect syntax near '('. (102) (SQLExecDirectW)")
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce.py", line 42, in <module>
setup()
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce.py", line 39, in setup
metadata.create_all(engine)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\schema.py", line 4553, in create_all
bind._run_visitor(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\ddl.py", line 783, in visit_metadata
self.traverse_single(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\ddl.py", line 839, in visit_table
self.traverse_single(index)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\ddl.py", line 868, in visit_index
self.connection.execute(CreateIndex(index))
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1011, in execute
return meth(self, multiparams, params)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\sql\ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1068, in _execute_ddl
ret = self._execute_context(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1316, in _execute_context
self._handle_dbapi_exception(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1510, in _handle_dbapi_exception
util.raise_(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\util\compat.py", line 182, in raise_
raise exception
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\base.py", line 1276, in _execute_context
self.dialect.do_execute(
File "C:\dev\sqlalchemy_reproduce\sqlalchemy_reproduce-EWF-LNSc\lib\site-packages\sqlalchemy\engine\default.py", line 593, in do_execute
cursor.execute(statement, parameters)
sqlalchemy.exc.ProgrammingError: (pyodbc.ProgrammingError) ('42000', "[42000] [Microsoft][ODBC SQL Server Driver][SQL Server]Incorrect syntax near '('. (102) (SQLExecDirectW)")
[SQL: CREATE INDEX idx_name_adult ON person (name) WHERE age > 18 INCLUDE (age)]
(Background on this error at: http://sqlalche.me/e/13/f405)
|
pyodbc.ProgrammingError
|
def visit_on_duplicate_key_update(self, on_duplicate, **kw):
statement = self.current_executable
if on_duplicate._parameter_ordering:
parameter_ordering = [
coercions.expect(roles.DMLColumnRole, key)
for key in on_duplicate._parameter_ordering
]
ordered_keys = set(parameter_ordering)
cols = [
statement.table.c[key]
for key in parameter_ordering
if key in statement.table.c
] + [c for c in statement.table.c if c.key not in ordered_keys]
else:
cols = statement.table.c
clauses = []
# traverses through all table columns to preserve table column order
for column in (col for col in cols if col.key in on_duplicate.update):
val = on_duplicate.update[column.key]
if coercions._is_literal(val):
val = elements.BindParameter(None, val, type_=column.type)
value_text = self.process(val.self_group(), use_schema=False)
else:
def replace(obj):
if isinstance(obj, elements.BindParameter) and obj.type._isnull:
obj = obj._clone()
obj.type = column.type
return obj
elif (
isinstance(obj, elements.ColumnClause)
and obj.table is on_duplicate.inserted_alias
):
obj = literal_column(
"VALUES(" + self.preparer.quote(column.name) + ")"
)
return obj
else:
# element is not replaced
return None
val = visitors.replacement_traverse(val, {}, replace)
value_text = self.process(val.self_group(), use_schema=False)
name_text = self.preparer.quote(column.name)
clauses.append("%s = %s" % (name_text, value_text))
non_matching = set(on_duplicate.update) - set(c.key for c in cols)
if non_matching:
util.warn(
"Additional column names not matching "
"any column keys in table '%s': %s"
% (
self.statement.table.name,
(", ".join("'%s'" % c for c in non_matching)),
)
)
return "ON DUPLICATE KEY UPDATE " + ", ".join(clauses)
|
def visit_on_duplicate_key_update(self, on_duplicate, **kw):
if on_duplicate._parameter_ordering:
parameter_ordering = [
coercions.expect(roles.DMLColumnRole, key)
for key in on_duplicate._parameter_ordering
]
ordered_keys = set(parameter_ordering)
cols = [
self.statement.table.c[key]
for key in parameter_ordering
if key in self.statement.table.c
] + [c for c in self.statement.table.c if c.key not in ordered_keys]
else:
cols = self.statement.table.c
clauses = []
# traverses through all table columns to preserve table column order
for column in (col for col in cols if col.key in on_duplicate.update):
val = on_duplicate.update[column.key]
if coercions._is_literal(val):
val = elements.BindParameter(None, val, type_=column.type)
value_text = self.process(val.self_group(), use_schema=False)
else:
def replace(obj):
if isinstance(obj, elements.BindParameter) and obj.type._isnull:
obj = obj._clone()
obj.type = column.type
return obj
elif (
isinstance(obj, elements.ColumnClause)
and obj.table is on_duplicate.inserted_alias
):
obj = literal_column(
"VALUES(" + self.preparer.quote(column.name) + ")"
)
return obj
else:
# element is not replaced
return None
val = visitors.replacement_traverse(val, {}, replace)
value_text = self.process(val.self_group(), use_schema=False)
name_text = self.preparer.quote(column.name)
clauses.append("%s = %s" % (name_text, value_text))
non_matching = set(on_duplicate.update) - set(c.key for c in cols)
if non_matching:
util.warn(
"Additional column names not matching "
"any column keys in table '%s': %s"
% (
self.statement.table.name,
(", ".join("'%s'" % c for c in non_matching)),
)
)
return "ON DUPLICATE KEY UPDATE " + ", ".join(clauses)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5656
|
Traceback (most recent call last):
File "./reproduction.py", line 315, in <module>
Score.__table__.create()
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 925, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 854, in visit_table
_is_metadata_operation=_is_metadata_operation,
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/event/attr.py", line 322, in __call__
fn(*args, **kw)
File "./reproduction.py", line 152, in create_after_all_dependencies
CreateTrigger(self)(*args, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 256, in __call__
return bind.execute(self.against(target))
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 325, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "./reproduction.py", line 199, in visit_create_trigger_mysql
compiled_query = sql_compiler.process(element.trigger.query, literal_binds=True)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2515, in visit_insert
insert_stmt._post_values_clause, **kw
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1400, in visit_on_duplicate_key_update
cols = self.statement.table.c
AttributeError: 'MySQLCompiler_mysqldb' object has no attribute 'statement'
|
AttributeError
|
def visit_on_conflict_do_update(self, on_conflict, **kw):
clause = on_conflict
target_text = self._on_conflict_target(on_conflict, **kw)
action_set_ops = []
set_parameters = dict(clause.update_values_to_set)
# create a list of column assignment clauses as tuples
insert_statement = self.stack[-1]["selectable"]
cols = insert_statement.table.c
for c in cols:
col_key = c.key
if col_key in set_parameters:
value = set_parameters.pop(col_key)
if coercions._is_literal(value):
value = elements.BindParameter(None, value, type_=c.type)
else:
if isinstance(value, elements.BindParameter) and value.type._isnull:
value = value._clone()
value.type = c.type
value_text = self.process(value.self_group(), use_schema=False)
key_text = self.preparer.quote(col_key)
action_set_ops.append("%s = %s" % (key_text, value_text))
# check for names that don't match columns
if set_parameters:
util.warn(
"Additional column names not matching "
"any column keys in table '%s': %s"
% (
self.current_executable.table.name,
(", ".join("'%s'" % c for c in set_parameters)),
)
)
for k, v in set_parameters.items():
key_text = (
self.preparer.quote(k)
if isinstance(k, util.string_types)
else self.process(k, use_schema=False)
)
value_text = self.process(
coercions.expect(roles.ExpressionElementRole, v),
use_schema=False,
)
action_set_ops.append("%s = %s" % (key_text, value_text))
action_text = ", ".join(action_set_ops)
if clause.update_whereclause is not None:
action_text += " WHERE %s" % self.process(
clause.update_whereclause, include_table=True, use_schema=False
)
return "ON CONFLICT %s DO UPDATE SET %s" % (target_text, action_text)
|
def visit_on_conflict_do_update(self, on_conflict, **kw):
clause = on_conflict
target_text = self._on_conflict_target(on_conflict, **kw)
action_set_ops = []
set_parameters = dict(clause.update_values_to_set)
# create a list of column assignment clauses as tuples
insert_statement = self.stack[-1]["selectable"]
cols = insert_statement.table.c
for c in cols:
col_key = c.key
if col_key in set_parameters:
value = set_parameters.pop(col_key)
if coercions._is_literal(value):
value = elements.BindParameter(None, value, type_=c.type)
else:
if isinstance(value, elements.BindParameter) and value.type._isnull:
value = value._clone()
value.type = c.type
value_text = self.process(value.self_group(), use_schema=False)
key_text = self.preparer.quote(col_key)
action_set_ops.append("%s = %s" % (key_text, value_text))
# check for names that don't match columns
if set_parameters:
util.warn(
"Additional column names not matching "
"any column keys in table '%s': %s"
% (
self.statement.table.name,
(", ".join("'%s'" % c for c in set_parameters)),
)
)
for k, v in set_parameters.items():
key_text = (
self.preparer.quote(k)
if isinstance(k, util.string_types)
else self.process(k, use_schema=False)
)
value_text = self.process(
coercions.expect(roles.ExpressionElementRole, v),
use_schema=False,
)
action_set_ops.append("%s = %s" % (key_text, value_text))
action_text = ", ".join(action_set_ops)
if clause.update_whereclause is not None:
action_text += " WHERE %s" % self.process(
clause.update_whereclause, include_table=True, use_schema=False
)
return "ON CONFLICT %s DO UPDATE SET %s" % (target_text, action_text)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5656
|
Traceback (most recent call last):
File "./reproduction.py", line 315, in <module>
Score.__table__.create()
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 925, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 854, in visit_table
_is_metadata_operation=_is_metadata_operation,
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/event/attr.py", line 322, in __call__
fn(*args, **kw)
File "./reproduction.py", line 152, in create_after_all_dependencies
CreateTrigger(self)(*args, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 256, in __call__
return bind.execute(self.against(target))
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 325, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "./reproduction.py", line 199, in visit_create_trigger_mysql
compiled_query = sql_compiler.process(element.trigger.query, literal_binds=True)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2515, in visit_insert
insert_stmt._post_values_clause, **kw
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1400, in visit_on_duplicate_key_update
cols = self.statement.table.c
AttributeError: 'MySQLCompiler_mysqldb' object has no attribute 'statement'
|
AttributeError
|
def create_for_statement(cls, statement, compiler, **kw):
self = cls.__new__(cls)
ext_info = statement.table._annotations["parententity"]
self.mapper = mapper = ext_info.mapper
self.extra_criteria_entities = {}
self._resolved_values = cls._get_resolved_values(mapper, statement)
extra_criteria_attributes = {}
for opt in statement._with_options:
if opt._is_criteria_option:
opt.get_global_criteria(extra_criteria_attributes)
if not statement._preserve_parameter_order and statement._values:
self._resolved_values = dict(self._resolved_values)
new_stmt = sql.Update.__new__(sql.Update)
new_stmt.__dict__.update(statement.__dict__)
new_stmt.table = mapper.local_table
# note if the statement has _multi_values, these
# are passed through to the new statement, which will then raise
# InvalidRequestError because UPDATE doesn't support multi_values
# right now.
if statement._ordered_values:
new_stmt._ordered_values = self._resolved_values
elif statement._values:
new_stmt._values = self._resolved_values
new_crit = cls._adjust_for_extra_criteria(extra_criteria_attributes, mapper)
if new_crit:
new_stmt = new_stmt.where(*new_crit)
# if we are against a lambda statement we might not be the
# topmost object that received per-execute annotations
if (
compiler._annotations.get("synchronize_session", None) == "fetch"
and compiler.dialect.full_returning
):
new_stmt = new_stmt.returning(*mapper.primary_key)
UpdateDMLState.__init__(self, new_stmt, compiler, **kw)
return self
|
def create_for_statement(cls, statement, compiler, **kw):
self = cls.__new__(cls)
ext_info = statement.table._annotations["parententity"]
self.mapper = mapper = ext_info.mapper
self.extra_criteria_entities = {}
self._resolved_values = cls._get_resolved_values(mapper, statement)
extra_criteria_attributes = {}
for opt in statement._with_options:
if opt._is_criteria_option:
opt.get_global_criteria(extra_criteria_attributes)
if not statement._preserve_parameter_order and statement._values:
self._resolved_values = dict(self._resolved_values)
new_stmt = sql.Update.__new__(sql.Update)
new_stmt.__dict__.update(statement.__dict__)
new_stmt.table = mapper.local_table
# note if the statement has _multi_values, these
# are passed through to the new statement, which will then raise
# InvalidRequestError because UPDATE doesn't support multi_values
# right now.
if statement._ordered_values:
new_stmt._ordered_values = self._resolved_values
elif statement._values:
new_stmt._values = self._resolved_values
new_crit = cls._adjust_for_extra_criteria(extra_criteria_attributes, mapper)
if new_crit:
new_stmt = new_stmt.where(*new_crit)
# if we are against a lambda statement we might not be the
# topmost object that received per-execute annotations
top_level_stmt = compiler.statement
if (
top_level_stmt._annotations.get("synchronize_session", None) == "fetch"
and compiler.dialect.full_returning
):
new_stmt = new_stmt.returning(*mapper.primary_key)
UpdateDMLState.__init__(self, new_stmt, compiler, **kw)
return self
|
https://github.com/sqlalchemy/sqlalchemy/issues/5656
|
Traceback (most recent call last):
File "./reproduction.py", line 315, in <module>
Score.__table__.create()
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 925, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 854, in visit_table
_is_metadata_operation=_is_metadata_operation,
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/event/attr.py", line 322, in __call__
fn(*args, **kw)
File "./reproduction.py", line 152, in create_after_all_dependencies
CreateTrigger(self)(*args, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 256, in __call__
return bind.execute(self.against(target))
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 325, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "./reproduction.py", line 199, in visit_create_trigger_mysql
compiled_query = sql_compiler.process(element.trigger.query, literal_binds=True)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2515, in visit_insert
insert_stmt._post_values_clause, **kw
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1400, in visit_on_duplicate_key_update
cols = self.statement.table.c
AttributeError: 'MySQLCompiler_mysqldb' object has no attribute 'statement'
|
AttributeError
|
def create_for_statement(cls, statement, compiler, **kw):
self = cls.__new__(cls)
ext_info = statement.table._annotations["parententity"]
self.mapper = mapper = ext_info.mapper
self.extra_criteria_entities = {}
extra_criteria_attributes = {}
for opt in statement._with_options:
if opt._is_criteria_option:
opt.get_global_criteria(extra_criteria_attributes)
new_crit = cls._adjust_for_extra_criteria(extra_criteria_attributes, mapper)
if new_crit:
statement = statement.where(*new_crit)
if (
mapper
and compiler._annotations.get("synchronize_session", None) == "fetch"
and compiler.dialect.full_returning
):
statement = statement.returning(*mapper.primary_key)
DeleteDMLState.__init__(self, statement, compiler, **kw)
return self
|
def create_for_statement(cls, statement, compiler, **kw):
self = cls.__new__(cls)
ext_info = statement.table._annotations["parententity"]
self.mapper = mapper = ext_info.mapper
top_level_stmt = compiler.statement
self.extra_criteria_entities = {}
extra_criteria_attributes = {}
for opt in statement._with_options:
if opt._is_criteria_option:
opt.get_global_criteria(extra_criteria_attributes)
new_crit = cls._adjust_for_extra_criteria(extra_criteria_attributes, mapper)
if new_crit:
statement = statement.where(*new_crit)
if (
mapper
and top_level_stmt._annotations.get("synchronize_session", None) == "fetch"
and compiler.dialect.full_returning
):
statement = statement.returning(*mapper.primary_key)
DeleteDMLState.__init__(self, statement, compiler, **kw)
return self
|
https://github.com/sqlalchemy/sqlalchemy/issues/5656
|
Traceback (most recent call last):
File "./reproduction.py", line 315, in <module>
Score.__table__.create()
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 925, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 854, in visit_table
_is_metadata_operation=_is_metadata_operation,
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/event/attr.py", line 322, in __call__
fn(*args, **kw)
File "./reproduction.py", line 152, in create_after_all_dependencies
CreateTrigger(self)(*args, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 256, in __call__
return bind.execute(self.against(target))
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 325, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "./reproduction.py", line 199, in visit_create_trigger_mysql
compiled_query = sql_compiler.process(element.trigger.query, literal_binds=True)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2515, in visit_insert
insert_stmt._post_values_clause, **kw
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1400, in visit_on_duplicate_key_update
cols = self.statement.table.c
AttributeError: 'MySQLCompiler_mysqldb' object has no attribute 'statement'
|
AttributeError
|
def __init__(
self,
dialect,
statement,
schema_translate_map=None,
render_schema_translate=False,
compile_kwargs=util.immutabledict(),
):
"""Construct a new :class:`.Compiled` object.
:param dialect: :class:`.Dialect` to compile against.
:param statement: :class:`_expression.ClauseElement` to be compiled.
:param bind: Optional Engine or Connection to compile this
statement against.
:param schema_translate_map: dictionary of schema names to be
translated when forming the resultant SQL
.. versionadded:: 1.1
.. seealso::
:ref:`schema_translating`
:param compile_kwargs: additional kwargs that will be
passed to the initial call to :meth:`.Compiled.process`.
"""
self.dialect = dialect
self.preparer = self.dialect.identifier_preparer
if schema_translate_map:
self.schema_translate_map = schema_translate_map
self.preparer = self.preparer._with_schema_translate(schema_translate_map)
if statement is not None:
self.statement = statement
self.can_execute = statement.supports_execution
self._annotations = statement._annotations
if self.can_execute:
self.execution_options = statement._execution_options
self.string = self.process(self.statement, **compile_kwargs)
if render_schema_translate:
self.string = self.preparer._render_schema_translates(
self.string, schema_translate_map
)
self._gen_time = util.perf_counter()
|
def __init__(
self,
dialect,
statement,
schema_translate_map=None,
render_schema_translate=False,
compile_kwargs=util.immutabledict(),
):
"""Construct a new :class:`.Compiled` object.
:param dialect: :class:`.Dialect` to compile against.
:param statement: :class:`_expression.ClauseElement` to be compiled.
:param bind: Optional Engine or Connection to compile this
statement against.
:param schema_translate_map: dictionary of schema names to be
translated when forming the resultant SQL
.. versionadded:: 1.1
.. seealso::
:ref:`schema_translating`
:param compile_kwargs: additional kwargs that will be
passed to the initial call to :meth:`.Compiled.process`.
"""
self.dialect = dialect
self.preparer = self.dialect.identifier_preparer
if schema_translate_map:
self.schema_translate_map = schema_translate_map
self.preparer = self.preparer._with_schema_translate(schema_translate_map)
if statement is not None:
self.statement = statement
self.can_execute = statement.supports_execution
if self.can_execute:
self.execution_options = statement._execution_options
self.string = self.process(self.statement, **compile_kwargs)
if render_schema_translate:
self.string = self.preparer._render_schema_translates(
self.string, schema_translate_map
)
self._gen_time = util.perf_counter()
|
https://github.com/sqlalchemy/sqlalchemy/issues/5656
|
Traceback (most recent call last):
File "./reproduction.py", line 315, in <module>
Score.__table__.create()
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 925, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 854, in visit_table
_is_metadata_operation=_is_metadata_operation,
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/event/attr.py", line 322, in __call__
fn(*args, **kw)
File "./reproduction.py", line 152, in create_after_all_dependencies
CreateTrigger(self)(*args, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 256, in __call__
return bind.execute(self.against(target))
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 325, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "./reproduction.py", line 199, in visit_create_trigger_mysql
compiled_query = sql_compiler.process(element.trigger.query, literal_binds=True)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2515, in visit_insert
insert_stmt._post_values_clause, **kw
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1400, in visit_on_duplicate_key_update
cols = self.statement.table.c
AttributeError: 'MySQLCompiler_mysqldb' object has no attribute 'statement'
|
AttributeError
|
def visit_on_duplicate_key_update(self, on_duplicate, **kw):
statement = self.current_executable
if on_duplicate._parameter_ordering:
parameter_ordering = [
elements._column_as_key(key) for key in on_duplicate._parameter_ordering
]
ordered_keys = set(parameter_ordering)
cols = [
statement.table.c[key]
for key in parameter_ordering
if key in statement.table.c
] + [c for c in statement.table.c if c.key not in ordered_keys]
else:
cols = statement.table.c
clauses = []
# traverses through all table columns to preserve table column order
for column in (col for col in cols if col.key in on_duplicate.update):
val = on_duplicate.update[column.key]
if elements._is_literal(val):
val = elements.BindParameter(None, val, type_=column.type)
value_text = self.process(val.self_group(), use_schema=False)
else:
def replace(obj):
if isinstance(obj, elements.BindParameter) and obj.type._isnull:
obj = obj._clone()
obj.type = column.type
return obj
elif (
isinstance(obj, elements.ColumnClause)
and obj.table is on_duplicate.inserted_alias
):
obj = literal_column(
"VALUES(" + self.preparer.quote(column.name) + ")"
)
return obj
else:
# element is not replaced
return None
val = visitors.replacement_traverse(val, {}, replace)
value_text = self.process(val.self_group(), use_schema=False)
name_text = self.preparer.quote(column.name)
clauses.append("%s = %s" % (name_text, value_text))
non_matching = set(on_duplicate.update) - set(c.key for c in cols)
if non_matching:
util.warn(
"Additional column names not matching "
"any column keys in table '%s': %s"
% (
self.statement.table.name,
(", ".join("'%s'" % c for c in non_matching)),
)
)
return "ON DUPLICATE KEY UPDATE " + ", ".join(clauses)
|
def visit_on_duplicate_key_update(self, on_duplicate, **kw):
if on_duplicate._parameter_ordering:
parameter_ordering = [
elements._column_as_key(key) for key in on_duplicate._parameter_ordering
]
ordered_keys = set(parameter_ordering)
cols = [
self.statement.table.c[key]
for key in parameter_ordering
if key in self.statement.table.c
] + [c for c in self.statement.table.c if c.key not in ordered_keys]
else:
cols = self.statement.table.c
clauses = []
# traverses through all table columns to preserve table column order
for column in (col for col in cols if col.key in on_duplicate.update):
val = on_duplicate.update[column.key]
if elements._is_literal(val):
val = elements.BindParameter(None, val, type_=column.type)
value_text = self.process(val.self_group(), use_schema=False)
else:
def replace(obj):
if isinstance(obj, elements.BindParameter) and obj.type._isnull:
obj = obj._clone()
obj.type = column.type
return obj
elif (
isinstance(obj, elements.ColumnClause)
and obj.table is on_duplicate.inserted_alias
):
obj = literal_column(
"VALUES(" + self.preparer.quote(column.name) + ")"
)
return obj
else:
# element is not replaced
return None
val = visitors.replacement_traverse(val, {}, replace)
value_text = self.process(val.self_group(), use_schema=False)
name_text = self.preparer.quote(column.name)
clauses.append("%s = %s" % (name_text, value_text))
non_matching = set(on_duplicate.update) - set(c.key for c in cols)
if non_matching:
util.warn(
"Additional column names not matching "
"any column keys in table '%s': %s"
% (
self.statement.table.name,
(", ".join("'%s'" % c for c in non_matching)),
)
)
return "ON DUPLICATE KEY UPDATE " + ", ".join(clauses)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5656
|
Traceback (most recent call last):
File "./reproduction.py", line 315, in <module>
Score.__table__.create()
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 925, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 854, in visit_table
_is_metadata_operation=_is_metadata_operation,
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/event/attr.py", line 322, in __call__
fn(*args, **kw)
File "./reproduction.py", line 152, in create_after_all_dependencies
CreateTrigger(self)(*args, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 256, in __call__
return bind.execute(self.against(target))
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 325, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "./reproduction.py", line 199, in visit_create_trigger_mysql
compiled_query = sql_compiler.process(element.trigger.query, literal_binds=True)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2515, in visit_insert
insert_stmt._post_values_clause, **kw
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1400, in visit_on_duplicate_key_update
cols = self.statement.table.c
AttributeError: 'MySQLCompiler_mysqldb' object has no attribute 'statement'
|
AttributeError
|
def visit_on_conflict_do_update(self, on_conflict, **kw):
clause = on_conflict
target_text = self._on_conflict_target(on_conflict, **kw)
action_set_ops = []
set_parameters = dict(clause.update_values_to_set)
# create a list of column assignment clauses as tuples
insert_statement = self.stack[-1]["selectable"]
cols = insert_statement.table.c
for c in cols:
col_key = c.key
if col_key in set_parameters:
value = set_parameters.pop(col_key)
if elements._is_literal(value):
value = elements.BindParameter(None, value, type_=c.type)
else:
if isinstance(value, elements.BindParameter) and value.type._isnull:
value = value._clone()
value.type = c.type
value_text = self.process(value.self_group(), use_schema=False)
key_text = self.preparer.quote(col_key)
action_set_ops.append("%s = %s" % (key_text, value_text))
# check for names that don't match columns
if set_parameters:
util.warn(
"Additional column names not matching "
"any column keys in table '%s': %s"
% (
self.current_executable.table.name,
(", ".join("'%s'" % c for c in set_parameters)),
)
)
for k, v in set_parameters.items():
key_text = (
self.preparer.quote(k)
if isinstance(k, util.string_types)
else self.process(k, use_schema=False)
)
value_text = self.process(elements._literal_as_binds(v), use_schema=False)
action_set_ops.append("%s = %s" % (key_text, value_text))
action_text = ", ".join(action_set_ops)
if clause.update_whereclause is not None:
action_text += " WHERE %s" % self.process(
clause.update_whereclause, include_table=True, use_schema=False
)
return "ON CONFLICT %s DO UPDATE SET %s" % (target_text, action_text)
|
def visit_on_conflict_do_update(self, on_conflict, **kw):
clause = on_conflict
target_text = self._on_conflict_target(on_conflict, **kw)
action_set_ops = []
set_parameters = dict(clause.update_values_to_set)
# create a list of column assignment clauses as tuples
insert_statement = self.stack[-1]["selectable"]
cols = insert_statement.table.c
for c in cols:
col_key = c.key
if col_key in set_parameters:
value = set_parameters.pop(col_key)
if elements._is_literal(value):
value = elements.BindParameter(None, value, type_=c.type)
else:
if isinstance(value, elements.BindParameter) and value.type._isnull:
value = value._clone()
value.type = c.type
value_text = self.process(value.self_group(), use_schema=False)
key_text = self.preparer.quote(col_key)
action_set_ops.append("%s = %s" % (key_text, value_text))
# check for names that don't match columns
if set_parameters:
util.warn(
"Additional column names not matching "
"any column keys in table '%s': %s"
% (
self.statement.table.name,
(", ".join("'%s'" % c for c in set_parameters)),
)
)
for k, v in set_parameters.items():
key_text = (
self.preparer.quote(k)
if isinstance(k, util.string_types)
else self.process(k, use_schema=False)
)
value_text = self.process(elements._literal_as_binds(v), use_schema=False)
action_set_ops.append("%s = %s" % (key_text, value_text))
action_text = ", ".join(action_set_ops)
if clause.update_whereclause is not None:
action_text += " WHERE %s" % self.process(
clause.update_whereclause, include_table=True, use_schema=False
)
return "ON CONFLICT %s DO UPDATE SET %s" % (target_text, action_text)
|
https://github.com/sqlalchemy/sqlalchemy/issues/5656
|
Traceback (most recent call last):
File "./reproduction.py", line 315, in <module>
Score.__table__.create()
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/schema.py", line 925, in create
bind._run_visitor(ddl.SchemaGenerator, self, checkfirst=checkfirst)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 2095, in _run_visitor
conn._run_visitor(visitorcallable, element, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1656, in _run_visitor
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 145, in traverse_single
return meth(obj, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 854, in visit_table
_is_metadata_operation=_is_metadata_operation,
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/event/attr.py", line 322, in __call__
fn(*args, **kw)
File "./reproduction.py", line 152, in create_after_all_dependencies
CreateTrigger(self)(*args, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 256, in __call__
return bind.execute(self.against(target))
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1011, in execute
return meth(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 72, in _execute_on_connection
return connection._execute_ddl(self, multiparams, params)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/engine/base.py", line 1066, in _execute_ddl
else None,
File "<string>", line 1, in <lambda>
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/elements.py", line 481, in compile
return self._compiler(dialect, bind=bind, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/ddl.py", line 29, in _compiler
return dialect.ddl_compiler(dialect, self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 325, in __init__
self.string = self.process(self.statement, **compile_kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 441, in <lambda>
lambda *arg, **kw: existing(*arg, **kw),
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/ext/compiler.py", line 486, in __call__
return fn(element, compiler, **kw)
File "./reproduction.py", line 199, in visit_create_trigger_mysql
compiled_query = sql_compiler.process(element.trigger.query, literal_binds=True)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 2515, in visit_insert
insert_stmt._post_values_clause, **kw
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/compiler.py", line 355, in process
return obj._compiler_dispatch(self, **kwargs)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/sql/visitors.py", line 96, in _compiler_dispatch
return meth(self, **kw)
File "….virtualenvs/yeepa/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/base.py", line 1400, in visit_on_duplicate_key_update
cols = self.statement.table.c
AttributeError: 'MySQLCompiler_mysqldb' object has no attribute 'statement'
|
AttributeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.