repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
mbedmicro/pyOCD
pyocd/debug/svd/model.py
_check_type
def _check_type(value, expected_type): """Perform type checking on the provided value This is a helper that will raise ``TypeError`` if the provided value is not an instance of the provided type. This method should be used sparingly but can be good for preventing problems earlier when you want to restrict duck typing to make the types of fields more obvious. If the value passed the type check it will be returned from the call. """ if not isinstance(value, expected_type): raise TypeError("Value {value!r} has unexpected type {actual_type!r}, expected {expected_type!r}".format( value=value, expected_type=expected_type, actual_type=type(value), )) return value
python
def _check_type(value, expected_type): """Perform type checking on the provided value This is a helper that will raise ``TypeError`` if the provided value is not an instance of the provided type. This method should be used sparingly but can be good for preventing problems earlier when you want to restrict duck typing to make the types of fields more obvious. If the value passed the type check it will be returned from the call. """ if not isinstance(value, expected_type): raise TypeError("Value {value!r} has unexpected type {actual_type!r}, expected {expected_type!r}".format( value=value, expected_type=expected_type, actual_type=type(value), )) return value
[ "def", "_check_type", "(", "value", ",", "expected_type", ")", ":", "if", "not", "isinstance", "(", "value", ",", "expected_type", ")", ":", "raise", "TypeError", "(", "\"Value {value!r} has unexpected type {actual_type!r}, expected {expected_type!r}\"", ".", "format", ...
Perform type checking on the provided value This is a helper that will raise ``TypeError`` if the provided value is not an instance of the provided type. This method should be used sparingly but can be good for preventing problems earlier when you want to restrict duck typing to make the types of fields more obvious. If the value passed the type check it will be returned from the call.
[ "Perform", "type", "checking", "on", "the", "provided", "value" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/debug/svd/model.py#L26-L42
train
215,000
mbedmicro/pyOCD
pyocd/probe/stlink/detect/windows.py
_get_cached_mounted_points
def _get_cached_mounted_points(): """! Get the volumes present on the system @return List of mount points and their associated target id Ex. [{ 'mount_point': 'D:', 'target_id_usb_id': 'xxxx'}, ...] """ result = [] try: # Open the registry key for mounted devices mounted_devices_key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\MountedDevices" ) for v in _iter_vals(mounted_devices_key): # Valid entries have the following format: \DosDevices\D: if "DosDevices" not in v[0]: continue volume_string = v[1].decode("utf-16le", "ignore") if not _is_mbed_volume(volume_string): continue mount_point_match = re.match(".*\\\\(.:)$", v[0]) if not mount_point_match: logger.debug("Invalid disk pattern for entry %s, skipping", v[0]) continue mount_point = mount_point_match.group(1) result.append({"mount_point": mount_point, "volume_string": volume_string}) except OSError: logger.error('Failed to open "MountedDevices" in registry') return result
python
def _get_cached_mounted_points(): """! Get the volumes present on the system @return List of mount points and their associated target id Ex. [{ 'mount_point': 'D:', 'target_id_usb_id': 'xxxx'}, ...] """ result = [] try: # Open the registry key for mounted devices mounted_devices_key = winreg.OpenKey( winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\MountedDevices" ) for v in _iter_vals(mounted_devices_key): # Valid entries have the following format: \DosDevices\D: if "DosDevices" not in v[0]: continue volume_string = v[1].decode("utf-16le", "ignore") if not _is_mbed_volume(volume_string): continue mount_point_match = re.match(".*\\\\(.:)$", v[0]) if not mount_point_match: logger.debug("Invalid disk pattern for entry %s, skipping", v[0]) continue mount_point = mount_point_match.group(1) result.append({"mount_point": mount_point, "volume_string": volume_string}) except OSError: logger.error('Failed to open "MountedDevices" in registry') return result
[ "def", "_get_cached_mounted_points", "(", ")", ":", "result", "=", "[", "]", "try", ":", "# Open the registry key for mounted devices", "mounted_devices_key", "=", "winreg", ".", "OpenKey", "(", "winreg", ".", "HKEY_LOCAL_MACHINE", ",", "\"SYSTEM\\\\MountedDevices\"", "...
! Get the volumes present on the system @return List of mount points and their associated target id Ex. [{ 'mount_point': 'D:', 'target_id_usb_id': 'xxxx'}, ...]
[ "!", "Get", "the", "volumes", "present", "on", "the", "system" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/windows.py#L64-L96
train
215,001
mbedmicro/pyOCD
pyocd/probe/stlink/detect/windows.py
_vid_pid_path_to_usb_info
def _vid_pid_path_to_usb_info(vid_pid_path): """! Provide the vendor ID and product ID of a device based on its entry in the registry @return Returns {'vendor_id': '<vendor ID>', 'product': '<product ID>'} @details If the vendor ID or product ID can't be determined, they will be returned as None. """ result = {"vendor_id": None, "product_id": None} for component in vid_pid_path.split("&"): component_part = component.lower().split("_") if len(component_part) != 2: logger.debug("Unexpected VID/PID string structure %s", component) break if component_part[0] == "vid": result["vendor_id"] = component_part[1] elif component_part[0] == "pid": result["product_id"] = component_part[1] return result
python
def _vid_pid_path_to_usb_info(vid_pid_path): """! Provide the vendor ID and product ID of a device based on its entry in the registry @return Returns {'vendor_id': '<vendor ID>', 'product': '<product ID>'} @details If the vendor ID or product ID can't be determined, they will be returned as None. """ result = {"vendor_id": None, "product_id": None} for component in vid_pid_path.split("&"): component_part = component.lower().split("_") if len(component_part) != 2: logger.debug("Unexpected VID/PID string structure %s", component) break if component_part[0] == "vid": result["vendor_id"] = component_part[1] elif component_part[0] == "pid": result["product_id"] = component_part[1] return result
[ "def", "_vid_pid_path_to_usb_info", "(", "vid_pid_path", ")", ":", "result", "=", "{", "\"vendor_id\"", ":", "None", ",", "\"product_id\"", ":", "None", "}", "for", "component", "in", "vid_pid_path", ".", "split", "(", "\"&\"", ")", ":", "component_part", "=",...
! Provide the vendor ID and product ID of a device based on its entry in the registry @return Returns {'vendor_id': '<vendor ID>', 'product': '<product ID>'} @details If the vendor ID or product ID can't be determined, they will be returned as None.
[ "!", "Provide", "the", "vendor", "ID", "and", "product", "ID", "of", "a", "device", "based", "on", "its", "entry", "in", "the", "registry" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/windows.py#L189-L209
train
215,002
mbedmicro/pyOCD
pyocd/probe/stlink/detect/windows.py
_iter_keys_as_str
def _iter_keys_as_str(key): """! Iterate over subkeys of a key returning subkey as string """ for i in range(winreg.QueryInfoKey(key)[0]): yield winreg.EnumKey(key, i)
python
def _iter_keys_as_str(key): """! Iterate over subkeys of a key returning subkey as string """ for i in range(winreg.QueryInfoKey(key)[0]): yield winreg.EnumKey(key, i)
[ "def", "_iter_keys_as_str", "(", "key", ")", ":", "for", "i", "in", "range", "(", "winreg", ".", "QueryInfoKey", "(", "key", ")", "[", "0", "]", ")", ":", "yield", "winreg", ".", "EnumKey", "(", "key", ",", "i", ")" ]
! Iterate over subkeys of a key returning subkey as string
[ "!", "Iterate", "over", "subkeys", "of", "a", "key", "returning", "subkey", "as", "string" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/windows.py#L212-L216
train
215,003
mbedmicro/pyOCD
pyocd/probe/stlink/detect/windows.py
_iter_keys
def _iter_keys(key): """! Iterate over subkeys of a key """ for i in range(winreg.QueryInfoKey(key)[0]): yield winreg.OpenKey(key, winreg.EnumKey(key, i))
python
def _iter_keys(key): """! Iterate over subkeys of a key """ for i in range(winreg.QueryInfoKey(key)[0]): yield winreg.OpenKey(key, winreg.EnumKey(key, i))
[ "def", "_iter_keys", "(", "key", ")", ":", "for", "i", "in", "range", "(", "winreg", ".", "QueryInfoKey", "(", "key", ")", "[", "0", "]", ")", ":", "yield", "winreg", ".", "OpenKey", "(", "key", ",", "winreg", ".", "EnumKey", "(", "key", ",", "i"...
! Iterate over subkeys of a key
[ "!", "Iterate", "over", "subkeys", "of", "a", "key" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/windows.py#L219-L223
train
215,004
mbedmicro/pyOCD
pyocd/probe/stlink/detect/windows.py
_iter_vals
def _iter_vals(key): """! Iterate over values of a key """ for i in range(winreg.QueryInfoKey(key)[1]): yield winreg.EnumValue(key, i)
python
def _iter_vals(key): """! Iterate over values of a key """ for i in range(winreg.QueryInfoKey(key)[1]): yield winreg.EnumValue(key, i)
[ "def", "_iter_vals", "(", "key", ")", ":", "for", "i", "in", "range", "(", "winreg", ".", "QueryInfoKey", "(", "key", ")", "[", "1", "]", ")", ":", "yield", "winreg", ".", "EnumValue", "(", "key", ",", "i", ")" ]
! Iterate over values of a key
[ "!", "Iterate", "over", "values", "of", "a", "key" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/windows.py#L226-L230
train
215,005
mbedmicro/pyOCD
pyocd/probe/stlink/detect/windows.py
MbedLsToolsWindows.mount_point_ready
def mount_point_ready(self, path): """! Check if a mount point is ready for file operations @return Returns True if the given path exists, False otherwise @details Calling the Windows command `dir` instead of using the python `os.path.exists`. The latter causes a Python error box to appear claiming there is "No Disk" for some devices that are in the ejected state. Calling `dir` prevents this since it uses the Windows API to determine if the device is ready before accessing the file system. """ stdout, stderr, retcode = self._run_cli_process("dir %s" % path) result = True if retcode == 0 else False return result
python
def mount_point_ready(self, path): """! Check if a mount point is ready for file operations @return Returns True if the given path exists, False otherwise @details Calling the Windows command `dir` instead of using the python `os.path.exists`. The latter causes a Python error box to appear claiming there is "No Disk" for some devices that are in the ejected state. Calling `dir` prevents this since it uses the Windows API to determine if the device is ready before accessing the file system. """ stdout, stderr, retcode = self._run_cli_process("dir %s" % path) result = True if retcode == 0 else False return result
[ "def", "mount_point_ready", "(", "self", ",", "path", ")", ":", "stdout", ",", "stderr", ",", "retcode", "=", "self", ".", "_run_cli_process", "(", "\"dir %s\"", "%", "path", ")", "result", "=", "True", "if", "retcode", "==", "0", "else", "False", "retur...
! Check if a mount point is ready for file operations @return Returns True if the given path exists, False otherwise @details Calling the Windows command `dir` instead of using the python `os.path.exists`. The latter causes a Python error box to appear claiming there is "No Disk" for some devices that are in the ejected state. Calling `dir` prevents this since it uses the Windows API to determine if the device is ready before accessing the file system.
[ "!", "Check", "if", "a", "mount", "point", "is", "ready", "for", "file", "operations" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/windows.py#L478-L490
train
215,006
mbedmicro/pyOCD
pyocd/target/pack/flash_algo.py
PackFlashAlgo._create_algo_bin
def _create_algo_bin(self, ro_rw_zi): """Create a binary blob of the flash algo which can execute from ram""" sect_ro, sect_rw, sect_zi = ro_rw_zi algo_size = sect_ro.length + sect_rw.length + sect_zi.length algo_data = bytearray(algo_size) for section in (sect_ro, sect_rw): start = section.start size = section.length data = section.data assert len(data) == size algo_data[start:start + size] = data return algo_data
python
def _create_algo_bin(self, ro_rw_zi): """Create a binary blob of the flash algo which can execute from ram""" sect_ro, sect_rw, sect_zi = ro_rw_zi algo_size = sect_ro.length + sect_rw.length + sect_zi.length algo_data = bytearray(algo_size) for section in (sect_ro, sect_rw): start = section.start size = section.length data = section.data assert len(data) == size algo_data[start:start + size] = data return algo_data
[ "def", "_create_algo_bin", "(", "self", ",", "ro_rw_zi", ")", ":", "sect_ro", ",", "sect_rw", ",", "sect_zi", "=", "ro_rw_zi", "algo_size", "=", "sect_ro", ".", "length", "+", "sect_rw", ".", "length", "+", "sect_zi", ".", "length", "algo_data", "=", "byte...
Create a binary blob of the flash algo which can execute from ram
[ "Create", "a", "binary", "blob", "of", "the", "flash", "algo", "which", "can", "execute", "from", "ram" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/target/pack/flash_algo.py#L232-L243
train
215,007
mbedmicro/pyOCD
pyocd/target/pack/flash_algo.py
PackFlashInfo._sector_and_sz_itr
def _sector_and_sz_itr(self, elf, data_start): """Iterator which returns starting address and sector size""" for entry_start in itertools.count(data_start, self.FLASH_SECTORS_STRUCT_SIZE): data = elf.read(entry_start, self.FLASH_SECTORS_STRUCT_SIZE) size, start = struct.unpack(self.FLASH_SECTORS_STRUCT, data) start_and_size = start, size if start_and_size == (self.SECTOR_END, self.SECTOR_END): return yield start_and_size
python
def _sector_and_sz_itr(self, elf, data_start): """Iterator which returns starting address and sector size""" for entry_start in itertools.count(data_start, self.FLASH_SECTORS_STRUCT_SIZE): data = elf.read(entry_start, self.FLASH_SECTORS_STRUCT_SIZE) size, start = struct.unpack(self.FLASH_SECTORS_STRUCT, data) start_and_size = start, size if start_and_size == (self.SECTOR_END, self.SECTOR_END): return yield start_and_size
[ "def", "_sector_and_sz_itr", "(", "self", ",", "elf", ",", "data_start", ")", ":", "for", "entry_start", "in", "itertools", ".", "count", "(", "data_start", ",", "self", ".", "FLASH_SECTORS_STRUCT_SIZE", ")", ":", "data", "=", "elf", ".", "read", "(", "ent...
Iterator which returns starting address and sector size
[ "Iterator", "which", "returns", "starting", "address", "and", "sector", "size" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/target/pack/flash_algo.py#L292-L300
train
215,008
mbedmicro/pyOCD
pyocd/probe/stlink/detect/darwin.py
_prune
def _prune(current, keys): """ Reduce the amount of data we have to sift through to only include the specified keys, and children that contain the specified keys """ pruned_current = {k: current[k] for k in keys if k in current} pruned_children = list( filter( None, [_prune(c, keys) for c in current.get("IORegistryEntryChildren", [])] ) ) keep_current = any(k in current for k in keys) or pruned_children if keep_current: if pruned_children: pruned_current["IORegistryEntryChildren"] = pruned_children return pruned_current else: return {}
python
def _prune(current, keys): """ Reduce the amount of data we have to sift through to only include the specified keys, and children that contain the specified keys """ pruned_current = {k: current[k] for k in keys if k in current} pruned_children = list( filter( None, [_prune(c, keys) for c in current.get("IORegistryEntryChildren", [])] ) ) keep_current = any(k in current for k in keys) or pruned_children if keep_current: if pruned_children: pruned_current["IORegistryEntryChildren"] = pruned_children return pruned_current else: return {}
[ "def", "_prune", "(", "current", ",", "keys", ")", ":", "pruned_current", "=", "{", "k", ":", "current", "[", "k", "]", "for", "k", "in", "keys", "if", "k", "in", "current", "}", "pruned_children", "=", "list", "(", "filter", "(", "None", ",", "[",...
Reduce the amount of data we have to sift through to only include the specified keys, and children that contain the specified keys
[ "Reduce", "the", "amount", "of", "data", "we", "have", "to", "sift", "through", "to", "only", "include", "the", "specified", "keys", "and", "children", "that", "contain", "the", "specified", "keys" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/darwin.py#L46-L63
train
215,009
mbedmicro/pyOCD
pyocd/gdbserver/context_facade.py
GDBDebugContextFacade.get_register_context
def get_register_context(self): """ return hexadecimal dump of registers as expected by GDB """ logging.debug("GDB getting register context") resp = b'' reg_num_list = [reg.reg_num for reg in self._register_list] vals = self._context.read_core_registers_raw(reg_num_list) #print("Vals: %s" % vals) for reg, regValue in zip(self._register_list, vals): if reg.bitsize == 64: resp += six.b(conversion.u64_to_hex16le(regValue)) else: resp += six.b(conversion.u32_to_hex8le(regValue)) logging.debug("GDB reg: %s = 0x%X", reg.name, regValue) return resp
python
def get_register_context(self): """ return hexadecimal dump of registers as expected by GDB """ logging.debug("GDB getting register context") resp = b'' reg_num_list = [reg.reg_num for reg in self._register_list] vals = self._context.read_core_registers_raw(reg_num_list) #print("Vals: %s" % vals) for reg, regValue in zip(self._register_list, vals): if reg.bitsize == 64: resp += six.b(conversion.u64_to_hex16le(regValue)) else: resp += six.b(conversion.u32_to_hex8le(regValue)) logging.debug("GDB reg: %s = 0x%X", reg.name, regValue) return resp
[ "def", "get_register_context", "(", "self", ")", ":", "logging", ".", "debug", "(", "\"GDB getting register context\"", ")", "resp", "=", "b''", "reg_num_list", "=", "[", "reg", ".", "reg_num", "for", "reg", "in", "self", ".", "_register_list", "]", "vals", ...
return hexadecimal dump of registers as expected by GDB
[ "return", "hexadecimal", "dump", "of", "registers", "as", "expected", "by", "GDB" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/gdbserver/context_facade.py#L60-L76
train
215,010
mbedmicro/pyOCD
pyocd/gdbserver/context_facade.py
GDBDebugContextFacade.set_register_context
def set_register_context(self, data): """ Set registers from GDB hexadecimal string. """ logging.debug("GDB setting register context") reg_num_list = [] reg_data_list = [] for reg in self._register_list: if reg.bitsize == 64: regValue = conversion.hex16_to_u64be(data) data = data[16:] else: regValue = conversion.hex8_to_u32be(data) data = data[8:] reg_num_list.append(reg.reg_num) reg_data_list.append(regValue) logging.debug("GDB reg: %s = 0x%X", reg.name, regValue) self._context.write_core_registers_raw(reg_num_list, reg_data_list)
python
def set_register_context(self, data): """ Set registers from GDB hexadecimal string. """ logging.debug("GDB setting register context") reg_num_list = [] reg_data_list = [] for reg in self._register_list: if reg.bitsize == 64: regValue = conversion.hex16_to_u64be(data) data = data[16:] else: regValue = conversion.hex8_to_u32be(data) data = data[8:] reg_num_list.append(reg.reg_num) reg_data_list.append(regValue) logging.debug("GDB reg: %s = 0x%X", reg.name, regValue) self._context.write_core_registers_raw(reg_num_list, reg_data_list)
[ "def", "set_register_context", "(", "self", ",", "data", ")", ":", "logging", ".", "debug", "(", "\"GDB setting register context\"", ")", "reg_num_list", "=", "[", "]", "reg_data_list", "=", "[", "]", "for", "reg", "in", "self", ".", "_register_list", ":", "...
Set registers from GDB hexadecimal string.
[ "Set", "registers", "from", "GDB", "hexadecimal", "string", "." ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/gdbserver/context_facade.py#L78-L95
train
215,011
mbedmicro/pyOCD
pyocd/gdbserver/context_facade.py
GDBDebugContextFacade.set_register
def set_register(self, reg, data): """ Set single register from GDB hexadecimal string. reg parameter is the index of register in targetXML sent to GDB. """ if reg < 0: return elif reg < len(self._register_list): regName = self._register_list[reg].name regBits = self._register_list[reg].bitsize if regBits == 64: value = conversion.hex16_to_u64be(data) else: value = conversion.hex8_to_u32be(data) logging.debug("GDB: write reg %s: 0x%X", regName, value) self._context.write_core_register_raw(regName, value)
python
def set_register(self, reg, data): """ Set single register from GDB hexadecimal string. reg parameter is the index of register in targetXML sent to GDB. """ if reg < 0: return elif reg < len(self._register_list): regName = self._register_list[reg].name regBits = self._register_list[reg].bitsize if regBits == 64: value = conversion.hex16_to_u64be(data) else: value = conversion.hex8_to_u32be(data) logging.debug("GDB: write reg %s: 0x%X", regName, value) self._context.write_core_register_raw(regName, value)
[ "def", "set_register", "(", "self", ",", "reg", ",", "data", ")", ":", "if", "reg", "<", "0", ":", "return", "elif", "reg", "<", "len", "(", "self", ".", "_register_list", ")", ":", "regName", "=", "self", ".", "_register_list", "[", "reg", "]", "....
Set single register from GDB hexadecimal string. reg parameter is the index of register in targetXML sent to GDB.
[ "Set", "single", "register", "from", "GDB", "hexadecimal", "string", ".", "reg", "parameter", "is", "the", "index", "of", "register", "in", "targetXML", "sent", "to", "GDB", "." ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/gdbserver/context_facade.py#L97-L112
train
215,012
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
_get_interfaces
def _get_interfaces(): """Get the connected USB devices""" # Get CMSIS-DAPv1 interfaces. v1_interfaces = INTERFACE[USB_BACKEND].get_all_connected_interfaces() # Get CMSIS-DAPv2 interfaces. v2_interfaces = INTERFACE[USB_BACKEND_V2].get_all_connected_interfaces() # Prefer v2 over v1 if a device provides both. devices_in_both = [v1 for v1 in v1_interfaces for v2 in v2_interfaces if _get_unique_id(v1) == _get_unique_id(v2)] for dev in devices_in_both: v1_interfaces.remove(dev) # Return the combined list. return v1_interfaces + v2_interfaces
python
def _get_interfaces(): """Get the connected USB devices""" # Get CMSIS-DAPv1 interfaces. v1_interfaces = INTERFACE[USB_BACKEND].get_all_connected_interfaces() # Get CMSIS-DAPv2 interfaces. v2_interfaces = INTERFACE[USB_BACKEND_V2].get_all_connected_interfaces() # Prefer v2 over v1 if a device provides both. devices_in_both = [v1 for v1 in v1_interfaces for v2 in v2_interfaces if _get_unique_id(v1) == _get_unique_id(v2)] for dev in devices_in_both: v1_interfaces.remove(dev) # Return the combined list. return v1_interfaces + v2_interfaces
[ "def", "_get_interfaces", "(", ")", ":", "# Get CMSIS-DAPv1 interfaces.", "v1_interfaces", "=", "INTERFACE", "[", "USB_BACKEND", "]", ".", "get_all_connected_interfaces", "(", ")", "# Get CMSIS-DAPv2 interfaces.", "v2_interfaces", "=", "INTERFACE", "[", "USB_BACKEND_V2", ...
Get the connected USB devices
[ "Get", "the", "connected", "USB", "devices" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L51-L66
train
215,013
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
_Transfer.add_response
def add_response(self, data): """ Add data read from the remote device to this object. The size of data added must match exactly the size that get_data_size returns. """ assert len(data) == self._size_bytes result = [] for i in range(0, self._size_bytes, 4): word = ((data[0 + i] << 0) | (data[1 + i] << 8) | (data[2 + i] << 16) | (data[3 + i] << 24)) result.append(word) self._result = result
python
def add_response(self, data): """ Add data read from the remote device to this object. The size of data added must match exactly the size that get_data_size returns. """ assert len(data) == self._size_bytes result = [] for i in range(0, self._size_bytes, 4): word = ((data[0 + i] << 0) | (data[1 + i] << 8) | (data[2 + i] << 16) | (data[3 + i] << 24)) result.append(word) self._result = result
[ "def", "add_response", "(", "self", ",", "data", ")", ":", "assert", "len", "(", "data", ")", "==", "self", ".", "_size_bytes", "result", "=", "[", "]", "for", "i", "in", "range", "(", "0", ",", "self", ".", "_size_bytes", ",", "4", ")", ":", "wo...
Add data read from the remote device to this object. The size of data added must match exactly the size that get_data_size returns.
[ "Add", "data", "read", "from", "the", "remote", "device", "to", "this", "object", "." ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L107-L120
train
215,014
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
_Transfer.get_result
def get_result(self): """ Get the result of this transfer. """ while self._result is None: if len(self.daplink._commands_to_read) > 0: self.daplink._read_packet() else: assert not self.daplink._crnt_cmd.get_empty() self.daplink.flush() if self._error is not None: # Pylint is confused and thinks self._error is None # since that is what it is initialized to. # Supress warnings for this. # pylint: disable=raising-bad-type raise self._error assert self._result is not None return self._result
python
def get_result(self): """ Get the result of this transfer. """ while self._result is None: if len(self.daplink._commands_to_read) > 0: self.daplink._read_packet() else: assert not self.daplink._crnt_cmd.get_empty() self.daplink.flush() if self._error is not None: # Pylint is confused and thinks self._error is None # since that is what it is initialized to. # Supress warnings for this. # pylint: disable=raising-bad-type raise self._error assert self._result is not None return self._result
[ "def", "get_result", "(", "self", ")", ":", "while", "self", ".", "_result", "is", "None", ":", "if", "len", "(", "self", ".", "daplink", ".", "_commands_to_read", ")", ">", "0", ":", "self", ".", "daplink", ".", "_read_packet", "(", ")", "else", ":"...
Get the result of this transfer.
[ "Get", "the", "result", "of", "this", "transfer", "." ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L129-L148
train
215,015
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
_Command._get_free_words
def _get_free_words(self, blockAllowed, isRead): """ Return the number of words free in the transmit packet """ if blockAllowed: # DAP_TransferBlock request packet: # BYTE | BYTE *****| SHORT**********| BYTE *************| WORD *********| # > 0x06 | DAP Index | Transfer Count | Transfer Request | Transfer Data | # ******|***********|****************|*******************|+++++++++++++++| send = self._size - 5 - 4 * self._write_count # DAP_TransferBlock response packet: # BYTE | SHORT *********| BYTE *************| WORD *********| # < 0x06 | Transfer Count | Transfer Response | Transfer Data | # ******|****************|*******************|+++++++++++++++| recv = self._size - 4 - 4 * self._read_count if isRead: return recv // 4 else: return send // 4 else: # DAP_Transfer request packet: # BYTE | BYTE *****| BYTE **********| BYTE *************| WORD *********| # > 0x05 | DAP Index | Transfer Count | Transfer Request | Transfer Data | # ******|***********|****************|+++++++++++++++++++++++++++++++++++| send = self._size - 3 - 1 * self._read_count - 5 * self._write_count # DAP_Transfer response packet: # BYTE | BYTE **********| BYTE *************| WORD *********| # < 0x05 | Transfer Count | Transfer Response | Transfer Data | # ******|****************|*******************|+++++++++++++++| recv = self._size - 3 - 4 * self._read_count if isRead: # 1 request byte in request packet, 4 data bytes in response packet return min(send, recv // 4) else: # 1 request byte + 4 data bytes return send // 5
python
def _get_free_words(self, blockAllowed, isRead): """ Return the number of words free in the transmit packet """ if blockAllowed: # DAP_TransferBlock request packet: # BYTE | BYTE *****| SHORT**********| BYTE *************| WORD *********| # > 0x06 | DAP Index | Transfer Count | Transfer Request | Transfer Data | # ******|***********|****************|*******************|+++++++++++++++| send = self._size - 5 - 4 * self._write_count # DAP_TransferBlock response packet: # BYTE | SHORT *********| BYTE *************| WORD *********| # < 0x06 | Transfer Count | Transfer Response | Transfer Data | # ******|****************|*******************|+++++++++++++++| recv = self._size - 4 - 4 * self._read_count if isRead: return recv // 4 else: return send // 4 else: # DAP_Transfer request packet: # BYTE | BYTE *****| BYTE **********| BYTE *************| WORD *********| # > 0x05 | DAP Index | Transfer Count | Transfer Request | Transfer Data | # ******|***********|****************|+++++++++++++++++++++++++++++++++++| send = self._size - 3 - 1 * self._read_count - 5 * self._write_count # DAP_Transfer response packet: # BYTE | BYTE **********| BYTE *************| WORD *********| # < 0x05 | Transfer Count | Transfer Response | Transfer Data | # ******|****************|*******************|+++++++++++++++| recv = self._size - 3 - 4 * self._read_count if isRead: # 1 request byte in request packet, 4 data bytes in response packet return min(send, recv // 4) else: # 1 request byte + 4 data bytes return send // 5
[ "def", "_get_free_words", "(", "self", ",", "blockAllowed", ",", "isRead", ")", ":", "if", "blockAllowed", ":", "# DAP_TransferBlock request packet:", "# BYTE | BYTE *****| SHORT**********| BYTE *************| WORD *********|", "# > 0x06 | DAP Index | Transfer Count | Transfer Reques...
Return the number of words free in the transmit packet
[ "Return", "the", "number", "of", "words", "free", "in", "the", "transmit", "packet" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L176-L215
train
215,016
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
_Command.add
def add(self, count, request, data, dap_index): """ Add a single or block register transfer operation to this command """ assert self._data_encoded is False if self._dap_index is None: self._dap_index = dap_index assert self._dap_index == dap_index if self._block_request is None: self._block_request = request elif request != self._block_request: self._block_allowed = False assert not self._block_allowed or self._block_request == request if request & READ: self._read_count += count else: self._write_count += count self._data.append((count, request, data)) if LOG_PACKET_BUILDS: self._logger.debug("add(%d, %02x:%s) -> [wc=%d, rc=%d, ba=%d]" % (count, request, 'r' if (request & READ) else 'w', self._write_count, self._read_count, self._block_allowed))
python
def add(self, count, request, data, dap_index): """ Add a single or block register transfer operation to this command """ assert self._data_encoded is False if self._dap_index is None: self._dap_index = dap_index assert self._dap_index == dap_index if self._block_request is None: self._block_request = request elif request != self._block_request: self._block_allowed = False assert not self._block_allowed or self._block_request == request if request & READ: self._read_count += count else: self._write_count += count self._data.append((count, request, data)) if LOG_PACKET_BUILDS: self._logger.debug("add(%d, %02x:%s) -> [wc=%d, rc=%d, ba=%d]" % (count, request, 'r' if (request & READ) else 'w', self._write_count, self._read_count, self._block_allowed))
[ "def", "add", "(", "self", ",", "count", ",", "request", ",", "data", ",", "dap_index", ")", ":", "assert", "self", ".", "_data_encoded", "is", "False", "if", "self", ".", "_dap_index", "is", "None", ":", "self", ".", "_dap_index", "=", "dap_index", "a...
Add a single or block register transfer operation to this command
[ "Add", "a", "single", "or", "block", "register", "transfer", "operation", "to", "this", "command" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L261-L284
train
215,017
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
_Command.decode_data
def decode_data(self, data): """ Decode the response data """ assert self.get_empty() is False assert self._data_encoded is True if self._block_allowed: data = self._decode_transfer_block_data(data) else: data = self._decode_transfer_data(data) return data
python
def decode_data(self, data): """ Decode the response data """ assert self.get_empty() is False assert self._data_encoded is True if self._block_allowed: data = self._decode_transfer_block_data(data) else: data = self._decode_transfer_data(data) return data
[ "def", "decode_data", "(", "self", ",", "data", ")", ":", "assert", "self", ".", "get_empty", "(", ")", "is", "False", "assert", "self", ".", "_data_encoded", "is", "True", "if", "self", ".", "_block_allowed", ":", "data", "=", "self", ".", "_decode_tran...
Decode the response data
[ "Decode", "the", "response", "data" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L429-L439
train
215,018
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
DAPAccessCMSISDAP.get_connected_devices
def get_connected_devices(): """ Return an array of all mbed boards connected """ all_daplinks = [] all_interfaces = _get_interfaces() for interface in all_interfaces: try: new_daplink = DAPAccessCMSISDAP(None, interface=interface) all_daplinks.append(new_daplink) except DAPAccessIntf.TransferError: logger = logging.getLogger(__name__) logger.error('Failed to get unique id', exc_info=session.Session.get_current().log_tracebacks) return all_daplinks
python
def get_connected_devices(): """ Return an array of all mbed boards connected """ all_daplinks = [] all_interfaces = _get_interfaces() for interface in all_interfaces: try: new_daplink = DAPAccessCMSISDAP(None, interface=interface) all_daplinks.append(new_daplink) except DAPAccessIntf.TransferError: logger = logging.getLogger(__name__) logger.error('Failed to get unique id', exc_info=session.Session.get_current().log_tracebacks) return all_daplinks
[ "def", "get_connected_devices", "(", ")", ":", "all_daplinks", "=", "[", "]", "all_interfaces", "=", "_get_interfaces", "(", ")", "for", "interface", "in", "all_interfaces", ":", "try", ":", "new_daplink", "=", "DAPAccessCMSISDAP", "(", "None", ",", "interface",...
Return an array of all mbed boards connected
[ "Return", "an", "array", "of", "all", "mbed", "boards", "connected" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L452-L465
train
215,019
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
DAPAccessCMSISDAP.set_deferred_transfer
def set_deferred_transfer(self, enable): """ Allow transfers to be delayed and buffered By default deferred transfers are turned off. All reads and writes will be completed by the time the function returns. When enabled packets are buffered and sent all at once, which increases speed. When memory is written to, the transfer might take place immediately, or might take place on a future memory write. This means that an invalid write could cause an exception to occur on a later, unrelated write. To guarantee that previous writes are complete call the flush() function. The behaviour of read operations is determined by the modes READ_START, READ_NOW and READ_END. The option READ_NOW is the default and will cause the read to flush all previous writes, and read the data immediately. To improve performance, multiple reads can be made using READ_START and finished later with READ_NOW. This allows the reads to be buffered and sent at once. Note - All READ_ENDs must be called before a call using READ_NOW can be made. """ if self._deferred_transfer and not enable: self.flush() self._deferred_transfer = enable
python
def set_deferred_transfer(self, enable): """ Allow transfers to be delayed and buffered By default deferred transfers are turned off. All reads and writes will be completed by the time the function returns. When enabled packets are buffered and sent all at once, which increases speed. When memory is written to, the transfer might take place immediately, or might take place on a future memory write. This means that an invalid write could cause an exception to occur on a later, unrelated write. To guarantee that previous writes are complete call the flush() function. The behaviour of read operations is determined by the modes READ_START, READ_NOW and READ_END. The option READ_NOW is the default and will cause the read to flush all previous writes, and read the data immediately. To improve performance, multiple reads can be made using READ_START and finished later with READ_NOW. This allows the reads to be buffered and sent at once. Note - All READ_ENDs must be called before a call using READ_NOW can be made. """ if self._deferred_transfer and not enable: self.flush() self._deferred_transfer = enable
[ "def", "set_deferred_transfer", "(", "self", ",", "enable", ")", ":", "if", "self", ".", "_deferred_transfer", "and", "not", "enable", ":", "self", ".", "flush", "(", ")", "self", ".", "_deferred_transfer", "=", "enable" ]
Allow transfers to be delayed and buffered By default deferred transfers are turned off. All reads and writes will be completed by the time the function returns. When enabled packets are buffered and sent all at once, which increases speed. When memory is written to, the transfer might take place immediately, or might take place on a future memory write. This means that an invalid write could cause an exception to occur on a later, unrelated write. To guarantee that previous writes are complete call the flush() function. The behaviour of read operations is determined by the modes READ_START, READ_NOW and READ_END. The option READ_NOW is the default and will cause the read to flush all previous writes, and read the data immediately. To improve performance, multiple reads can be made using READ_START and finished later with READ_NOW. This allows the reads to be buffered and sent at once. Note - All READ_ENDs must be called before a call using READ_NOW can be made.
[ "Allow", "transfers", "to", "be", "delayed", "and", "buffered" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L626-L650
train
215,020
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
DAPAccessCMSISDAP._init_deferred_buffers
def _init_deferred_buffers(self): """ Initialize or reinitalize all the deferred transfer buffers Calling this method will drop all pending transactions so use with care. """ # List of transfers that have been started, but # not completed (started by write_reg, read_reg, # reg_write_repeat and reg_read_repeat) self._transfer_list = collections.deque() # The current packet - this can contain multiple # different transfers self._crnt_cmd = _Command(self._packet_size) # Packets that have been sent but not read self._commands_to_read = collections.deque() # Buffer for data returned for completed commands. # This data will be added to transfers self._command_response_buf = bytearray()
python
def _init_deferred_buffers(self): """ Initialize or reinitalize all the deferred transfer buffers Calling this method will drop all pending transactions so use with care. """ # List of transfers that have been started, but # not completed (started by write_reg, read_reg, # reg_write_repeat and reg_read_repeat) self._transfer_list = collections.deque() # The current packet - this can contain multiple # different transfers self._crnt_cmd = _Command(self._packet_size) # Packets that have been sent but not read self._commands_to_read = collections.deque() # Buffer for data returned for completed commands. # This data will be added to transfers self._command_response_buf = bytearray()
[ "def", "_init_deferred_buffers", "(", "self", ")", ":", "# List of transfers that have been started, but", "# not completed (started by write_reg, read_reg,", "# reg_write_repeat and reg_read_repeat)", "self", ".", "_transfer_list", "=", "collections", ".", "deque", "(", ")", "# ...
Initialize or reinitalize all the deferred transfer buffers Calling this method will drop all pending transactions so use with care.
[ "Initialize", "or", "reinitalize", "all", "the", "deferred", "transfer", "buffers" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L851-L869
train
215,021
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
DAPAccessCMSISDAP._read_packet
def _read_packet(self): """ Reads and decodes a single packet Reads a single packet from the device and stores the data from it in the current Command object """ # Grab command, send it and decode response cmd = self._commands_to_read.popleft() try: raw_data = self._interface.read() raw_data = bytearray(raw_data) decoded_data = cmd.decode_data(raw_data) except Exception as exception: self._abort_all_transfers(exception) raise decoded_data = bytearray(decoded_data) self._command_response_buf.extend(decoded_data) # Attach data to transfers pos = 0 while True: size_left = len(self._command_response_buf) - pos if size_left == 0: # If size left is 0 then the transfer list might # be empty, so don't try to access element 0 break transfer = self._transfer_list[0] size = transfer.get_data_size() if size > size_left: break self._transfer_list.popleft() data = self._command_response_buf[pos:pos + size] pos += size transfer.add_response(data) # Remove used data from _command_response_buf if pos > 0: self._command_response_buf = self._command_response_buf[pos:]
python
def _read_packet(self): """ Reads and decodes a single packet Reads a single packet from the device and stores the data from it in the current Command object """ # Grab command, send it and decode response cmd = self._commands_to_read.popleft() try: raw_data = self._interface.read() raw_data = bytearray(raw_data) decoded_data = cmd.decode_data(raw_data) except Exception as exception: self._abort_all_transfers(exception) raise decoded_data = bytearray(decoded_data) self._command_response_buf.extend(decoded_data) # Attach data to transfers pos = 0 while True: size_left = len(self._command_response_buf) - pos if size_left == 0: # If size left is 0 then the transfer list might # be empty, so don't try to access element 0 break transfer = self._transfer_list[0] size = transfer.get_data_size() if size > size_left: break self._transfer_list.popleft() data = self._command_response_buf[pos:pos + size] pos += size transfer.add_response(data) # Remove used data from _command_response_buf if pos > 0: self._command_response_buf = self._command_response_buf[pos:]
[ "def", "_read_packet", "(", "self", ")", ":", "# Grab command, send it and decode response", "cmd", "=", "self", ".", "_commands_to_read", ".", "popleft", "(", ")", "try", ":", "raw_data", "=", "self", ".", "_interface", ".", "read", "(", ")", "raw_data", "=",...
Reads and decodes a single packet Reads a single packet from the device and stores the data from it in the current Command object
[ "Reads", "and", "decodes", "a", "single", "packet" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L871-L912
train
215,022
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
DAPAccessCMSISDAP._send_packet
def _send_packet(self): """ Send a single packet to the interface This function guarentees that the number of packets that are stored in daplink's buffer (the number of packets written but not read) does not exceed the number supported by the given device. """ cmd = self._crnt_cmd if cmd.get_empty(): return max_packets = self._interface.get_packet_count() if len(self._commands_to_read) >= max_packets: self._read_packet() data = cmd.encode_data() try: self._interface.write(list(data)) except Exception as exception: self._abort_all_transfers(exception) raise self._commands_to_read.append(cmd) self._crnt_cmd = _Command(self._packet_size)
python
def _send_packet(self): """ Send a single packet to the interface This function guarentees that the number of packets that are stored in daplink's buffer (the number of packets written but not read) does not exceed the number supported by the given device. """ cmd = self._crnt_cmd if cmd.get_empty(): return max_packets = self._interface.get_packet_count() if len(self._commands_to_read) >= max_packets: self._read_packet() data = cmd.encode_data() try: self._interface.write(list(data)) except Exception as exception: self._abort_all_transfers(exception) raise self._commands_to_read.append(cmd) self._crnt_cmd = _Command(self._packet_size)
[ "def", "_send_packet", "(", "self", ")", ":", "cmd", "=", "self", ".", "_crnt_cmd", "if", "cmd", ".", "get_empty", "(", ")", ":", "return", "max_packets", "=", "self", ".", "_interface", ".", "get_packet_count", "(", ")", "if", "len", "(", "self", ".",...
Send a single packet to the interface This function guarentees that the number of packets that are stored in daplink's buffer (the number of packets written but not read) does not exceed the number supported by the given device.
[ "Send", "a", "single", "packet", "to", "the", "interface" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L914-L937
train
215,023
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
DAPAccessCMSISDAP._write
def _write(self, dap_index, transfer_count, transfer_request, transfer_data): """ Write one or more commands """ assert dap_index == 0 # dap index currently unsupported assert isinstance(transfer_count, six.integer_types) assert isinstance(transfer_request, six.integer_types) assert transfer_data is None or len(transfer_data) > 0 # Create transfer and add to transfer list transfer = None if transfer_request & READ: transfer = _Transfer(self, dap_index, transfer_count, transfer_request, transfer_data) self._transfer_list.append(transfer) # Build physical packet by adding it to command cmd = self._crnt_cmd is_read = transfer_request & READ size_to_transfer = transfer_count trans_data_pos = 0 while size_to_transfer > 0: # Get the size remaining in the current packet for the given request. size = cmd.get_request_space(size_to_transfer, transfer_request, dap_index) # This request doesn't fit in the packet so send it. if size == 0: if LOG_PACKET_BUILDS: self._logger.debug("_write: send packet [size==0]") self._send_packet() cmd = self._crnt_cmd continue # Add request to packet. if transfer_data is None: data = None else: data = transfer_data[trans_data_pos:trans_data_pos + size] cmd.add(size, transfer_request, data, dap_index) size_to_transfer -= size trans_data_pos += size # Packet has been filled so send it if cmd.get_full(): if LOG_PACKET_BUILDS: self._logger.debug("_write: send packet [full]") self._send_packet() cmd = self._crnt_cmd if not self._deferred_transfer: self.flush() return transfer
python
def _write(self, dap_index, transfer_count, transfer_request, transfer_data): """ Write one or more commands """ assert dap_index == 0 # dap index currently unsupported assert isinstance(transfer_count, six.integer_types) assert isinstance(transfer_request, six.integer_types) assert transfer_data is None or len(transfer_data) > 0 # Create transfer and add to transfer list transfer = None if transfer_request & READ: transfer = _Transfer(self, dap_index, transfer_count, transfer_request, transfer_data) self._transfer_list.append(transfer) # Build physical packet by adding it to command cmd = self._crnt_cmd is_read = transfer_request & READ size_to_transfer = transfer_count trans_data_pos = 0 while size_to_transfer > 0: # Get the size remaining in the current packet for the given request. size = cmd.get_request_space(size_to_transfer, transfer_request, dap_index) # This request doesn't fit in the packet so send it. if size == 0: if LOG_PACKET_BUILDS: self._logger.debug("_write: send packet [size==0]") self._send_packet() cmd = self._crnt_cmd continue # Add request to packet. if transfer_data is None: data = None else: data = transfer_data[trans_data_pos:trans_data_pos + size] cmd.add(size, transfer_request, data, dap_index) size_to_transfer -= size trans_data_pos += size # Packet has been filled so send it if cmd.get_full(): if LOG_PACKET_BUILDS: self._logger.debug("_write: send packet [full]") self._send_packet() cmd = self._crnt_cmd if not self._deferred_transfer: self.flush() return transfer
[ "def", "_write", "(", "self", ",", "dap_index", ",", "transfer_count", ",", "transfer_request", ",", "transfer_data", ")", ":", "assert", "dap_index", "==", "0", "# dap index currently unsupported", "assert", "isinstance", "(", "transfer_count", ",", "six", ".", "...
Write one or more commands
[ "Write", "one", "or", "more", "commands" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L939-L992
train
215,024
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
DAPAccessCMSISDAP._jtag_to_swd
def _jtag_to_swd(self): """ Send the command to switch from SWD to jtag """ data = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff] self._protocol.swj_sequence(data) data = [0x9e, 0xe7] self._protocol.swj_sequence(data) data = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff] self._protocol.swj_sequence(data) data = [0x00] self._protocol.swj_sequence(data)
python
def _jtag_to_swd(self): """ Send the command to switch from SWD to jtag """ data = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff] self._protocol.swj_sequence(data) data = [0x9e, 0xe7] self._protocol.swj_sequence(data) data = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff] self._protocol.swj_sequence(data) data = [0x00] self._protocol.swj_sequence(data)
[ "def", "_jtag_to_swd", "(", "self", ")", ":", "data", "=", "[", "0xff", ",", "0xff", ",", "0xff", ",", "0xff", ",", "0xff", ",", "0xff", ",", "0xff", "]", "self", ".", "_protocol", ".", "swj_sequence", "(", "data", ")", "data", "=", "[", "0x9e", ...
Send the command to switch from SWD to jtag
[ "Send", "the", "command", "to", "switch", "from", "SWD", "to", "jtag" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L994-L1008
train
215,025
mbedmicro/pyOCD
pyocd/probe/pydapaccess/dap_access_cmsis_dap.py
DAPAccessCMSISDAP._abort_all_transfers
def _abort_all_transfers(self, exception): """ Abort any ongoing transfers and clear all buffers """ pending_reads = len(self._commands_to_read) # invalidate _transfer_list for transfer in self._transfer_list: transfer.add_error(exception) # clear all deferred buffers self._init_deferred_buffers() # finish all pending reads and ignore the data # Only do this if the error is a tranfer error. # Otherwise this could cause another exception if isinstance(exception, DAPAccessIntf.TransferError): for _ in range(pending_reads): self._interface.read()
python
def _abort_all_transfers(self, exception): """ Abort any ongoing transfers and clear all buffers """ pending_reads = len(self._commands_to_read) # invalidate _transfer_list for transfer in self._transfer_list: transfer.add_error(exception) # clear all deferred buffers self._init_deferred_buffers() # finish all pending reads and ignore the data # Only do this if the error is a tranfer error. # Otherwise this could cause another exception if isinstance(exception, DAPAccessIntf.TransferError): for _ in range(pending_reads): self._interface.read()
[ "def", "_abort_all_transfers", "(", "self", ",", "exception", ")", ":", "pending_reads", "=", "len", "(", "self", ".", "_commands_to_read", ")", "# invalidate _transfer_list", "for", "transfer", "in", "self", ".", "_transfer_list", ":", "transfer", ".", "add_error...
Abort any ongoing transfers and clear all buffers
[ "Abort", "any", "ongoing", "transfers", "and", "clear", "all", "buffers" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/pydapaccess/dap_access_cmsis_dap.py#L1010-L1025
train
215,026
mbedmicro/pyOCD
pyocd/target/builtin/target_nRF51822_xxAA.py
NRF51.resetn
def resetn(self): """ reset a core. After a call to this function, the core is running """ #Regular reset will kick NRF out of DBG mode logging.debug("target_nrf51.reset: enable reset pin") self.write_memory(RESET, RESET_ENABLE) #reset logging.debug("target_nrf51.reset: trigger nRST pin") self.reset()
python
def resetn(self): """ reset a core. After a call to this function, the core is running """ #Regular reset will kick NRF out of DBG mode logging.debug("target_nrf51.reset: enable reset pin") self.write_memory(RESET, RESET_ENABLE) #reset logging.debug("target_nrf51.reset: trigger nRST pin") self.reset()
[ "def", "resetn", "(", "self", ")", ":", "#Regular reset will kick NRF out of DBG mode", "logging", ".", "debug", "(", "\"target_nrf51.reset: enable reset pin\"", ")", "self", ".", "write_memory", "(", "RESET", ",", "RESET_ENABLE", ")", "#reset", "logging", ".", "debug...
reset a core. After a call to this function, the core is running
[ "reset", "a", "core", ".", "After", "a", "call", "to", "this", "function", "the", "core", "is", "running" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/target/builtin/target_nRF51822_xxAA.py#L67-L77
train
215,027
mbedmicro/pyOCD
pyocd/probe/stlink/detect/base.py
StlinkDetectBase.list_mbeds
def list_mbeds(self): """ List details of connected devices @return Returns list of structures with detailed info about each mbed @details Function returns list of dictionaries with mbed attributes 'mount_point', TargetID name etc. Function returns mbed list with platform names if possible """ platform_count = {} candidates = list(self.find_candidates()) result = [] for device in candidates: if not device.get("mount_point", None): continue device["target_id"] = device["target_id_usb_id"] self._update_device_from_fs(device) result.append(device) return result
python
def list_mbeds(self): """ List details of connected devices @return Returns list of structures with detailed info about each mbed @details Function returns list of dictionaries with mbed attributes 'mount_point', TargetID name etc. Function returns mbed list with platform names if possible """ platform_count = {} candidates = list(self.find_candidates()) result = [] for device in candidates: if not device.get("mount_point", None): continue device["target_id"] = device["target_id_usb_id"] self._update_device_from_fs(device) result.append(device) return result
[ "def", "list_mbeds", "(", "self", ")", ":", "platform_count", "=", "{", "}", "candidates", "=", "list", "(", "self", ".", "find_candidates", "(", ")", ")", "result", "=", "[", "]", "for", "device", "in", "candidates", ":", "if", "not", "device", ".", ...
List details of connected devices @return Returns list of structures with detailed info about each mbed @details Function returns list of dictionaries with mbed attributes 'mount_point', TargetID name etc. Function returns mbed list with platform names if possible
[ "List", "details", "of", "connected", "devices" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/base.py#L52-L69
train
215,028
mbedmicro/pyOCD
pyocd/probe/stlink/detect/base.py
StlinkDetectBase._update_device_from_fs
def _update_device_from_fs(self, device): """ Updates the device information based on files from its 'mount_point' @param device Dictionary containing device information """ try: directory_entries = listdir(device["mount_point"]) # Always try to update using daplink compatible boards processself. # This is done for backwards compatibility. lowercase_directory_entries = [e.lower() for e in directory_entries] if self.MBED_HTM_NAME.lower() in lowercase_directory_entries: self._update_device_from_htm(device) except (OSError, IOError) as e: logger.warning( 'Marking device with mount point "%s" as unmounted due to the ' "following error: %s", device["mount_point"], e, ) device["mount_point"] = None
python
def _update_device_from_fs(self, device): """ Updates the device information based on files from its 'mount_point' @param device Dictionary containing device information """ try: directory_entries = listdir(device["mount_point"]) # Always try to update using daplink compatible boards processself. # This is done for backwards compatibility. lowercase_directory_entries = [e.lower() for e in directory_entries] if self.MBED_HTM_NAME.lower() in lowercase_directory_entries: self._update_device_from_htm(device) except (OSError, IOError) as e: logger.warning( 'Marking device with mount point "%s" as unmounted due to the ' "following error: %s", device["mount_point"], e, ) device["mount_point"] = None
[ "def", "_update_device_from_fs", "(", "self", ",", "device", ")", ":", "try", ":", "directory_entries", "=", "listdir", "(", "device", "[", "\"mount_point\"", "]", ")", "# Always try to update using daplink compatible boards processself.", "# This is done for backwards compat...
Updates the device information based on files from its 'mount_point' @param device Dictionary containing device information
[ "Updates", "the", "device", "information", "based", "on", "files", "from", "its", "mount_point" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/base.py#L71-L91
train
215,029
mbedmicro/pyOCD
pyocd/probe/stlink/detect/base.py
StlinkDetectBase._read_htm_ids
def _read_htm_ids(self, mount_point): """! Function scans mbed.htm to get information about TargetID. @param mount_point mbed mount point (disk / drive letter) @return Function returns targetID, in case of failure returns None. @details Note: This function should be improved to scan variety of boards' mbed.htm files """ result = {} target_id = None for line in self._htm_lines(mount_point): target_id = target_id or self._target_id_from_htm(line) return target_id, result
python
def _read_htm_ids(self, mount_point): """! Function scans mbed.htm to get information about TargetID. @param mount_point mbed mount point (disk / drive letter) @return Function returns targetID, in case of failure returns None. @details Note: This function should be improved to scan variety of boards' mbed.htm files """ result = {} target_id = None for line in self._htm_lines(mount_point): target_id = target_id or self._target_id_from_htm(line) return target_id, result
[ "def", "_read_htm_ids", "(", "self", ",", "mount_point", ")", ":", "result", "=", "{", "}", "target_id", "=", "None", "for", "line", "in", "self", ".", "_htm_lines", "(", "mount_point", ")", ":", "target_id", "=", "target_id", "or", "self", ".", "_target...
! Function scans mbed.htm to get information about TargetID. @param mount_point mbed mount point (disk / drive letter) @return Function returns targetID, in case of failure returns None. @details Note: This function should be improved to scan variety of boards' mbed.htm files
[ "!", "Function", "scans", "mbed", ".", "htm", "to", "get", "information", "about", "TargetID", "." ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/base.py#L109-L120
train
215,030
mbedmicro/pyOCD
pyocd/probe/stlink/detect/base.py
StlinkDetectBase._target_id_from_htm
def _target_id_from_htm(self, line): """! Extract Target id from htm line. @return Target id or None """ # Detecting modern mbed.htm file format m = re.search("\\?code=([a-fA-F0-9]+)", line) if m: result = m.groups()[0] return result # Last resort, we can try to see if old mbed.htm format is there m = re.search("\\?auth=([a-fA-F0-9]+)", line) if m: result = m.groups()[0] return result return None
python
def _target_id_from_htm(self, line): """! Extract Target id from htm line. @return Target id or None """ # Detecting modern mbed.htm file format m = re.search("\\?code=([a-fA-F0-9]+)", line) if m: result = m.groups()[0] return result # Last resort, we can try to see if old mbed.htm format is there m = re.search("\\?auth=([a-fA-F0-9]+)", line) if m: result = m.groups()[0] return result return None
[ "def", "_target_id_from_htm", "(", "self", ",", "line", ")", ":", "# Detecting modern mbed.htm file format", "m", "=", "re", ".", "search", "(", "\"\\\\?code=([a-fA-F0-9]+)\"", ",", "line", ")", "if", "m", ":", "result", "=", "m", ".", "groups", "(", ")", "[...
! Extract Target id from htm line. @return Target id or None
[ "!", "Extract", "Target", "id", "from", "htm", "line", "." ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/base.py#L128-L143
train
215,031
mbedmicro/pyOCD
pyocd/probe/stlink/detect/base.py
StlinkDetectBase._run_cli_process
def _run_cli_process(cmd, shell=True): """! Runs command as a process and return stdout, stderr and ret code @param cmd Command to execute @return Tuple of (stdout, stderr, returncode) """ from subprocess import Popen, PIPE p = Popen(cmd, shell=shell, stdout=PIPE, stderr=PIPE) _stdout, _stderr = p.communicate() return _stdout, _stderr, p.returncode
python
def _run_cli_process(cmd, shell=True): """! Runs command as a process and return stdout, stderr and ret code @param cmd Command to execute @return Tuple of (stdout, stderr, returncode) """ from subprocess import Popen, PIPE p = Popen(cmd, shell=shell, stdout=PIPE, stderr=PIPE) _stdout, _stderr = p.communicate() return _stdout, _stderr, p.returncode
[ "def", "_run_cli_process", "(", "cmd", ",", "shell", "=", "True", ")", ":", "from", "subprocess", "import", "Popen", ",", "PIPE", "p", "=", "Popen", "(", "cmd", ",", "shell", "=", "shell", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "PIPE", ")", ...
! Runs command as a process and return stdout, stderr and ret code @param cmd Command to execute @return Tuple of (stdout, stderr, returncode)
[ "!", "Runs", "command", "as", "a", "process", "and", "return", "stdout", "stderr", "and", "ret", "code" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/base.py#L151-L160
train
215,032
mbedmicro/pyOCD
pyocd/probe/stlink/detect/factory.py
create_mbed_detector
def create_mbed_detector(**kwargs): """! Factory used to create host OS specific mbed-lstools object :param kwargs: keyword arguments to pass along to the constructors @return Returns MbedLsTools object or None if host OS is not supported """ host_os = platform.system() if host_os == "Windows": from .windows import StlinkDetectWindows return StlinkDetectWindows(**kwargs) elif host_os == "Linux": from .linux import StlinkDetectLinuxGeneric return StlinkDetectLinuxGeneric(**kwargs) elif host_os == "Darwin": from .darwin import StlinkDetectDarwin return StlinkDetectDarwin(**kwargs) else: return None
python
def create_mbed_detector(**kwargs): """! Factory used to create host OS specific mbed-lstools object :param kwargs: keyword arguments to pass along to the constructors @return Returns MbedLsTools object or None if host OS is not supported """ host_os = platform.system() if host_os == "Windows": from .windows import StlinkDetectWindows return StlinkDetectWindows(**kwargs) elif host_os == "Linux": from .linux import StlinkDetectLinuxGeneric return StlinkDetectLinuxGeneric(**kwargs) elif host_os == "Darwin": from .darwin import StlinkDetectDarwin return StlinkDetectDarwin(**kwargs) else: return None
[ "def", "create_mbed_detector", "(", "*", "*", "kwargs", ")", ":", "host_os", "=", "platform", ".", "system", "(", ")", "if", "host_os", "==", "\"Windows\"", ":", "from", ".", "windows", "import", "StlinkDetectWindows", "return", "StlinkDetectWindows", "(", "*"...
! Factory used to create host OS specific mbed-lstools object :param kwargs: keyword arguments to pass along to the constructors @return Returns MbedLsTools object or None if host OS is not supported
[ "!", "Factory", "used", "to", "create", "host", "OS", "specific", "mbed", "-", "lstools", "object" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/factory.py#L22-L43
train
215,033
mbedmicro/pyOCD
pyocd/coresight/ap.py
_locked
def _locked(func): """! Decorator to automatically lock an AccessPort method.""" def _locking(self, *args, **kwargs): try: self.lock() return func(self, *args, **kwargs) finally: self.unlock() return _locking
python
def _locked(func): """! Decorator to automatically lock an AccessPort method.""" def _locking(self, *args, **kwargs): try: self.lock() return func(self, *args, **kwargs) finally: self.unlock() return _locking
[ "def", "_locked", "(", "func", ")", ":", "def", "_locking", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "self", ".", "lock", "(", ")", "return", "func", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ...
! Decorator to automatically lock an AccessPort method.
[ "!", "Decorator", "to", "automatically", "lock", "an", "AccessPort", "method", "." ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/coresight/ap.py#L136-L144
train
215,034
mbedmicro/pyOCD
pyocd/probe/stlink/detect/linux.py
StlinkDetectLinuxGeneric._dev_by_id
def _dev_by_id(self, device_type): """! Get a dict, USBID -> device, for a device class @param device_type The type of devices to search. For exmaple, "serial" looks for all serial devices connected to this computer @return A dict: Device USBID -> device file in /dev """ dir = os.path.join("/dev", device_type, "by-id") if os.path.isdir(dir): to_ret = dict( self._hex_ids([os.path.join(dir, f) for f in os.listdir(dir)]) ) return to_ret else: logger.error( "Could not get %s devices by id. " "This could be because your Linux distribution " "does not use udev, or does not create /dev/%s/by-id " "symlinks. Please submit an issue to github.com/" "armmbed/mbed-ls.", device_type, device_type, ) return {}
python
def _dev_by_id(self, device_type): """! Get a dict, USBID -> device, for a device class @param device_type The type of devices to search. For exmaple, "serial" looks for all serial devices connected to this computer @return A dict: Device USBID -> device file in /dev """ dir = os.path.join("/dev", device_type, "by-id") if os.path.isdir(dir): to_ret = dict( self._hex_ids([os.path.join(dir, f) for f in os.listdir(dir)]) ) return to_ret else: logger.error( "Could not get %s devices by id. " "This could be because your Linux distribution " "does not use udev, or does not create /dev/%s/by-id " "symlinks. Please submit an issue to github.com/" "armmbed/mbed-ls.", device_type, device_type, ) return {}
[ "def", "_dev_by_id", "(", "self", ",", "device_type", ")", ":", "dir", "=", "os", ".", "path", ".", "join", "(", "\"/dev\"", ",", "device_type", ",", "\"by-id\"", ")", "if", "os", ".", "path", ".", "isdir", "(", "dir", ")", ":", "to_ret", "=", "dic...
! Get a dict, USBID -> device, for a device class @param device_type The type of devices to search. For exmaple, "serial" looks for all serial devices connected to this computer @return A dict: Device USBID -> device file in /dev
[ "!", "Get", "a", "dict", "USBID", "-", ">", "device", "for", "a", "device", "class" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/linux.py#L63-L85
train
215,035
mbedmicro/pyOCD
pyocd/probe/stlink/detect/linux.py
StlinkDetectLinuxGeneric._hex_ids
def _hex_ids(self, dev_list): """! Build a USBID map for a device list @param disk_list List of disks in a system with USBID decoration @return Returns map USBID -> device file in /dev @details Uses regular expressions to get a USBID (TargeTIDs) a "by-id" symbolic link """ for dl in dev_list: match = self.nlp.search(dl) if match: yield match.group("usbid"), _readlink(dl)
python
def _hex_ids(self, dev_list): """! Build a USBID map for a device list @param disk_list List of disks in a system with USBID decoration @return Returns map USBID -> device file in /dev @details Uses regular expressions to get a USBID (TargeTIDs) a "by-id" symbolic link """ for dl in dev_list: match = self.nlp.search(dl) if match: yield match.group("usbid"), _readlink(dl)
[ "def", "_hex_ids", "(", "self", ",", "dev_list", ")", ":", "for", "dl", "in", "dev_list", ":", "match", "=", "self", ".", "nlp", ".", "search", "(", "dl", ")", "if", "match", ":", "yield", "match", ".", "group", "(", "\"usbid\"", ")", ",", "_readli...
! Build a USBID map for a device list @param disk_list List of disks in a system with USBID decoration @return Returns map USBID -> device file in /dev @details Uses regular expressions to get a USBID (TargeTIDs) a "by-id" symbolic link
[ "!", "Build", "a", "USBID", "map", "for", "a", "device", "list" ]
41a174718a9739f3cbe785c2ba21cb7fd1310c6f
https://github.com/mbedmicro/pyOCD/blob/41a174718a9739f3cbe785c2ba21cb7fd1310c6f/pyocd/probe/stlink/detect/linux.py#L100-L110
train
215,036
influxdata/influxdb-python
influxdb/line_protocol.py
_get_unicode
def _get_unicode(data, force=False): """Try to return a text aka unicode object from the given data.""" if isinstance(data, binary_type): return data.decode('utf-8') elif data is None: return '' elif force: if PY2: return unicode(data) else: return str(data) else: return data
python
def _get_unicode(data, force=False): """Try to return a text aka unicode object from the given data.""" if isinstance(data, binary_type): return data.decode('utf-8') elif data is None: return '' elif force: if PY2: return unicode(data) else: return str(data) else: return data
[ "def", "_get_unicode", "(", "data", ",", "force", "=", "False", ")", ":", "if", "isinstance", "(", "data", ",", "binary_type", ")", ":", "return", "data", ".", "decode", "(", "'utf-8'", ")", "elif", "data", "is", "None", ":", "return", "''", "elif", ...
Try to return a text aka unicode object from the given data.
[ "Try", "to", "return", "a", "text", "aka", "unicode", "object", "from", "the", "given", "data", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/line_protocol.py#L104-L116
train
215,037
influxdata/influxdb-python
influxdb/line_protocol.py
make_lines
def make_lines(data, precision=None): """Extract points from given dict. Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0. """ lines = [] static_tags = data.get('tags') for point in data['points']: elements = [] # add measurement name measurement = _escape_tag(_get_unicode( point.get('measurement', data.get('measurement')))) key_values = [measurement] # add tags if static_tags: tags = dict(static_tags) # make a copy, since we'll modify tags.update(point.get('tags') or {}) else: tags = point.get('tags') or {} # tags should be sorted client-side to take load off server for tag_key, tag_value in sorted(iteritems(tags)): key = _escape_tag(tag_key) value = _escape_tag_value(tag_value) if key != '' and value != '': key_values.append(key + "=" + value) elements.append(','.join(key_values)) # add fields field_values = [] for field_key, field_value in sorted(iteritems(point['fields'])): key = _escape_tag(field_key) value = _escape_value(field_value) if key != '' and value != '': field_values.append(key + "=" + value) elements.append(','.join(field_values)) # add timestamp if 'time' in point: timestamp = _get_unicode(str(int( _convert_timestamp(point['time'], precision)))) elements.append(timestamp) line = ' '.join(elements) lines.append(line) return '\n'.join(lines) + '\n'
python
def make_lines(data, precision=None): """Extract points from given dict. Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0. """ lines = [] static_tags = data.get('tags') for point in data['points']: elements = [] # add measurement name measurement = _escape_tag(_get_unicode( point.get('measurement', data.get('measurement')))) key_values = [measurement] # add tags if static_tags: tags = dict(static_tags) # make a copy, since we'll modify tags.update(point.get('tags') or {}) else: tags = point.get('tags') or {} # tags should be sorted client-side to take load off server for tag_key, tag_value in sorted(iteritems(tags)): key = _escape_tag(tag_key) value = _escape_tag_value(tag_value) if key != '' and value != '': key_values.append(key + "=" + value) elements.append(','.join(key_values)) # add fields field_values = [] for field_key, field_value in sorted(iteritems(point['fields'])): key = _escape_tag(field_key) value = _escape_value(field_value) if key != '' and value != '': field_values.append(key + "=" + value) elements.append(','.join(field_values)) # add timestamp if 'time' in point: timestamp = _get_unicode(str(int( _convert_timestamp(point['time'], precision)))) elements.append(timestamp) line = ' '.join(elements) lines.append(line) return '\n'.join(lines) + '\n'
[ "def", "make_lines", "(", "data", ",", "precision", "=", "None", ")", ":", "lines", "=", "[", "]", "static_tags", "=", "data", ".", "get", "(", "'tags'", ")", "for", "point", "in", "data", "[", "'points'", "]", ":", "elements", "=", "[", "]", "# ad...
Extract points from given dict. Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0.
[ "Extract", "points", "from", "given", "dict", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/line_protocol.py#L119-L172
train
215,038
influxdata/influxdb-python
examples/tutorial_pandas.py
main
def main(host='localhost', port=8086): """Instantiate the connection to the InfluxDB client.""" user = 'root' password = 'root' dbname = 'demo' protocol = 'json' client = DataFrameClient(host, port, user, password, dbname) print("Create pandas DataFrame") df = pd.DataFrame(data=list(range(30)), index=pd.date_range(start='2014-11-16', periods=30, freq='H'), columns=['0']) print("Create database: " + dbname) client.create_database(dbname) print("Write DataFrame") client.write_points(df, 'demo', protocol=protocol) print("Write DataFrame with Tags") client.write_points(df, 'demo', {'k1': 'v1', 'k2': 'v2'}, protocol=protocol) print("Read DataFrame") client.query("select * from demo") print("Delete database: " + dbname) client.drop_database(dbname)
python
def main(host='localhost', port=8086): """Instantiate the connection to the InfluxDB client.""" user = 'root' password = 'root' dbname = 'demo' protocol = 'json' client = DataFrameClient(host, port, user, password, dbname) print("Create pandas DataFrame") df = pd.DataFrame(data=list(range(30)), index=pd.date_range(start='2014-11-16', periods=30, freq='H'), columns=['0']) print("Create database: " + dbname) client.create_database(dbname) print("Write DataFrame") client.write_points(df, 'demo', protocol=protocol) print("Write DataFrame with Tags") client.write_points(df, 'demo', {'k1': 'v1', 'k2': 'v2'}, protocol=protocol) print("Read DataFrame") client.query("select * from demo") print("Delete database: " + dbname) client.drop_database(dbname)
[ "def", "main", "(", "host", "=", "'localhost'", ",", "port", "=", "8086", ")", ":", "user", "=", "'root'", "password", "=", "'root'", "dbname", "=", "'demo'", "protocol", "=", "'json'", "client", "=", "DataFrameClient", "(", "host", ",", "port", ",", "...
Instantiate the connection to the InfluxDB client.
[ "Instantiate", "the", "connection", "to", "the", "InfluxDB", "client", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/examples/tutorial_pandas.py#L10-L38
train
215,039
influxdata/influxdb-python
influxdb/client.py
_parse_dsn
def _parse_dsn(dsn): """Parse data source name. This is a helper function to split the data source name provided in the from_dsn classmethod """ conn_params = urlparse(dsn) init_args = {} scheme_info = conn_params.scheme.split('+') if len(scheme_info) == 1: scheme = scheme_info[0] modifier = None else: modifier, scheme = scheme_info if scheme != 'influxdb': raise ValueError('Unknown scheme "{0}".'.format(scheme)) if modifier: if modifier == 'udp': init_args['use_udp'] = True elif modifier == 'https': init_args['ssl'] = True else: raise ValueError('Unknown modifier "{0}".'.format(modifier)) netlocs = conn_params.netloc.split(',') init_args['hosts'] = [] for netloc in netlocs: parsed = _parse_netloc(netloc) init_args['hosts'].append((parsed['host'], int(parsed['port']))) init_args['username'] = parsed['username'] init_args['password'] = parsed['password'] if conn_params.path and len(conn_params.path) > 1: init_args['database'] = conn_params.path[1:] return init_args
python
def _parse_dsn(dsn): """Parse data source name. This is a helper function to split the data source name provided in the from_dsn classmethod """ conn_params = urlparse(dsn) init_args = {} scheme_info = conn_params.scheme.split('+') if len(scheme_info) == 1: scheme = scheme_info[0] modifier = None else: modifier, scheme = scheme_info if scheme != 'influxdb': raise ValueError('Unknown scheme "{0}".'.format(scheme)) if modifier: if modifier == 'udp': init_args['use_udp'] = True elif modifier == 'https': init_args['ssl'] = True else: raise ValueError('Unknown modifier "{0}".'.format(modifier)) netlocs = conn_params.netloc.split(',') init_args['hosts'] = [] for netloc in netlocs: parsed = _parse_netloc(netloc) init_args['hosts'].append((parsed['host'], int(parsed['port']))) init_args['username'] = parsed['username'] init_args['password'] = parsed['password'] if conn_params.path and len(conn_params.path) > 1: init_args['database'] = conn_params.path[1:] return init_args
[ "def", "_parse_dsn", "(", "dsn", ")", ":", "conn_params", "=", "urlparse", "(", "dsn", ")", "init_args", "=", "{", "}", "scheme_info", "=", "conn_params", ".", "scheme", ".", "split", "(", "'+'", ")", "if", "len", "(", "scheme_info", ")", "==", "1", ...
Parse data source name. This is a helper function to split the data source name provided in the from_dsn classmethod
[ "Parse", "data", "source", "name", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L1059-L1097
train
215,040
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.from_dsn
def from_dsn(cls, dsn, **kwargs): r"""Generate an instance of InfluxDBClient from given data source name. Return an instance of :class:`~.InfluxDBClient` from the provided data source name. Supported schemes are "influxdb", "https+influxdb" and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient` constructor may also be passed to this method. :param dsn: data source name :type dsn: string :param kwargs: additional parameters for `InfluxDBClient` :type kwargs: dict :raises ValueError: if the provided DSN has any unexpected values :Example: :: >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\ localhost:8086/databasename', timeout=5) >> type(cli) <class 'influxdb.client.InfluxDBClient'> >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\ localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 .. note:: parameters provided in `**kwargs` may override dsn parameters .. note:: when using "udp+influxdb" the specified port (if any) will be used for the TCP connection; specify the UDP port with the additional `udp_port` parameter (cf. examples). """ init_args = _parse_dsn(dsn) host, port = init_args.pop('hosts')[0] init_args['host'] = host init_args['port'] = port init_args.update(kwargs) return cls(**init_args)
python
def from_dsn(cls, dsn, **kwargs): r"""Generate an instance of InfluxDBClient from given data source name. Return an instance of :class:`~.InfluxDBClient` from the provided data source name. Supported schemes are "influxdb", "https+influxdb" and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient` constructor may also be passed to this method. :param dsn: data source name :type dsn: string :param kwargs: additional parameters for `InfluxDBClient` :type kwargs: dict :raises ValueError: if the provided DSN has any unexpected values :Example: :: >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\ localhost:8086/databasename', timeout=5) >> type(cli) <class 'influxdb.client.InfluxDBClient'> >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\ localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 .. note:: parameters provided in `**kwargs` may override dsn parameters .. note:: when using "udp+influxdb" the specified port (if any) will be used for the TCP connection; specify the UDP port with the additional `udp_port` parameter (cf. examples). """ init_args = _parse_dsn(dsn) host, port = init_args.pop('hosts')[0] init_args['host'] = host init_args['port'] = port init_args.update(kwargs) return cls(**init_args)
[ "def", "from_dsn", "(", "cls", ",", "dsn", ",", "*", "*", "kwargs", ")", ":", "init_args", "=", "_parse_dsn", "(", "dsn", ")", "host", ",", "port", "=", "init_args", ".", "pop", "(", "'hosts'", ")", "[", "0", "]", "init_args", "[", "'host'", "]", ...
r"""Generate an instance of InfluxDBClient from given data source name. Return an instance of :class:`~.InfluxDBClient` from the provided data source name. Supported schemes are "influxdb", "https+influxdb" and "udp+influxdb". Parameters for the :class:`~.InfluxDBClient` constructor may also be passed to this method. :param dsn: data source name :type dsn: string :param kwargs: additional parameters for `InfluxDBClient` :type kwargs: dict :raises ValueError: if the provided DSN has any unexpected values :Example: :: >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\ localhost:8086/databasename', timeout=5) >> type(cli) <class 'influxdb.client.InfluxDBClient'> >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\ localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 .. note:: parameters provided in `**kwargs` may override dsn parameters .. note:: when using "udp+influxdb" the specified port (if any) will be used for the TCP connection; specify the UDP port with the additional `udp_port` parameter (cf. examples).
[ "r", "Generate", "an", "instance", "of", "InfluxDBClient", "from", "given", "data", "source", "name", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L159-L197
train
215,041
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.switch_user
def switch_user(self, username, password): """Change the client's username. :param username: the username to switch to :type username: str :param password: the password for the username :type password: str """ self._username = username self._password = password
python
def switch_user(self, username, password): """Change the client's username. :param username: the username to switch to :type username: str :param password: the password for the username :type password: str """ self._username = username self._password = password
[ "def", "switch_user", "(", "self", ",", "username", ",", "password", ")", ":", "self", ".", "_username", "=", "username", "self", ".", "_password", "=", "password" ]
Change the client's username. :param username: the username to switch to :type username: str :param password: the password for the username :type password: str
[ "Change", "the", "client", "s", "username", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L207-L216
train
215,042
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.request
def request(self, url, method='GET', params=None, data=None, expected_response_code=200, headers=None): """Make a HTTP request to the InfluxDB API. :param url: the path of the HTTP request, e.g. write, query, etc. :type url: str :param method: the HTTP method for the request, defaults to GET :type method: str :param params: additional parameters for the request, defaults to None :type params: dict :param data: the data of the request, defaults to None :type data: str :param expected_response_code: the expected response code of the request, defaults to 200 :type expected_response_code: int :param headers: headers to add to the request :type headers: dict :returns: the response from the request :rtype: :class:`requests.Response` :raises InfluxDBServerError: if the response code is any server error code (5xx) :raises InfluxDBClientError: if the response code is not the same as `expected_response_code` and is not a server error code """ url = "{0}/{1}".format(self._baseurl, url) if headers is None: headers = self._headers if params is None: params = {} if isinstance(data, (dict, list)): data = json.dumps(data) # Try to send the request more than once by default (see #103) retry = True _try = 0 while retry: try: response = self._session.request( method=method, url=url, auth=(self._username, self._password), params=params, data=data, headers=headers, proxies=self._proxies, verify=self._verify_ssl, timeout=self._timeout ) break except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError, requests.exceptions.Timeout): _try += 1 if self._retries != 0: retry = _try < self._retries if method == "POST": time.sleep((2 ** _try) * random.random() / 100.0) if not retry: raise # if there's not an error, there must have been a successful response if 500 <= response.status_code < 600: raise InfluxDBServerError(response.content) elif response.status_code == expected_response_code: return response else: raise InfluxDBClientError(response.content, response.status_code)
python
def request(self, url, method='GET', params=None, data=None, expected_response_code=200, headers=None): """Make a HTTP request to the InfluxDB API. :param url: the path of the HTTP request, e.g. write, query, etc. :type url: str :param method: the HTTP method for the request, defaults to GET :type method: str :param params: additional parameters for the request, defaults to None :type params: dict :param data: the data of the request, defaults to None :type data: str :param expected_response_code: the expected response code of the request, defaults to 200 :type expected_response_code: int :param headers: headers to add to the request :type headers: dict :returns: the response from the request :rtype: :class:`requests.Response` :raises InfluxDBServerError: if the response code is any server error code (5xx) :raises InfluxDBClientError: if the response code is not the same as `expected_response_code` and is not a server error code """ url = "{0}/{1}".format(self._baseurl, url) if headers is None: headers = self._headers if params is None: params = {} if isinstance(data, (dict, list)): data = json.dumps(data) # Try to send the request more than once by default (see #103) retry = True _try = 0 while retry: try: response = self._session.request( method=method, url=url, auth=(self._username, self._password), params=params, data=data, headers=headers, proxies=self._proxies, verify=self._verify_ssl, timeout=self._timeout ) break except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError, requests.exceptions.Timeout): _try += 1 if self._retries != 0: retry = _try < self._retries if method == "POST": time.sleep((2 ** _try) * random.random() / 100.0) if not retry: raise # if there's not an error, there must have been a successful response if 500 <= response.status_code < 600: raise InfluxDBServerError(response.content) elif response.status_code == expected_response_code: return response else: raise InfluxDBClientError(response.content, response.status_code)
[ "def", "request", "(", "self", ",", "url", ",", "method", "=", "'GET'", ",", "params", "=", "None", ",", "data", "=", "None", ",", "expected_response_code", "=", "200", ",", "headers", "=", "None", ")", ":", "url", "=", "\"{0}/{1}\"", ".", "format", ...
Make a HTTP request to the InfluxDB API. :param url: the path of the HTTP request, e.g. write, query, etc. :type url: str :param method: the HTTP method for the request, defaults to GET :type method: str :param params: additional parameters for the request, defaults to None :type params: dict :param data: the data of the request, defaults to None :type data: str :param expected_response_code: the expected response code of the request, defaults to 200 :type expected_response_code: int :param headers: headers to add to the request :type headers: dict :returns: the response from the request :rtype: :class:`requests.Response` :raises InfluxDBServerError: if the response code is any server error code (5xx) :raises InfluxDBClientError: if the response code is not the same as `expected_response_code` and is not a server error code
[ "Make", "a", "HTTP", "request", "to", "the", "InfluxDB", "API", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L218-L286
train
215,043
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.write
def write(self, data, params=None, expected_response_code=204, protocol='json'): """Write data to InfluxDB. :param data: the data to be written :type data: (if protocol is 'json') dict (if protocol is 'line') sequence of line protocol strings or single string :param params: additional parameters for the request, defaults to None :type params: dict :param expected_response_code: the expected response code of the write operation, defaults to 204 :type expected_response_code: int :param protocol: protocol of input data, either 'json' or 'line' :type protocol: str :returns: True, if the write operation is successful :rtype: bool """ headers = self._headers headers['Content-Type'] = 'application/octet-stream' if params: precision = params.get('precision') else: precision = None if protocol == 'json': data = make_lines(data, precision).encode('utf-8') elif protocol == 'line': if isinstance(data, str): data = [data] data = ('\n'.join(data) + '\n').encode('utf-8') self.request( url="write", method='POST', params=params, data=data, expected_response_code=expected_response_code, headers=headers ) return True
python
def write(self, data, params=None, expected_response_code=204, protocol='json'): """Write data to InfluxDB. :param data: the data to be written :type data: (if protocol is 'json') dict (if protocol is 'line') sequence of line protocol strings or single string :param params: additional parameters for the request, defaults to None :type params: dict :param expected_response_code: the expected response code of the write operation, defaults to 204 :type expected_response_code: int :param protocol: protocol of input data, either 'json' or 'line' :type protocol: str :returns: True, if the write operation is successful :rtype: bool """ headers = self._headers headers['Content-Type'] = 'application/octet-stream' if params: precision = params.get('precision') else: precision = None if protocol == 'json': data = make_lines(data, precision).encode('utf-8') elif protocol == 'line': if isinstance(data, str): data = [data] data = ('\n'.join(data) + '\n').encode('utf-8') self.request( url="write", method='POST', params=params, data=data, expected_response_code=expected_response_code, headers=headers ) return True
[ "def", "write", "(", "self", ",", "data", ",", "params", "=", "None", ",", "expected_response_code", "=", "204", ",", "protocol", "=", "'json'", ")", ":", "headers", "=", "self", ".", "_headers", "headers", "[", "'Content-Type'", "]", "=", "'application/oc...
Write data to InfluxDB. :param data: the data to be written :type data: (if protocol is 'json') dict (if protocol is 'line') sequence of line protocol strings or single string :param params: additional parameters for the request, defaults to None :type params: dict :param expected_response_code: the expected response code of the write operation, defaults to 204 :type expected_response_code: int :param protocol: protocol of input data, either 'json' or 'line' :type protocol: str :returns: True, if the write operation is successful :rtype: bool
[ "Write", "data", "to", "InfluxDB", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L288-L329
train
215,044
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.query
def query(self, query, params=None, bind_params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True, chunked=False, chunk_size=0, method="GET"): """Send a query to InfluxDB. .. danger:: In order to avoid injection vulnerabilities (similar to `SQL injection <https://www.owasp.org/index.php/SQL_Injection>`_ vulnerabilities), do not directly include untrusted data into the ``query`` parameter, use ``bind_params`` instead. :param query: the actual query string :type query: str :param params: additional parameters for the request, defaults to {} :type params: dict :param bind_params: bind parameters for the query: any variable in the query written as ``'$var_name'`` will be replaced with ``bind_params['var_name']``. Only works in the ``WHERE`` clause and takes precedence over ``params['params']`` :type bind_params: dict :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision :type epoch: str :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int :param database: database to query, defaults to None :type database: str :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :type raise_errors: bool :param chunked: Enable to use chunked responses from InfluxDB. With ``chunked`` enabled, one ResultSet is returned per chunk containing all results within that chunk :type chunked: bool :param chunk_size: Size of each chunk to tell InfluxDB to use. :type chunk_size: int :param method: the HTTP method for the request, defaults to GET :type method: str :returns: the queried data :rtype: :class:`~.ResultSet` """ if params is None: params = {} if bind_params is not None: params_dict = json.loads(params.get('params', '{}')) params_dict.update(bind_params) params['params'] = json.dumps(params_dict) params['q'] = query params['db'] = database or self._database if epoch is not None: params['epoch'] = epoch if chunked: params['chunked'] = 'true' if chunk_size > 0: params['chunk_size'] = chunk_size if query.lower().startswith("select ") and " into " in query.lower(): method = "POST" response = self.request( url="query", method=method, params=params, data=None, expected_response_code=expected_response_code ) if chunked: return self._read_chunked_response(response) data = response.json() results = [ ResultSet(result, raise_errors=raise_errors) for result in data.get('results', []) ] # TODO(aviau): Always return a list. (This would be a breaking change) if len(results) == 1: return results[0] return results
python
def query(self, query, params=None, bind_params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True, chunked=False, chunk_size=0, method="GET"): """Send a query to InfluxDB. .. danger:: In order to avoid injection vulnerabilities (similar to `SQL injection <https://www.owasp.org/index.php/SQL_Injection>`_ vulnerabilities), do not directly include untrusted data into the ``query`` parameter, use ``bind_params`` instead. :param query: the actual query string :type query: str :param params: additional parameters for the request, defaults to {} :type params: dict :param bind_params: bind parameters for the query: any variable in the query written as ``'$var_name'`` will be replaced with ``bind_params['var_name']``. Only works in the ``WHERE`` clause and takes precedence over ``params['params']`` :type bind_params: dict :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision :type epoch: str :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int :param database: database to query, defaults to None :type database: str :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :type raise_errors: bool :param chunked: Enable to use chunked responses from InfluxDB. With ``chunked`` enabled, one ResultSet is returned per chunk containing all results within that chunk :type chunked: bool :param chunk_size: Size of each chunk to tell InfluxDB to use. :type chunk_size: int :param method: the HTTP method for the request, defaults to GET :type method: str :returns: the queried data :rtype: :class:`~.ResultSet` """ if params is None: params = {} if bind_params is not None: params_dict = json.loads(params.get('params', '{}')) params_dict.update(bind_params) params['params'] = json.dumps(params_dict) params['q'] = query params['db'] = database or self._database if epoch is not None: params['epoch'] = epoch if chunked: params['chunked'] = 'true' if chunk_size > 0: params['chunk_size'] = chunk_size if query.lower().startswith("select ") and " into " in query.lower(): method = "POST" response = self.request( url="query", method=method, params=params, data=None, expected_response_code=expected_response_code ) if chunked: return self._read_chunked_response(response) data = response.json() results = [ ResultSet(result, raise_errors=raise_errors) for result in data.get('results', []) ] # TODO(aviau): Always return a list. (This would be a breaking change) if len(results) == 1: return results[0] return results
[ "def", "query", "(", "self", ",", "query", ",", "params", "=", "None", ",", "bind_params", "=", "None", ",", "epoch", "=", "None", ",", "expected_response_code", "=", "200", ",", "database", "=", "None", ",", "raise_errors", "=", "True", ",", "chunked", ...
Send a query to InfluxDB. .. danger:: In order to avoid injection vulnerabilities (similar to `SQL injection <https://www.owasp.org/index.php/SQL_Injection>`_ vulnerabilities), do not directly include untrusted data into the ``query`` parameter, use ``bind_params`` instead. :param query: the actual query string :type query: str :param params: additional parameters for the request, defaults to {} :type params: dict :param bind_params: bind parameters for the query: any variable in the query written as ``'$var_name'`` will be replaced with ``bind_params['var_name']``. Only works in the ``WHERE`` clause and takes precedence over ``params['params']`` :type bind_params: dict :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision :type epoch: str :param expected_response_code: the expected status code of response, defaults to 200 :type expected_response_code: int :param database: database to query, defaults to None :type database: str :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :type raise_errors: bool :param chunked: Enable to use chunked responses from InfluxDB. With ``chunked`` enabled, one ResultSet is returned per chunk containing all results within that chunk :type chunked: bool :param chunk_size: Size of each chunk to tell InfluxDB to use. :type chunk_size: int :param method: the HTTP method for the request, defaults to GET :type method: str :returns: the queried data :rtype: :class:`~.ResultSet`
[ "Send", "a", "query", "to", "InfluxDB", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L345-L452
train
215,045
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.ping
def ping(self): """Check connectivity to InfluxDB. :returns: The version of the InfluxDB the client is connected to """ response = self.request( url="ping", method='GET', expected_response_code=204 ) return response.headers['X-Influxdb-Version']
python
def ping(self): """Check connectivity to InfluxDB. :returns: The version of the InfluxDB the client is connected to """ response = self.request( url="ping", method='GET', expected_response_code=204 ) return response.headers['X-Influxdb-Version']
[ "def", "ping", "(", "self", ")", ":", "response", "=", "self", ".", "request", "(", "url", "=", "\"ping\"", ",", "method", "=", "'GET'", ",", "expected_response_code", "=", "204", ")", "return", "response", ".", "headers", "[", "'X-Influxdb-Version'", "]" ...
Check connectivity to InfluxDB. :returns: The version of the InfluxDB the client is connected to
[ "Check", "connectivity", "to", "InfluxDB", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L516-L527
train
215,046
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.create_retention_policy
def create_retention_policy(self, name, duration, replication, database=None, default=False, shard_duration="0s"): """Create a retention policy for a database. :param name: the name of the new retention policy :type name: str :param duration: the duration of the new retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. For infinite retention - meaning the data will never be deleted - use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the replication of the retention policy :type replication: str :param database: the database for which the retention policy is created. Defaults to current client's database :type database: str :param default: whether or not to set the policy as default :type default: bool :param shard_duration: the shard duration of the retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. Infinite retention is not supported. As a workaround, specify a "1000w" duration to achieve an extremely long shard group duration. Defaults to "0s", which is interpreted by the database to mean the default value given the duration. The minimum shard group duration is 1 hour. :type shard_duration: str """ query_string = \ "CREATE RETENTION POLICY {0} ON {1} " \ "DURATION {2} REPLICATION {3} SHARD DURATION {4}".format( quote_ident(name), quote_ident(database or self._database), duration, replication, shard_duration) if default is True: query_string += " DEFAULT" self.query(query_string, method="POST")
python
def create_retention_policy(self, name, duration, replication, database=None, default=False, shard_duration="0s"): """Create a retention policy for a database. :param name: the name of the new retention policy :type name: str :param duration: the duration of the new retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. For infinite retention - meaning the data will never be deleted - use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the replication of the retention policy :type replication: str :param database: the database for which the retention policy is created. Defaults to current client's database :type database: str :param default: whether or not to set the policy as default :type default: bool :param shard_duration: the shard duration of the retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. Infinite retention is not supported. As a workaround, specify a "1000w" duration to achieve an extremely long shard group duration. Defaults to "0s", which is interpreted by the database to mean the default value given the duration. The minimum shard group duration is 1 hour. :type shard_duration: str """ query_string = \ "CREATE RETENTION POLICY {0} ON {1} " \ "DURATION {2} REPLICATION {3} SHARD DURATION {4}".format( quote_ident(name), quote_ident(database or self._database), duration, replication, shard_duration) if default is True: query_string += " DEFAULT" self.query(query_string, method="POST")
[ "def", "create_retention_policy", "(", "self", ",", "name", ",", "duration", ",", "replication", ",", "database", "=", "None", ",", "default", "=", "False", ",", "shard_duration", "=", "\"0s\"", ")", ":", "query_string", "=", "\"CREATE RETENTION POLICY {0} ON {1} ...
Create a retention policy for a database. :param name: the name of the new retention policy :type name: str :param duration: the duration of the new retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. For infinite retention - meaning the data will never be deleted - use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the replication of the retention policy :type replication: str :param database: the database for which the retention policy is created. Defaults to current client's database :type database: str :param default: whether or not to set the policy as default :type default: bool :param shard_duration: the shard duration of the retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. Infinite retention is not supported. As a workaround, specify a "1000w" duration to achieve an extremely long shard group duration. Defaults to "0s", which is interpreted by the database to mean the default value given the duration. The minimum shard group duration is 1 hour. :type shard_duration: str
[ "Create", "a", "retention", "policy", "for", "a", "database", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L648-L688
train
215,047
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.alter_retention_policy
def alter_retention_policy(self, name, database=None, duration=None, replication=None, default=None, shard_duration=None): """Modify an existing retention policy for a database. :param name: the name of the retention policy to modify :type name: str :param database: the database for which the retention policy is modified. Defaults to current client's database :type database: str :param duration: the new duration of the existing retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. For infinite retention, meaning the data will never be deleted, use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the new replication of the existing retention policy :type replication: int :param default: whether or not to set the modified policy as default :type default: bool :param shard_duration: the shard duration of the retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. Infinite retention is not supported. As a workaround, specify a "1000w" duration to achieve an extremely long shard group duration. The minimum shard group duration is 1 hour. :type shard_duration: str .. note:: at least one of duration, replication, or default flag should be set. Otherwise the operation will fail. """ query_string = ( "ALTER RETENTION POLICY {0} ON {1}" ).format(quote_ident(name), quote_ident(database or self._database), shard_duration) if duration: query_string += " DURATION {0}".format(duration) if shard_duration: query_string += " SHARD DURATION {0}".format(shard_duration) if replication: query_string += " REPLICATION {0}".format(replication) if default is True: query_string += " DEFAULT" self.query(query_string, method="POST")
python
def alter_retention_policy(self, name, database=None, duration=None, replication=None, default=None, shard_duration=None): """Modify an existing retention policy for a database. :param name: the name of the retention policy to modify :type name: str :param database: the database for which the retention policy is modified. Defaults to current client's database :type database: str :param duration: the new duration of the existing retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. For infinite retention, meaning the data will never be deleted, use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the new replication of the existing retention policy :type replication: int :param default: whether or not to set the modified policy as default :type default: bool :param shard_duration: the shard duration of the retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. Infinite retention is not supported. As a workaround, specify a "1000w" duration to achieve an extremely long shard group duration. The minimum shard group duration is 1 hour. :type shard_duration: str .. note:: at least one of duration, replication, or default flag should be set. Otherwise the operation will fail. """ query_string = ( "ALTER RETENTION POLICY {0} ON {1}" ).format(quote_ident(name), quote_ident(database or self._database), shard_duration) if duration: query_string += " DURATION {0}".format(duration) if shard_duration: query_string += " SHARD DURATION {0}".format(shard_duration) if replication: query_string += " REPLICATION {0}".format(replication) if default is True: query_string += " DEFAULT" self.query(query_string, method="POST")
[ "def", "alter_retention_policy", "(", "self", ",", "name", ",", "database", "=", "None", ",", "duration", "=", "None", ",", "replication", "=", "None", ",", "default", "=", "None", ",", "shard_duration", "=", "None", ")", ":", "query_string", "=", "(", "...
Modify an existing retention policy for a database. :param name: the name of the retention policy to modify :type name: str :param database: the database for which the retention policy is modified. Defaults to current client's database :type database: str :param duration: the new duration of the existing retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. For infinite retention, meaning the data will never be deleted, use 'INF' for duration. The minimum retention period is 1 hour. :type duration: str :param replication: the new replication of the existing retention policy :type replication: int :param default: whether or not to set the modified policy as default :type default: bool :param shard_duration: the shard duration of the retention policy. Durations such as 1h, 90m, 12h, 7d, and 4w, are all supported and mean 1 hour, 90 minutes, 12 hours, 7 day, and 4 weeks, respectively. Infinite retention is not supported. As a workaround, specify a "1000w" duration to achieve an extremely long shard group duration. The minimum shard group duration is 1 hour. :type shard_duration: str .. note:: at least one of duration, replication, or default flag should be set. Otherwise the operation will fail.
[ "Modify", "an", "existing", "retention", "policy", "for", "a", "database", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L690-L737
train
215,048
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.drop_retention_policy
def drop_retention_policy(self, name, database=None): """Drop an existing retention policy for a database. :param name: the name of the retention policy to drop :type name: str :param database: the database for which the retention policy is dropped. Defaults to current client's database :type database: str """ query_string = ( "DROP RETENTION POLICY {0} ON {1}" ).format(quote_ident(name), quote_ident(database or self._database)) self.query(query_string, method="POST")
python
def drop_retention_policy(self, name, database=None): """Drop an existing retention policy for a database. :param name: the name of the retention policy to drop :type name: str :param database: the database for which the retention policy is dropped. Defaults to current client's database :type database: str """ query_string = ( "DROP RETENTION POLICY {0} ON {1}" ).format(quote_ident(name), quote_ident(database or self._database)) self.query(query_string, method="POST")
[ "def", "drop_retention_policy", "(", "self", ",", "name", ",", "database", "=", "None", ")", ":", "query_string", "=", "(", "\"DROP RETENTION POLICY {0} ON {1}\"", ")", ".", "format", "(", "quote_ident", "(", "name", ")", ",", "quote_ident", "(", "database", "...
Drop an existing retention policy for a database. :param name: the name of the retention policy to drop :type name: str :param database: the database for which the retention policy is dropped. Defaults to current client's database :type database: str
[ "Drop", "an", "existing", "retention", "policy", "for", "a", "database", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L739-L751
train
215,049
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.get_list_retention_policies
def get_list_retention_policies(self, database=None): """Get the list of retention policies for a database. :param database: the name of the database, defaults to the client's current database :type database: str :returns: all retention policies for the database :rtype: list of dictionaries :Example: :: >> ret_policies = client.get_list_retention_policies('my_db') >> ret_policies [{u'default': True, u'duration': u'0', u'name': u'default', u'replicaN': 1}] """ if not (database or self._database): raise InfluxDBClientError( "get_list_retention_policies() requires a database as a " "parameter or the client to be using a database") rsp = self.query( "SHOW RETENTION POLICIES ON {0}".format( quote_ident(database or self._database)) ) return list(rsp.get_points())
python
def get_list_retention_policies(self, database=None): """Get the list of retention policies for a database. :param database: the name of the database, defaults to the client's current database :type database: str :returns: all retention policies for the database :rtype: list of dictionaries :Example: :: >> ret_policies = client.get_list_retention_policies('my_db') >> ret_policies [{u'default': True, u'duration': u'0', u'name': u'default', u'replicaN': 1}] """ if not (database or self._database): raise InfluxDBClientError( "get_list_retention_policies() requires a database as a " "parameter or the client to be using a database") rsp = self.query( "SHOW RETENTION POLICIES ON {0}".format( quote_ident(database or self._database)) ) return list(rsp.get_points())
[ "def", "get_list_retention_policies", "(", "self", ",", "database", "=", "None", ")", ":", "if", "not", "(", "database", "or", "self", ".", "_database", ")", ":", "raise", "InfluxDBClientError", "(", "\"get_list_retention_policies() requires a database as a \"", "\"pa...
Get the list of retention policies for a database. :param database: the name of the database, defaults to the client's current database :type database: str :returns: all retention policies for the database :rtype: list of dictionaries :Example: :: >> ret_policies = client.get_list_retention_policies('my_db') >> ret_policies [{u'default': True, u'duration': u'0', u'name': u'default', u'replicaN': 1}]
[ "Get", "the", "list", "of", "retention", "policies", "for", "a", "database", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L753-L782
train
215,050
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.create_user
def create_user(self, username, password, admin=False): """Create a new user in InfluxDB. :param username: the new username to create :type username: str :param password: the password for the new user :type password: str :param admin: whether the user should have cluster administration privileges or not :type admin: boolean """ text = "CREATE USER {0} WITH PASSWORD {1}".format( quote_ident(username), quote_literal(password)) if admin: text += ' WITH ALL PRIVILEGES' self.query(text, method="POST")
python
def create_user(self, username, password, admin=False): """Create a new user in InfluxDB. :param username: the new username to create :type username: str :param password: the password for the new user :type password: str :param admin: whether the user should have cluster administration privileges or not :type admin: boolean """ text = "CREATE USER {0} WITH PASSWORD {1}".format( quote_ident(username), quote_literal(password)) if admin: text += ' WITH ALL PRIVILEGES' self.query(text, method="POST")
[ "def", "create_user", "(", "self", ",", "username", ",", "password", ",", "admin", "=", "False", ")", ":", "text", "=", "\"CREATE USER {0} WITH PASSWORD {1}\"", ".", "format", "(", "quote_ident", "(", "username", ")", ",", "quote_literal", "(", "password", ")"...
Create a new user in InfluxDB. :param username: the new username to create :type username: str :param password: the password for the new user :type password: str :param admin: whether the user should have cluster administration privileges or not :type admin: boolean
[ "Create", "a", "new", "user", "in", "InfluxDB", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L802-L817
train
215,051
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.drop_user
def drop_user(self, username): """Drop a user from InfluxDB. :param username: the username to drop :type username: str """ text = "DROP USER {0}".format(quote_ident(username), method="POST") self.query(text, method="POST")
python
def drop_user(self, username): """Drop a user from InfluxDB. :param username: the username to drop :type username: str """ text = "DROP USER {0}".format(quote_ident(username), method="POST") self.query(text, method="POST")
[ "def", "drop_user", "(", "self", ",", "username", ")", ":", "text", "=", "\"DROP USER {0}\"", ".", "format", "(", "quote_ident", "(", "username", ")", ",", "method", "=", "\"POST\"", ")", "self", ".", "query", "(", "text", ",", "method", "=", "\"POST\"",...
Drop a user from InfluxDB. :param username: the username to drop :type username: str
[ "Drop", "a", "user", "from", "InfluxDB", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L819-L826
train
215,052
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.set_user_password
def set_user_password(self, username, password): """Change the password of an existing user. :param username: the username who's password is being changed :type username: str :param password: the new password for the user :type password: str """ text = "SET PASSWORD FOR {0} = {1}".format( quote_ident(username), quote_literal(password)) self.query(text)
python
def set_user_password(self, username, password): """Change the password of an existing user. :param username: the username who's password is being changed :type username: str :param password: the new password for the user :type password: str """ text = "SET PASSWORD FOR {0} = {1}".format( quote_ident(username), quote_literal(password)) self.query(text)
[ "def", "set_user_password", "(", "self", ",", "username", ",", "password", ")", ":", "text", "=", "\"SET PASSWORD FOR {0} = {1}\"", ".", "format", "(", "quote_ident", "(", "username", ")", ",", "quote_literal", "(", "password", ")", ")", "self", ".", "query", ...
Change the password of an existing user. :param username: the username who's password is being changed :type username: str :param password: the new password for the user :type password: str
[ "Change", "the", "password", "of", "an", "existing", "user", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L828-L838
train
215,053
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.delete_series
def delete_series(self, database=None, measurement=None, tags=None): """Delete series from a database. Series must be filtered by either measurement and tags. This method cannot be used to delete all series, use `drop_database` instead. :param database: the database from which the series should be deleted, defaults to client's current database :type database: str :param measurement: Delete all series from a measurement :type measurement: str :param tags: Delete all series that match given tags :type tags: dict """ database = database or self._database query_str = 'DROP SERIES' if measurement: query_str += ' FROM {0}'.format(quote_ident(measurement)) if tags: tag_eq_list = ["{0}={1}".format(quote_ident(k), quote_literal(v)) for k, v in tags.items()] query_str += ' WHERE ' + ' AND '.join(tag_eq_list) self.query(query_str, database=database, method="POST")
python
def delete_series(self, database=None, measurement=None, tags=None): """Delete series from a database. Series must be filtered by either measurement and tags. This method cannot be used to delete all series, use `drop_database` instead. :param database: the database from which the series should be deleted, defaults to client's current database :type database: str :param measurement: Delete all series from a measurement :type measurement: str :param tags: Delete all series that match given tags :type tags: dict """ database = database or self._database query_str = 'DROP SERIES' if measurement: query_str += ' FROM {0}'.format(quote_ident(measurement)) if tags: tag_eq_list = ["{0}={1}".format(quote_ident(k), quote_literal(v)) for k, v in tags.items()] query_str += ' WHERE ' + ' AND '.join(tag_eq_list) self.query(query_str, database=database, method="POST")
[ "def", "delete_series", "(", "self", ",", "database", "=", "None", ",", "measurement", "=", "None", ",", "tags", "=", "None", ")", ":", "database", "=", "database", "or", "self", ".", "_database", "query_str", "=", "'DROP SERIES'", "if", "measurement", ":"...
Delete series from a database. Series must be filtered by either measurement and tags. This method cannot be used to delete all series, use `drop_database` instead. :param database: the database from which the series should be deleted, defaults to client's current database :type database: str :param measurement: Delete all series from a measurement :type measurement: str :param tags: Delete all series that match given tags :type tags: dict
[ "Delete", "series", "from", "a", "database", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L840-L864
train
215,054
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.get_list_privileges
def get_list_privileges(self, username): """Get the list of all privileges granted to given user. :param username: the username to get privileges of :type username: str :returns: all privileges granted to given user :rtype: list of dictionaries :Example: :: >> privileges = client.get_list_privileges('user1') >> privileges [{u'privilege': u'WRITE', u'database': u'db1'}, {u'privilege': u'ALL PRIVILEGES', u'database': u'db2'}, {u'privilege': u'NO PRIVILEGES', u'database': u'db3'}] """ text = "SHOW GRANTS FOR {0}".format(quote_ident(username)) return list(self.query(text).get_points())
python
def get_list_privileges(self, username): """Get the list of all privileges granted to given user. :param username: the username to get privileges of :type username: str :returns: all privileges granted to given user :rtype: list of dictionaries :Example: :: >> privileges = client.get_list_privileges('user1') >> privileges [{u'privilege': u'WRITE', u'database': u'db1'}, {u'privilege': u'ALL PRIVILEGES', u'database': u'db2'}, {u'privilege': u'NO PRIVILEGES', u'database': u'db3'}] """ text = "SHOW GRANTS FOR {0}".format(quote_ident(username)) return list(self.query(text).get_points())
[ "def", "get_list_privileges", "(", "self", ",", "username", ")", ":", "text", "=", "\"SHOW GRANTS FOR {0}\"", ".", "format", "(", "quote_ident", "(", "username", ")", ")", "return", "list", "(", "self", ".", "query", "(", "text", ")", ".", "get_points", "(...
Get the list of all privileges granted to given user. :param username: the username to get privileges of :type username: str :returns: all privileges granted to given user :rtype: list of dictionaries :Example: :: >> privileges = client.get_list_privileges('user1') >> privileges [{u'privilege': u'WRITE', u'database': u'db1'}, {u'privilege': u'ALL PRIVILEGES', u'database': u'db2'}, {u'privilege': u'NO PRIVILEGES', u'database': u'db3'}]
[ "Get", "the", "list", "of", "all", "privileges", "granted", "to", "given", "user", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L922-L942
train
215,055
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.get_list_continuous_queries
def get_list_continuous_queries(self): """Get the list of continuous queries in InfluxDB. :return: all CQs in InfluxDB :rtype: list of dictionaries :Example: :: >> cqs = client.get_list_cqs() >> cqs [ { u'db1': [] }, { u'db2': [ { u'name': u'vampire', u'query': u'CREATE CONTINUOUS QUERY vampire ON ' 'mydb BEGIN SELECT count(dracula) INTO ' 'mydb.autogen.all_of_them FROM ' 'mydb.autogen.one GROUP BY time(5m) END' } ] } ] """ query_string = "SHOW CONTINUOUS QUERIES" return [{sk[0]: list(p)} for sk, p in self.query(query_string).items()]
python
def get_list_continuous_queries(self): """Get the list of continuous queries in InfluxDB. :return: all CQs in InfluxDB :rtype: list of dictionaries :Example: :: >> cqs = client.get_list_cqs() >> cqs [ { u'db1': [] }, { u'db2': [ { u'name': u'vampire', u'query': u'CREATE CONTINUOUS QUERY vampire ON ' 'mydb BEGIN SELECT count(dracula) INTO ' 'mydb.autogen.all_of_them FROM ' 'mydb.autogen.one GROUP BY time(5m) END' } ] } ] """ query_string = "SHOW CONTINUOUS QUERIES" return [{sk[0]: list(p)} for sk, p in self.query(query_string).items()]
[ "def", "get_list_continuous_queries", "(", "self", ")", ":", "query_string", "=", "\"SHOW CONTINUOUS QUERIES\"", "return", "[", "{", "sk", "[", "0", "]", ":", "list", "(", "p", ")", "}", "for", "sk", ",", "p", "in", "self", ".", "query", "(", "query_stri...
Get the list of continuous queries in InfluxDB. :return: all CQs in InfluxDB :rtype: list of dictionaries :Example: :: >> cqs = client.get_list_cqs() >> cqs [ { u'db1': [] }, { u'db2': [ { u'name': u'vampire', u'query': u'CREATE CONTINUOUS QUERY vampire ON ' 'mydb BEGIN SELECT count(dracula) INTO ' 'mydb.autogen.all_of_them FROM ' 'mydb.autogen.one GROUP BY time(5m) END' } ] } ]
[ "Get", "the", "list", "of", "continuous", "queries", "in", "InfluxDB", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L944-L974
train
215,056
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.create_continuous_query
def create_continuous_query(self, name, select, database=None, resample_opts=None): r"""Create a continuous query for a database. :param name: the name of continuous query to create :type name: str :param select: select statement for the continuous query :type select: str :param database: the database for which the continuous query is created. Defaults to current client's database :type database: str :param resample_opts: resample options :type resample_opts: str :Example: :: >> select_clause = 'SELECT mean("value") INTO "cpu_mean" ' \ ... 'FROM "cpu" GROUP BY time(1m)' >> client.create_continuous_query( ... 'cpu_mean', select_clause, 'db_name', 'EVERY 10s FOR 2m' ... ) >> client.get_list_continuous_queries() [ { 'db_name': [ { 'name': 'cpu_mean', 'query': 'CREATE CONTINUOUS QUERY "cpu_mean" ' 'ON "db_name" ' 'RESAMPLE EVERY 10s FOR 2m ' 'BEGIN SELECT mean("value") ' 'INTO "cpu_mean" FROM "cpu" ' 'GROUP BY time(1m) END' } ] } ] """ query_string = ( "CREATE CONTINUOUS QUERY {0} ON {1}{2} BEGIN {3} END" ).format(quote_ident(name), quote_ident(database or self._database), ' RESAMPLE ' + resample_opts if resample_opts else '', select) self.query(query_string)
python
def create_continuous_query(self, name, select, database=None, resample_opts=None): r"""Create a continuous query for a database. :param name: the name of continuous query to create :type name: str :param select: select statement for the continuous query :type select: str :param database: the database for which the continuous query is created. Defaults to current client's database :type database: str :param resample_opts: resample options :type resample_opts: str :Example: :: >> select_clause = 'SELECT mean("value") INTO "cpu_mean" ' \ ... 'FROM "cpu" GROUP BY time(1m)' >> client.create_continuous_query( ... 'cpu_mean', select_clause, 'db_name', 'EVERY 10s FOR 2m' ... ) >> client.get_list_continuous_queries() [ { 'db_name': [ { 'name': 'cpu_mean', 'query': 'CREATE CONTINUOUS QUERY "cpu_mean" ' 'ON "db_name" ' 'RESAMPLE EVERY 10s FOR 2m ' 'BEGIN SELECT mean("value") ' 'INTO "cpu_mean" FROM "cpu" ' 'GROUP BY time(1m) END' } ] } ] """ query_string = ( "CREATE CONTINUOUS QUERY {0} ON {1}{2} BEGIN {3} END" ).format(quote_ident(name), quote_ident(database or self._database), ' RESAMPLE ' + resample_opts if resample_opts else '', select) self.query(query_string)
[ "def", "create_continuous_query", "(", "self", ",", "name", ",", "select", ",", "database", "=", "None", ",", "resample_opts", "=", "None", ")", ":", "query_string", "=", "(", "\"CREATE CONTINUOUS QUERY {0} ON {1}{2} BEGIN {3} END\"", ")", ".", "format", "(", "quo...
r"""Create a continuous query for a database. :param name: the name of continuous query to create :type name: str :param select: select statement for the continuous query :type select: str :param database: the database for which the continuous query is created. Defaults to current client's database :type database: str :param resample_opts: resample options :type resample_opts: str :Example: :: >> select_clause = 'SELECT mean("value") INTO "cpu_mean" ' \ ... 'FROM "cpu" GROUP BY time(1m)' >> client.create_continuous_query( ... 'cpu_mean', select_clause, 'db_name', 'EVERY 10s FOR 2m' ... ) >> client.get_list_continuous_queries() [ { 'db_name': [ { 'name': 'cpu_mean', 'query': 'CREATE CONTINUOUS QUERY "cpu_mean" ' 'ON "db_name" ' 'RESAMPLE EVERY 10s FOR 2m ' 'BEGIN SELECT mean("value") ' 'INTO "cpu_mean" FROM "cpu" ' 'GROUP BY time(1m) END' } ] } ]
[ "r", "Create", "a", "continuous", "query", "for", "a", "database", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L976-L1020
train
215,057
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.drop_continuous_query
def drop_continuous_query(self, name, database=None): """Drop an existing continuous query for a database. :param name: the name of continuous query to drop :type name: str :param database: the database for which the continuous query is dropped. Defaults to current client's database :type database: str """ query_string = ( "DROP CONTINUOUS QUERY {0} ON {1}" ).format(quote_ident(name), quote_ident(database or self._database)) self.query(query_string)
python
def drop_continuous_query(self, name, database=None): """Drop an existing continuous query for a database. :param name: the name of continuous query to drop :type name: str :param database: the database for which the continuous query is dropped. Defaults to current client's database :type database: str """ query_string = ( "DROP CONTINUOUS QUERY {0} ON {1}" ).format(quote_ident(name), quote_ident(database or self._database)) self.query(query_string)
[ "def", "drop_continuous_query", "(", "self", ",", "name", ",", "database", "=", "None", ")", ":", "query_string", "=", "(", "\"DROP CONTINUOUS QUERY {0} ON {1}\"", ")", ".", "format", "(", "quote_ident", "(", "name", ")", ",", "quote_ident", "(", "database", "...
Drop an existing continuous query for a database. :param name: the name of continuous query to drop :type name: str :param database: the database for which the continuous query is dropped. Defaults to current client's database :type database: str
[ "Drop", "an", "existing", "continuous", "query", "for", "a", "database", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L1022-L1034
train
215,058
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.send_packet
def send_packet(self, packet, protocol='json', time_precision=None): """Send an UDP packet. :param packet: the packet to be sent :type packet: (if protocol is 'json') dict (if protocol is 'line') list of line protocol strings :param protocol: protocol of input data, either 'json' or 'line' :type protocol: str :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None :type time_precision: str """ if protocol == 'json': data = make_lines(packet, time_precision).encode('utf-8') elif protocol == 'line': data = ('\n'.join(packet) + '\n').encode('utf-8') self.udp_socket.sendto(data, (self._host, self._udp_port))
python
def send_packet(self, packet, protocol='json', time_precision=None): """Send an UDP packet. :param packet: the packet to be sent :type packet: (if protocol is 'json') dict (if protocol is 'line') list of line protocol strings :param protocol: protocol of input data, either 'json' or 'line' :type protocol: str :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None :type time_precision: str """ if protocol == 'json': data = make_lines(packet, time_precision).encode('utf-8') elif protocol == 'line': data = ('\n'.join(packet) + '\n').encode('utf-8') self.udp_socket.sendto(data, (self._host, self._udp_port))
[ "def", "send_packet", "(", "self", ",", "packet", ",", "protocol", "=", "'json'", ",", "time_precision", "=", "None", ")", ":", "if", "protocol", "==", "'json'", ":", "data", "=", "make_lines", "(", "packet", ",", "time_precision", ")", ".", "encode", "(...
Send an UDP packet. :param packet: the packet to be sent :type packet: (if protocol is 'json') dict (if protocol is 'line') list of line protocol strings :param protocol: protocol of input data, either 'json' or 'line' :type protocol: str :param time_precision: Either 's', 'm', 'ms' or 'u', defaults to None :type time_precision: str
[ "Send", "an", "UDP", "packet", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L1036-L1051
train
215,059
influxdata/influxdb-python
influxdb/client.py
InfluxDBClient.close
def close(self): """Close http session.""" if isinstance(self._session, requests.Session): self._session.close()
python
def close(self): """Close http session.""" if isinstance(self._session, requests.Session): self._session.close()
[ "def", "close", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "_session", ",", "requests", ".", "Session", ")", ":", "self", ".", "_session", ".", "close", "(", ")" ]
Close http session.
[ "Close", "http", "session", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/client.py#L1053-L1056
train
215,060
influxdata/influxdb-python
influxdb/chunked_json.py
loads
def loads(s): """Generate a sequence of JSON values from a string.""" _decoder = json.JSONDecoder() while s: s = s.strip() obj, pos = _decoder.raw_decode(s) if not pos: raise ValueError('no JSON object found at %i' % pos) yield obj s = s[pos:]
python
def loads(s): """Generate a sequence of JSON values from a string.""" _decoder = json.JSONDecoder() while s: s = s.strip() obj, pos = _decoder.raw_decode(s) if not pos: raise ValueError('no JSON object found at %i' % pos) yield obj s = s[pos:]
[ "def", "loads", "(", "s", ")", ":", "_decoder", "=", "json", ".", "JSONDecoder", "(", ")", "while", "s", ":", "s", "=", "s", ".", "strip", "(", ")", "obj", ",", "pos", "=", "_decoder", ".", "raw_decode", "(", "s", ")", "if", "not", "pos", ":", ...
Generate a sequence of JSON values from a string.
[ "Generate", "a", "sequence", "of", "JSON", "values", "from", "a", "string", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/chunked_json.py#L17-L27
train
215,061
influxdata/influxdb-python
influxdb/influxdb08/helper.py
SeriesHelper.commit
def commit(cls, client=None): """Commit everything from datapoints via the client. :param client: InfluxDBClient instance for writing points to InfluxDB. :attention: any provided client will supersede the class client. :return: result of client.write_points. """ if not client: client = cls._client rtn = client.write_points(cls._json_body_()) cls._reset_() return rtn
python
def commit(cls, client=None): """Commit everything from datapoints via the client. :param client: InfluxDBClient instance for writing points to InfluxDB. :attention: any provided client will supersede the class client. :return: result of client.write_points. """ if not client: client = cls._client rtn = client.write_points(cls._json_body_()) cls._reset_() return rtn
[ "def", "commit", "(", "cls", ",", "client", "=", "None", ")", ":", "if", "not", "client", ":", "client", "=", "cls", ".", "_client", "rtn", "=", "client", ".", "write_points", "(", "cls", ".", "_json_body_", "(", ")", ")", "cls", ".", "_reset_", "(...
Commit everything from datapoints via the client. :param client: InfluxDBClient instance for writing points to InfluxDB. :attention: any provided client will supersede the class client. :return: result of client.write_points.
[ "Commit", "everything", "from", "datapoints", "via", "the", "client", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/helper.py#L122-L133
train
215,062
influxdata/influxdb-python
influxdb/influxdb08/helper.py
SeriesHelper._json_body_
def _json_body_(cls): """Return JSON body of the datapoints. :return: JSON body of the datapoints. """ json = [] for series_name, data in six.iteritems(cls._datapoints): json.append({'name': series_name, 'columns': cls._fields, 'points': [[getattr(point, k) for k in cls._fields] for point in data] }) return json
python
def _json_body_(cls): """Return JSON body of the datapoints. :return: JSON body of the datapoints. """ json = [] for series_name, data in six.iteritems(cls._datapoints): json.append({'name': series_name, 'columns': cls._fields, 'points': [[getattr(point, k) for k in cls._fields] for point in data] }) return json
[ "def", "_json_body_", "(", "cls", ")", ":", "json", "=", "[", "]", "for", "series_name", ",", "data", "in", "six", ".", "iteritems", "(", "cls", ".", "_datapoints", ")", ":", "json", ".", "append", "(", "{", "'name'", ":", "series_name", ",", "'colum...
Return JSON body of the datapoints. :return: JSON body of the datapoints.
[ "Return", "JSON", "body", "of", "the", "datapoints", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/helper.py#L136-L148
train
215,063
influxdata/influxdb-python
influxdb/_dataframe_client.py
DataFrameClient.query
def query(self, query, params=None, bind_params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True, chunked=False, chunk_size=0, method="GET", dropna=True): """ Query data into a DataFrame. .. danger:: In order to avoid injection vulnerabilities (similar to `SQL injection <https://www.owasp.org/index.php/SQL_Injection>`_ vulnerabilities), do not directly include untrusted data into the ``query`` parameter, use ``bind_params`` instead. :param query: the actual query string :param params: additional parameters for the request, defaults to {} :param bind_params: bind parameters for the query: any variable in the query written as ``'$var_name'`` will be replaced with ``bind_params['var_name']``. Only works in the ``WHERE`` clause and takes precedence over ``params['params']`` :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision :param expected_response_code: the expected status code of response, defaults to 200 :param database: database to query, defaults to None :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :param chunked: Enable to use chunked responses from InfluxDB. With ``chunked`` enabled, one ResultSet is returned per chunk containing all results within that chunk :param chunk_size: Size of each chunk to tell InfluxDB to use. :param dropna: drop columns where all values are missing :returns: the queried data :rtype: :class:`~.ResultSet` """ query_args = dict(params=params, bind_params=bind_params, epoch=epoch, expected_response_code=expected_response_code, raise_errors=raise_errors, chunked=chunked, database=database, method=method, chunk_size=chunk_size) results = super(DataFrameClient, self).query(query, **query_args) if query.strip().upper().startswith("SELECT"): if len(results) > 0: return self._to_dataframe(results, dropna) else: return {} else: return results
python
def query(self, query, params=None, bind_params=None, epoch=None, expected_response_code=200, database=None, raise_errors=True, chunked=False, chunk_size=0, method="GET", dropna=True): """ Query data into a DataFrame. .. danger:: In order to avoid injection vulnerabilities (similar to `SQL injection <https://www.owasp.org/index.php/SQL_Injection>`_ vulnerabilities), do not directly include untrusted data into the ``query`` parameter, use ``bind_params`` instead. :param query: the actual query string :param params: additional parameters for the request, defaults to {} :param bind_params: bind parameters for the query: any variable in the query written as ``'$var_name'`` will be replaced with ``bind_params['var_name']``. Only works in the ``WHERE`` clause and takes precedence over ``params['params']`` :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision :param expected_response_code: the expected status code of response, defaults to 200 :param database: database to query, defaults to None :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :param chunked: Enable to use chunked responses from InfluxDB. With ``chunked`` enabled, one ResultSet is returned per chunk containing all results within that chunk :param chunk_size: Size of each chunk to tell InfluxDB to use. :param dropna: drop columns where all values are missing :returns: the queried data :rtype: :class:`~.ResultSet` """ query_args = dict(params=params, bind_params=bind_params, epoch=epoch, expected_response_code=expected_response_code, raise_errors=raise_errors, chunked=chunked, database=database, method=method, chunk_size=chunk_size) results = super(DataFrameClient, self).query(query, **query_args) if query.strip().upper().startswith("SELECT"): if len(results) > 0: return self._to_dataframe(results, dropna) else: return {} else: return results
[ "def", "query", "(", "self", ",", "query", ",", "params", "=", "None", ",", "bind_params", "=", "None", ",", "epoch", "=", "None", ",", "expected_response_code", "=", "200", ",", "database", "=", "None", ",", "raise_errors", "=", "True", ",", "chunked", ...
Query data into a DataFrame. .. danger:: In order to avoid injection vulnerabilities (similar to `SQL injection <https://www.owasp.org/index.php/SQL_Injection>`_ vulnerabilities), do not directly include untrusted data into the ``query`` parameter, use ``bind_params`` instead. :param query: the actual query string :param params: additional parameters for the request, defaults to {} :param bind_params: bind parameters for the query: any variable in the query written as ``'$var_name'`` will be replaced with ``bind_params['var_name']``. Only works in the ``WHERE`` clause and takes precedence over ``params['params']`` :param epoch: response timestamps to be in epoch format either 'h', 'm', 's', 'ms', 'u', or 'ns',defaults to `None` which is RFC3339 UTC format with nanosecond precision :param expected_response_code: the expected status code of response, defaults to 200 :param database: database to query, defaults to None :param raise_errors: Whether or not to raise exceptions when InfluxDB returns errors, defaults to True :param chunked: Enable to use chunked responses from InfluxDB. With ``chunked`` enabled, one ResultSet is returned per chunk containing all results within that chunk :param chunk_size: Size of each chunk to tell InfluxDB to use. :param dropna: drop columns where all values are missing :returns: the queried data :rtype: :class:`~.ResultSet`
[ "Query", "data", "into", "a", "DataFrame", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/_dataframe_client.py#L142-L201
train
215,064
influxdata/influxdb-python
examples/tutorial.py
main
def main(host='localhost', port=8086): """Instantiate a connection to the InfluxDB.""" user = 'root' password = 'root' dbname = 'example' dbuser = 'smly' dbuser_password = 'my_secret_password' query = 'select Float_value from cpu_load_short;' query_where = 'select Int_value from cpu_load_short where host=$host;' bind_params = {'host': 'server01'} json_body = [ { "measurement": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:00:00Z", "fields": { "Float_value": 0.64, "Int_value": 3, "String_value": "Text", "Bool_value": True } } ] client = InfluxDBClient(host, port, user, password, dbname) print("Create database: " + dbname) client.create_database(dbname) print("Create a retention policy") client.create_retention_policy('awesome_policy', '3d', 3, default=True) print("Switch user: " + dbuser) client.switch_user(dbuser, dbuser_password) print("Write points: {0}".format(json_body)) client.write_points(json_body) print("Querying data: " + query) result = client.query(query) print("Result: {0}".format(result)) print("Querying data: " + query_where) result = client.query(query_where, bind_params=bind_params) print("Result: {0}".format(result)) print("Switch user: " + user) client.switch_user(user, password) print("Drop database: " + dbname) client.drop_database(dbname)
python
def main(host='localhost', port=8086): """Instantiate a connection to the InfluxDB.""" user = 'root' password = 'root' dbname = 'example' dbuser = 'smly' dbuser_password = 'my_secret_password' query = 'select Float_value from cpu_load_short;' query_where = 'select Int_value from cpu_load_short where host=$host;' bind_params = {'host': 'server01'} json_body = [ { "measurement": "cpu_load_short", "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:00:00Z", "fields": { "Float_value": 0.64, "Int_value": 3, "String_value": "Text", "Bool_value": True } } ] client = InfluxDBClient(host, port, user, password, dbname) print("Create database: " + dbname) client.create_database(dbname) print("Create a retention policy") client.create_retention_policy('awesome_policy', '3d', 3, default=True) print("Switch user: " + dbuser) client.switch_user(dbuser, dbuser_password) print("Write points: {0}".format(json_body)) client.write_points(json_body) print("Querying data: " + query) result = client.query(query) print("Result: {0}".format(result)) print("Querying data: " + query_where) result = client.query(query_where, bind_params=bind_params) print("Result: {0}".format(result)) print("Switch user: " + user) client.switch_user(user, password) print("Drop database: " + dbname) client.drop_database(dbname)
[ "def", "main", "(", "host", "=", "'localhost'", ",", "port", "=", "8086", ")", ":", "user", "=", "'root'", "password", "=", "'root'", "dbname", "=", "'example'", "dbuser", "=", "'smly'", "dbuser_password", "=", "'my_secret_password'", "query", "=", "'select ...
Instantiate a connection to the InfluxDB.
[ "Instantiate", "a", "connection", "to", "the", "InfluxDB", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/examples/tutorial.py#L9-L64
train
215,065
influxdata/influxdb-python
examples/tutorial_udp.py
main
def main(uport): """Instantiate connection to the InfluxDB.""" # NOTE: structure of the UDP packet is different than that of information # sent via HTTP json_body = { "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:00:00Z", "points": [{ "measurement": "cpu_load_short", "fields": { "value": 0.64 } }, { "measurement": "cpu_load_short", "fields": { "value": 0.67 } }] } # make `use_udp` True and add `udp_port` number from `influxdb.conf` file # no need to mention the database name since it is already configured client = InfluxDBClient(use_udp=True, udp_port=uport) # Instead of `write_points` use `send_packet` client.send_packet(json_body)
python
def main(uport): """Instantiate connection to the InfluxDB.""" # NOTE: structure of the UDP packet is different than that of information # sent via HTTP json_body = { "tags": { "host": "server01", "region": "us-west" }, "time": "2009-11-10T23:00:00Z", "points": [{ "measurement": "cpu_load_short", "fields": { "value": 0.64 } }, { "measurement": "cpu_load_short", "fields": { "value": 0.67 } }] } # make `use_udp` True and add `udp_port` number from `influxdb.conf` file # no need to mention the database name since it is already configured client = InfluxDBClient(use_udp=True, udp_port=uport) # Instead of `write_points` use `send_packet` client.send_packet(json_body)
[ "def", "main", "(", "uport", ")", ":", "# NOTE: structure of the UDP packet is different than that of information", "# sent via HTTP", "json_body", "=", "{", "\"tags\"", ":", "{", "\"host\"", ":", "\"server01\"", ",", "\"region\"", ":", "\"us-west\"", "}", ",", "\...
Instantiate connection to the InfluxDB.
[ "Instantiate", "connection", "to", "the", "InfluxDB", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/examples/tutorial_udp.py#L23-L52
train
215,066
influxdata/influxdb-python
influxdb/helper.py
SeriesHelper._json_body_
def _json_body_(cls): """Return the JSON body of given datapoints. :return: JSON body of these datapoints. """ json = [] for series_name, data in six.iteritems(cls._datapoints): for point in data: json_point = { "measurement": series_name, "fields": {}, "tags": {}, "time": getattr(point, "time") } for field in cls._fields: value = getattr(point, field) if value is not None: json_point['fields'][field] = value for tag in cls._tags: json_point['tags'][tag] = getattr(point, tag) json.append(json_point) return json
python
def _json_body_(cls): """Return the JSON body of given datapoints. :return: JSON body of these datapoints. """ json = [] for series_name, data in six.iteritems(cls._datapoints): for point in data: json_point = { "measurement": series_name, "fields": {}, "tags": {}, "time": getattr(point, "time") } for field in cls._fields: value = getattr(point, field) if value is not None: json_point['fields'][field] = value for tag in cls._tags: json_point['tags'][tag] = getattr(point, tag) json.append(json_point) return json
[ "def", "_json_body_", "(", "cls", ")", ":", "json", "=", "[", "]", "for", "series_name", ",", "data", "in", "six", ".", "iteritems", "(", "cls", ".", "_datapoints", ")", ":", "for", "point", "in", "data", ":", "json_point", "=", "{", "\"measurement\"",...
Return the JSON body of given datapoints. :return: JSON body of these datapoints.
[ "Return", "the", "JSON", "body", "of", "given", "datapoints", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/helper.py#L151-L175
train
215,067
influxdata/influxdb-python
examples/tutorial_server_data.py
main
def main(host='localhost', port=8086, nb_day=15): """Instantiate a connection to the backend.""" nb_day = 15 # number of day to generate time series timeinterval_min = 5 # create an event every x minutes total_minutes = 1440 * nb_day total_records = int(total_minutes / timeinterval_min) now = datetime.datetime.today() metric = "server_data.cpu_idle" series = [] for i in range(0, total_records): past_date = now - datetime.timedelta(minutes=i * timeinterval_min) value = random.randint(0, 200) hostName = "server-%d" % random.randint(1, 5) # pointValues = [int(past_date.strftime('%s')), value, hostName] pointValues = { "time": int(past_date.strftime('%s')), "measurement": metric, "fields": { "value": value, }, "tags": { "hostName": hostName, }, } series.append(pointValues) print(series) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) try: client.create_database(DBNAME) except InfluxDBClientError: # Drop and create client.drop_database(DBNAME) client.create_database(DBNAME) print("Create a retention policy") retention_policy = 'server_data' client.create_retention_policy(retention_policy, '3d', 3, default=True) print("Write points #: {0}".format(total_records)) client.write_points(series, retention_policy=retention_policy) time.sleep(2) query = "SELECT MEAN(value) FROM {} WHERE \ time > now() - 10d GROUP BY time(500m)".format(metric) result = client.query(query, database=DBNAME) print(result) print("Result: {0}".format(result)) print("Drop database: {}".format(DBNAME)) client.drop_database(DBNAME)
python
def main(host='localhost', port=8086, nb_day=15): """Instantiate a connection to the backend.""" nb_day = 15 # number of day to generate time series timeinterval_min = 5 # create an event every x minutes total_minutes = 1440 * nb_day total_records = int(total_minutes / timeinterval_min) now = datetime.datetime.today() metric = "server_data.cpu_idle" series = [] for i in range(0, total_records): past_date = now - datetime.timedelta(minutes=i * timeinterval_min) value = random.randint(0, 200) hostName = "server-%d" % random.randint(1, 5) # pointValues = [int(past_date.strftime('%s')), value, hostName] pointValues = { "time": int(past_date.strftime('%s')), "measurement": metric, "fields": { "value": value, }, "tags": { "hostName": hostName, }, } series.append(pointValues) print(series) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) try: client.create_database(DBNAME) except InfluxDBClientError: # Drop and create client.drop_database(DBNAME) client.create_database(DBNAME) print("Create a retention policy") retention_policy = 'server_data' client.create_retention_policy(retention_policy, '3d', 3, default=True) print("Write points #: {0}".format(total_records)) client.write_points(series, retention_policy=retention_policy) time.sleep(2) query = "SELECT MEAN(value) FROM {} WHERE \ time > now() - 10d GROUP BY time(500m)".format(metric) result = client.query(query, database=DBNAME) print(result) print("Result: {0}".format(result)) print("Drop database: {}".format(DBNAME)) client.drop_database(DBNAME)
[ "def", "main", "(", "host", "=", "'localhost'", ",", "port", "=", "8086", ",", "nb_day", "=", "15", ")", ":", "nb_day", "=", "15", "# number of day to generate time series", "timeinterval_min", "=", "5", "# create an event every x minutes", "total_minutes", "=", "...
Instantiate a connection to the backend.
[ "Instantiate", "a", "connection", "to", "the", "backend", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/examples/tutorial_server_data.py#L19-L74
train
215,068
influxdata/influxdb-python
examples/tutorial_sine_wave.py
main
def main(host='localhost', port=8086): """Define function to generate the sin wave.""" now = datetime.datetime.today() points = [] for angle in range(0, 360): y = 10 + math.sin(math.radians(angle)) * 10 point = { "measurement": 'foobar', "time": int(now.strftime('%s')) + angle, "fields": { "value": y } } points.append(point) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) client.create_database(DBNAME) client.switch_database(DBNAME) # Write points client.write_points(points) time.sleep(3) query = 'SELECT * FROM foobar' print("Querying data: " + query) result = client.query(query, database=DBNAME) print("Result: {0}".format(result)) """ You might want to comment the delete and plot the result on InfluxDB Interface. Connect on InfluxDB Interface at http://127.0.0.1:8083/ Select the database tutorial -> Explore Data Then run the following query: SELECT * from foobar """ print("Delete database: " + DBNAME) client.drop_database(DBNAME)
python
def main(host='localhost', port=8086): """Define function to generate the sin wave.""" now = datetime.datetime.today() points = [] for angle in range(0, 360): y = 10 + math.sin(math.radians(angle)) * 10 point = { "measurement": 'foobar', "time": int(now.strftime('%s')) + angle, "fields": { "value": y } } points.append(point) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) client.create_database(DBNAME) client.switch_database(DBNAME) # Write points client.write_points(points) time.sleep(3) query = 'SELECT * FROM foobar' print("Querying data: " + query) result = client.query(query, database=DBNAME) print("Result: {0}".format(result)) """ You might want to comment the delete and plot the result on InfluxDB Interface. Connect on InfluxDB Interface at http://127.0.0.1:8083/ Select the database tutorial -> Explore Data Then run the following query: SELECT * from foobar """ print("Delete database: " + DBNAME) client.drop_database(DBNAME)
[ "def", "main", "(", "host", "=", "'localhost'", ",", "port", "=", "8086", ")", ":", "now", "=", "datetime", ".", "datetime", ".", "today", "(", ")", "points", "=", "[", "]", "for", "angle", "in", "range", "(", "0", ",", "360", ")", ":", "y", "=...
Define function to generate the sin wave.
[ "Define", "function", "to", "generate", "the", "sin", "wave", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/examples/tutorial_sine_wave.py#L17-L61
train
215,069
influxdata/influxdb-python
influxdb/resultset.py
ResultSet.get_points
def get_points(self, measurement=None, tags=None): """Return a generator for all the points that match the given filters. :param measurement: The measurement name :type measurement: str :param tags: Tags to look for :type tags: dict :return: Points generator """ # Raise error if measurement is not str or bytes if not isinstance(measurement, (bytes, type(b''.decode()), type(None))): raise TypeError('measurement must be an str or None') for series in self._get_series(): series_name = series.get('measurement', series.get('name', 'results')) if series_name is None: # this is a "system" query or a query which # doesn't return a name attribute. # like 'show retention policies' .. if tags is None: for item in self._get_points_for_series(series): yield item elif measurement in (None, series_name): # by default if no tags was provided then # we will matches every returned series series_tags = series.get('tags', {}) for item in self._get_points_for_series(series): if tags is None or \ self._tag_matches(item, tags) or \ self._tag_matches(series_tags, tags): yield item
python
def get_points(self, measurement=None, tags=None): """Return a generator for all the points that match the given filters. :param measurement: The measurement name :type measurement: str :param tags: Tags to look for :type tags: dict :return: Points generator """ # Raise error if measurement is not str or bytes if not isinstance(measurement, (bytes, type(b''.decode()), type(None))): raise TypeError('measurement must be an str or None') for series in self._get_series(): series_name = series.get('measurement', series.get('name', 'results')) if series_name is None: # this is a "system" query or a query which # doesn't return a name attribute. # like 'show retention policies' .. if tags is None: for item in self._get_points_for_series(series): yield item elif measurement in (None, series_name): # by default if no tags was provided then # we will matches every returned series series_tags = series.get('tags', {}) for item in self._get_points_for_series(series): if tags is None or \ self._tag_matches(item, tags) or \ self._tag_matches(series_tags, tags): yield item
[ "def", "get_points", "(", "self", ",", "measurement", "=", "None", ",", "tags", "=", "None", ")", ":", "# Raise error if measurement is not str or bytes", "if", "not", "isinstance", "(", "measurement", ",", "(", "bytes", ",", "type", "(", "b''", ".", "decode",...
Return a generator for all the points that match the given filters. :param measurement: The measurement name :type measurement: str :param tags: Tags to look for :type tags: dict :return: Points generator
[ "Return", "a", "generator", "for", "all", "the", "points", "that", "match", "the", "given", "filters", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/resultset.py#L80-L115
train
215,070
influxdata/influxdb-python
influxdb/resultset.py
ResultSet.keys
def keys(self): """Return the list of keys in the ResultSet. :return: List of keys. Keys are tuples (series_name, tags) """ keys = [] for series in self._get_series(): keys.append( (series.get('measurement', series.get('name', 'results')), series.get('tags', None)) ) return keys
python
def keys(self): """Return the list of keys in the ResultSet. :return: List of keys. Keys are tuples (series_name, tags) """ keys = [] for series in self._get_series(): keys.append( (series.get('measurement', series.get('name', 'results')), series.get('tags', None)) ) return keys
[ "def", "keys", "(", "self", ")", ":", "keys", "=", "[", "]", "for", "series", "in", "self", ".", "_get_series", "(", ")", ":", "keys", ".", "append", "(", "(", "series", ".", "get", "(", "'measurement'", ",", "series", ".", "get", "(", "'name'", ...
Return the list of keys in the ResultSet. :return: List of keys. Keys are tuples (series_name, tags)
[ "Return", "the", "list", "of", "keys", "in", "the", "ResultSet", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/resultset.py#L153-L165
train
215,071
influxdata/influxdb-python
influxdb/resultset.py
ResultSet.items
def items(self): """Return the set of items from the ResultSet. :return: List of tuples, (key, generator) """ items = [] for series in self._get_series(): series_key = (series.get('measurement', series.get('name', 'results')), series.get('tags', None)) items.append( (series_key, self._get_points_for_series(series)) ) return items
python
def items(self): """Return the set of items from the ResultSet. :return: List of tuples, (key, generator) """ items = [] for series in self._get_series(): series_key = (series.get('measurement', series.get('name', 'results')), series.get('tags', None)) items.append( (series_key, self._get_points_for_series(series)) ) return items
[ "def", "items", "(", "self", ")", ":", "items", "=", "[", "]", "for", "series", "in", "self", ".", "_get_series", "(", ")", ":", "series_key", "=", "(", "series", ".", "get", "(", "'measurement'", ",", "series", ".", "get", "(", "'name'", ",", "'re...
Return the set of items from the ResultSet. :return: List of tuples, (key, generator)
[ "Return", "the", "set", "of", "items", "from", "the", "ResultSet", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/resultset.py#L167-L180
train
215,072
influxdata/influxdb-python
influxdb/resultset.py
ResultSet._get_points_for_series
def _get_points_for_series(self, series): """Return generator of dict from columns and values of a series. :param series: One series :return: Generator of dicts """ for point in series.get('values', []): yield self.point_from_cols_vals( series['columns'], point )
python
def _get_points_for_series(self, series): """Return generator of dict from columns and values of a series. :param series: One series :return: Generator of dicts """ for point in series.get('values', []): yield self.point_from_cols_vals( series['columns'], point )
[ "def", "_get_points_for_series", "(", "self", ",", "series", ")", ":", "for", "point", "in", "series", ".", "get", "(", "'values'", ",", "[", "]", ")", ":", "yield", "self", ".", "point_from_cols_vals", "(", "series", "[", "'columns'", "]", ",", "point",...
Return generator of dict from columns and values of a series. :param series: One series :return: Generator of dicts
[ "Return", "generator", "of", "dict", "from", "columns", "and", "values", "of", "a", "series", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/resultset.py#L182-L192
train
215,073
influxdata/influxdb-python
influxdb/resultset.py
ResultSet.point_from_cols_vals
def point_from_cols_vals(cols, vals): """Create a dict from columns and values lists. :param cols: List of columns :param vals: List of values :return: Dict where keys are columns. """ point = {} for col_index, col_name in enumerate(cols): point[col_name] = vals[col_index] return point
python
def point_from_cols_vals(cols, vals): """Create a dict from columns and values lists. :param cols: List of columns :param vals: List of values :return: Dict where keys are columns. """ point = {} for col_index, col_name in enumerate(cols): point[col_name] = vals[col_index] return point
[ "def", "point_from_cols_vals", "(", "cols", ",", "vals", ")", ":", "point", "=", "{", "}", "for", "col_index", ",", "col_name", "in", "enumerate", "(", "cols", ")", ":", "point", "[", "col_name", "]", "=", "vals", "[", "col_index", "]", "return", "poin...
Create a dict from columns and values lists. :param cols: List of columns :param vals: List of values :return: Dict where keys are columns.
[ "Create", "a", "dict", "from", "columns", "and", "values", "lists", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/resultset.py#L195-L206
train
215,074
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.from_dsn
def from_dsn(dsn, **kwargs): r"""Return an instaance of InfluxDBClient from given data source name. Returns an instance of InfluxDBClient from the provided data source name. Supported schemes are "influxdb", "https+influxdb", "udp+influxdb". Parameters for the InfluxDBClient constructor may be also be passed to this function. Examples: >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\ ... localhost:8086/databasename', timeout=5) >> type(cli) <class 'influxdb.client.InfluxDBClient'> >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\ ... localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 :param dsn: data source name :type dsn: string :param **kwargs: additional parameters for InfluxDBClient. :type **kwargs: dict :note: parameters provided in **kwargs may override dsn parameters. :note: when using "udp+influxdb" the specified port (if any) will be used for the TCP connection; specify the udp port with the additional udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value. """ init_args = {} conn_params = urlparse(dsn) scheme_info = conn_params.scheme.split('+') if len(scheme_info) == 1: scheme = scheme_info[0] modifier = None else: modifier, scheme = scheme_info if scheme != 'influxdb': raise ValueError('Unknown scheme "{0}".'.format(scheme)) if modifier: if modifier == 'udp': init_args['use_udp'] = True elif modifier == 'https': init_args['ssl'] = True else: raise ValueError('Unknown modifier "{0}".'.format(modifier)) if conn_params.hostname: init_args['host'] = conn_params.hostname if conn_params.port: init_args['port'] = conn_params.port if conn_params.username: init_args['username'] = conn_params.username if conn_params.password: init_args['password'] = conn_params.password if conn_params.path and len(conn_params.path) > 1: init_args['database'] = conn_params.path[1:] init_args.update(kwargs) return InfluxDBClient(**init_args)
python
def from_dsn(dsn, **kwargs): r"""Return an instaance of InfluxDBClient from given data source name. Returns an instance of InfluxDBClient from the provided data source name. Supported schemes are "influxdb", "https+influxdb", "udp+influxdb". Parameters for the InfluxDBClient constructor may be also be passed to this function. Examples: >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\ ... localhost:8086/databasename', timeout=5) >> type(cli) <class 'influxdb.client.InfluxDBClient'> >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\ ... localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 :param dsn: data source name :type dsn: string :param **kwargs: additional parameters for InfluxDBClient. :type **kwargs: dict :note: parameters provided in **kwargs may override dsn parameters. :note: when using "udp+influxdb" the specified port (if any) will be used for the TCP connection; specify the udp port with the additional udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value. """ init_args = {} conn_params = urlparse(dsn) scheme_info = conn_params.scheme.split('+') if len(scheme_info) == 1: scheme = scheme_info[0] modifier = None else: modifier, scheme = scheme_info if scheme != 'influxdb': raise ValueError('Unknown scheme "{0}".'.format(scheme)) if modifier: if modifier == 'udp': init_args['use_udp'] = True elif modifier == 'https': init_args['ssl'] = True else: raise ValueError('Unknown modifier "{0}".'.format(modifier)) if conn_params.hostname: init_args['host'] = conn_params.hostname if conn_params.port: init_args['port'] = conn_params.port if conn_params.username: init_args['username'] = conn_params.username if conn_params.password: init_args['password'] = conn_params.password if conn_params.path and len(conn_params.path) > 1: init_args['database'] = conn_params.path[1:] init_args.update(kwargs) return InfluxDBClient(**init_args)
[ "def", "from_dsn", "(", "dsn", ",", "*", "*", "kwargs", ")", ":", "init_args", "=", "{", "}", "conn_params", "=", "urlparse", "(", "dsn", ")", "scheme_info", "=", "conn_params", ".", "scheme", ".", "split", "(", "'+'", ")", "if", "len", "(", "scheme_...
r"""Return an instaance of InfluxDBClient from given data source name. Returns an instance of InfluxDBClient from the provided data source name. Supported schemes are "influxdb", "https+influxdb", "udp+influxdb". Parameters for the InfluxDBClient constructor may be also be passed to this function. Examples: >> cli = InfluxDBClient.from_dsn('influxdb://username:password@\ ... localhost:8086/databasename', timeout=5) >> type(cli) <class 'influxdb.client.InfluxDBClient'> >> cli = InfluxDBClient.from_dsn('udp+influxdb://username:pass@\ ... localhost:8086/databasename', timeout=5, udp_port=159) >> print('{0._baseurl} - {0.use_udp} {0.udp_port}'.format(cli)) http://localhost:8086 - True 159 :param dsn: data source name :type dsn: string :param **kwargs: additional parameters for InfluxDBClient. :type **kwargs: dict :note: parameters provided in **kwargs may override dsn parameters. :note: when using "udp+influxdb" the specified port (if any) will be used for the TCP connection; specify the udp port with the additional udp_port parameter (cf. examples). :raise ValueError: if the provided DSN has any unexpected value.
[ "r", "Return", "an", "instaance", "of", "InfluxDBClient", "from", "given", "data", "source", "name", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L106-L169
train
215,075
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.request
def request(self, url, method='GET', params=None, data=None, expected_response_code=200): """Make a http request to API.""" url = "{0}/{1}".format(self._baseurl, url) if params is None: params = {} auth = { 'u': self._username, 'p': self._password } params.update(auth) if data is not None and not isinstance(data, str): data = json.dumps(data) retry = True _try = 0 # Try to send the request more than once by default (see #103) while retry: try: response = session.request( method=method, url=url, params=params, data=data, headers=self._headers, verify=self._verify_ssl, timeout=self._timeout ) break except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): _try += 1 if self._retries != 0: retry = _try < self._retries else: raise requests.exceptions.ConnectionError if response.status_code == expected_response_code: return response else: raise InfluxDBClientError(response.content, response.status_code)
python
def request(self, url, method='GET', params=None, data=None, expected_response_code=200): """Make a http request to API.""" url = "{0}/{1}".format(self._baseurl, url) if params is None: params = {} auth = { 'u': self._username, 'p': self._password } params.update(auth) if data is not None and not isinstance(data, str): data = json.dumps(data) retry = True _try = 0 # Try to send the request more than once by default (see #103) while retry: try: response = session.request( method=method, url=url, params=params, data=data, headers=self._headers, verify=self._verify_ssl, timeout=self._timeout ) break except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): _try += 1 if self._retries != 0: retry = _try < self._retries else: raise requests.exceptions.ConnectionError if response.status_code == expected_response_code: return response else: raise InfluxDBClientError(response.content, response.status_code)
[ "def", "request", "(", "self", ",", "url", ",", "method", "=", "'GET'", ",", "params", "=", "None", ",", "data", "=", "None", ",", "expected_response_code", "=", "200", ")", ":", "url", "=", "\"{0}/{1}\"", ".", "format", "(", "self", ".", "_baseurl", ...
Make a http request to API.
[ "Make", "a", "http", "request", "to", "API", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L204-L248
train
215,076
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.write
def write(self, data): """Provide as convenience for influxdb v0.9.0, this may change.""" self.request( url="write", method='POST', params=None, data=data, expected_response_code=200 ) return True
python
def write(self, data): """Provide as convenience for influxdb v0.9.0, this may change.""" self.request( url="write", method='POST', params=None, data=data, expected_response_code=200 ) return True
[ "def", "write", "(", "self", ",", "data", ")", ":", "self", ".", "request", "(", "url", "=", "\"write\"", ",", "method", "=", "'POST'", ",", "params", "=", "None", ",", "data", "=", "data", ",", "expected_response_code", "=", "200", ")", "return", "T...
Provide as convenience for influxdb v0.9.0, this may change.
[ "Provide", "as", "convenience", "for", "influxdb", "v0", ".", "9", ".", "0", "this", "may", "change", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L250-L259
train
215,077
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.delete_points
def delete_points(self, name): """Delete an entire series.""" url = "db/{0}/series/{1}".format(self._database, name) self.request( url=url, method='DELETE', expected_response_code=204 ) return True
python
def delete_points(self, name): """Delete an entire series.""" url = "db/{0}/series/{1}".format(self._database, name) self.request( url=url, method='DELETE', expected_response_code=204 ) return True
[ "def", "delete_points", "(", "self", ",", "name", ")", ":", "url", "=", "\"db/{0}/series/{1}\"", ".", "format", "(", "self", ".", "_database", ",", "name", ")", "self", ".", "request", "(", "url", "=", "url", ",", "method", "=", "'DELETE'", ",", "expec...
Delete an entire series.
[ "Delete", "an", "entire", "series", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L364-L374
train
215,078
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.query
def query(self, query, time_precision='s', chunked=False): """Query data from the influxdb v0.8 database. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. """ return self._query(query, time_precision=time_precision, chunked=chunked)
python
def query(self, query, time_precision='s', chunked=False): """Query data from the influxdb v0.8 database. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. """ return self._query(query, time_precision=time_precision, chunked=chunked)
[ "def", "query", "(", "self", ",", "query", ",", "time_precision", "=", "'s'", ",", "chunked", "=", "False", ")", ":", "return", "self", ".", "_query", "(", "query", ",", "time_precision", "=", "time_precision", ",", "chunked", "=", "chunked", ")" ]
Query data from the influxdb v0.8 database. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise.
[ "Query", "data", "from", "the", "influxdb", "v0", ".", "8", "database", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L414-L423
train
215,079
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.create_database
def create_database(self, database): """Create a database on the InfluxDB server. :param database: the name of the database to create :type database: string :rtype: boolean """ url = "db" data = {'name': database} self.request( url=url, method='POST', data=data, expected_response_code=201 ) return True
python
def create_database(self, database): """Create a database on the InfluxDB server. :param database: the name of the database to create :type database: string :rtype: boolean """ url = "db" data = {'name': database} self.request( url=url, method='POST', data=data, expected_response_code=201 ) return True
[ "def", "create_database", "(", "self", ",", "database", ")", ":", "url", "=", "\"db\"", "data", "=", "{", "'name'", ":", "database", "}", "self", ".", "request", "(", "url", "=", "url", ",", "method", "=", "'POST'", ",", "data", "=", "data", ",", "...
Create a database on the InfluxDB server. :param database: the name of the database to create :type database: string :rtype: boolean
[ "Create", "a", "database", "on", "the", "InfluxDB", "server", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L472-L490
train
215,080
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.delete_database
def delete_database(self, database): """Drop a database on the InfluxDB server. :param database: the name of the database to delete :type database: string :rtype: boolean """ url = "db/{0}".format(database) self.request( url=url, method='DELETE', expected_response_code=204 ) return True
python
def delete_database(self, database): """Drop a database on the InfluxDB server. :param database: the name of the database to delete :type database: string :rtype: boolean """ url = "db/{0}".format(database) self.request( url=url, method='DELETE', expected_response_code=204 ) return True
[ "def", "delete_database", "(", "self", ",", "database", ")", ":", "url", "=", "\"db/{0}\"", ".", "format", "(", "database", ")", "self", ".", "request", "(", "url", "=", "url", ",", "method", "=", "'DELETE'", ",", "expected_response_code", "=", "204", ")...
Drop a database on the InfluxDB server. :param database: the name of the database to delete :type database: string :rtype: boolean
[ "Drop", "a", "database", "on", "the", "InfluxDB", "server", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L492-L507
train
215,081
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.get_list_database
def get_list_database(self): """Get the list of databases.""" url = "db" response = self.request( url=url, method='GET', expected_response_code=200 ) return response.json()
python
def get_list_database(self): """Get the list of databases.""" url = "db" response = self.request( url=url, method='GET', expected_response_code=200 ) return response.json()
[ "def", "get_list_database", "(", "self", ")", ":", "url", "=", "\"db\"", "response", "=", "self", ".", "request", "(", "url", "=", "url", ",", "method", "=", "'GET'", ",", "expected_response_code", "=", "200", ")", "return", "response", ".", "json", "(",...
Get the list of databases.
[ "Get", "the", "list", "of", "databases", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L512-L522
train
215,082
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.delete_series
def delete_series(self, series): """Drop a series on the InfluxDB server. :param series: the name of the series to delete :type series: string :rtype: boolean """ url = "db/{0}/series/{1}".format( self._database, series ) self.request( url=url, method='DELETE', expected_response_code=204 ) return True
python
def delete_series(self, series): """Drop a series on the InfluxDB server. :param series: the name of the series to delete :type series: string :rtype: boolean """ url = "db/{0}/series/{1}".format( self._database, series ) self.request( url=url, method='DELETE', expected_response_code=204 ) return True
[ "def", "delete_series", "(", "self", ",", "series", ")", ":", "url", "=", "\"db/{0}/series/{1}\"", ".", "format", "(", "self", ".", "_database", ",", "series", ")", "self", ".", "request", "(", "url", "=", "url", ",", "method", "=", "'DELETE'", ",", "e...
Drop a series on the InfluxDB server. :param series: the name of the series to delete :type series: string :rtype: boolean
[ "Drop", "a", "series", "on", "the", "InfluxDB", "server", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L536-L554
train
215,083
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.get_list_cluster_admins
def get_list_cluster_admins(self): """Get list of cluster admins.""" response = self.request( url="cluster_admins", method='GET', expected_response_code=200 ) return response.json()
python
def get_list_cluster_admins(self): """Get list of cluster admins.""" response = self.request( url="cluster_admins", method='GET', expected_response_code=200 ) return response.json()
[ "def", "get_list_cluster_admins", "(", "self", ")", ":", "response", "=", "self", ".", "request", "(", "url", "=", "\"cluster_admins\"", ",", "method", "=", "'GET'", ",", "expected_response_code", "=", "200", ")", "return", "response", ".", "json", "(", ")" ...
Get list of cluster admins.
[ "Get", "list", "of", "cluster", "admins", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L597-L605
train
215,084
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.add_cluster_admin
def add_cluster_admin(self, new_username, new_password): """Add cluster admin.""" data = { 'name': new_username, 'password': new_password } self.request( url="cluster_admins", method='POST', data=data, expected_response_code=200 ) return True
python
def add_cluster_admin(self, new_username, new_password): """Add cluster admin.""" data = { 'name': new_username, 'password': new_password } self.request( url="cluster_admins", method='POST', data=data, expected_response_code=200 ) return True
[ "def", "add_cluster_admin", "(", "self", ",", "new_username", ",", "new_password", ")", ":", "data", "=", "{", "'name'", ":", "new_username", ",", "'password'", ":", "new_password", "}", "self", ".", "request", "(", "url", "=", "\"cluster_admins\"", ",", "me...
Add cluster admin.
[ "Add", "cluster", "admin", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L607-L621
train
215,085
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.update_cluster_admin_password
def update_cluster_admin_password(self, username, new_password): """Update cluster admin password.""" url = "cluster_admins/{0}".format(username) data = { 'password': new_password } self.request( url=url, method='POST', data=data, expected_response_code=200 ) return True
python
def update_cluster_admin_password(self, username, new_password): """Update cluster admin password.""" url = "cluster_admins/{0}".format(username) data = { 'password': new_password } self.request( url=url, method='POST', data=data, expected_response_code=200 ) return True
[ "def", "update_cluster_admin_password", "(", "self", ",", "username", ",", "new_password", ")", ":", "url", "=", "\"cluster_admins/{0}\"", ".", "format", "(", "username", ")", "data", "=", "{", "'password'", ":", "new_password", "}", "self", ".", "request", "(...
Update cluster admin password.
[ "Update", "cluster", "admin", "password", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L623-L638
train
215,086
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.delete_cluster_admin
def delete_cluster_admin(self, username): """Delete cluster admin.""" url = "cluster_admins/{0}".format(username) self.request( url=url, method='DELETE', expected_response_code=200 ) return True
python
def delete_cluster_admin(self, username): """Delete cluster admin.""" url = "cluster_admins/{0}".format(username) self.request( url=url, method='DELETE', expected_response_code=200 ) return True
[ "def", "delete_cluster_admin", "(", "self", ",", "username", ")", ":", "url", "=", "\"cluster_admins/{0}\"", ".", "format", "(", "username", ")", "self", ".", "request", "(", "url", "=", "url", ",", "method", "=", "'DELETE'", ",", "expected_response_code", "...
Delete cluster admin.
[ "Delete", "cluster", "admin", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L640-L650
train
215,087
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.alter_database_admin
def alter_database_admin(self, username, is_admin): """Alter the database admin.""" url = "db/{0}/users/{1}".format(self._database, username) data = {'admin': is_admin} self.request( url=url, method='POST', data=data, expected_response_code=200 ) return True
python
def alter_database_admin(self, username, is_admin): """Alter the database admin.""" url = "db/{0}/users/{1}".format(self._database, username) data = {'admin': is_admin} self.request( url=url, method='POST', data=data, expected_response_code=200 ) return True
[ "def", "alter_database_admin", "(", "self", ",", "username", ",", "is_admin", ")", ":", "url", "=", "\"db/{0}/users/{1}\"", ".", "format", "(", "self", ".", "_database", ",", "username", ")", "data", "=", "{", "'admin'", ":", "is_admin", "}", "self", ".", ...
Alter the database admin.
[ "Alter", "the", "database", "admin", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L660-L673
train
215,088
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.get_database_users
def get_database_users(self): """Get list of database users.""" url = "db/{0}/users".format(self._database) response = self.request( url=url, method='GET', expected_response_code=200 ) return response.json()
python
def get_database_users(self): """Get list of database users.""" url = "db/{0}/users".format(self._database) response = self.request( url=url, method='GET', expected_response_code=200 ) return response.json()
[ "def", "get_database_users", "(", "self", ")", ":", "url", "=", "\"db/{0}/users\"", ".", "format", "(", "self", ".", "_database", ")", "response", "=", "self", ".", "request", "(", "url", "=", "url", ",", "method", "=", "'GET'", ",", "expected_response_cod...
Get list of database users.
[ "Get", "list", "of", "database", "users", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L733-L743
train
215,089
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.add_database_user
def add_database_user(self, new_username, new_password, permissions=None): """Add database user. :param permissions: A ``(readFrom, writeTo)`` tuple """ url = "db/{0}/users".format(self._database) data = { 'name': new_username, 'password': new_password } if permissions: try: data['readFrom'], data['writeTo'] = permissions except (ValueError, TypeError): raise TypeError( "'permissions' must be (readFrom, writeTo) tuple" ) self.request( url=url, method='POST', data=data, expected_response_code=200 ) return True
python
def add_database_user(self, new_username, new_password, permissions=None): """Add database user. :param permissions: A ``(readFrom, writeTo)`` tuple """ url = "db/{0}/users".format(self._database) data = { 'name': new_username, 'password': new_password } if permissions: try: data['readFrom'], data['writeTo'] = permissions except (ValueError, TypeError): raise TypeError( "'permissions' must be (readFrom, writeTo) tuple" ) self.request( url=url, method='POST', data=data, expected_response_code=200 ) return True
[ "def", "add_database_user", "(", "self", ",", "new_username", ",", "new_password", ",", "permissions", "=", "None", ")", ":", "url", "=", "\"db/{0}/users\"", ".", "format", "(", "self", ".", "_database", ")", "data", "=", "{", "'name'", ":", "new_username", ...
Add database user. :param permissions: A ``(readFrom, writeTo)`` tuple
[ "Add", "database", "user", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L745-L772
train
215,090
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.delete_database_user
def delete_database_user(self, username): """Delete database user.""" url = "db/{0}/users/{1}".format(self._database, username) self.request( url=url, method='DELETE', expected_response_code=200 ) return True
python
def delete_database_user(self, username): """Delete database user.""" url = "db/{0}/users/{1}".format(self._database, username) self.request( url=url, method='DELETE', expected_response_code=200 ) return True
[ "def", "delete_database_user", "(", "self", ",", "username", ")", ":", "url", "=", "\"db/{0}/users/{1}\"", ".", "format", "(", "self", ".", "_database", ",", "username", ")", "self", ".", "request", "(", "url", "=", "url", ",", "method", "=", "'DELETE'", ...
Delete database user.
[ "Delete", "database", "user", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L815-L825
train
215,091
influxdata/influxdb-python
influxdb/influxdb08/client.py
InfluxDBClient.send_packet
def send_packet(self, packet): """Send a UDP packet along the wire.""" data = json.dumps(packet) byte = data.encode('utf-8') self.udp_socket.sendto(byte, (self._host, self._udp_port))
python
def send_packet(self, packet): """Send a UDP packet along the wire.""" data = json.dumps(packet) byte = data.encode('utf-8') self.udp_socket.sendto(byte, (self._host, self._udp_port))
[ "def", "send_packet", "(", "self", ",", "packet", ")", ":", "data", "=", "json", ".", "dumps", "(", "packet", ")", "byte", "=", "data", ".", "encode", "(", "'utf-8'", ")", "self", ".", "udp_socket", ".", "sendto", "(", "byte", ",", "(", "self", "."...
Send a UDP packet along the wire.
[ "Send", "a", "UDP", "packet", "along", "the", "wire", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/client.py#L839-L843
train
215,092
influxdata/influxdb-python
influxdb/influxdb08/dataframe_client.py
DataFrameClient.query
def query(self, query, time_precision='s', chunked=False): """Query data into DataFrames. Returns a DataFrame for a single time series and a map for multiple time series with the time series as value and its name as key. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. """ result = InfluxDBClient.query(self, query=query, time_precision=time_precision, chunked=chunked) if len(result) == 0: return result elif len(result) == 1: return self._to_dataframe(result[0], time_precision) else: ret = {} for time_series in result: ret[time_series['name']] = self._to_dataframe(time_series, time_precision) return ret
python
def query(self, query, time_precision='s', chunked=False): """Query data into DataFrames. Returns a DataFrame for a single time series and a map for multiple time series with the time series as value and its name as key. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise. """ result = InfluxDBClient.query(self, query=query, time_precision=time_precision, chunked=chunked) if len(result) == 0: return result elif len(result) == 1: return self._to_dataframe(result[0], time_precision) else: ret = {} for time_series in result: ret[time_series['name']] = self._to_dataframe(time_series, time_precision) return ret
[ "def", "query", "(", "self", ",", "query", ",", "time_precision", "=", "'s'", ",", "chunked", "=", "False", ")", ":", "result", "=", "InfluxDBClient", ".", "query", "(", "self", ",", "query", "=", "query", ",", "time_precision", "=", "time_precision", ",...
Query data into DataFrames. Returns a DataFrame for a single time series and a map for multiple time series with the time series as value and its name as key. :param time_precision: [Optional, default 's'] Either 's', 'm', 'ms' or 'u'. :param chunked: [Optional, default=False] True if the data shall be retrieved in chunks, False otherwise.
[ "Query", "data", "into", "DataFrames", "." ]
d5d12499f3755199d5eedd8b363450f1cf4073bd
https://github.com/influxdata/influxdb-python/blob/d5d12499f3755199d5eedd8b363450f1cf4073bd/influxdb/influxdb08/dataframe_client.py#L85-L108
train
215,093
groveco/django-sql-explorer
explorer/forms.py
SqlField.validate
def validate(self, value): """ Ensure that the SQL passes the blacklist. :param value: The SQL for this Query model. """ query = Query(sql=value) passes_blacklist, failing_words = query.passes_blacklist() error = MSG_FAILED_BLACKLIST % ', '.join(failing_words) if not passes_blacklist else None if error: raise ValidationError( error, code="InvalidSql" )
python
def validate(self, value): """ Ensure that the SQL passes the blacklist. :param value: The SQL for this Query model. """ query = Query(sql=value) passes_blacklist, failing_words = query.passes_blacklist() error = MSG_FAILED_BLACKLIST % ', '.join(failing_words) if not passes_blacklist else None if error: raise ValidationError( error, code="InvalidSql" )
[ "def", "validate", "(", "self", ",", "value", ")", ":", "query", "=", "Query", "(", "sql", "=", "value", ")", "passes_blacklist", ",", "failing_words", "=", "query", ".", "passes_blacklist", "(", ")", "error", "=", "MSG_FAILED_BLACKLIST", "%", "', '", ".",...
Ensure that the SQL passes the blacklist. :param value: The SQL for this Query model.
[ "Ensure", "that", "the", "SQL", "passes", "the", "blacklist", "." ]
622b96354e1d7ae7f7105ae90b8da3247e028623
https://github.com/groveco/django-sql-explorer/blob/622b96354e1d7ae7f7105ae90b8da3247e028623/explorer/forms.py#L11-L28
train
215,094
groveco/django-sql-explorer
explorer/schema.py
build_schema_info
def build_schema_info(connection_alias): """ Construct schema information via engine-specific queries of the tables in the DB. :return: Schema information of the following form, sorted by db_table_name. [ ("db_table_name", [ ("db_column_name", "DbFieldType"), (...), ] ) ] """ connection = get_valid_connection(connection_alias) ret = [] with connection.cursor() as cursor: tables_to_introspect = connection.introspection.table_names(cursor, include_views=_include_views()) for table_name in tables_to_introspect: if not _include_table(table_name): continue td = [] table_description = connection.introspection.get_table_description(cursor, table_name) for row in table_description: column_name = row[0] try: field_type = connection.introspection.get_field_type(row[1], row) except KeyError as e: field_type = 'Unknown' td.append((column_name, field_type)) ret.append((table_name, td)) return ret
python
def build_schema_info(connection_alias): """ Construct schema information via engine-specific queries of the tables in the DB. :return: Schema information of the following form, sorted by db_table_name. [ ("db_table_name", [ ("db_column_name", "DbFieldType"), (...), ] ) ] """ connection = get_valid_connection(connection_alias) ret = [] with connection.cursor() as cursor: tables_to_introspect = connection.introspection.table_names(cursor, include_views=_include_views()) for table_name in tables_to_introspect: if not _include_table(table_name): continue td = [] table_description = connection.introspection.get_table_description(cursor, table_name) for row in table_description: column_name = row[0] try: field_type = connection.introspection.get_field_type(row[1], row) except KeyError as e: field_type = 'Unknown' td.append((column_name, field_type)) ret.append((table_name, td)) return ret
[ "def", "build_schema_info", "(", "connection_alias", ")", ":", "connection", "=", "get_valid_connection", "(", "connection_alias", ")", "ret", "=", "[", "]", "with", "connection", ".", "cursor", "(", ")", "as", "cursor", ":", "tables_to_introspect", "=", "connec...
Construct schema information via engine-specific queries of the tables in the DB. :return: Schema information of the following form, sorted by db_table_name. [ ("db_table_name", [ ("db_column_name", "DbFieldType"), (...), ] ) ]
[ "Construct", "schema", "information", "via", "engine", "-", "specific", "queries", "of", "the", "tables", "in", "the", "DB", "." ]
622b96354e1d7ae7f7105ae90b8da3247e028623
https://github.com/groveco/django-sql-explorer/blob/622b96354e1d7ae7f7105ae90b8da3247e028623/explorer/schema.py#L52-L85
train
215,095
CodeReclaimers/neat-python
neat/graphs.py
creates_cycle
def creates_cycle(connections, test): """ Returns true if the addition of the 'test' connection would create a cycle, assuming that no cycle already exists in the graph represented by 'connections'. """ i, o = test if i == o: return True visited = {o} while True: num_added = 0 for a, b in connections: if a in visited and b not in visited: if b == i: return True visited.add(b) num_added += 1 if num_added == 0: return False
python
def creates_cycle(connections, test): """ Returns true if the addition of the 'test' connection would create a cycle, assuming that no cycle already exists in the graph represented by 'connections'. """ i, o = test if i == o: return True visited = {o} while True: num_added = 0 for a, b in connections: if a in visited and b not in visited: if b == i: return True visited.add(b) num_added += 1 if num_added == 0: return False
[ "def", "creates_cycle", "(", "connections", ",", "test", ")", ":", "i", ",", "o", "=", "test", "if", "i", "==", "o", ":", "return", "True", "visited", "=", "{", "o", "}", "while", "True", ":", "num_added", "=", "0", "for", "a", ",", "b", "in", ...
Returns true if the addition of the 'test' connection would create a cycle, assuming that no cycle already exists in the graph represented by 'connections'.
[ "Returns", "true", "if", "the", "addition", "of", "the", "test", "connection", "would", "create", "a", "cycle", "assuming", "that", "no", "cycle", "already", "exists", "in", "the", "graph", "represented", "by", "connections", "." ]
e3dbe77c0d776eae41d598e6439e6ac02ab90b18
https://github.com/CodeReclaimers/neat-python/blob/e3dbe77c0d776eae41d598e6439e6ac02ab90b18/neat/graphs.py#L3-L24
train
215,096
CodeReclaimers/neat-python
examples/xor/evolve-feedforward-threaded.py
run
def run(config_file): """load the config, create a population, evolve and show the result""" # Load configuration. config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat.DefaultSpeciesSet, neat.DefaultStagnation, config_file) # Create the population, which is the top-level object for a NEAT run. p = neat.Population(config) # Add a stdout reporter to show progress in the terminal. p.add_reporter(neat.StdOutReporter(True)) stats = neat.StatisticsReporter() p.add_reporter(stats) # Run for up to 300 generations. pe = neat.ThreadedEvaluator(4, eval_genome) winner = p.run(pe.evaluate, 300) pe.stop() # Display the winning genome. print('\nBest genome:\n{!s}'.format(winner)) # Show output of the most fit genome against training data. print('\nOutput:') winner_net = neat.nn.FeedForwardNetwork.create(winner, config) for xi, xo in zip(xor_inputs, xor_outputs): output = winner_net.activate(xi) print( "input {!r}, expected output {!r}, got {!r}".format(xi, xo, output) ) if visualize is not None: node_names = {-1: 'A', -2: 'B', 0: 'A XOR B'} visualize.draw_net(config, winner, True, node_names=node_names) visualize.plot_stats(stats, ylog=False, view=True) visualize.plot_species(stats, view=True)
python
def run(config_file): """load the config, create a population, evolve and show the result""" # Load configuration. config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat.DefaultSpeciesSet, neat.DefaultStagnation, config_file) # Create the population, which is the top-level object for a NEAT run. p = neat.Population(config) # Add a stdout reporter to show progress in the terminal. p.add_reporter(neat.StdOutReporter(True)) stats = neat.StatisticsReporter() p.add_reporter(stats) # Run for up to 300 generations. pe = neat.ThreadedEvaluator(4, eval_genome) winner = p.run(pe.evaluate, 300) pe.stop() # Display the winning genome. print('\nBest genome:\n{!s}'.format(winner)) # Show output of the most fit genome against training data. print('\nOutput:') winner_net = neat.nn.FeedForwardNetwork.create(winner, config) for xi, xo in zip(xor_inputs, xor_outputs): output = winner_net.activate(xi) print( "input {!r}, expected output {!r}, got {!r}".format(xi, xo, output) ) if visualize is not None: node_names = {-1: 'A', -2: 'B', 0: 'A XOR B'} visualize.draw_net(config, winner, True, node_names=node_names) visualize.plot_stats(stats, ylog=False, view=True) visualize.plot_species(stats, view=True)
[ "def", "run", "(", "config_file", ")", ":", "# Load configuration.", "config", "=", "neat", ".", "Config", "(", "neat", ".", "DefaultGenome", ",", "neat", ".", "DefaultReproduction", ",", "neat", ".", "DefaultSpeciesSet", ",", "neat", ".", "DefaultStagnation", ...
load the config, create a population, evolve and show the result
[ "load", "the", "config", "create", "a", "population", "evolve", "and", "show", "the", "result" ]
e3dbe77c0d776eae41d598e6439e6ac02ab90b18
https://github.com/CodeReclaimers/neat-python/blob/e3dbe77c0d776eae41d598e6439e6ac02ab90b18/examples/xor/evolve-feedforward-threaded.py#L49-L85
train
215,097
CodeReclaimers/neat-python
examples/memory-variable/visualize.py
plot_stats
def plot_stats(statistics, ylog=False, view=False, filename='avg_fitness.svg'): """ Plots the population's average and best fitness. """ if plt is None: warnings.warn("This display is not available due to a missing optional dependency (matplotlib)") return generation = range(len(statistics.most_fit_genomes)) best_fitness = [c.fitness for c in statistics.most_fit_genomes] avg_fitness = np.array(statistics.get_fitness_mean()) stdev_fitness = np.array(statistics.get_fitness_stdev()) plt.plot(generation, avg_fitness, 'b-', label="average") plt.plot(generation, avg_fitness - stdev_fitness, 'g-.', label="-1 sd") plt.plot(generation, avg_fitness + stdev_fitness, 'g-.', label="+1 sd") plt.plot(generation, best_fitness, 'r-', label="best") plt.title("Population's average and best fitness") plt.xlabel("Generations") plt.ylabel("Fitness") plt.grid() plt.legend(loc="best") if ylog: plt.gca().set_yscale('symlog') plt.savefig(filename) if view: plt.show() plt.close()
python
def plot_stats(statistics, ylog=False, view=False, filename='avg_fitness.svg'): """ Plots the population's average and best fitness. """ if plt is None: warnings.warn("This display is not available due to a missing optional dependency (matplotlib)") return generation = range(len(statistics.most_fit_genomes)) best_fitness = [c.fitness for c in statistics.most_fit_genomes] avg_fitness = np.array(statistics.get_fitness_mean()) stdev_fitness = np.array(statistics.get_fitness_stdev()) plt.plot(generation, avg_fitness, 'b-', label="average") plt.plot(generation, avg_fitness - stdev_fitness, 'g-.', label="-1 sd") plt.plot(generation, avg_fitness + stdev_fitness, 'g-.', label="+1 sd") plt.plot(generation, best_fitness, 'r-', label="best") plt.title("Population's average and best fitness") plt.xlabel("Generations") plt.ylabel("Fitness") plt.grid() plt.legend(loc="best") if ylog: plt.gca().set_yscale('symlog') plt.savefig(filename) if view: plt.show() plt.close()
[ "def", "plot_stats", "(", "statistics", ",", "ylog", "=", "False", ",", "view", "=", "False", ",", "filename", "=", "'avg_fitness.svg'", ")", ":", "if", "plt", "is", "None", ":", "warnings", ".", "warn", "(", "\"This display is not available due to a missing opt...
Plots the population's average and best fitness.
[ "Plots", "the", "population", "s", "average", "and", "best", "fitness", "." ]
e3dbe77c0d776eae41d598e6439e6ac02ab90b18
https://github.com/CodeReclaimers/neat-python/blob/e3dbe77c0d776eae41d598e6439e6ac02ab90b18/examples/memory-variable/visualize.py#L12-L40
train
215,098
CodeReclaimers/neat-python
examples/memory-variable/visualize.py
plot_species
def plot_species(statistics, view=False, filename='speciation.svg'): """ Visualizes speciation throughout evolution. """ if plt is None: warnings.warn("This display is not available due to a missing optional dependency (matplotlib)") return species_sizes = statistics.get_species_sizes() num_generations = len(species_sizes) curves = np.array(species_sizes).T fig, ax = plt.subplots() ax.stackplot(range(num_generations), *curves) plt.title("Speciation") plt.ylabel("Size per Species") plt.xlabel("Generations") plt.savefig(filename) if view: plt.show() plt.close()
python
def plot_species(statistics, view=False, filename='speciation.svg'): """ Visualizes speciation throughout evolution. """ if plt is None: warnings.warn("This display is not available due to a missing optional dependency (matplotlib)") return species_sizes = statistics.get_species_sizes() num_generations = len(species_sizes) curves = np.array(species_sizes).T fig, ax = plt.subplots() ax.stackplot(range(num_generations), *curves) plt.title("Speciation") plt.ylabel("Size per Species") plt.xlabel("Generations") plt.savefig(filename) if view: plt.show() plt.close()
[ "def", "plot_species", "(", "statistics", ",", "view", "=", "False", ",", "filename", "=", "'speciation.svg'", ")", ":", "if", "plt", "is", "None", ":", "warnings", ".", "warn", "(", "\"This display is not available due to a missing optional dependency (matplotlib)\"", ...
Visualizes speciation throughout evolution.
[ "Visualizes", "speciation", "throughout", "evolution", "." ]
e3dbe77c0d776eae41d598e6439e6ac02ab90b18
https://github.com/CodeReclaimers/neat-python/blob/e3dbe77c0d776eae41d598e6439e6ac02ab90b18/examples/memory-variable/visualize.py#L89-L111
train
215,099