body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
14d3891132368121e54044031141e893e0d1669ab2fb530f36e2bc5cad97188a
def testMalformedHTTP_ACCEPT_CHARSET(self): 'Test for Launchpad #253362.' request = {'HTTP_ACCEPT_CHARSET': 'utf-8;q=0.7,iso-8859-1;q=0.2*;q=0.1'} browser_charsets = HTTPCharsets(request) self.assertEqual(list(browser_charsets.getPreferredCharsets()), ['utf-8', 'iso-8859-1'])
Test for Launchpad #253362.
src/zope/publisher/tests/test_httpcharsets.py
testMalformedHTTP_ACCEPT_CHARSET
Shoobx/zope.publisher
3
python
def testMalformedHTTP_ACCEPT_CHARSET(self): request = {'HTTP_ACCEPT_CHARSET': 'utf-8;q=0.7,iso-8859-1;q=0.2*;q=0.1'} browser_charsets = HTTPCharsets(request) self.assertEqual(list(browser_charsets.getPreferredCharsets()), ['utf-8', 'iso-8859-1'])
def testMalformedHTTP_ACCEPT_CHARSET(self): request = {'HTTP_ACCEPT_CHARSET': 'utf-8;q=0.7,iso-8859-1;q=0.2*;q=0.1'} browser_charsets = HTTPCharsets(request) self.assertEqual(list(browser_charsets.getPreferredCharsets()), ['utf-8', 'iso-8859-1'])<|docstring|>Test for Launchpad #253362.<|endoftext|>
89887afa29f82af26a0e83b744e9443c7df3939d3548bf9207a34a5751e07a8f
def useradd(self, username, expiration=None, comment=None): "\n Create user account with 'username'\n " userentry = self.get_userentry(username) if (userentry is not None): logger.warn('User {0} already exists, skip useradd', username) return if (expiration is not None): cmd = ['pw', 'useradd', username, '-e', expiration, '-m'] else: cmd = ['pw', 'useradd', username, '-m'] if (comment is not None): cmd.extend(['-c', comment]) self._run_command_raising_OSUtilError(cmd, err_msg='Failed to create user account:{0}'.format(username))
Create user account with 'username'
azurelinuxagent/common/osutil/freebsd.py
useradd
magnologan/WALinuxAgent
423
python
def useradd(self, username, expiration=None, comment=None): "\n \n " userentry = self.get_userentry(username) if (userentry is not None): logger.warn('User {0} already exists, skip useradd', username) return if (expiration is not None): cmd = ['pw', 'useradd', username, '-e', expiration, '-m'] else: cmd = ['pw', 'useradd', username, '-m'] if (comment is not None): cmd.extend(['-c', comment]) self._run_command_raising_OSUtilError(cmd, err_msg='Failed to create user account:{0}'.format(username))
def useradd(self, username, expiration=None, comment=None): "\n \n " userentry = self.get_userentry(username) if (userentry is not None): logger.warn('User {0} already exists, skip useradd', username) return if (expiration is not None): cmd = ['pw', 'useradd', username, '-e', expiration, '-m'] else: cmd = ['pw', 'useradd', username, '-m'] if (comment is not None): cmd.extend(['-c', comment]) self._run_command_raising_OSUtilError(cmd, err_msg='Failed to create user account:{0}'.format(username))<|docstring|>Create user account with 'username'<|endoftext|>
3d3ba74d583d8f45d6afd5977398743979815dbe6fa94c7ed4fcd0aedf90aa8d
@staticmethod def read_route_table(): '\n Return a list of strings comprising the route table as in the Linux /proc/net/route format. The input taken is from FreeBSDs\n `netstat -rn -f inet` command. Here is what the function does in detail:\n\n 1. Runs `netstat -rn -f inet` which outputs a column formatted list of ipv4 routes in priority order like so:\n\n > Routing tables\n > \n > Internet:\n > Destination Gateway Flags Refs Use Netif Expire\n > default 61.221.xx.yy UGS 0 247 em1\n > 10 10.10.110.5 UGS 0 50 em0\n > 10.10.110/26 link#1 UC 0 0 em0\n > 10.10.110.5 00:1b:0d:e6:58:40 UHLW 2 0 em0 1145\n > 61.221.xx.yy/29 link#2 UC 0 0 em1\n > 61.221.xx.yy 00:1b:0d:e6:57:c0 UHLW 2 0 em1 1055\n > 61.221.xx/24 link#2 UC 0 0 em1\n > 127.0.0.1 127.0.0.1 UH 0 0 lo0\n \n 2. Convert it to an array of lines that resemble an equivalent /proc/net/route content on a Linux system like so:\n\n > Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT\n > gre828 00000000 00000000 0001 0 0 0 000000F8 0 0 0\n > ens160 00000000 FE04700A 0003 0 0 100 00000000 0 0 0\n > gre828 00000008 00000000 0001 0 0 0 000000FE 0 0 0\n > ens160 0004700A 00000000 0001 0 0 100 00FFFFFF 0 0 0\n > gre828 2504700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0\n > gre828 3704700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0\n > gre828 4104700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0\n\n :return: Entries in the ipv4 route priority list from `netstat -rn -f inet` in the linux `/proc/net/route` style\n :rtype: list(str)\n ' def _get_netstat_rn_ipv4_routes(): '\n Runs `netstat -rn -f inet` and parses its output and returns a list of routes where the key is the column name\n and the value is the value in the column, stripped of leading and trailing whitespace.\n\n :return: List of dictionaries representing routes in the ipv4 route priority list from `netstat -rn -f inet`\n :rtype: list(dict)\n ' cmd = ['netstat', '-rn', '-f', 'inet'] output = shellutil.run_command(cmd, log_error=True) output_lines = output.split('\n') if (len(output_lines) < 3): raise OSUtilError('`netstat -rn -f inet` output seems to be empty') output_lines = [line.strip() for line in output_lines if line] if ('Internet:' not in output_lines): raise OSUtilError('`netstat -rn -f inet` output seems to contain no ipv4 routes') route_header_line = (output_lines.index('Internet:') + 1) route_start_line = (route_header_line + 1) route_line_length = max([len(line) for line in output_lines[route_header_line:]]) netstat_route_list = [line.ljust(route_line_length) for line in output_lines[route_start_line:]] _route_headers = output_lines[route_header_line].split() n_route_headers = len(_route_headers) route_columns = {} for i in range(0, (n_route_headers - 1)): route_columns[_route_headers[i]] = (output_lines[route_header_line].index(_route_headers[i]), (output_lines[route_header_line].index(_route_headers[(i + 1)]) - 1)) route_columns[_route_headers[(n_route_headers - 1)]] = (output_lines[route_header_line].index(_route_headers[(n_route_headers - 1)]), None) netstat_routes = [] n_netstat_routes = len(netstat_route_list) for i in range(0, n_netstat_routes): netstat_route = {} for column in route_columns: netstat_route[column] = netstat_route_list[i][route_columns[column][0]:route_columns[column][1]].strip() netstat_route['Metric'] = (n_netstat_routes - i) netstat_routes.append(netstat_route) return netstat_routes def _ipv4_ascii_address_to_hex(ipv4_ascii_address): '\n Converts an IPv4 32bit address from its ASCII notation (ie. 127.0.0.1) to an 8 digit padded hex notation\n (ie. "0100007F") string.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return ('%08X' % int(binascii.hexlify(struct.pack('!I', struct.unpack('=I', socket.inet_pton(socket.AF_INET, ipv4_ascii_address))[0])), 16)) def _ipv4_cidr_mask_to_hex(ipv4_cidr_mask): '\n Converts an subnet mask from its CIDR integer notation (ie. 32) to an 8 digit padded hex notation\n (ie. "FFFFFFFF") string representing its bitmask form.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return '{0:08x}'.format(struct.unpack('=I', struct.pack('!I', ((4294967295 << (32 - ipv4_cidr_mask)) & 4294967295)))[0]).upper() def _ipv4_cidr_destination_to_hex(destination): '\n Converts an destination address from its CIDR notation (ie. 127.0.0.1/32 or default or localhost) to an 8\n digit padded hex notation (ie. "0100007F" or "00000000" or "0100007F") string and its subnet bitmask\n also in hex (FFFFFFFF).\n\n :return: tuple of 8 character long hex string representation of the IP and 8 character long hex string representation of the subnet mask\n :rtype: tuple(string, int)\n ' destination_ip = '0.0.0.0' destination_subnetmask = 32 if (destination != 'default'): if (destination == 'localhost'): destination_ip = '127.0.0.1' else: destination_ip = destination.split('/') if (len(destination_ip) > 1): destination_subnetmask = int(destination_ip[1]) destination_ip = destination_ip[0] hex_destination_ip = _ipv4_ascii_address_to_hex(destination_ip) hex_destination_subnetmask = _ipv4_cidr_mask_to_hex(destination_subnetmask) return (hex_destination_ip, hex_destination_subnetmask) def _try_ipv4_gateway_to_hex(gateway): '\n If the gateway is an IPv4 address, return its IP in hex, else, return "00000000"\n\n :return: 8 character long hex string representation of the IP of the gateway\n :rtype: string\n ' try: return _ipv4_ascii_address_to_hex(gateway) except socket.error: return '00000000' def _ascii_route_flags_to_bitmask(ascii_route_flags): '\n Converts route flags to a bitmask of their equivalent linux/route.h values.\n\n :return: integer representation of a 16 bit mask\n :rtype: int\n ' bitmask_flags = 0 RTF_UP = 1 RTF_GATEWAY = 2 RTF_HOST = 4 RTF_DYNAMIC = 16 if ('U' in ascii_route_flags): bitmask_flags |= RTF_UP if ('G' in ascii_route_flags): bitmask_flags |= RTF_GATEWAY if ('H' in ascii_route_flags): bitmask_flags |= RTF_HOST if ('S' not in ascii_route_flags): bitmask_flags |= RTF_DYNAMIC return bitmask_flags def _freebsd_netstat_rn_route_to_linux_proc_net_route(netstat_route): '\n Converts a single FreeBSD `netstat -rn -f inet` route to its equivalent /proc/net/route line. ie:\n > default 0.0.0.0 UGS 0 247 em1\n to\n > em1 00000000 00000000 0003 0 0 0 FFFFFFFF 0 0 0\n\n :return: string representation of the equivalent /proc/net/route line\n :rtype: string\n ' network_interface = netstat_route['Netif'] (hex_destination_ip, hex_destination_subnetmask) = _ipv4_cidr_destination_to_hex(netstat_route['Destination']) hex_gateway = _try_ipv4_gateway_to_hex(netstat_route['Gateway']) bitmask_flags = _ascii_route_flags_to_bitmask(netstat_route['Flags']) dummy_refcount = 0 dummy_use = 0 route_metric = netstat_route['Metric'] dummy_mtu = 0 dummy_window = 0 dummy_irtt = 0 return '{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}'.format(network_interface, hex_destination_ip, hex_gateway, bitmask_flags, dummy_refcount, dummy_use, route_metric, hex_destination_subnetmask, dummy_mtu, dummy_window, dummy_irtt) linux_style_route_file = ['Iface\tDestination\tGateway\tFlags\tRefCnt\tUse\tMetric\tMask\tMTU\tWindow\tIRTT'] try: netstat_routes = _get_netstat_rn_ipv4_routes() if (len(netstat_routes) > 0): missing_headers = [] if ('Netif' not in netstat_routes[0]): missing_headers.append('Netif') if ('Destination' not in netstat_routes[0]): missing_headers.append('Destination') if ('Gateway' not in netstat_routes[0]): missing_headers.append('Gateway') if ('Flags' not in netstat_routes[0]): missing_headers.append('Flags') if missing_headers: raise KeyError('`netstat -rn -f inet` output is missing columns required to convert to the Linux /proc/net/route format; columns are [{0}]'.format(missing_headers)) for netstat_route in netstat_routes: try: linux_style_route = _freebsd_netstat_rn_route_to_linux_proc_net_route(netstat_route) linux_style_route_file.append(linux_style_route) except Exception: continue except Exception as e: logger.error('Cannot read route table [{0}]', ustr(e)) return linux_style_route_file
Return a list of strings comprising the route table as in the Linux /proc/net/route format. The input taken is from FreeBSDs `netstat -rn -f inet` command. Here is what the function does in detail: 1. Runs `netstat -rn -f inet` which outputs a column formatted list of ipv4 routes in priority order like so: > Routing tables > > Internet: > Destination Gateway Flags Refs Use Netif Expire > default 61.221.xx.yy UGS 0 247 em1 > 10 10.10.110.5 UGS 0 50 em0 > 10.10.110/26 link#1 UC 0 0 em0 > 10.10.110.5 00:1b:0d:e6:58:40 UHLW 2 0 em0 1145 > 61.221.xx.yy/29 link#2 UC 0 0 em1 > 61.221.xx.yy 00:1b:0d:e6:57:c0 UHLW 2 0 em1 1055 > 61.221.xx/24 link#2 UC 0 0 em1 > 127.0.0.1 127.0.0.1 UH 0 0 lo0 2. Convert it to an array of lines that resemble an equivalent /proc/net/route content on a Linux system like so: > Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT > gre828 00000000 00000000 0001 0 0 0 000000F8 0 0 0 > ens160 00000000 FE04700A 0003 0 0 100 00000000 0 0 0 > gre828 00000008 00000000 0001 0 0 0 000000FE 0 0 0 > ens160 0004700A 00000000 0001 0 0 100 00FFFFFF 0 0 0 > gre828 2504700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0 > gre828 3704700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0 > gre828 4104700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0 :return: Entries in the ipv4 route priority list from `netstat -rn -f inet` in the linux `/proc/net/route` style :rtype: list(str)
azurelinuxagent/common/osutil/freebsd.py
read_route_table
magnologan/WALinuxAgent
423
python
@staticmethod def read_route_table(): '\n Return a list of strings comprising the route table as in the Linux /proc/net/route format. The input taken is from FreeBSDs\n `netstat -rn -f inet` command. Here is what the function does in detail:\n\n 1. Runs `netstat -rn -f inet` which outputs a column formatted list of ipv4 routes in priority order like so:\n\n > Routing tables\n > \n > Internet:\n > Destination Gateway Flags Refs Use Netif Expire\n > default 61.221.xx.yy UGS 0 247 em1\n > 10 10.10.110.5 UGS 0 50 em0\n > 10.10.110/26 link#1 UC 0 0 em0\n > 10.10.110.5 00:1b:0d:e6:58:40 UHLW 2 0 em0 1145\n > 61.221.xx.yy/29 link#2 UC 0 0 em1\n > 61.221.xx.yy 00:1b:0d:e6:57:c0 UHLW 2 0 em1 1055\n > 61.221.xx/24 link#2 UC 0 0 em1\n > 127.0.0.1 127.0.0.1 UH 0 0 lo0\n \n 2. Convert it to an array of lines that resemble an equivalent /proc/net/route content on a Linux system like so:\n\n > Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT\n > gre828 00000000 00000000 0001 0 0 0 000000F8 0 0 0\n > ens160 00000000 FE04700A 0003 0 0 100 00000000 0 0 0\n > gre828 00000008 00000000 0001 0 0 0 000000FE 0 0 0\n > ens160 0004700A 00000000 0001 0 0 100 00FFFFFF 0 0 0\n > gre828 2504700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0\n > gre828 3704700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0\n > gre828 4104700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0\n\n :return: Entries in the ipv4 route priority list from `netstat -rn -f inet` in the linux `/proc/net/route` style\n :rtype: list(str)\n ' def _get_netstat_rn_ipv4_routes(): '\n Runs `netstat -rn -f inet` and parses its output and returns a list of routes where the key is the column name\n and the value is the value in the column, stripped of leading and trailing whitespace.\n\n :return: List of dictionaries representing routes in the ipv4 route priority list from `netstat -rn -f inet`\n :rtype: list(dict)\n ' cmd = ['netstat', '-rn', '-f', 'inet'] output = shellutil.run_command(cmd, log_error=True) output_lines = output.split('\n') if (len(output_lines) < 3): raise OSUtilError('`netstat -rn -f inet` output seems to be empty') output_lines = [line.strip() for line in output_lines if line] if ('Internet:' not in output_lines): raise OSUtilError('`netstat -rn -f inet` output seems to contain no ipv4 routes') route_header_line = (output_lines.index('Internet:') + 1) route_start_line = (route_header_line + 1) route_line_length = max([len(line) for line in output_lines[route_header_line:]]) netstat_route_list = [line.ljust(route_line_length) for line in output_lines[route_start_line:]] _route_headers = output_lines[route_header_line].split() n_route_headers = len(_route_headers) route_columns = {} for i in range(0, (n_route_headers - 1)): route_columns[_route_headers[i]] = (output_lines[route_header_line].index(_route_headers[i]), (output_lines[route_header_line].index(_route_headers[(i + 1)]) - 1)) route_columns[_route_headers[(n_route_headers - 1)]] = (output_lines[route_header_line].index(_route_headers[(n_route_headers - 1)]), None) netstat_routes = [] n_netstat_routes = len(netstat_route_list) for i in range(0, n_netstat_routes): netstat_route = {} for column in route_columns: netstat_route[column] = netstat_route_list[i][route_columns[column][0]:route_columns[column][1]].strip() netstat_route['Metric'] = (n_netstat_routes - i) netstat_routes.append(netstat_route) return netstat_routes def _ipv4_ascii_address_to_hex(ipv4_ascii_address): '\n Converts an IPv4 32bit address from its ASCII notation (ie. 127.0.0.1) to an 8 digit padded hex notation\n (ie. "0100007F") string.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return ('%08X' % int(binascii.hexlify(struct.pack('!I', struct.unpack('=I', socket.inet_pton(socket.AF_INET, ipv4_ascii_address))[0])), 16)) def _ipv4_cidr_mask_to_hex(ipv4_cidr_mask): '\n Converts an subnet mask from its CIDR integer notation (ie. 32) to an 8 digit padded hex notation\n (ie. "FFFFFFFF") string representing its bitmask form.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return '{0:08x}'.format(struct.unpack('=I', struct.pack('!I', ((4294967295 << (32 - ipv4_cidr_mask)) & 4294967295)))[0]).upper() def _ipv4_cidr_destination_to_hex(destination): '\n Converts an destination address from its CIDR notation (ie. 127.0.0.1/32 or default or localhost) to an 8\n digit padded hex notation (ie. "0100007F" or "00000000" or "0100007F") string and its subnet bitmask\n also in hex (FFFFFFFF).\n\n :return: tuple of 8 character long hex string representation of the IP and 8 character long hex string representation of the subnet mask\n :rtype: tuple(string, int)\n ' destination_ip = '0.0.0.0' destination_subnetmask = 32 if (destination != 'default'): if (destination == 'localhost'): destination_ip = '127.0.0.1' else: destination_ip = destination.split('/') if (len(destination_ip) > 1): destination_subnetmask = int(destination_ip[1]) destination_ip = destination_ip[0] hex_destination_ip = _ipv4_ascii_address_to_hex(destination_ip) hex_destination_subnetmask = _ipv4_cidr_mask_to_hex(destination_subnetmask) return (hex_destination_ip, hex_destination_subnetmask) def _try_ipv4_gateway_to_hex(gateway): '\n If the gateway is an IPv4 address, return its IP in hex, else, return "00000000"\n\n :return: 8 character long hex string representation of the IP of the gateway\n :rtype: string\n ' try: return _ipv4_ascii_address_to_hex(gateway) except socket.error: return '00000000' def _ascii_route_flags_to_bitmask(ascii_route_flags): '\n Converts route flags to a bitmask of their equivalent linux/route.h values.\n\n :return: integer representation of a 16 bit mask\n :rtype: int\n ' bitmask_flags = 0 RTF_UP = 1 RTF_GATEWAY = 2 RTF_HOST = 4 RTF_DYNAMIC = 16 if ('U' in ascii_route_flags): bitmask_flags |= RTF_UP if ('G' in ascii_route_flags): bitmask_flags |= RTF_GATEWAY if ('H' in ascii_route_flags): bitmask_flags |= RTF_HOST if ('S' not in ascii_route_flags): bitmask_flags |= RTF_DYNAMIC return bitmask_flags def _freebsd_netstat_rn_route_to_linux_proc_net_route(netstat_route): '\n Converts a single FreeBSD `netstat -rn -f inet` route to its equivalent /proc/net/route line. ie:\n > default 0.0.0.0 UGS 0 247 em1\n to\n > em1 00000000 00000000 0003 0 0 0 FFFFFFFF 0 0 0\n\n :return: string representation of the equivalent /proc/net/route line\n :rtype: string\n ' network_interface = netstat_route['Netif'] (hex_destination_ip, hex_destination_subnetmask) = _ipv4_cidr_destination_to_hex(netstat_route['Destination']) hex_gateway = _try_ipv4_gateway_to_hex(netstat_route['Gateway']) bitmask_flags = _ascii_route_flags_to_bitmask(netstat_route['Flags']) dummy_refcount = 0 dummy_use = 0 route_metric = netstat_route['Metric'] dummy_mtu = 0 dummy_window = 0 dummy_irtt = 0 return '{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}'.format(network_interface, hex_destination_ip, hex_gateway, bitmask_flags, dummy_refcount, dummy_use, route_metric, hex_destination_subnetmask, dummy_mtu, dummy_window, dummy_irtt) linux_style_route_file = ['Iface\tDestination\tGateway\tFlags\tRefCnt\tUse\tMetric\tMask\tMTU\tWindow\tIRTT'] try: netstat_routes = _get_netstat_rn_ipv4_routes() if (len(netstat_routes) > 0): missing_headers = [] if ('Netif' not in netstat_routes[0]): missing_headers.append('Netif') if ('Destination' not in netstat_routes[0]): missing_headers.append('Destination') if ('Gateway' not in netstat_routes[0]): missing_headers.append('Gateway') if ('Flags' not in netstat_routes[0]): missing_headers.append('Flags') if missing_headers: raise KeyError('`netstat -rn -f inet` output is missing columns required to convert to the Linux /proc/net/route format; columns are [{0}]'.format(missing_headers)) for netstat_route in netstat_routes: try: linux_style_route = _freebsd_netstat_rn_route_to_linux_proc_net_route(netstat_route) linux_style_route_file.append(linux_style_route) except Exception: continue except Exception as e: logger.error('Cannot read route table [{0}]', ustr(e)) return linux_style_route_file
@staticmethod def read_route_table(): '\n Return a list of strings comprising the route table as in the Linux /proc/net/route format. The input taken is from FreeBSDs\n `netstat -rn -f inet` command. Here is what the function does in detail:\n\n 1. Runs `netstat -rn -f inet` which outputs a column formatted list of ipv4 routes in priority order like so:\n\n > Routing tables\n > \n > Internet:\n > Destination Gateway Flags Refs Use Netif Expire\n > default 61.221.xx.yy UGS 0 247 em1\n > 10 10.10.110.5 UGS 0 50 em0\n > 10.10.110/26 link#1 UC 0 0 em0\n > 10.10.110.5 00:1b:0d:e6:58:40 UHLW 2 0 em0 1145\n > 61.221.xx.yy/29 link#2 UC 0 0 em1\n > 61.221.xx.yy 00:1b:0d:e6:57:c0 UHLW 2 0 em1 1055\n > 61.221.xx/24 link#2 UC 0 0 em1\n > 127.0.0.1 127.0.0.1 UH 0 0 lo0\n \n 2. Convert it to an array of lines that resemble an equivalent /proc/net/route content on a Linux system like so:\n\n > Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT\n > gre828 00000000 00000000 0001 0 0 0 000000F8 0 0 0\n > ens160 00000000 FE04700A 0003 0 0 100 00000000 0 0 0\n > gre828 00000008 00000000 0001 0 0 0 000000FE 0 0 0\n > ens160 0004700A 00000000 0001 0 0 100 00FFFFFF 0 0 0\n > gre828 2504700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0\n > gre828 3704700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0\n > gre828 4104700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0\n\n :return: Entries in the ipv4 route priority list from `netstat -rn -f inet` in the linux `/proc/net/route` style\n :rtype: list(str)\n ' def _get_netstat_rn_ipv4_routes(): '\n Runs `netstat -rn -f inet` and parses its output and returns a list of routes where the key is the column name\n and the value is the value in the column, stripped of leading and trailing whitespace.\n\n :return: List of dictionaries representing routes in the ipv4 route priority list from `netstat -rn -f inet`\n :rtype: list(dict)\n ' cmd = ['netstat', '-rn', '-f', 'inet'] output = shellutil.run_command(cmd, log_error=True) output_lines = output.split('\n') if (len(output_lines) < 3): raise OSUtilError('`netstat -rn -f inet` output seems to be empty') output_lines = [line.strip() for line in output_lines if line] if ('Internet:' not in output_lines): raise OSUtilError('`netstat -rn -f inet` output seems to contain no ipv4 routes') route_header_line = (output_lines.index('Internet:') + 1) route_start_line = (route_header_line + 1) route_line_length = max([len(line) for line in output_lines[route_header_line:]]) netstat_route_list = [line.ljust(route_line_length) for line in output_lines[route_start_line:]] _route_headers = output_lines[route_header_line].split() n_route_headers = len(_route_headers) route_columns = {} for i in range(0, (n_route_headers - 1)): route_columns[_route_headers[i]] = (output_lines[route_header_line].index(_route_headers[i]), (output_lines[route_header_line].index(_route_headers[(i + 1)]) - 1)) route_columns[_route_headers[(n_route_headers - 1)]] = (output_lines[route_header_line].index(_route_headers[(n_route_headers - 1)]), None) netstat_routes = [] n_netstat_routes = len(netstat_route_list) for i in range(0, n_netstat_routes): netstat_route = {} for column in route_columns: netstat_route[column] = netstat_route_list[i][route_columns[column][0]:route_columns[column][1]].strip() netstat_route['Metric'] = (n_netstat_routes - i) netstat_routes.append(netstat_route) return netstat_routes def _ipv4_ascii_address_to_hex(ipv4_ascii_address): '\n Converts an IPv4 32bit address from its ASCII notation (ie. 127.0.0.1) to an 8 digit padded hex notation\n (ie. "0100007F") string.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return ('%08X' % int(binascii.hexlify(struct.pack('!I', struct.unpack('=I', socket.inet_pton(socket.AF_INET, ipv4_ascii_address))[0])), 16)) def _ipv4_cidr_mask_to_hex(ipv4_cidr_mask): '\n Converts an subnet mask from its CIDR integer notation (ie. 32) to an 8 digit padded hex notation\n (ie. "FFFFFFFF") string representing its bitmask form.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return '{0:08x}'.format(struct.unpack('=I', struct.pack('!I', ((4294967295 << (32 - ipv4_cidr_mask)) & 4294967295)))[0]).upper() def _ipv4_cidr_destination_to_hex(destination): '\n Converts an destination address from its CIDR notation (ie. 127.0.0.1/32 or default or localhost) to an 8\n digit padded hex notation (ie. "0100007F" or "00000000" or "0100007F") string and its subnet bitmask\n also in hex (FFFFFFFF).\n\n :return: tuple of 8 character long hex string representation of the IP and 8 character long hex string representation of the subnet mask\n :rtype: tuple(string, int)\n ' destination_ip = '0.0.0.0' destination_subnetmask = 32 if (destination != 'default'): if (destination == 'localhost'): destination_ip = '127.0.0.1' else: destination_ip = destination.split('/') if (len(destination_ip) > 1): destination_subnetmask = int(destination_ip[1]) destination_ip = destination_ip[0] hex_destination_ip = _ipv4_ascii_address_to_hex(destination_ip) hex_destination_subnetmask = _ipv4_cidr_mask_to_hex(destination_subnetmask) return (hex_destination_ip, hex_destination_subnetmask) def _try_ipv4_gateway_to_hex(gateway): '\n If the gateway is an IPv4 address, return its IP in hex, else, return "00000000"\n\n :return: 8 character long hex string representation of the IP of the gateway\n :rtype: string\n ' try: return _ipv4_ascii_address_to_hex(gateway) except socket.error: return '00000000' def _ascii_route_flags_to_bitmask(ascii_route_flags): '\n Converts route flags to a bitmask of their equivalent linux/route.h values.\n\n :return: integer representation of a 16 bit mask\n :rtype: int\n ' bitmask_flags = 0 RTF_UP = 1 RTF_GATEWAY = 2 RTF_HOST = 4 RTF_DYNAMIC = 16 if ('U' in ascii_route_flags): bitmask_flags |= RTF_UP if ('G' in ascii_route_flags): bitmask_flags |= RTF_GATEWAY if ('H' in ascii_route_flags): bitmask_flags |= RTF_HOST if ('S' not in ascii_route_flags): bitmask_flags |= RTF_DYNAMIC return bitmask_flags def _freebsd_netstat_rn_route_to_linux_proc_net_route(netstat_route): '\n Converts a single FreeBSD `netstat -rn -f inet` route to its equivalent /proc/net/route line. ie:\n > default 0.0.0.0 UGS 0 247 em1\n to\n > em1 00000000 00000000 0003 0 0 0 FFFFFFFF 0 0 0\n\n :return: string representation of the equivalent /proc/net/route line\n :rtype: string\n ' network_interface = netstat_route['Netif'] (hex_destination_ip, hex_destination_subnetmask) = _ipv4_cidr_destination_to_hex(netstat_route['Destination']) hex_gateway = _try_ipv4_gateway_to_hex(netstat_route['Gateway']) bitmask_flags = _ascii_route_flags_to_bitmask(netstat_route['Flags']) dummy_refcount = 0 dummy_use = 0 route_metric = netstat_route['Metric'] dummy_mtu = 0 dummy_window = 0 dummy_irtt = 0 return '{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}'.format(network_interface, hex_destination_ip, hex_gateway, bitmask_flags, dummy_refcount, dummy_use, route_metric, hex_destination_subnetmask, dummy_mtu, dummy_window, dummy_irtt) linux_style_route_file = ['Iface\tDestination\tGateway\tFlags\tRefCnt\tUse\tMetric\tMask\tMTU\tWindow\tIRTT'] try: netstat_routes = _get_netstat_rn_ipv4_routes() if (len(netstat_routes) > 0): missing_headers = [] if ('Netif' not in netstat_routes[0]): missing_headers.append('Netif') if ('Destination' not in netstat_routes[0]): missing_headers.append('Destination') if ('Gateway' not in netstat_routes[0]): missing_headers.append('Gateway') if ('Flags' not in netstat_routes[0]): missing_headers.append('Flags') if missing_headers: raise KeyError('`netstat -rn -f inet` output is missing columns required to convert to the Linux /proc/net/route format; columns are [{0}]'.format(missing_headers)) for netstat_route in netstat_routes: try: linux_style_route = _freebsd_netstat_rn_route_to_linux_proc_net_route(netstat_route) linux_style_route_file.append(linux_style_route) except Exception: continue except Exception as e: logger.error('Cannot read route table [{0}]', ustr(e)) return linux_style_route_file<|docstring|>Return a list of strings comprising the route table as in the Linux /proc/net/route format. The input taken is from FreeBSDs `netstat -rn -f inet` command. Here is what the function does in detail: 1. Runs `netstat -rn -f inet` which outputs a column formatted list of ipv4 routes in priority order like so: > Routing tables > > Internet: > Destination Gateway Flags Refs Use Netif Expire > default 61.221.xx.yy UGS 0 247 em1 > 10 10.10.110.5 UGS 0 50 em0 > 10.10.110/26 link#1 UC 0 0 em0 > 10.10.110.5 00:1b:0d:e6:58:40 UHLW 2 0 em0 1145 > 61.221.xx.yy/29 link#2 UC 0 0 em1 > 61.221.xx.yy 00:1b:0d:e6:57:c0 UHLW 2 0 em1 1055 > 61.221.xx/24 link#2 UC 0 0 em1 > 127.0.0.1 127.0.0.1 UH 0 0 lo0 2. Convert it to an array of lines that resemble an equivalent /proc/net/route content on a Linux system like so: > Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT > gre828 00000000 00000000 0001 0 0 0 000000F8 0 0 0 > ens160 00000000 FE04700A 0003 0 0 100 00000000 0 0 0 > gre828 00000008 00000000 0001 0 0 0 000000FE 0 0 0 > ens160 0004700A 00000000 0001 0 0 100 00FFFFFF 0 0 0 > gre828 2504700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0 > gre828 3704700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0 > gre828 4104700A 00000000 0005 0 0 0 FFFFFFFF 0 0 0 :return: Entries in the ipv4 route priority list from `netstat -rn -f inet` in the linux `/proc/net/route` style :rtype: list(str)<|endoftext|>
a88464226f4c4b638cfad0c43e285579f92fb9ec031f03681c439b2075e48fa6
@staticmethod def get_list_of_routes(route_table): '\n Construct a list of all network routes known to this system.\n\n :param list(str) route_table: List of text entries from route table, including headers\n :return: a list of network routes\n :rtype: list(RouteEntry)\n ' route_list = [] count = len(route_table) if (count < 1): logger.error('netstat -rn -f inet is missing headers') elif (count == 1): logger.error('netstat -rn -f inet contains no routes') else: route_list = DefaultOSUtil._build_route_list(route_table) return route_list
Construct a list of all network routes known to this system. :param list(str) route_table: List of text entries from route table, including headers :return: a list of network routes :rtype: list(RouteEntry)
azurelinuxagent/common/osutil/freebsd.py
get_list_of_routes
magnologan/WALinuxAgent
423
python
@staticmethod def get_list_of_routes(route_table): '\n Construct a list of all network routes known to this system.\n\n :param list(str) route_table: List of text entries from route table, including headers\n :return: a list of network routes\n :rtype: list(RouteEntry)\n ' route_list = [] count = len(route_table) if (count < 1): logger.error('netstat -rn -f inet is missing headers') elif (count == 1): logger.error('netstat -rn -f inet contains no routes') else: route_list = DefaultOSUtil._build_route_list(route_table) return route_list
@staticmethod def get_list_of_routes(route_table): '\n Construct a list of all network routes known to this system.\n\n :param list(str) route_table: List of text entries from route table, including headers\n :return: a list of network routes\n :rtype: list(RouteEntry)\n ' route_list = [] count = len(route_table) if (count < 1): logger.error('netstat -rn -f inet is missing headers') elif (count == 1): logger.error('netstat -rn -f inet contains no routes') else: route_list = DefaultOSUtil._build_route_list(route_table) return route_list<|docstring|>Construct a list of all network routes known to this system. :param list(str) route_table: List of text entries from route table, including headers :return: a list of network routes :rtype: list(RouteEntry)<|endoftext|>
01dbdad105ff75c049e8eb277360784a580ef6878f98919216d54fe27f25528e
def get_primary_interface(self): '\n Get the name of the primary interface, which is the one with the\n default route attached to it; if there are multiple default routes,\n the primary has the lowest Metric.\n :return: the interface which has the default route\n ' RTF_GATEWAY = 2 DEFAULT_DEST = '00000000' primary_interface = None if (not self.disable_route_warning): logger.info('Examine `netstat -rn -f inet` for primary interface') route_table = self.read_route_table() def is_default(route): return ((route.destination == DEFAULT_DEST) and (RTF_GATEWAY & route.flags)) candidates = list(filter(is_default, self.get_list_of_routes(route_table))) if (len(candidates) > 0): def get_metric(route): return int(route.metric) primary_route = min(candidates, key=get_metric) primary_interface = primary_route.interface if (primary_interface is None): primary_interface = '' if (not self.disable_route_warning): logger.warn('Could not determine primary interface, please ensure routes are correct') logger.warn('Primary interface examination will retry silently') self.disable_route_warning = True else: logger.info('Primary interface is [{0}]'.format(primary_interface)) self.disable_route_warning = False return primary_interface
Get the name of the primary interface, which is the one with the default route attached to it; if there are multiple default routes, the primary has the lowest Metric. :return: the interface which has the default route
azurelinuxagent/common/osutil/freebsd.py
get_primary_interface
magnologan/WALinuxAgent
423
python
def get_primary_interface(self): '\n Get the name of the primary interface, which is the one with the\n default route attached to it; if there are multiple default routes,\n the primary has the lowest Metric.\n :return: the interface which has the default route\n ' RTF_GATEWAY = 2 DEFAULT_DEST = '00000000' primary_interface = None if (not self.disable_route_warning): logger.info('Examine `netstat -rn -f inet` for primary interface') route_table = self.read_route_table() def is_default(route): return ((route.destination == DEFAULT_DEST) and (RTF_GATEWAY & route.flags)) candidates = list(filter(is_default, self.get_list_of_routes(route_table))) if (len(candidates) > 0): def get_metric(route): return int(route.metric) primary_route = min(candidates, key=get_metric) primary_interface = primary_route.interface if (primary_interface is None): primary_interface = if (not self.disable_route_warning): logger.warn('Could not determine primary interface, please ensure routes are correct') logger.warn('Primary interface examination will retry silently') self.disable_route_warning = True else: logger.info('Primary interface is [{0}]'.format(primary_interface)) self.disable_route_warning = False return primary_interface
def get_primary_interface(self): '\n Get the name of the primary interface, which is the one with the\n default route attached to it; if there are multiple default routes,\n the primary has the lowest Metric.\n :return: the interface which has the default route\n ' RTF_GATEWAY = 2 DEFAULT_DEST = '00000000' primary_interface = None if (not self.disable_route_warning): logger.info('Examine `netstat -rn -f inet` for primary interface') route_table = self.read_route_table() def is_default(route): return ((route.destination == DEFAULT_DEST) and (RTF_GATEWAY & route.flags)) candidates = list(filter(is_default, self.get_list_of_routes(route_table))) if (len(candidates) > 0): def get_metric(route): return int(route.metric) primary_route = min(candidates, key=get_metric) primary_interface = primary_route.interface if (primary_interface is None): primary_interface = if (not self.disable_route_warning): logger.warn('Could not determine primary interface, please ensure routes are correct') logger.warn('Primary interface examination will retry silently') self.disable_route_warning = True else: logger.info('Primary interface is [{0}]'.format(primary_interface)) self.disable_route_warning = False return primary_interface<|docstring|>Get the name of the primary interface, which is the one with the default route attached to it; if there are multiple default routes, the primary has the lowest Metric. :return: the interface which has the default route<|endoftext|>
f07caa3c43e467683a916d539f3326c583c040acd825e09ada7bb0714b28a528
def is_primary_interface(self, ifname): '\n Indicate whether the specified interface is the primary.\n :param ifname: the name of the interface - eth0, lo, etc.\n :return: True if this interface binds the default route\n ' return (self.get_primary_interface() == ifname)
Indicate whether the specified interface is the primary. :param ifname: the name of the interface - eth0, lo, etc. :return: True if this interface binds the default route
azurelinuxagent/common/osutil/freebsd.py
is_primary_interface
magnologan/WALinuxAgent
423
python
def is_primary_interface(self, ifname): '\n Indicate whether the specified interface is the primary.\n :param ifname: the name of the interface - eth0, lo, etc.\n :return: True if this interface binds the default route\n ' return (self.get_primary_interface() == ifname)
def is_primary_interface(self, ifname): '\n Indicate whether the specified interface is the primary.\n :param ifname: the name of the interface - eth0, lo, etc.\n :return: True if this interface binds the default route\n ' return (self.get_primary_interface() == ifname)<|docstring|>Indicate whether the specified interface is the primary. :param ifname: the name of the interface - eth0, lo, etc. :return: True if this interface binds the default route<|endoftext|>
8c15409856eec786555e52427c4b65bd98044bd5558c0c904c431f10a5b3f1c7
def is_loopback(self, ifname): '\n Determine if a named interface is loopback.\n ' return ifname.startswith('lo')
Determine if a named interface is loopback.
azurelinuxagent/common/osutil/freebsd.py
is_loopback
magnologan/WALinuxAgent
423
python
def is_loopback(self, ifname): '\n \n ' return ifname.startswith('lo')
def is_loopback(self, ifname): '\n \n ' return ifname.startswith('lo')<|docstring|>Determine if a named interface is loopback.<|endoftext|>
c0b6548cfa9a3ee55aaecee898906d60321109cb0cb7290e66ee574eeab68fe6
def is_missing_default_route(self): '\n For FreeBSD, the default broadcast goes to current default gw, not a all-ones broadcast address, need to\n specify the route manually to get it work in a VNET environment.\n SEE ALSO: man ip(4) IP_ONESBCAST,\n ' RTF_GATEWAY = 2 DEFAULT_DEST = '00000000' route_table = self.read_route_table() routes = self.get_list_of_routes(route_table) for route in routes: if ((route.destination == DEFAULT_DEST) and (RTF_GATEWAY & route.flags)): return False return True
For FreeBSD, the default broadcast goes to current default gw, not a all-ones broadcast address, need to specify the route manually to get it work in a VNET environment. SEE ALSO: man ip(4) IP_ONESBCAST,
azurelinuxagent/common/osutil/freebsd.py
is_missing_default_route
magnologan/WALinuxAgent
423
python
def is_missing_default_route(self): '\n For FreeBSD, the default broadcast goes to current default gw, not a all-ones broadcast address, need to\n specify the route manually to get it work in a VNET environment.\n SEE ALSO: man ip(4) IP_ONESBCAST,\n ' RTF_GATEWAY = 2 DEFAULT_DEST = '00000000' route_table = self.read_route_table() routes = self.get_list_of_routes(route_table) for route in routes: if ((route.destination == DEFAULT_DEST) and (RTF_GATEWAY & route.flags)): return False return True
def is_missing_default_route(self): '\n For FreeBSD, the default broadcast goes to current default gw, not a all-ones broadcast address, need to\n specify the route manually to get it work in a VNET environment.\n SEE ALSO: man ip(4) IP_ONESBCAST,\n ' RTF_GATEWAY = 2 DEFAULT_DEST = '00000000' route_table = self.read_route_table() routes = self.get_list_of_routes(route_table) for route in routes: if ((route.destination == DEFAULT_DEST) and (RTF_GATEWAY & route.flags)): return False return True<|docstring|>For FreeBSD, the default broadcast goes to current default gw, not a all-ones broadcast address, need to specify the route manually to get it work in a VNET environment. SEE ALSO: man ip(4) IP_ONESBCAST,<|endoftext|>
b478cd7a4084d46fb1018f8b60fb1b83656e8fbfc3a094b98df3137376f23db4
@staticmethod def _get_net_info(): "\n There is no SIOCGIFCONF\n on freeBSD - just parse ifconfig.\n Returns strings: iface, inet4_addr, and mac\n or 'None,None,None' if unable to parse.\n We will sleep and retry as the network must be up.\n " iface = '' inet = '' mac = '' (err, output) = shellutil.run_get_output('ifconfig -l ether', chk_err=False) if err: raise OSUtilError("Can't find ether interface:{0}".format(output)) ifaces = output.split() if (not ifaces): raise OSUtilError("Can't find ether interface.") iface = ifaces[0] (err, output) = shellutil.run_get_output(('ifconfig ' + iface), chk_err=False) if err: raise OSUtilError("Can't get info for interface:{0}".format(iface)) for line in output.split('\n'): if (line.find('inet ') != (- 1)): inet = line.split()[1] elif (line.find('ether ') != (- 1)): mac = line.split()[1] logger.verbose('Interface info: ({0},{1},{2})', iface, inet, mac) return (iface, inet, mac)
There is no SIOCGIFCONF on freeBSD - just parse ifconfig. Returns strings: iface, inet4_addr, and mac or 'None,None,None' if unable to parse. We will sleep and retry as the network must be up.
azurelinuxagent/common/osutil/freebsd.py
_get_net_info
magnologan/WALinuxAgent
423
python
@staticmethod def _get_net_info(): "\n There is no SIOCGIFCONF\n on freeBSD - just parse ifconfig.\n Returns strings: iface, inet4_addr, and mac\n or 'None,None,None' if unable to parse.\n We will sleep and retry as the network must be up.\n " iface = inet = mac = (err, output) = shellutil.run_get_output('ifconfig -l ether', chk_err=False) if err: raise OSUtilError("Can't find ether interface:{0}".format(output)) ifaces = output.split() if (not ifaces): raise OSUtilError("Can't find ether interface.") iface = ifaces[0] (err, output) = shellutil.run_get_output(('ifconfig ' + iface), chk_err=False) if err: raise OSUtilError("Can't get info for interface:{0}".format(iface)) for line in output.split('\n'): if (line.find('inet ') != (- 1)): inet = line.split()[1] elif (line.find('ether ') != (- 1)): mac = line.split()[1] logger.verbose('Interface info: ({0},{1},{2})', iface, inet, mac) return (iface, inet, mac)
@staticmethod def _get_net_info(): "\n There is no SIOCGIFCONF\n on freeBSD - just parse ifconfig.\n Returns strings: iface, inet4_addr, and mac\n or 'None,None,None' if unable to parse.\n We will sleep and retry as the network must be up.\n " iface = inet = mac = (err, output) = shellutil.run_get_output('ifconfig -l ether', chk_err=False) if err: raise OSUtilError("Can't find ether interface:{0}".format(output)) ifaces = output.split() if (not ifaces): raise OSUtilError("Can't find ether interface.") iface = ifaces[0] (err, output) = shellutil.run_get_output(('ifconfig ' + iface), chk_err=False) if err: raise OSUtilError("Can't get info for interface:{0}".format(iface)) for line in output.split('\n'): if (line.find('inet ') != (- 1)): inet = line.split()[1] elif (line.find('ether ') != (- 1)): mac = line.split()[1] logger.verbose('Interface info: ({0},{1},{2})', iface, inet, mac) return (iface, inet, mac)<|docstring|>There is no SIOCGIFCONF on freeBSD - just parse ifconfig. Returns strings: iface, inet4_addr, and mac or 'None,None,None' if unable to parse. We will sleep and retry as the network must be up.<|endoftext|>
d674c0debb984db8b08041dd3d096da036abf43c3c4ea854082272a19c0ef838
def device_for_ide_port(self, port_id): "\n Return device name attached to ide port 'n'.\n " if (port_id > 3): return None g0 = '00000000' if (port_id > 1): g0 = '00000001' port_id = (port_id - 2) (err, output) = shellutil.run_get_output('sysctl dev.storvsc | grep pnpinfo | grep deviceid=') if err: return None g1 = ('000' + ustr(port_id)) g0g1 = '{0}-{1}'.format(g0, g1) "\n search 'X' from 'dev.storvsc.X.%pnpinfo: classid=32412632-86cb-44a2-9b5c-50d1417354f5 deviceid=00000000-0001-8899-0000-000000000000'\n " cmd_search_ide = 'sysctl dev.storvsc | grep pnpinfo | grep deviceid={0}'.format(g0g1) (err, output) = shellutil.run_get_output(cmd_search_ide) if err: return None cmd_extract_id = (cmd_search_ide + "|awk -F . '{print $3}'") (err, output) = shellutil.run_get_output(cmd_extract_id) "\n try to search 'blkvscX' and 'storvscX' to find device name\n " output = output.rstrip() cmd_search_blkvsc = "camcontrol devlist -b | grep blkvsc{0} | awk '{{print $1}}'".format(output) (err, output) = shellutil.run_get_output(cmd_search_blkvsc) if (err == 0): output = output.rstrip() cmd_search_dev = "camcontrol devlist | grep {0} | awk -F \\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'".format(output) (err, output) = shellutil.run_get_output(cmd_search_dev) if (err == 0): for possible in output.rstrip().split(','): if (not possible.startswith('pass')): return possible cmd_search_storvsc = "camcontrol devlist -b | grep storvsc{0} | awk '{{print $1}}'".format(output) (err, output) = shellutil.run_get_output(cmd_search_storvsc) if (err == 0): output = output.rstrip() cmd_search_dev = "camcontrol devlist | grep {0} | awk -F \\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'".format(output) (err, output) = shellutil.run_get_output(cmd_search_dev) if (err == 0): for possible in output.rstrip().split(','): if (not possible.startswith('pass')): return possible return None
Return device name attached to ide port 'n'.
azurelinuxagent/common/osutil/freebsd.py
device_for_ide_port
magnologan/WALinuxAgent
423
python
def device_for_ide_port(self, port_id): "\n \n " if (port_id > 3): return None g0 = '00000000' if (port_id > 1): g0 = '00000001' port_id = (port_id - 2) (err, output) = shellutil.run_get_output('sysctl dev.storvsc | grep pnpinfo | grep deviceid=') if err: return None g1 = ('000' + ustr(port_id)) g0g1 = '{0}-{1}'.format(g0, g1) "\n search 'X' from 'dev.storvsc.X.%pnpinfo: classid=32412632-86cb-44a2-9b5c-50d1417354f5 deviceid=00000000-0001-8899-0000-000000000000'\n " cmd_search_ide = 'sysctl dev.storvsc | grep pnpinfo | grep deviceid={0}'.format(g0g1) (err, output) = shellutil.run_get_output(cmd_search_ide) if err: return None cmd_extract_id = (cmd_search_ide + "|awk -F . '{print $3}'") (err, output) = shellutil.run_get_output(cmd_extract_id) "\n try to search 'blkvscX' and 'storvscX' to find device name\n " output = output.rstrip() cmd_search_blkvsc = "camcontrol devlist -b | grep blkvsc{0} | awk '{{print $1}}'".format(output) (err, output) = shellutil.run_get_output(cmd_search_blkvsc) if (err == 0): output = output.rstrip() cmd_search_dev = "camcontrol devlist | grep {0} | awk -F \\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'".format(output) (err, output) = shellutil.run_get_output(cmd_search_dev) if (err == 0): for possible in output.rstrip().split(','): if (not possible.startswith('pass')): return possible cmd_search_storvsc = "camcontrol devlist -b | grep storvsc{0} | awk '{{print $1}}'".format(output) (err, output) = shellutil.run_get_output(cmd_search_storvsc) if (err == 0): output = output.rstrip() cmd_search_dev = "camcontrol devlist | grep {0} | awk -F \\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'".format(output) (err, output) = shellutil.run_get_output(cmd_search_dev) if (err == 0): for possible in output.rstrip().split(','): if (not possible.startswith('pass')): return possible return None
def device_for_ide_port(self, port_id): "\n \n " if (port_id > 3): return None g0 = '00000000' if (port_id > 1): g0 = '00000001' port_id = (port_id - 2) (err, output) = shellutil.run_get_output('sysctl dev.storvsc | grep pnpinfo | grep deviceid=') if err: return None g1 = ('000' + ustr(port_id)) g0g1 = '{0}-{1}'.format(g0, g1) "\n search 'X' from 'dev.storvsc.X.%pnpinfo: classid=32412632-86cb-44a2-9b5c-50d1417354f5 deviceid=00000000-0001-8899-0000-000000000000'\n " cmd_search_ide = 'sysctl dev.storvsc | grep pnpinfo | grep deviceid={0}'.format(g0g1) (err, output) = shellutil.run_get_output(cmd_search_ide) if err: return None cmd_extract_id = (cmd_search_ide + "|awk -F . '{print $3}'") (err, output) = shellutil.run_get_output(cmd_extract_id) "\n try to search 'blkvscX' and 'storvscX' to find device name\n " output = output.rstrip() cmd_search_blkvsc = "camcontrol devlist -b | grep blkvsc{0} | awk '{{print $1}}'".format(output) (err, output) = shellutil.run_get_output(cmd_search_blkvsc) if (err == 0): output = output.rstrip() cmd_search_dev = "camcontrol devlist | grep {0} | awk -F \\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'".format(output) (err, output) = shellutil.run_get_output(cmd_search_dev) if (err == 0): for possible in output.rstrip().split(','): if (not possible.startswith('pass')): return possible cmd_search_storvsc = "camcontrol devlist -b | grep storvsc{0} | awk '{{print $1}}'".format(output) (err, output) = shellutil.run_get_output(cmd_search_storvsc) if (err == 0): output = output.rstrip() cmd_search_dev = "camcontrol devlist | grep {0} | awk -F \\( '{{print $2}}'|sed -e 's/.*(//'| sed -e 's/).*//'".format(output) (err, output) = shellutil.run_get_output(cmd_search_dev) if (err == 0): for possible in output.rstrip().split(','): if (not possible.startswith('pass')): return possible return None<|docstring|>Return device name attached to ide port 'n'.<|endoftext|>
f0eed52d6f13939503dd47999de0e2a93f85997adbd49bb594b3397f43e5b23f
def _get_netstat_rn_ipv4_routes(): '\n Runs `netstat -rn -f inet` and parses its output and returns a list of routes where the key is the column name\n and the value is the value in the column, stripped of leading and trailing whitespace.\n\n :return: List of dictionaries representing routes in the ipv4 route priority list from `netstat -rn -f inet`\n :rtype: list(dict)\n ' cmd = ['netstat', '-rn', '-f', 'inet'] output = shellutil.run_command(cmd, log_error=True) output_lines = output.split('\n') if (len(output_lines) < 3): raise OSUtilError('`netstat -rn -f inet` output seems to be empty') output_lines = [line.strip() for line in output_lines if line] if ('Internet:' not in output_lines): raise OSUtilError('`netstat -rn -f inet` output seems to contain no ipv4 routes') route_header_line = (output_lines.index('Internet:') + 1) route_start_line = (route_header_line + 1) route_line_length = max([len(line) for line in output_lines[route_header_line:]]) netstat_route_list = [line.ljust(route_line_length) for line in output_lines[route_start_line:]] _route_headers = output_lines[route_header_line].split() n_route_headers = len(_route_headers) route_columns = {} for i in range(0, (n_route_headers - 1)): route_columns[_route_headers[i]] = (output_lines[route_header_line].index(_route_headers[i]), (output_lines[route_header_line].index(_route_headers[(i + 1)]) - 1)) route_columns[_route_headers[(n_route_headers - 1)]] = (output_lines[route_header_line].index(_route_headers[(n_route_headers - 1)]), None) netstat_routes = [] n_netstat_routes = len(netstat_route_list) for i in range(0, n_netstat_routes): netstat_route = {} for column in route_columns: netstat_route[column] = netstat_route_list[i][route_columns[column][0]:route_columns[column][1]].strip() netstat_route['Metric'] = (n_netstat_routes - i) netstat_routes.append(netstat_route) return netstat_routes
Runs `netstat -rn -f inet` and parses its output and returns a list of routes where the key is the column name and the value is the value in the column, stripped of leading and trailing whitespace. :return: List of dictionaries representing routes in the ipv4 route priority list from `netstat -rn -f inet` :rtype: list(dict)
azurelinuxagent/common/osutil/freebsd.py
_get_netstat_rn_ipv4_routes
magnologan/WALinuxAgent
423
python
def _get_netstat_rn_ipv4_routes(): '\n Runs `netstat -rn -f inet` and parses its output and returns a list of routes where the key is the column name\n and the value is the value in the column, stripped of leading and trailing whitespace.\n\n :return: List of dictionaries representing routes in the ipv4 route priority list from `netstat -rn -f inet`\n :rtype: list(dict)\n ' cmd = ['netstat', '-rn', '-f', 'inet'] output = shellutil.run_command(cmd, log_error=True) output_lines = output.split('\n') if (len(output_lines) < 3): raise OSUtilError('`netstat -rn -f inet` output seems to be empty') output_lines = [line.strip() for line in output_lines if line] if ('Internet:' not in output_lines): raise OSUtilError('`netstat -rn -f inet` output seems to contain no ipv4 routes') route_header_line = (output_lines.index('Internet:') + 1) route_start_line = (route_header_line + 1) route_line_length = max([len(line) for line in output_lines[route_header_line:]]) netstat_route_list = [line.ljust(route_line_length) for line in output_lines[route_start_line:]] _route_headers = output_lines[route_header_line].split() n_route_headers = len(_route_headers) route_columns = {} for i in range(0, (n_route_headers - 1)): route_columns[_route_headers[i]] = (output_lines[route_header_line].index(_route_headers[i]), (output_lines[route_header_line].index(_route_headers[(i + 1)]) - 1)) route_columns[_route_headers[(n_route_headers - 1)]] = (output_lines[route_header_line].index(_route_headers[(n_route_headers - 1)]), None) netstat_routes = [] n_netstat_routes = len(netstat_route_list) for i in range(0, n_netstat_routes): netstat_route = {} for column in route_columns: netstat_route[column] = netstat_route_list[i][route_columns[column][0]:route_columns[column][1]].strip() netstat_route['Metric'] = (n_netstat_routes - i) netstat_routes.append(netstat_route) return netstat_routes
def _get_netstat_rn_ipv4_routes(): '\n Runs `netstat -rn -f inet` and parses its output and returns a list of routes where the key is the column name\n and the value is the value in the column, stripped of leading and trailing whitespace.\n\n :return: List of dictionaries representing routes in the ipv4 route priority list from `netstat -rn -f inet`\n :rtype: list(dict)\n ' cmd = ['netstat', '-rn', '-f', 'inet'] output = shellutil.run_command(cmd, log_error=True) output_lines = output.split('\n') if (len(output_lines) < 3): raise OSUtilError('`netstat -rn -f inet` output seems to be empty') output_lines = [line.strip() for line in output_lines if line] if ('Internet:' not in output_lines): raise OSUtilError('`netstat -rn -f inet` output seems to contain no ipv4 routes') route_header_line = (output_lines.index('Internet:') + 1) route_start_line = (route_header_line + 1) route_line_length = max([len(line) for line in output_lines[route_header_line:]]) netstat_route_list = [line.ljust(route_line_length) for line in output_lines[route_start_line:]] _route_headers = output_lines[route_header_line].split() n_route_headers = len(_route_headers) route_columns = {} for i in range(0, (n_route_headers - 1)): route_columns[_route_headers[i]] = (output_lines[route_header_line].index(_route_headers[i]), (output_lines[route_header_line].index(_route_headers[(i + 1)]) - 1)) route_columns[_route_headers[(n_route_headers - 1)]] = (output_lines[route_header_line].index(_route_headers[(n_route_headers - 1)]), None) netstat_routes = [] n_netstat_routes = len(netstat_route_list) for i in range(0, n_netstat_routes): netstat_route = {} for column in route_columns: netstat_route[column] = netstat_route_list[i][route_columns[column][0]:route_columns[column][1]].strip() netstat_route['Metric'] = (n_netstat_routes - i) netstat_routes.append(netstat_route) return netstat_routes<|docstring|>Runs `netstat -rn -f inet` and parses its output and returns a list of routes where the key is the column name and the value is the value in the column, stripped of leading and trailing whitespace. :return: List of dictionaries representing routes in the ipv4 route priority list from `netstat -rn -f inet` :rtype: list(dict)<|endoftext|>
f7ab335f925e548c934a1411573f0e1c592e63bf45f14fdd278093de37f81440
def _ipv4_ascii_address_to_hex(ipv4_ascii_address): '\n Converts an IPv4 32bit address from its ASCII notation (ie. 127.0.0.1) to an 8 digit padded hex notation\n (ie. "0100007F") string.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return ('%08X' % int(binascii.hexlify(struct.pack('!I', struct.unpack('=I', socket.inet_pton(socket.AF_INET, ipv4_ascii_address))[0])), 16))
Converts an IPv4 32bit address from its ASCII notation (ie. 127.0.0.1) to an 8 digit padded hex notation (ie. "0100007F") string. :return: 8 character long hex string representation of the IP :rtype: string
azurelinuxagent/common/osutil/freebsd.py
_ipv4_ascii_address_to_hex
magnologan/WALinuxAgent
423
python
def _ipv4_ascii_address_to_hex(ipv4_ascii_address): '\n Converts an IPv4 32bit address from its ASCII notation (ie. 127.0.0.1) to an 8 digit padded hex notation\n (ie. "0100007F") string.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return ('%08X' % int(binascii.hexlify(struct.pack('!I', struct.unpack('=I', socket.inet_pton(socket.AF_INET, ipv4_ascii_address))[0])), 16))
def _ipv4_ascii_address_to_hex(ipv4_ascii_address): '\n Converts an IPv4 32bit address from its ASCII notation (ie. 127.0.0.1) to an 8 digit padded hex notation\n (ie. "0100007F") string.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return ('%08X' % int(binascii.hexlify(struct.pack('!I', struct.unpack('=I', socket.inet_pton(socket.AF_INET, ipv4_ascii_address))[0])), 16))<|docstring|>Converts an IPv4 32bit address from its ASCII notation (ie. 127.0.0.1) to an 8 digit padded hex notation (ie. "0100007F") string. :return: 8 character long hex string representation of the IP :rtype: string<|endoftext|>
54f6dcb36b2f537e8bdbdbe17e033c6d3503f5c673c157e63aea1a5bf74e0cce
def _ipv4_cidr_mask_to_hex(ipv4_cidr_mask): '\n Converts an subnet mask from its CIDR integer notation (ie. 32) to an 8 digit padded hex notation\n (ie. "FFFFFFFF") string representing its bitmask form.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return '{0:08x}'.format(struct.unpack('=I', struct.pack('!I', ((4294967295 << (32 - ipv4_cidr_mask)) & 4294967295)))[0]).upper()
Converts an subnet mask from its CIDR integer notation (ie. 32) to an 8 digit padded hex notation (ie. "FFFFFFFF") string representing its bitmask form. :return: 8 character long hex string representation of the IP :rtype: string
azurelinuxagent/common/osutil/freebsd.py
_ipv4_cidr_mask_to_hex
magnologan/WALinuxAgent
423
python
def _ipv4_cidr_mask_to_hex(ipv4_cidr_mask): '\n Converts an subnet mask from its CIDR integer notation (ie. 32) to an 8 digit padded hex notation\n (ie. "FFFFFFFF") string representing its bitmask form.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return '{0:08x}'.format(struct.unpack('=I', struct.pack('!I', ((4294967295 << (32 - ipv4_cidr_mask)) & 4294967295)))[0]).upper()
def _ipv4_cidr_mask_to_hex(ipv4_cidr_mask): '\n Converts an subnet mask from its CIDR integer notation (ie. 32) to an 8 digit padded hex notation\n (ie. "FFFFFFFF") string representing its bitmask form.\n\n :return: 8 character long hex string representation of the IP\n :rtype: string\n ' return '{0:08x}'.format(struct.unpack('=I', struct.pack('!I', ((4294967295 << (32 - ipv4_cidr_mask)) & 4294967295)))[0]).upper()<|docstring|>Converts an subnet mask from its CIDR integer notation (ie. 32) to an 8 digit padded hex notation (ie. "FFFFFFFF") string representing its bitmask form. :return: 8 character long hex string representation of the IP :rtype: string<|endoftext|>
fd19175b0da53cfa1f96ae418d0d2f6e0501a845d2682bf2c63aa9165b6ad06a
def _ipv4_cidr_destination_to_hex(destination): '\n Converts an destination address from its CIDR notation (ie. 127.0.0.1/32 or default or localhost) to an 8\n digit padded hex notation (ie. "0100007F" or "00000000" or "0100007F") string and its subnet bitmask\n also in hex (FFFFFFFF).\n\n :return: tuple of 8 character long hex string representation of the IP and 8 character long hex string representation of the subnet mask\n :rtype: tuple(string, int)\n ' destination_ip = '0.0.0.0' destination_subnetmask = 32 if (destination != 'default'): if (destination == 'localhost'): destination_ip = '127.0.0.1' else: destination_ip = destination.split('/') if (len(destination_ip) > 1): destination_subnetmask = int(destination_ip[1]) destination_ip = destination_ip[0] hex_destination_ip = _ipv4_ascii_address_to_hex(destination_ip) hex_destination_subnetmask = _ipv4_cidr_mask_to_hex(destination_subnetmask) return (hex_destination_ip, hex_destination_subnetmask)
Converts an destination address from its CIDR notation (ie. 127.0.0.1/32 or default or localhost) to an 8 digit padded hex notation (ie. "0100007F" or "00000000" or "0100007F") string and its subnet bitmask also in hex (FFFFFFFF). :return: tuple of 8 character long hex string representation of the IP and 8 character long hex string representation of the subnet mask :rtype: tuple(string, int)
azurelinuxagent/common/osutil/freebsd.py
_ipv4_cidr_destination_to_hex
magnologan/WALinuxAgent
423
python
def _ipv4_cidr_destination_to_hex(destination): '\n Converts an destination address from its CIDR notation (ie. 127.0.0.1/32 or default or localhost) to an 8\n digit padded hex notation (ie. "0100007F" or "00000000" or "0100007F") string and its subnet bitmask\n also in hex (FFFFFFFF).\n\n :return: tuple of 8 character long hex string representation of the IP and 8 character long hex string representation of the subnet mask\n :rtype: tuple(string, int)\n ' destination_ip = '0.0.0.0' destination_subnetmask = 32 if (destination != 'default'): if (destination == 'localhost'): destination_ip = '127.0.0.1' else: destination_ip = destination.split('/') if (len(destination_ip) > 1): destination_subnetmask = int(destination_ip[1]) destination_ip = destination_ip[0] hex_destination_ip = _ipv4_ascii_address_to_hex(destination_ip) hex_destination_subnetmask = _ipv4_cidr_mask_to_hex(destination_subnetmask) return (hex_destination_ip, hex_destination_subnetmask)
def _ipv4_cidr_destination_to_hex(destination): '\n Converts an destination address from its CIDR notation (ie. 127.0.0.1/32 or default or localhost) to an 8\n digit padded hex notation (ie. "0100007F" or "00000000" or "0100007F") string and its subnet bitmask\n also in hex (FFFFFFFF).\n\n :return: tuple of 8 character long hex string representation of the IP and 8 character long hex string representation of the subnet mask\n :rtype: tuple(string, int)\n ' destination_ip = '0.0.0.0' destination_subnetmask = 32 if (destination != 'default'): if (destination == 'localhost'): destination_ip = '127.0.0.1' else: destination_ip = destination.split('/') if (len(destination_ip) > 1): destination_subnetmask = int(destination_ip[1]) destination_ip = destination_ip[0] hex_destination_ip = _ipv4_ascii_address_to_hex(destination_ip) hex_destination_subnetmask = _ipv4_cidr_mask_to_hex(destination_subnetmask) return (hex_destination_ip, hex_destination_subnetmask)<|docstring|>Converts an destination address from its CIDR notation (ie. 127.0.0.1/32 or default or localhost) to an 8 digit padded hex notation (ie. "0100007F" or "00000000" or "0100007F") string and its subnet bitmask also in hex (FFFFFFFF). :return: tuple of 8 character long hex string representation of the IP and 8 character long hex string representation of the subnet mask :rtype: tuple(string, int)<|endoftext|>
983602b7474b556d7c8201fadb5aa87defa65b2989f9853b3396abec947a38ae
def _try_ipv4_gateway_to_hex(gateway): '\n If the gateway is an IPv4 address, return its IP in hex, else, return "00000000"\n\n :return: 8 character long hex string representation of the IP of the gateway\n :rtype: string\n ' try: return _ipv4_ascii_address_to_hex(gateway) except socket.error: return '00000000'
If the gateway is an IPv4 address, return its IP in hex, else, return "00000000" :return: 8 character long hex string representation of the IP of the gateway :rtype: string
azurelinuxagent/common/osutil/freebsd.py
_try_ipv4_gateway_to_hex
magnologan/WALinuxAgent
423
python
def _try_ipv4_gateway_to_hex(gateway): '\n If the gateway is an IPv4 address, return its IP in hex, else, return "00000000"\n\n :return: 8 character long hex string representation of the IP of the gateway\n :rtype: string\n ' try: return _ipv4_ascii_address_to_hex(gateway) except socket.error: return '00000000'
def _try_ipv4_gateway_to_hex(gateway): '\n If the gateway is an IPv4 address, return its IP in hex, else, return "00000000"\n\n :return: 8 character long hex string representation of the IP of the gateway\n :rtype: string\n ' try: return _ipv4_ascii_address_to_hex(gateway) except socket.error: return '00000000'<|docstring|>If the gateway is an IPv4 address, return its IP in hex, else, return "00000000" :return: 8 character long hex string representation of the IP of the gateway :rtype: string<|endoftext|>
a827aa723028381e323b01e30e07443832e7b5e44ccd3fee8ef4c61b43a75476
def _ascii_route_flags_to_bitmask(ascii_route_flags): '\n Converts route flags to a bitmask of their equivalent linux/route.h values.\n\n :return: integer representation of a 16 bit mask\n :rtype: int\n ' bitmask_flags = 0 RTF_UP = 1 RTF_GATEWAY = 2 RTF_HOST = 4 RTF_DYNAMIC = 16 if ('U' in ascii_route_flags): bitmask_flags |= RTF_UP if ('G' in ascii_route_flags): bitmask_flags |= RTF_GATEWAY if ('H' in ascii_route_flags): bitmask_flags |= RTF_HOST if ('S' not in ascii_route_flags): bitmask_flags |= RTF_DYNAMIC return bitmask_flags
Converts route flags to a bitmask of their equivalent linux/route.h values. :return: integer representation of a 16 bit mask :rtype: int
azurelinuxagent/common/osutil/freebsd.py
_ascii_route_flags_to_bitmask
magnologan/WALinuxAgent
423
python
def _ascii_route_flags_to_bitmask(ascii_route_flags): '\n Converts route flags to a bitmask of their equivalent linux/route.h values.\n\n :return: integer representation of a 16 bit mask\n :rtype: int\n ' bitmask_flags = 0 RTF_UP = 1 RTF_GATEWAY = 2 RTF_HOST = 4 RTF_DYNAMIC = 16 if ('U' in ascii_route_flags): bitmask_flags |= RTF_UP if ('G' in ascii_route_flags): bitmask_flags |= RTF_GATEWAY if ('H' in ascii_route_flags): bitmask_flags |= RTF_HOST if ('S' not in ascii_route_flags): bitmask_flags |= RTF_DYNAMIC return bitmask_flags
def _ascii_route_flags_to_bitmask(ascii_route_flags): '\n Converts route flags to a bitmask of their equivalent linux/route.h values.\n\n :return: integer representation of a 16 bit mask\n :rtype: int\n ' bitmask_flags = 0 RTF_UP = 1 RTF_GATEWAY = 2 RTF_HOST = 4 RTF_DYNAMIC = 16 if ('U' in ascii_route_flags): bitmask_flags |= RTF_UP if ('G' in ascii_route_flags): bitmask_flags |= RTF_GATEWAY if ('H' in ascii_route_flags): bitmask_flags |= RTF_HOST if ('S' not in ascii_route_flags): bitmask_flags |= RTF_DYNAMIC return bitmask_flags<|docstring|>Converts route flags to a bitmask of their equivalent linux/route.h values. :return: integer representation of a 16 bit mask :rtype: int<|endoftext|>
efa67d1553e16296608891cd54c499d0b7f21621847e108252989f3382a98cd5
def _freebsd_netstat_rn_route_to_linux_proc_net_route(netstat_route): '\n Converts a single FreeBSD `netstat -rn -f inet` route to its equivalent /proc/net/route line. ie:\n > default 0.0.0.0 UGS 0 247 em1\n to\n > em1 00000000 00000000 0003 0 0 0 FFFFFFFF 0 0 0\n\n :return: string representation of the equivalent /proc/net/route line\n :rtype: string\n ' network_interface = netstat_route['Netif'] (hex_destination_ip, hex_destination_subnetmask) = _ipv4_cidr_destination_to_hex(netstat_route['Destination']) hex_gateway = _try_ipv4_gateway_to_hex(netstat_route['Gateway']) bitmask_flags = _ascii_route_flags_to_bitmask(netstat_route['Flags']) dummy_refcount = 0 dummy_use = 0 route_metric = netstat_route['Metric'] dummy_mtu = 0 dummy_window = 0 dummy_irtt = 0 return '{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}'.format(network_interface, hex_destination_ip, hex_gateway, bitmask_flags, dummy_refcount, dummy_use, route_metric, hex_destination_subnetmask, dummy_mtu, dummy_window, dummy_irtt)
Converts a single FreeBSD `netstat -rn -f inet` route to its equivalent /proc/net/route line. ie: > default 0.0.0.0 UGS 0 247 em1 to > em1 00000000 00000000 0003 0 0 0 FFFFFFFF 0 0 0 :return: string representation of the equivalent /proc/net/route line :rtype: string
azurelinuxagent/common/osutil/freebsd.py
_freebsd_netstat_rn_route_to_linux_proc_net_route
magnologan/WALinuxAgent
423
python
def _freebsd_netstat_rn_route_to_linux_proc_net_route(netstat_route): '\n Converts a single FreeBSD `netstat -rn -f inet` route to its equivalent /proc/net/route line. ie:\n > default 0.0.0.0 UGS 0 247 em1\n to\n > em1 00000000 00000000 0003 0 0 0 FFFFFFFF 0 0 0\n\n :return: string representation of the equivalent /proc/net/route line\n :rtype: string\n ' network_interface = netstat_route['Netif'] (hex_destination_ip, hex_destination_subnetmask) = _ipv4_cidr_destination_to_hex(netstat_route['Destination']) hex_gateway = _try_ipv4_gateway_to_hex(netstat_route['Gateway']) bitmask_flags = _ascii_route_flags_to_bitmask(netstat_route['Flags']) dummy_refcount = 0 dummy_use = 0 route_metric = netstat_route['Metric'] dummy_mtu = 0 dummy_window = 0 dummy_irtt = 0 return '{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}'.format(network_interface, hex_destination_ip, hex_gateway, bitmask_flags, dummy_refcount, dummy_use, route_metric, hex_destination_subnetmask, dummy_mtu, dummy_window, dummy_irtt)
def _freebsd_netstat_rn_route_to_linux_proc_net_route(netstat_route): '\n Converts a single FreeBSD `netstat -rn -f inet` route to its equivalent /proc/net/route line. ie:\n > default 0.0.0.0 UGS 0 247 em1\n to\n > em1 00000000 00000000 0003 0 0 0 FFFFFFFF 0 0 0\n\n :return: string representation of the equivalent /proc/net/route line\n :rtype: string\n ' network_interface = netstat_route['Netif'] (hex_destination_ip, hex_destination_subnetmask) = _ipv4_cidr_destination_to_hex(netstat_route['Destination']) hex_gateway = _try_ipv4_gateway_to_hex(netstat_route['Gateway']) bitmask_flags = _ascii_route_flags_to_bitmask(netstat_route['Flags']) dummy_refcount = 0 dummy_use = 0 route_metric = netstat_route['Metric'] dummy_mtu = 0 dummy_window = 0 dummy_irtt = 0 return '{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}'.format(network_interface, hex_destination_ip, hex_gateway, bitmask_flags, dummy_refcount, dummy_use, route_metric, hex_destination_subnetmask, dummy_mtu, dummy_window, dummy_irtt)<|docstring|>Converts a single FreeBSD `netstat -rn -f inet` route to its equivalent /proc/net/route line. ie: > default 0.0.0.0 UGS 0 247 em1 to > em1 00000000 00000000 0003 0 0 0 FFFFFFFF 0 0 0 :return: string representation of the equivalent /proc/net/route line :rtype: string<|endoftext|>
446a19564ac5ee6538e58d1c8d52515c068d7fffc9d76bd7ad9bd809b21c8624
def quat_to_axis_rotation(*args): 'Converts quaternion to euler angles\n ' if ((len(args) == 4) and all(map((lambda x: isinstance(x, float)), args))): Quaternion(args).unit elif ((len(args) == 1) and isinstance(args[0], Quaternion)): quat = args[0].unit else: raise TypeError('Use either 4 floats (w, x, y, z) or one Quaternion object.') angle = math.atan2(math.sqrt(sum(((i ** 2) for i in quat.vector))), quat.w) s = math.sqrt((1 - (quat.w * quat.w))) if (s < 0.001): x = quat.x y = quat.y z = quat.z else: x = (quat.x / s) y = (quat.y / s) z = (quat.z / s) return (math.degrees(angle), z, x, (- y))
Converts quaternion to euler angles
simulation.py
quat_to_axis_rotation
rodrigost23/automailx
3
python
def quat_to_axis_rotation(*args): '\n ' if ((len(args) == 4) and all(map((lambda x: isinstance(x, float)), args))): Quaternion(args).unit elif ((len(args) == 1) and isinstance(args[0], Quaternion)): quat = args[0].unit else: raise TypeError('Use either 4 floats (w, x, y, z) or one Quaternion object.') angle = math.atan2(math.sqrt(sum(((i ** 2) for i in quat.vector))), quat.w) s = math.sqrt((1 - (quat.w * quat.w))) if (s < 0.001): x = quat.x y = quat.y z = quat.z else: x = (quat.x / s) y = (quat.y / s) z = (quat.z / s) return (math.degrees(angle), z, x, (- y))
def quat_to_axis_rotation(*args): '\n ' if ((len(args) == 4) and all(map((lambda x: isinstance(x, float)), args))): Quaternion(args).unit elif ((len(args) == 1) and isinstance(args[0], Quaternion)): quat = args[0].unit else: raise TypeError('Use either 4 floats (w, x, y, z) or one Quaternion object.') angle = math.atan2(math.sqrt(sum(((i ** 2) for i in quat.vector))), quat.w) s = math.sqrt((1 - (quat.w * quat.w))) if (s < 0.001): x = quat.x y = quat.y z = quat.z else: x = (quat.x / s) y = (quat.y / s) z = (quat.z / s) return (math.degrees(angle), z, x, (- y))<|docstring|>Converts quaternion to euler angles<|endoftext|>
f2aec64d2f4c09df46f3cb7578a6e96ad190314719200320bb497e509ab2ac05
def translate_range(self, value, leftMin, leftMax, rightMin, rightMax): 'Translates one range to another' leftSpan = (leftMax - leftMin) rightSpan = (rightMax - rightMin) valueScaled = (float((value - leftMin)) / float(leftSpan)) return (rightMin + (valueScaled * rightSpan))
Translates one range to another
simulation.py
translate_range
rodrigost23/automailx
3
python
def translate_range(self, value, leftMin, leftMax, rightMin, rightMax): leftSpan = (leftMax - leftMin) rightSpan = (rightMax - rightMin) valueScaled = (float((value - leftMin)) / float(leftSpan)) return (rightMin + (valueScaled * rightSpan))
def translate_range(self, value, leftMin, leftMax, rightMin, rightMax): leftSpan = (leftMax - leftMin) rightSpan = (rightMax - rightMin) valueScaled = (float((value - leftMin)) / float(leftSpan)) return (rightMin + (valueScaled * rightSpan))<|docstring|>Translates one range to another<|endoftext|>
d70b0398a289400b42aaa8bd4257112f17a4b78d575e91335bd3ca1bdd2b3ff3
def __init__(self, width: int, height: int): '\n Arguments:\n width {int} -- Window width in pixels\n height {int} -- Window height in pixels\n ' self.resize(width, height) self.quad = glu.gluNewQuadric() glu.gluQuadricDrawStyle(self.quad, gl.GL_LINE) glu.gluQuadricTexture(self.quad, gl.GL_TRUE)
Arguments: width {int} -- Window width in pixels height {int} -- Window height in pixels
simulation.py
__init__
rodrigost23/automailx
3
python
def __init__(self, width: int, height: int): '\n Arguments:\n width {int} -- Window width in pixels\n height {int} -- Window height in pixels\n ' self.resize(width, height) self.quad = glu.gluNewQuadric() glu.gluQuadricDrawStyle(self.quad, gl.GL_LINE) glu.gluQuadricTexture(self.quad, gl.GL_TRUE)
def __init__(self, width: int, height: int): '\n Arguments:\n width {int} -- Window width in pixels\n height {int} -- Window height in pixels\n ' self.resize(width, height) self.quad = glu.gluNewQuadric() glu.gluQuadricDrawStyle(self.quad, gl.GL_LINE) glu.gluQuadricTexture(self.quad, gl.GL_TRUE)<|docstring|>Arguments: width {int} -- Window width in pixels height {int} -- Window height in pixels<|endoftext|>
0f924214cd7e4d75c3d885da4a78411b7ed5b91fef16877118763cc6b33c8866
def nextPose(self): 'Show next pose of the foot\n ' self.setPose(((self.pose + 1) % self.__num_poses))
Show next pose of the foot
simulation.py
nextPose
rodrigost23/automailx
3
python
def nextPose(self): '\n ' self.setPose(((self.pose + 1) % self.__num_poses))
def nextPose(self): '\n ' self.setPose(((self.pose + 1) % self.__num_poses))<|docstring|>Show next pose of the foot<|endoftext|>
eddba3fcde4870afd15e97081232a9409938420946ecc20b61ac2b80dab77ee7
def prevPose(self): 'Show previous pose of the foot\n ' self.setPose(((self.pose - 1) % self.__num_poses))
Show previous pose of the foot
simulation.py
prevPose
rodrigost23/automailx
3
python
def prevPose(self): '\n ' self.setPose(((self.pose - 1) % self.__num_poses))
def prevPose(self): '\n ' self.setPose(((self.pose - 1) % self.__num_poses))<|docstring|>Show previous pose of the foot<|endoftext|>
57af6d8a1f77b399ea48f8b81194f2ab76284a43959761ec1ce9dcf733fda994
def setPose(self, pose: int): 'Sets a specific pose\n\n Arguments:\n pose {int} -- The pose number\n ' self.pose = pose
Sets a specific pose Arguments: pose {int} -- The pose number
simulation.py
setPose
rodrigost23/automailx
3
python
def setPose(self, pose: int): 'Sets a specific pose\n\n Arguments:\n pose {int} -- The pose number\n ' self.pose = pose
def setPose(self, pose: int): 'Sets a specific pose\n\n Arguments:\n pose {int} -- The pose number\n ' self.pose = pose<|docstring|>Sets a specific pose Arguments: pose {int} -- The pose number<|endoftext|>
3661d9ed880211ce99a0d39358be32effe77d374f97fde80b249c1cf8b12ee88
def recenter(self, data: SensorData=None): 'Sets an offset to define the resting standing pose\n\n Keyword Arguments:\n data {SensorData} -- the sensor data to set as the resting pose, or\n {None} to set the current sensor data (default: {None})\n ' if (data is None): data = self.sensor_data self.offset = copy.deepcopy(data)
Sets an offset to define the resting standing pose Keyword Arguments: data {SensorData} -- the sensor data to set as the resting pose, or {None} to set the current sensor data (default: {None})
simulation.py
recenter
rodrigost23/automailx
3
python
def recenter(self, data: SensorData=None): 'Sets an offset to define the resting standing pose\n\n Keyword Arguments:\n data {SensorData} -- the sensor data to set as the resting pose, or\n {None} to set the current sensor data (default: {None})\n ' if (data is None): data = self.sensor_data self.offset = copy.deepcopy(data)
def recenter(self, data: SensorData=None): 'Sets an offset to define the resting standing pose\n\n Keyword Arguments:\n data {SensorData} -- the sensor data to set as the resting pose, or\n {None} to set the current sensor data (default: {None})\n ' if (data is None): data = self.sensor_data self.offset = copy.deepcopy(data)<|docstring|>Sets an offset to define the resting standing pose Keyword Arguments: data {SensorData} -- the sensor data to set as the resting pose, or {None} to set the current sensor data (default: {None})<|endoftext|>
47b0de4a1624fb04b69af30feebfea1b1e8cdd739e6037e6984f265ff7d552de
def draw(self): 'Draws one frame in the OpenGL window\n ' blue = (0.27, 0.388, 0.678) dark_grey = (0.235, 0.243, 0.266) grey = (0.309, 0.309, 0.309) light_grey = (0.447, 0.435, 0.449) sensor_data = self.sensor_data print(('\r%s' % sensor_data), end='') quat = Quaternion(sensor_data.gyro.w, sensor_data.gyro.x, sensor_data.gyro.y, sensor_data.gyro.z) offset = Quaternion(self.offset.gyro.w, self.offset.gyro.x, self.offset.gyro.y, self.offset.gyro.z) if offset: quat = (quat - offset) self.flex_bent = ((self.offset.flex - self.flex_straight) + self.flex_bent) self.flex_straight = self.offset.flex gyro_euler = quat_to_euler(quat) rotation = quat_to_axis_rotation(quat) flex_angle = (self.translate_range(self.sensor_data.flex, self.flex_straight, self.flex_bent, 0.0, 90.0) if (self.sensor_data.flex != 0) else 0) flex_angle = min(170, max((- 20), flex_angle)) gl.glClearColor(0.8, 0.8, 0.8, 1.0) gl.glClearDepth(1.0) gl.glEnable(gl.GL_DEPTH_TEST) gl.glEnable(gl.GL_LIGHTING) gl.glShadeModel(gl.GL_SMOOTH) gl.glDisable(gl.GL_COLOR_MATERIAL) gl.glDepthFunc(gl.GL_LEQUAL) gl.glHint(gl.GL_PERSPECTIVE_CORRECTION_HINT, gl.GL_NICEST) gl.glClear(((gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) | gl.GL_STENCIL_BUFFER_BIT)) gl.glEnable(gl.GL_LIGHT0) gl.glLightfv(gl.GL_LIGHT0, gl.GL_POSITION, (1, 2, 3)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_AMBIENT, (0.5, 0.5, 0.5)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_DIFFUSE, (0.6, 0.6, 0.6)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_SPECULAR, (0, 0, 0)) gl.glLightf(gl.GL_LIGHT0, gl.GL_SPOT_CUTOFF, 180) gl.glLoadIdentity() gl.glTranslatef(0, 0.0, (- 7.0)) osd_line = ((('x: {0:<7.2f}'.format(quat.x) + 'y: {0:<7.2f}'.format(quat.y)) + 'z: {0:<7.2f}'.format(quat.z)) + 'flex: {0:>8}'.format('{0:.2f}°'.format(flex_angle))) self.drawText(((- 2), 1.9, 2), osd_line) gl.glPushMatrix() gl.glTranslatef(0, 2.0, 0.0) gl.glNormal3f(0.0, (- 1.0), 0.0) gl.glRotatef((2 * quat.y), 0, 0, 1) gl.glRotatef(quat.z, 1, 0, 0) gl.glRotatef(120, 0.5, 0.5, (- 0.5)) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, blue) glu.gluCylinder(self.quad, 0.2, 0.15, 2, 10, 1) gl.glTranslatef(0, 0, 2) gl.glRotatef(flex_angle, 1.0, 0.0, 0.0) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, dark_grey) glu.gluSphere(self.quad, 0.2, 6, 6) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, blue) glu.gluCylinder(self.quad, 0.15, 0.125, 1.8, 9, 1) gl.glTranslatef(0, 0, 1.8) if (self.pose == 0): pass elif (self.pose == 1): gl.glRotatef(60.0, 1.0, 0.0, 0.0) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, dark_grey) glu.gluSphere(self.quad, 0.2, 6, 6) gl.glBegin(gl.GL_QUADS) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, grey) gl.glNormal3f(0, (- 1), 0) gl.glVertex3f((- 0.2), (- 0.1), 0.0) gl.glVertex3f(0.2, (- 0.1), 0.0) gl.glVertex3f(0.2, (- 0.1), 0.3) gl.glVertex3f((- 0.2), (- 0.1), 0.3) gl.glNormal3f((- 1), 0, 0) gl.glVertex3f((- 0.2), (- 0.1), 0.3) gl.glVertex3f((- 0.2), 0.8, 0.3) gl.glVertex3f((- 0.2), 0.8, 0.1) gl.glVertex3f((- 0.2), (- 0.1), 0.0) gl.glNormal3f(1, 0, 0) gl.glVertex3f(0.2, (- 0.1), 0.3) gl.glVertex3f(0.2, 0.8, 0.3) gl.glVertex3f(0.2, 0.8, 0.1) gl.glVertex3f(0.2, (- 0.1), 0.0) gl.glNormal3f(0, 0, (- 1)) gl.glVertex3f((- 0.2), (- 0.1), 0.0) gl.glVertex3f((- 0.2), 0.8, 0.1) gl.glVertex3f(0.2, 0.8, 0.1) gl.glVertex3f(0.2, (- 0.1), 0.0) gl.glNormal3f(0, 1, 0) gl.glVertex3f((- 0.2), (- 0.1), 0.3) gl.glVertex3f((- 0.2), 0.8, 0.3) gl.glVertex3f(0.2, 0.8, 0.3) gl.glVertex3f(0.2, (- 0.1), 0.3) gl.glNormal3f(0, 0, 1) gl.glVertex3f((- 0.2), 0.8, 0.3) gl.glVertex3f((- 0.2), 0.8, 0.1) gl.glVertex3f(0.2, 0.8, 0.1) gl.glVertex3f(0.2, 0.8, 0.3) gl.glEnd() gl.glTranslatef(0, 0.8, 0.1) if (self.pose == 0): pass elif (self.pose == 1): gl.glRotatef((- 60.0), 1.0, 0.0, 0.0) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, dark_grey) glu.gluSphere(self.quad, 0.1, 6, 6) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, grey) gl.glBegin(gl.GL_QUADS) gl.glNormal3f(0, (- 1), 0) gl.glVertex3f((- 0.2), 0.02, 0.0) gl.glVertex3f(0.2, 0.02, 0.0) gl.glVertex3f(0.2, 0.02, 0.2) gl.glVertex3f((- 0.2), 0.02, 0.2) gl.glNormal3f((- 1), 0, 0) gl.glVertex3f((- 0.2), 0.02, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.1) gl.glVertex3f((- 0.2), 0.02, 0.0) gl.glNormal3f(1, 0, 0) gl.glVertex3f(0.2, 0.02, 0.2) gl.glVertex3f(0.2, 0.4, 0.2) gl.glVertex3f(0.2, 0.4, 0.1) gl.glVertex3f(0.2, 0.02, 0.0) gl.glNormal3f(0, 0, (- 1)) gl.glVertex3f((- 0.2), 0.02, 0.0) gl.glVertex3f((- 0.2), 0.4, 0.1) gl.glVertex3f(0.2, 0.4, 0.1) gl.glVertex3f(0.2, 0.02, 0.0) gl.glNormal3f(0, 1, 0) gl.glVertex3f((- 0.2), 0.02, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.2) gl.glVertex3f(0.2, 0.4, 0.2) gl.glVertex3f(0.2, 0.02, 0.2) gl.glNormal3f(0, 0, 1) gl.glVertex3f((- 0.2), 0.4, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.1) gl.glVertex3f(0.2, 0.4, 0.1) gl.glVertex3f(0.2, 0.4, 0.2) gl.glEnd() gl.glPopMatrix()
Draws one frame in the OpenGL window
simulation.py
draw
rodrigost23/automailx
3
python
def draw(self): '\n ' blue = (0.27, 0.388, 0.678) dark_grey = (0.235, 0.243, 0.266) grey = (0.309, 0.309, 0.309) light_grey = (0.447, 0.435, 0.449) sensor_data = self.sensor_data print(('\r%s' % sensor_data), end=) quat = Quaternion(sensor_data.gyro.w, sensor_data.gyro.x, sensor_data.gyro.y, sensor_data.gyro.z) offset = Quaternion(self.offset.gyro.w, self.offset.gyro.x, self.offset.gyro.y, self.offset.gyro.z) if offset: quat = (quat - offset) self.flex_bent = ((self.offset.flex - self.flex_straight) + self.flex_bent) self.flex_straight = self.offset.flex gyro_euler = quat_to_euler(quat) rotation = quat_to_axis_rotation(quat) flex_angle = (self.translate_range(self.sensor_data.flex, self.flex_straight, self.flex_bent, 0.0, 90.0) if (self.sensor_data.flex != 0) else 0) flex_angle = min(170, max((- 20), flex_angle)) gl.glClearColor(0.8, 0.8, 0.8, 1.0) gl.glClearDepth(1.0) gl.glEnable(gl.GL_DEPTH_TEST) gl.glEnable(gl.GL_LIGHTING) gl.glShadeModel(gl.GL_SMOOTH) gl.glDisable(gl.GL_COLOR_MATERIAL) gl.glDepthFunc(gl.GL_LEQUAL) gl.glHint(gl.GL_PERSPECTIVE_CORRECTION_HINT, gl.GL_NICEST) gl.glClear(((gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) | gl.GL_STENCIL_BUFFER_BIT)) gl.glEnable(gl.GL_LIGHT0) gl.glLightfv(gl.GL_LIGHT0, gl.GL_POSITION, (1, 2, 3)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_AMBIENT, (0.5, 0.5, 0.5)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_DIFFUSE, (0.6, 0.6, 0.6)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_SPECULAR, (0, 0, 0)) gl.glLightf(gl.GL_LIGHT0, gl.GL_SPOT_CUTOFF, 180) gl.glLoadIdentity() gl.glTranslatef(0, 0.0, (- 7.0)) osd_line = ((('x: {0:<7.2f}'.format(quat.x) + 'y: {0:<7.2f}'.format(quat.y)) + 'z: {0:<7.2f}'.format(quat.z)) + 'flex: {0:>8}'.format('{0:.2f}°'.format(flex_angle))) self.drawText(((- 2), 1.9, 2), osd_line) gl.glPushMatrix() gl.glTranslatef(0, 2.0, 0.0) gl.glNormal3f(0.0, (- 1.0), 0.0) gl.glRotatef((2 * quat.y), 0, 0, 1) gl.glRotatef(quat.z, 1, 0, 0) gl.glRotatef(120, 0.5, 0.5, (- 0.5)) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, blue) glu.gluCylinder(self.quad, 0.2, 0.15, 2, 10, 1) gl.glTranslatef(0, 0, 2) gl.glRotatef(flex_angle, 1.0, 0.0, 0.0) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, dark_grey) glu.gluSphere(self.quad, 0.2, 6, 6) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, blue) glu.gluCylinder(self.quad, 0.15, 0.125, 1.8, 9, 1) gl.glTranslatef(0, 0, 1.8) if (self.pose == 0): pass elif (self.pose == 1): gl.glRotatef(60.0, 1.0, 0.0, 0.0) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, dark_grey) glu.gluSphere(self.quad, 0.2, 6, 6) gl.glBegin(gl.GL_QUADS) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, grey) gl.glNormal3f(0, (- 1), 0) gl.glVertex3f((- 0.2), (- 0.1), 0.0) gl.glVertex3f(0.2, (- 0.1), 0.0) gl.glVertex3f(0.2, (- 0.1), 0.3) gl.glVertex3f((- 0.2), (- 0.1), 0.3) gl.glNormal3f((- 1), 0, 0) gl.glVertex3f((- 0.2), (- 0.1), 0.3) gl.glVertex3f((- 0.2), 0.8, 0.3) gl.glVertex3f((- 0.2), 0.8, 0.1) gl.glVertex3f((- 0.2), (- 0.1), 0.0) gl.glNormal3f(1, 0, 0) gl.glVertex3f(0.2, (- 0.1), 0.3) gl.glVertex3f(0.2, 0.8, 0.3) gl.glVertex3f(0.2, 0.8, 0.1) gl.glVertex3f(0.2, (- 0.1), 0.0) gl.glNormal3f(0, 0, (- 1)) gl.glVertex3f((- 0.2), (- 0.1), 0.0) gl.glVertex3f((- 0.2), 0.8, 0.1) gl.glVertex3f(0.2, 0.8, 0.1) gl.glVertex3f(0.2, (- 0.1), 0.0) gl.glNormal3f(0, 1, 0) gl.glVertex3f((- 0.2), (- 0.1), 0.3) gl.glVertex3f((- 0.2), 0.8, 0.3) gl.glVertex3f(0.2, 0.8, 0.3) gl.glVertex3f(0.2, (- 0.1), 0.3) gl.glNormal3f(0, 0, 1) gl.glVertex3f((- 0.2), 0.8, 0.3) gl.glVertex3f((- 0.2), 0.8, 0.1) gl.glVertex3f(0.2, 0.8, 0.1) gl.glVertex3f(0.2, 0.8, 0.3) gl.glEnd() gl.glTranslatef(0, 0.8, 0.1) if (self.pose == 0): pass elif (self.pose == 1): gl.glRotatef((- 60.0), 1.0, 0.0, 0.0) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, dark_grey) glu.gluSphere(self.quad, 0.1, 6, 6) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, grey) gl.glBegin(gl.GL_QUADS) gl.glNormal3f(0, (- 1), 0) gl.glVertex3f((- 0.2), 0.02, 0.0) gl.glVertex3f(0.2, 0.02, 0.0) gl.glVertex3f(0.2, 0.02, 0.2) gl.glVertex3f((- 0.2), 0.02, 0.2) gl.glNormal3f((- 1), 0, 0) gl.glVertex3f((- 0.2), 0.02, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.1) gl.glVertex3f((- 0.2), 0.02, 0.0) gl.glNormal3f(1, 0, 0) gl.glVertex3f(0.2, 0.02, 0.2) gl.glVertex3f(0.2, 0.4, 0.2) gl.glVertex3f(0.2, 0.4, 0.1) gl.glVertex3f(0.2, 0.02, 0.0) gl.glNormal3f(0, 0, (- 1)) gl.glVertex3f((- 0.2), 0.02, 0.0) gl.glVertex3f((- 0.2), 0.4, 0.1) gl.glVertex3f(0.2, 0.4, 0.1) gl.glVertex3f(0.2, 0.02, 0.0) gl.glNormal3f(0, 1, 0) gl.glVertex3f((- 0.2), 0.02, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.2) gl.glVertex3f(0.2, 0.4, 0.2) gl.glVertex3f(0.2, 0.02, 0.2) gl.glNormal3f(0, 0, 1) gl.glVertex3f((- 0.2), 0.4, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.1) gl.glVertex3f(0.2, 0.4, 0.1) gl.glVertex3f(0.2, 0.4, 0.2) gl.glEnd() gl.glPopMatrix()
def draw(self): '\n ' blue = (0.27, 0.388, 0.678) dark_grey = (0.235, 0.243, 0.266) grey = (0.309, 0.309, 0.309) light_grey = (0.447, 0.435, 0.449) sensor_data = self.sensor_data print(('\r%s' % sensor_data), end=) quat = Quaternion(sensor_data.gyro.w, sensor_data.gyro.x, sensor_data.gyro.y, sensor_data.gyro.z) offset = Quaternion(self.offset.gyro.w, self.offset.gyro.x, self.offset.gyro.y, self.offset.gyro.z) if offset: quat = (quat - offset) self.flex_bent = ((self.offset.flex - self.flex_straight) + self.flex_bent) self.flex_straight = self.offset.flex gyro_euler = quat_to_euler(quat) rotation = quat_to_axis_rotation(quat) flex_angle = (self.translate_range(self.sensor_data.flex, self.flex_straight, self.flex_bent, 0.0, 90.0) if (self.sensor_data.flex != 0) else 0) flex_angle = min(170, max((- 20), flex_angle)) gl.glClearColor(0.8, 0.8, 0.8, 1.0) gl.glClearDepth(1.0) gl.glEnable(gl.GL_DEPTH_TEST) gl.glEnable(gl.GL_LIGHTING) gl.glShadeModel(gl.GL_SMOOTH) gl.glDisable(gl.GL_COLOR_MATERIAL) gl.glDepthFunc(gl.GL_LEQUAL) gl.glHint(gl.GL_PERSPECTIVE_CORRECTION_HINT, gl.GL_NICEST) gl.glClear(((gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) | gl.GL_STENCIL_BUFFER_BIT)) gl.glEnable(gl.GL_LIGHT0) gl.glLightfv(gl.GL_LIGHT0, gl.GL_POSITION, (1, 2, 3)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_AMBIENT, (0.5, 0.5, 0.5)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_DIFFUSE, (0.6, 0.6, 0.6)) gl.glLightfv(gl.GL_LIGHT0, gl.GL_SPECULAR, (0, 0, 0)) gl.glLightf(gl.GL_LIGHT0, gl.GL_SPOT_CUTOFF, 180) gl.glLoadIdentity() gl.glTranslatef(0, 0.0, (- 7.0)) osd_line = ((('x: {0:<7.2f}'.format(quat.x) + 'y: {0:<7.2f}'.format(quat.y)) + 'z: {0:<7.2f}'.format(quat.z)) + 'flex: {0:>8}'.format('{0:.2f}°'.format(flex_angle))) self.drawText(((- 2), 1.9, 2), osd_line) gl.glPushMatrix() gl.glTranslatef(0, 2.0, 0.0) gl.glNormal3f(0.0, (- 1.0), 0.0) gl.glRotatef((2 * quat.y), 0, 0, 1) gl.glRotatef(quat.z, 1, 0, 0) gl.glRotatef(120, 0.5, 0.5, (- 0.5)) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, blue) glu.gluCylinder(self.quad, 0.2, 0.15, 2, 10, 1) gl.glTranslatef(0, 0, 2) gl.glRotatef(flex_angle, 1.0, 0.0, 0.0) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, dark_grey) glu.gluSphere(self.quad, 0.2, 6, 6) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, blue) glu.gluCylinder(self.quad, 0.15, 0.125, 1.8, 9, 1) gl.glTranslatef(0, 0, 1.8) if (self.pose == 0): pass elif (self.pose == 1): gl.glRotatef(60.0, 1.0, 0.0, 0.0) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, dark_grey) glu.gluSphere(self.quad, 0.2, 6, 6) gl.glBegin(gl.GL_QUADS) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, grey) gl.glNormal3f(0, (- 1), 0) gl.glVertex3f((- 0.2), (- 0.1), 0.0) gl.glVertex3f(0.2, (- 0.1), 0.0) gl.glVertex3f(0.2, (- 0.1), 0.3) gl.glVertex3f((- 0.2), (- 0.1), 0.3) gl.glNormal3f((- 1), 0, 0) gl.glVertex3f((- 0.2), (- 0.1), 0.3) gl.glVertex3f((- 0.2), 0.8, 0.3) gl.glVertex3f((- 0.2), 0.8, 0.1) gl.glVertex3f((- 0.2), (- 0.1), 0.0) gl.glNormal3f(1, 0, 0) gl.glVertex3f(0.2, (- 0.1), 0.3) gl.glVertex3f(0.2, 0.8, 0.3) gl.glVertex3f(0.2, 0.8, 0.1) gl.glVertex3f(0.2, (- 0.1), 0.0) gl.glNormal3f(0, 0, (- 1)) gl.glVertex3f((- 0.2), (- 0.1), 0.0) gl.glVertex3f((- 0.2), 0.8, 0.1) gl.glVertex3f(0.2, 0.8, 0.1) gl.glVertex3f(0.2, (- 0.1), 0.0) gl.glNormal3f(0, 1, 0) gl.glVertex3f((- 0.2), (- 0.1), 0.3) gl.glVertex3f((- 0.2), 0.8, 0.3) gl.glVertex3f(0.2, 0.8, 0.3) gl.glVertex3f(0.2, (- 0.1), 0.3) gl.glNormal3f(0, 0, 1) gl.glVertex3f((- 0.2), 0.8, 0.3) gl.glVertex3f((- 0.2), 0.8, 0.1) gl.glVertex3f(0.2, 0.8, 0.1) gl.glVertex3f(0.2, 0.8, 0.3) gl.glEnd() gl.glTranslatef(0, 0.8, 0.1) if (self.pose == 0): pass elif (self.pose == 1): gl.glRotatef((- 60.0), 1.0, 0.0, 0.0) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, dark_grey) glu.gluSphere(self.quad, 0.1, 6, 6) gl.glMaterialfv(gl.GL_FRONT_AND_BACK, gl.GL_AMBIENT_AND_DIFFUSE, grey) gl.glBegin(gl.GL_QUADS) gl.glNormal3f(0, (- 1), 0) gl.glVertex3f((- 0.2), 0.02, 0.0) gl.glVertex3f(0.2, 0.02, 0.0) gl.glVertex3f(0.2, 0.02, 0.2) gl.glVertex3f((- 0.2), 0.02, 0.2) gl.glNormal3f((- 1), 0, 0) gl.glVertex3f((- 0.2), 0.02, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.1) gl.glVertex3f((- 0.2), 0.02, 0.0) gl.glNormal3f(1, 0, 0) gl.glVertex3f(0.2, 0.02, 0.2) gl.glVertex3f(0.2, 0.4, 0.2) gl.glVertex3f(0.2, 0.4, 0.1) gl.glVertex3f(0.2, 0.02, 0.0) gl.glNormal3f(0, 0, (- 1)) gl.glVertex3f((- 0.2), 0.02, 0.0) gl.glVertex3f((- 0.2), 0.4, 0.1) gl.glVertex3f(0.2, 0.4, 0.1) gl.glVertex3f(0.2, 0.02, 0.0) gl.glNormal3f(0, 1, 0) gl.glVertex3f((- 0.2), 0.02, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.2) gl.glVertex3f(0.2, 0.4, 0.2) gl.glVertex3f(0.2, 0.02, 0.2) gl.glNormal3f(0, 0, 1) gl.glVertex3f((- 0.2), 0.4, 0.2) gl.glVertex3f((- 0.2), 0.4, 0.1) gl.glVertex3f(0.2, 0.4, 0.1) gl.glVertex3f(0.2, 0.4, 0.2) gl.glEnd() gl.glPopMatrix()<|docstring|>Draws one frame in the OpenGL window<|endoftext|>
3e3be78dfa8fc42aa5da38d3e851e7922baebf2754249b093da5e41d44949d3c
def make_time_formatter(request, stags): 'Return a function that propertly formats timestamps for a\n particular request.\n ' if ('timefmt' in request.args): try: tz = stags['Properties']['Timezone'] except KeyError: tz = 'Utc' tz = dtutil.gettz(tz) if (request.args['timefmt'][0] == 'iso8601'): fmt = dtutil.iso8601 elif (request.args['timefmt'][0] == 'excel'): fmt = fmt = dtutil.excel else: fmt = (lambda dt, tz: dtutil.strftime_tz(dt, '%s')) tz = dtutil.gettz('Utc') def format(t): return fmt(dtutil.ts2dt((t / 1000)), tz) return format else: return (lambda x: str(int(x)))
Return a function that propertly formats timestamps for a particular request.
python/smap/archiver/consumers.py
make_time_formatter
jf87/smap
21
python
def make_time_formatter(request, stags): 'Return a function that propertly formats timestamps for a\n particular request.\n ' if ('timefmt' in request.args): try: tz = stags['Properties']['Timezone'] except KeyError: tz = 'Utc' tz = dtutil.gettz(tz) if (request.args['timefmt'][0] == 'iso8601'): fmt = dtutil.iso8601 elif (request.args['timefmt'][0] == 'excel'): fmt = fmt = dtutil.excel else: fmt = (lambda dt, tz: dtutil.strftime_tz(dt, '%s')) tz = dtutil.gettz('Utc') def format(t): return fmt(dtutil.ts2dt((t / 1000)), tz) return format else: return (lambda x: str(int(x)))
def make_time_formatter(request, stags): 'Return a function that propertly formats timestamps for a\n particular request.\n ' if ('timefmt' in request.args): try: tz = stags['Properties']['Timezone'] except KeyError: tz = 'Utc' tz = dtutil.gettz(tz) if (request.args['timefmt'][0] == 'iso8601'): fmt = dtutil.iso8601 elif (request.args['timefmt'][0] == 'excel'): fmt = fmt = dtutil.excel else: fmt = (lambda dt, tz: dtutil.strftime_tz(dt, '%s')) tz = dtutil.gettz('Utc') def format(t): return fmt(dtutil.ts2dt((t / 1000)), tz) return format else: return (lambda x: str(int(x)))<|docstring|>Return a function that propertly formats timestamps for a particular request.<|endoftext|>
158e29a89a6fa56bdedfa6c58cfb8f5d6af0f65d0d3679aac30d6c0313a13c62
def construct_feature_dataframe(self, parameters_df: pd.DataFrame, context_df: pd.DataFrame=None, product: bool=False): 'Construct feature value dataframe from config value and context value dataframes.\n\n If product is True, creates a cartesian product, otherwise appends columns.\n\n ' if ((self.context_space is not None) and (context_df is None)): raise ValueError('Context required by optimization problem but not provided.') features_df = parameters_df.rename((lambda x: f'{self.parameter_space.name}.{x}'), axis=1) if ((context_df is not None) and (len(context_df) > 0)): renamed_context_values = context_df.rename((lambda x: f'{self.context_space.name}.{x}'), axis=1) features_df['contains_context'] = True if product: renamed_context_values['contains_context'] = True features_df = features_df.merge(renamed_context_values, how='outer', on='contains_context') features_df.index = parameters_df.index.copy() else: if (len(parameters_df) != len(context_df)): raise ValueError(f'Incompatible shape of parameters and context: {parameters_df.shape} and {context_df.shape}.') features_df = pd.concat([features_df, renamed_context_values], axis=1) else: features_df['contains_context'] = False return features_df
Construct feature value dataframe from config value and context value dataframes. If product is True, creates a cartesian product, otherwise appends columns.
source/Mlos.Python/mlos/Optimizers/OptimizationProblem.py
construct_feature_dataframe
kkanellis/MLOS
81
python
def construct_feature_dataframe(self, parameters_df: pd.DataFrame, context_df: pd.DataFrame=None, product: bool=False): 'Construct feature value dataframe from config value and context value dataframes.\n\n If product is True, creates a cartesian product, otherwise appends columns.\n\n ' if ((self.context_space is not None) and (context_df is None)): raise ValueError('Context required by optimization problem but not provided.') features_df = parameters_df.rename((lambda x: f'{self.parameter_space.name}.{x}'), axis=1) if ((context_df is not None) and (len(context_df) > 0)): renamed_context_values = context_df.rename((lambda x: f'{self.context_space.name}.{x}'), axis=1) features_df['contains_context'] = True if product: renamed_context_values['contains_context'] = True features_df = features_df.merge(renamed_context_values, how='outer', on='contains_context') features_df.index = parameters_df.index.copy() else: if (len(parameters_df) != len(context_df)): raise ValueError(f'Incompatible shape of parameters and context: {parameters_df.shape} and {context_df.shape}.') features_df = pd.concat([features_df, renamed_context_values], axis=1) else: features_df['contains_context'] = False return features_df
def construct_feature_dataframe(self, parameters_df: pd.DataFrame, context_df: pd.DataFrame=None, product: bool=False): 'Construct feature value dataframe from config value and context value dataframes.\n\n If product is True, creates a cartesian product, otherwise appends columns.\n\n ' if ((self.context_space is not None) and (context_df is None)): raise ValueError('Context required by optimization problem but not provided.') features_df = parameters_df.rename((lambda x: f'{self.parameter_space.name}.{x}'), axis=1) if ((context_df is not None) and (len(context_df) > 0)): renamed_context_values = context_df.rename((lambda x: f'{self.context_space.name}.{x}'), axis=1) features_df['contains_context'] = True if product: renamed_context_values['contains_context'] = True features_df = features_df.merge(renamed_context_values, how='outer', on='contains_context') features_df.index = parameters_df.index.copy() else: if (len(parameters_df) != len(context_df)): raise ValueError(f'Incompatible shape of parameters and context: {parameters_df.shape} and {context_df.shape}.') features_df = pd.concat([features_df, renamed_context_values], axis=1) else: features_df['contains_context'] = False return features_df<|docstring|>Construct feature value dataframe from config value and context value dataframes. If product is True, creates a cartesian product, otherwise appends columns.<|endoftext|>
95fd0a4e4da6fe7b946f546c45b1dcce6ce07196d03cb1cc77a4f6f331b5cb74
def deconstruct_feature_dataframe(self, features_df: pd.DataFrame) -> Tuple[(pd.DataFrame, pd.DataFrame)]: 'Splits the feature dataframe back into parameters and context dataframes.\n\n This is a workaround. What we should really do is implement this functionality as a proper operator on Hypergrids.\n ' parameter_column_names_mapping = {f'{self.parameter_space.name}.{dimension_name}': dimension_name for dimension_name in self.parameter_space.dimension_names} existing_parameter_names = [parameter_name for parameter_name in parameter_column_names_mapping.keys() if (parameter_name in features_df.columns)] parameters_df = features_df[existing_parameter_names] parameters_df.rename(columns=parameter_column_names_mapping, inplace=True) if (self.context_space is not None): context_column_names_mapping = {f'{self.context_space.name}.{dimension_name}': dimension_name for dimension_name in self.context_space.dimension_names} existing_context_column_names = [column_name for column_name in context_column_names_mapping.keys() if (column_name in features_df.columns)] context_df = features_df[existing_context_column_names] context_df.rename(columns=context_column_names_mapping, inplace=True) else: context_df = None return (parameters_df, context_df)
Splits the feature dataframe back into parameters and context dataframes. This is a workaround. What we should really do is implement this functionality as a proper operator on Hypergrids.
source/Mlos.Python/mlos/Optimizers/OptimizationProblem.py
deconstruct_feature_dataframe
kkanellis/MLOS
81
python
def deconstruct_feature_dataframe(self, features_df: pd.DataFrame) -> Tuple[(pd.DataFrame, pd.DataFrame)]: 'Splits the feature dataframe back into parameters and context dataframes.\n\n This is a workaround. What we should really do is implement this functionality as a proper operator on Hypergrids.\n ' parameter_column_names_mapping = {f'{self.parameter_space.name}.{dimension_name}': dimension_name for dimension_name in self.parameter_space.dimension_names} existing_parameter_names = [parameter_name for parameter_name in parameter_column_names_mapping.keys() if (parameter_name in features_df.columns)] parameters_df = features_df[existing_parameter_names] parameters_df.rename(columns=parameter_column_names_mapping, inplace=True) if (self.context_space is not None): context_column_names_mapping = {f'{self.context_space.name}.{dimension_name}': dimension_name for dimension_name in self.context_space.dimension_names} existing_context_column_names = [column_name for column_name in context_column_names_mapping.keys() if (column_name in features_df.columns)] context_df = features_df[existing_context_column_names] context_df.rename(columns=context_column_names_mapping, inplace=True) else: context_df = None return (parameters_df, context_df)
def deconstruct_feature_dataframe(self, features_df: pd.DataFrame) -> Tuple[(pd.DataFrame, pd.DataFrame)]: 'Splits the feature dataframe back into parameters and context dataframes.\n\n This is a workaround. What we should really do is implement this functionality as a proper operator on Hypergrids.\n ' parameter_column_names_mapping = {f'{self.parameter_space.name}.{dimension_name}': dimension_name for dimension_name in self.parameter_space.dimension_names} existing_parameter_names = [parameter_name for parameter_name in parameter_column_names_mapping.keys() if (parameter_name in features_df.columns)] parameters_df = features_df[existing_parameter_names] parameters_df.rename(columns=parameter_column_names_mapping, inplace=True) if (self.context_space is not None): context_column_names_mapping = {f'{self.context_space.name}.{dimension_name}': dimension_name for dimension_name in self.context_space.dimension_names} existing_context_column_names = [column_name for column_name in context_column_names_mapping.keys() if (column_name in features_df.columns)] context_df = features_df[existing_context_column_names] context_df.rename(columns=context_column_names_mapping, inplace=True) else: context_df = None return (parameters_df, context_df)<|docstring|>Splits the feature dataframe back into parameters and context dataframes. This is a workaround. What we should really do is implement this functionality as a proper operator on Hypergrids.<|endoftext|>
b1ce081fb2ea504d3f459245df88d7205e80d2b1a383d2d3ab4bd6e2cbdc62fb
def predict_error(self, x, error_calc='sem'): '\n returns percent error\n ' return_single = False if (not hasattr(x, '__iter__')): x = [x] return_single = True sef = self.calculate_standard_error_fit() (r, _) = self._covariance.shape def calc_error(xi): Xk = matrix(([xi] * r)).T varY_hat = ((Xk.T * self._covariance) * Xk) if (error_calc == 'sem'): se = (sef * sqrt(varY_hat)) else: se = sqrt(((sef ** 2) + ((sef ** 2) * varY_hat))) return se[(0, 0)] fx = array([calc_error(xi) for xi in x]) if return_single: fx = fx[0] return fx
returns percent error
pychron/core/regression/least_squares_regressor.py
predict_error
aelamspychron/pychron
1
python
def predict_error(self, x, error_calc='sem'): '\n \n ' return_single = False if (not hasattr(x, '__iter__')): x = [x] return_single = True sef = self.calculate_standard_error_fit() (r, _) = self._covariance.shape def calc_error(xi): Xk = matrix(([xi] * r)).T varY_hat = ((Xk.T * self._covariance) * Xk) if (error_calc == 'sem'): se = (sef * sqrt(varY_hat)) else: se = sqrt(((sef ** 2) + ((sef ** 2) * varY_hat))) return se[(0, 0)] fx = array([calc_error(xi) for xi in x]) if return_single: fx = fx[0] return fx
def predict_error(self, x, error_calc='sem'): '\n \n ' return_single = False if (not hasattr(x, '__iter__')): x = [x] return_single = True sef = self.calculate_standard_error_fit() (r, _) = self._covariance.shape def calc_error(xi): Xk = matrix(([xi] * r)).T varY_hat = ((Xk.T * self._covariance) * Xk) if (error_calc == 'sem'): se = (sef * sqrt(varY_hat)) else: se = sqrt(((sef ** 2) + ((sef ** 2) * varY_hat))) return se[(0, 0)] fx = array([calc_error(xi) for xi in x]) if return_single: fx = fx[0] return fx<|docstring|>returns percent error<|endoftext|>
552cc861f7c71ae11f6b74452041f82be89c9df48faac5a58287dfaf0bd81bf8
def data_received(self, chunk): "Prevents warning 'must implement all abstract methods'"
Prevents warning 'must implement all abstract methods'
pynogram/web/common.py
data_received
tsionyx/pynogram
17
python
def data_received(self, chunk):
def data_received(self, chunk): <|docstring|>Prevents warning 'must implement all abstract methods'<|endoftext|>
628b58ec6ed86cb55278f1ece4c6be2346cbe17e4a93011e729f1464823a4388
def write_as_json(self, chunk, pretty=True): '\n Respond by JSON-ify given object\n ' if isinstance(chunk, (dict, list, tuple)): indent = (None if (pretty is None) else 2) chunk = json.dumps(chunk, indent=indent, sort_keys=True, ensure_ascii=False) if (pretty is not None): chunk += '\n' self.set_header(str('Content-Type'), 'application/json') return super(BaseHandler, self).write(chunk)
Respond by JSON-ify given object
pynogram/web/common.py
write_as_json
tsionyx/pynogram
17
python
def write_as_json(self, chunk, pretty=True): '\n \n ' if isinstance(chunk, (dict, list, tuple)): indent = (None if (pretty is None) else 2) chunk = json.dumps(chunk, indent=indent, sort_keys=True, ensure_ascii=False) if (pretty is not None): chunk += '\n' self.set_header(str('Content-Type'), 'application/json') return super(BaseHandler, self).write(chunk)
def write_as_json(self, chunk, pretty=True): '\n \n ' if isinstance(chunk, (dict, list, tuple)): indent = (None if (pretty is None) else 2) chunk = json.dumps(chunk, indent=indent, sort_keys=True, ensure_ascii=False) if (pretty is not None): chunk += '\n' self.set_header(str('Content-Type'), 'application/json') return super(BaseHandler, self).write(chunk)<|docstring|>Respond by JSON-ify given object<|endoftext|>
ba33b25c819b8aec2529999e86cc5f3927134a63cf46b97a3b3f24608656f3ff
def write_error(self, status_code, **kwargs): '\n Respond with JSON-formatted error instead of standard one\n ' message = '' exc_info = kwargs.get('exc_info') if exc_info: exception = exc_info[1] if hasattr(exception, 'log_message'): message = exception.log_message if exception.args: message = (message % exception.args) else: message = str(exception) error = dict(status=('%d: %s' % (status_code, self._reason)), message=message) if (self.settings.get('serve_traceback') and exc_info): error['exc_info'] = ''.join(traceback.format_exception(*exc_info)) if ((status_code // 100) == 4): LOG.info('Client request problem') elif ((status_code // 100) == 5): LOG.error('Server problem', exc_info=exc_info) self.set_header(str('Content-Type'), 'application/json') self.write_as_json(dict(error=error))
Respond with JSON-formatted error instead of standard one
pynogram/web/common.py
write_error
tsionyx/pynogram
17
python
def write_error(self, status_code, **kwargs): '\n \n ' message = exc_info = kwargs.get('exc_info') if exc_info: exception = exc_info[1] if hasattr(exception, 'log_message'): message = exception.log_message if exception.args: message = (message % exception.args) else: message = str(exception) error = dict(status=('%d: %s' % (status_code, self._reason)), message=message) if (self.settings.get('serve_traceback') and exc_info): error['exc_info'] = .join(traceback.format_exception(*exc_info)) if ((status_code // 100) == 4): LOG.info('Client request problem') elif ((status_code // 100) == 5): LOG.error('Server problem', exc_info=exc_info) self.set_header(str('Content-Type'), 'application/json') self.write_as_json(dict(error=error))
def write_error(self, status_code, **kwargs): '\n \n ' message = exc_info = kwargs.get('exc_info') if exc_info: exception = exc_info[1] if hasattr(exception, 'log_message'): message = exception.log_message if exception.args: message = (message % exception.args) else: message = str(exception) error = dict(status=('%d: %s' % (status_code, self._reason)), message=message) if (self.settings.get('serve_traceback') and exc_info): error['exc_info'] = .join(traceback.format_exception(*exc_info)) if ((status_code // 100) == 4): LOG.info('Client request problem') elif ((status_code // 100) == 5): LOG.error('Server problem', exc_info=exc_info) self.set_header(str('Content-Type'), 'application/json') self.write_as_json(dict(error=error))<|docstring|>Respond with JSON-formatted error instead of standard one<|endoftext|>
b8680875ea868b1c3e570112b81571cb514629dbaf3365071ccb877124bd4f10
def register(self, callback): '\n Registers the function to call when the\n `notify_callbacks` will be fired.\n ' self.callbacks.append(callback)
Registers the function to call when the `notify_callbacks` will be fired.
pynogram/web/common.py
register
tsionyx/pynogram
17
python
def register(self, callback): '\n Registers the function to call when the\n `notify_callbacks` will be fired.\n ' self.callbacks.append(callback)
def register(self, callback): '\n Registers the function to call when the\n `notify_callbacks` will be fired.\n ' self.callbacks.append(callback)<|docstring|>Registers the function to call when the `notify_callbacks` will be fired.<|endoftext|>
0cd0fc8c9cc519e0cdf746a3d8a241dd093f5b769220395b54cfdef80e093df7
def notify_callbacks(self, *args, **kwargs): '\n Run the callbacks previously collected.\n In case of long-polling the callback should call\n `finish()` on `tornado.web.RequestHandler` instance\n to send the answer to the client.\n ' for callback in self.callbacks: self.callback_helper(callback, *args, **kwargs) self.callbacks = []
Run the callbacks previously collected. In case of long-polling the callback should call `finish()` on `tornado.web.RequestHandler` instance to send the answer to the client.
pynogram/web/common.py
notify_callbacks
tsionyx/pynogram
17
python
def notify_callbacks(self, *args, **kwargs): '\n Run the callbacks previously collected.\n In case of long-polling the callback should call\n `finish()` on `tornado.web.RequestHandler` instance\n to send the answer to the client.\n ' for callback in self.callbacks: self.callback_helper(callback, *args, **kwargs) self.callbacks = []
def notify_callbacks(self, *args, **kwargs): '\n Run the callbacks previously collected.\n In case of long-polling the callback should call\n `finish()` on `tornado.web.RequestHandler` instance\n to send the answer to the client.\n ' for callback in self.callbacks: self.callback_helper(callback, *args, **kwargs) self.callbacks = []<|docstring|>Run the callbacks previously collected. In case of long-polling the callback should call `finish()` on `tornado.web.RequestHandler` instance to send the answer to the client.<|endoftext|>
bbc7572fd2aeb35a510b84e88b5823bbbf89c383704c74c77af1ab13433bc0c5
@classmethod def callback_helper(cls, callback, *args, **kwargs): '\n Simply call the callback with the parameters provided.\n\n You should override this to pass additional arguments.\n ' LOG.debug(args) LOG.debug(kwargs) callback(*args, **kwargs)
Simply call the callback with the parameters provided. You should override this to pass additional arguments.
pynogram/web/common.py
callback_helper
tsionyx/pynogram
17
python
@classmethod def callback_helper(cls, callback, *args, **kwargs): '\n Simply call the callback with the parameters provided.\n\n You should override this to pass additional arguments.\n ' LOG.debug(args) LOG.debug(kwargs) callback(*args, **kwargs)
@classmethod def callback_helper(cls, callback, *args, **kwargs): '\n Simply call the callback with the parameters provided.\n\n You should override this to pass additional arguments.\n ' LOG.debug(args) LOG.debug(kwargs) callback(*args, **kwargs)<|docstring|>Simply call the callback with the parameters provided. You should override this to pass additional arguments.<|endoftext|>
cad00c21f5fc6c0021c56f3fe3936f00a492a60f274aa883b5837599cc9d204b
@abstractmethod async def load(self): '\n Load full data from khl server\n\n :return: empty\n ' raise NotImplementedError
Load full data from khl server :return: empty
khl/interface.py
load
TWT233/khl.py
44
python
@abstractmethod async def load(self): '\n Load full data from khl server\n\n :return: empty\n ' raise NotImplementedError
@abstractmethod async def load(self): '\n Load full data from khl server\n\n :return: empty\n ' raise NotImplementedError<|docstring|>Load full data from khl server :return: empty<|endoftext|>
67505b62ff959ddc91c686ce87033c838449851ff5aee5f79d2a155a4e7a060f
def is_loaded(self) -> bool: '\n Check if loaded\n\n :return: bool\n ' return self._loaded
Check if loaded :return: bool
khl/interface.py
is_loaded
TWT233/khl.py
44
python
def is_loaded(self) -> bool: '\n Check if loaded\n\n :return: bool\n ' return self._loaded
def is_loaded(self) -> bool: '\n Check if loaded\n\n :return: bool\n ' return self._loaded<|docstring|>Check if loaded :return: bool<|endoftext|>
965a93c3e1a5013980d473c022adb349b2d7655215940d48322e1e7b807c5ed1
def __init__(self, parent, list_id): '\n Initialize a new RecordList object, without metadta (yet).\n\n parent is the parent collection in which the list is defined.\n list_id the local identifier for the record list\n altparent is a site object to search for this new entity,\n allowing site-wide RecordType values to be found.\n ' super(RecordList, self).__init__(parent, list_id) self._parent = parent return
Initialize a new RecordList object, without metadta (yet). parent is the parent collection in which the list is defined. list_id the local identifier for the record list altparent is a site object to search for this new entity, allowing site-wide RecordType values to be found.
src/annalist_root/annalist/models/recordlist.py
__init__
gklyne/annalist
18
python
def __init__(self, parent, list_id): '\n Initialize a new RecordList object, without metadta (yet).\n\n parent is the parent collection in which the list is defined.\n list_id the local identifier for the record list\n altparent is a site object to search for this new entity,\n allowing site-wide RecordType values to be found.\n ' super(RecordList, self).__init__(parent, list_id) self._parent = parent return
def __init__(self, parent, list_id): '\n Initialize a new RecordList object, without metadta (yet).\n\n parent is the parent collection in which the list is defined.\n list_id the local identifier for the record list\n altparent is a site object to search for this new entity,\n allowing site-wide RecordType values to be found.\n ' super(RecordList, self).__init__(parent, list_id) self._parent = parent return<|docstring|>Initialize a new RecordList object, without metadta (yet). parent is the parent collection in which the list is defined. list_id the local identifier for the record list altparent is a site object to search for this new entity, allowing site-wide RecordType values to be found.<|endoftext|>
c480418f199a69e29b8e910e0e159585f31b72c59f1af66852a511df6d77f806
def _migrate_filenames(self): '\n Override EntityData method\n ' return None
Override EntityData method
src/annalist_root/annalist/models/recordlist.py
_migrate_filenames
gklyne/annalist
18
python
def _migrate_filenames(self): '\n \n ' return None
def _migrate_filenames(self): '\n \n ' return None<|docstring|>Override EntityData method<|endoftext|>
a5c485b6b7cf2b4bca42c517f8d4eaf054ca28f1c1124a2a57d6c713d22b6ad3
def _migrate_values(self, entitydata): '\n List description entity format migration method.\n\n The specification for this method is that it returns an entitydata value\n which is a copy of the supplied entitydata with format migrations applied.\n\n NOTE: implementations are free to apply migrations in-place. The resulting \n entitydata should be exactly as the supplied data *should* appear in storage\n to conform to the current format of the data. The migration function should \n be idempotent; i.e.\n x._migrate_values(x._migrate_values(e)) == x._migrate_values(e)\n ' for (fkey, ftype) in [(ANNAL.CURIE.display_type, '_enum_list_type')]: entitydata[fkey] = make_type_entity_id(ftype, extract_entity_id(entitydata[fkey])) migration_map = [(ANNAL.CURIE.record_type, ANNAL.CURIE.list_entity_type)] entitydata = self._migrate_values_map_field_names(migration_map, entitydata) if (ANNAL.CURIE.list_fields in entitydata): for f in entitydata[ANNAL.CURIE.list_fields]: field_id = extract_entity_id(f[ANNAL.CURIE.field_id]) if (field_id == 'Field_render'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/Field_render_type') if (field_id == 'Field_type'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/Field_value_type') if (field_id == 'View_target_type'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/View_entity_type') if (field_id == 'List_target_type'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/List_entity_type') return entitydata
List description entity format migration method. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with format migrations applied. NOTE: implementations are free to apply migrations in-place. The resulting entitydata should be exactly as the supplied data *should* appear in storage to conform to the current format of the data. The migration function should be idempotent; i.e. x._migrate_values(x._migrate_values(e)) == x._migrate_values(e)
src/annalist_root/annalist/models/recordlist.py
_migrate_values
gklyne/annalist
18
python
def _migrate_values(self, entitydata): '\n List description entity format migration method.\n\n The specification for this method is that it returns an entitydata value\n which is a copy of the supplied entitydata with format migrations applied.\n\n NOTE: implementations are free to apply migrations in-place. The resulting \n entitydata should be exactly as the supplied data *should* appear in storage\n to conform to the current format of the data. The migration function should \n be idempotent; i.e.\n x._migrate_values(x._migrate_values(e)) == x._migrate_values(e)\n ' for (fkey, ftype) in [(ANNAL.CURIE.display_type, '_enum_list_type')]: entitydata[fkey] = make_type_entity_id(ftype, extract_entity_id(entitydata[fkey])) migration_map = [(ANNAL.CURIE.record_type, ANNAL.CURIE.list_entity_type)] entitydata = self._migrate_values_map_field_names(migration_map, entitydata) if (ANNAL.CURIE.list_fields in entitydata): for f in entitydata[ANNAL.CURIE.list_fields]: field_id = extract_entity_id(f[ANNAL.CURIE.field_id]) if (field_id == 'Field_render'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/Field_render_type') if (field_id == 'Field_type'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/Field_value_type') if (field_id == 'View_target_type'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/View_entity_type') if (field_id == 'List_target_type'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/List_entity_type') return entitydata
def _migrate_values(self, entitydata): '\n List description entity format migration method.\n\n The specification for this method is that it returns an entitydata value\n which is a copy of the supplied entitydata with format migrations applied.\n\n NOTE: implementations are free to apply migrations in-place. The resulting \n entitydata should be exactly as the supplied data *should* appear in storage\n to conform to the current format of the data. The migration function should \n be idempotent; i.e.\n x._migrate_values(x._migrate_values(e)) == x._migrate_values(e)\n ' for (fkey, ftype) in [(ANNAL.CURIE.display_type, '_enum_list_type')]: entitydata[fkey] = make_type_entity_id(ftype, extract_entity_id(entitydata[fkey])) migration_map = [(ANNAL.CURIE.record_type, ANNAL.CURIE.list_entity_type)] entitydata = self._migrate_values_map_field_names(migration_map, entitydata) if (ANNAL.CURIE.list_fields in entitydata): for f in entitydata[ANNAL.CURIE.list_fields]: field_id = extract_entity_id(f[ANNAL.CURIE.field_id]) if (field_id == 'Field_render'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/Field_render_type') if (field_id == 'Field_type'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/Field_value_type') if (field_id == 'View_target_type'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/View_entity_type') if (field_id == 'List_target_type'): f[ANNAL.CURIE.field_id] = (layout.FIELD_TYPEID + '/List_entity_type') return entitydata<|docstring|>List description entity format migration method. The specification for this method is that it returns an entitydata value which is a copy of the supplied entitydata with format migrations applied. NOTE: implementations are free to apply migrations in-place. The resulting entitydata should be exactly as the supplied data *should* appear in storage to conform to the current format of the data. The migration function should be idempotent; i.e. x._migrate_values(x._migrate_values(e)) == x._migrate_values(e)<|endoftext|>
0dd2908fad3118eb6384f4a48a7df52ae737310fc697886ebb5c7b34c11d2c69
@staticmethod def render(msg='OK', data=None, status=200, header=None): '\n render json response\n :param msg: message for user\n :param data: response of data\n :param status: http response status\n :param header: http response header\n :return: response in dict type\n ' if (header is None): header = {} host = request.host if (':' in host): host = host.split(':')[0] resp = {'msg': msg, 'traceId': '', 'server': host} if (data is not None): resp['data'] = data return (resp, status, header)
render json response :param msg: message for user :param data: response of data :param status: http response status :param header: http response header :return: response in dict type
api/curve/v1/api/resource.py
render
QiliangFan/Baidu-Curve
478
python
@staticmethod def render(msg='OK', data=None, status=200, header=None): '\n render json response\n :param msg: message for user\n :param data: response of data\n :param status: http response status\n :param header: http response header\n :return: response in dict type\n ' if (header is None): header = {} host = request.host if (':' in host): host = host.split(':')[0] resp = {'msg': msg, 'traceId': , 'server': host} if (data is not None): resp['data'] = data return (resp, status, header)
@staticmethod def render(msg='OK', data=None, status=200, header=None): '\n render json response\n :param msg: message for user\n :param data: response of data\n :param status: http response status\n :param header: http response header\n :return: response in dict type\n ' if (header is None): header = {} host = request.host if (':' in host): host = host.split(':')[0] resp = {'msg': msg, 'traceId': , 'server': host} if (data is not None): resp['data'] = data return (resp, status, header)<|docstring|>render json response :param msg: message for user :param data: response of data :param status: http response status :param header: http response header :return: response in dict type<|endoftext|>
e54f1c86a60f57748d47dbf6f60b5801925e7aef5fe573761aae23e736951a1c
@staticmethod def render_file(filename, content): '\n render file response\n :param filename:\n :param content:\n :return: response in response type\n ' content = content response = make_response(content) response.headers['Content-Disposition'] = ('attachment; filename=%s' % filename) response.headers['Content-Type'] = 'text/plain' return response
render file response :param filename: :param content: :return: response in response type
api/curve/v1/api/resource.py
render_file
QiliangFan/Baidu-Curve
478
python
@staticmethod def render_file(filename, content): '\n render file response\n :param filename:\n :param content:\n :return: response in response type\n ' content = content response = make_response(content) response.headers['Content-Disposition'] = ('attachment; filename=%s' % filename) response.headers['Content-Type'] = 'text/plain' return response
@staticmethod def render_file(filename, content): '\n render file response\n :param filename:\n :param content:\n :return: response in response type\n ' content = content response = make_response(content) response.headers['Content-Disposition'] = ('attachment; filename=%s' % filename) response.headers['Content-Type'] = 'text/plain' return response<|docstring|>render file response :param filename: :param content: :return: response in response type<|endoftext|>
8fc75bdb8d85d7bb08e33d6cd06d7591645eb2e4b37e1f99b42fd719199bca5e
@staticmethod def render_json_str(content): '\n render plain response\n :param content:\n :return: response in response type\n ' content = content response = make_response(content) response.headers['Content-Type'] = 'application/json' return response
render plain response :param content: :return: response in response type
api/curve/v1/api/resource.py
render_json_str
QiliangFan/Baidu-Curve
478
python
@staticmethod def render_json_str(content): '\n render plain response\n :param content:\n :return: response in response type\n ' content = content response = make_response(content) response.headers['Content-Type'] = 'application/json' return response
@staticmethod def render_json_str(content): '\n render plain response\n :param content:\n :return: response in response type\n ' content = content response = make_response(content) response.headers['Content-Type'] = 'application/json' return response<|docstring|>render plain response :param content: :return: response in response type<|endoftext|>
792cfa127fed3b8c4333c59b9a5163f4feeeef9a420da90a029dcdec963a80a8
def read_mibitiff(file, channels=None): ' Reads MIBI data from an IonpathMIBI TIFF file.\n\n Currently, only SIMS data is supported\n\n Args:\n file (str): The string path or an open file object to a MIBItiff file.\n channels (list): Targets to load. If None, all targets/channels are loaded\n\n Returns:\n tuple (np.ndarray, list[tuple]):\n - image data\n - channel data\n ' return_channels = [] img_data = [] with TiffFile(file) as tif: _check_version(tif) for page in tif.pages: description = json.loads(page.tags['image_description'].value.decode('utf-8')) if ((channels is not None) and (description['channel.target'] not in channels)): continue return_channels.append((description['channel.mass'], description['channel.target'])) img_data.append(page.asarray()) if (channels is not None): try: channel_names = [return_channel[1] for return_channel in return_channels] verify_in_list(passed_channels=channels, in_tiff=channel_names) except ValueError as exc: raise IndexError('Passed unknown channels...') from exc return (np.stack(img_data, axis=2), return_channels)
Reads MIBI data from an IonpathMIBI TIFF file. Currently, only SIMS data is supported Args: file (str): The string path or an open file object to a MIBItiff file. channels (list): Targets to load. If None, all targets/channels are loaded Returns: tuple (np.ndarray, list[tuple]): - image data - channel data
ark/utils/tiff_utils.py
read_mibitiff
ngreenwald/segmentation
17
python
def read_mibitiff(file, channels=None): ' Reads MIBI data from an IonpathMIBI TIFF file.\n\n Currently, only SIMS data is supported\n\n Args:\n file (str): The string path or an open file object to a MIBItiff file.\n channels (list): Targets to load. If None, all targets/channels are loaded\n\n Returns:\n tuple (np.ndarray, list[tuple]):\n - image data\n - channel data\n ' return_channels = [] img_data = [] with TiffFile(file) as tif: _check_version(tif) for page in tif.pages: description = json.loads(page.tags['image_description'].value.decode('utf-8')) if ((channels is not None) and (description['channel.target'] not in channels)): continue return_channels.append((description['channel.mass'], description['channel.target'])) img_data.append(page.asarray()) if (channels is not None): try: channel_names = [return_channel[1] for return_channel in return_channels] verify_in_list(passed_channels=channels, in_tiff=channel_names) except ValueError as exc: raise IndexError('Passed unknown channels...') from exc return (np.stack(img_data, axis=2), return_channels)
def read_mibitiff(file, channels=None): ' Reads MIBI data from an IonpathMIBI TIFF file.\n\n Currently, only SIMS data is supported\n\n Args:\n file (str): The string path or an open file object to a MIBItiff file.\n channels (list): Targets to load. If None, all targets/channels are loaded\n\n Returns:\n tuple (np.ndarray, list[tuple]):\n - image data\n - channel data\n ' return_channels = [] img_data = [] with TiffFile(file) as tif: _check_version(tif) for page in tif.pages: description = json.loads(page.tags['image_description'].value.decode('utf-8')) if ((channels is not None) and (description['channel.target'] not in channels)): continue return_channels.append((description['channel.mass'], description['channel.target'])) img_data.append(page.asarray()) if (channels is not None): try: channel_names = [return_channel[1] for return_channel in return_channels] verify_in_list(passed_channels=channels, in_tiff=channel_names) except ValueError as exc: raise IndexError('Passed unknown channels...') from exc return (np.stack(img_data, axis=2), return_channels)<|docstring|>Reads MIBI data from an IonpathMIBI TIFF file. Currently, only SIMS data is supported Args: file (str): The string path or an open file object to a MIBItiff file. channels (list): Targets to load. If None, all targets/channels are loaded Returns: tuple (np.ndarray, list[tuple]): - image data - channel data<|endoftext|>
26a8b3f0955a1874d7b0754a0b53ea964fd4b454475a9c93c25d864f758d9ff6
def _check_version(file): ' Checks that file is MIBItiff\n\n Args:\n file (TiffFile): opened tiff file\n\n Raises:\n ValueError\n ' filetype = file.pages[0].tags.get('software') if (not (filetype and filetype.value.decode('utf-8').startswith('IonpathMIBI'))): raise ValueError('File is not of type IonpathMIBI...')
Checks that file is MIBItiff Args: file (TiffFile): opened tiff file Raises: ValueError
ark/utils/tiff_utils.py
_check_version
ngreenwald/segmentation
17
python
def _check_version(file): ' Checks that file is MIBItiff\n\n Args:\n file (TiffFile): opened tiff file\n\n Raises:\n ValueError\n ' filetype = file.pages[0].tags.get('software') if (not (filetype and filetype.value.decode('utf-8').startswith('IonpathMIBI'))): raise ValueError('File is not of type IonpathMIBI...')
def _check_version(file): ' Checks that file is MIBItiff\n\n Args:\n file (TiffFile): opened tiff file\n\n Raises:\n ValueError\n ' filetype = file.pages[0].tags.get('software') if (not (filetype and filetype.value.decode('utf-8').startswith('IonpathMIBI'))): raise ValueError('File is not of type IonpathMIBI...')<|docstring|>Checks that file is MIBItiff Args: file (TiffFile): opened tiff file Raises: ValueError<|endoftext|>
663febf8bc26bcd818c8b36d9e1fd8a259ab90201f9981d68c5c9b8c7fb35ffd
def write_mibitiff(filepath, img_data, channel_tuples, metadata): ' Writes MIBI data to a multipage TIFF.\n\n Args:\n filepath (str):\n The path to the target file\n img_data (np.ndarray):\n Image data\n channel_tuples (iterable):\n Iterable of tuples corresponding to image channel massess and target names\n metadata (dict):\n MIBItiff specific metadata\n ' ranges = [(0, m) for m in img_data.max(axis=(0, 1))] range_dtype = _range_dtype_map(img_data.dtype) coordinates = [(286, '2i', 1, _micron_to_cm(metadata['coordinates'][0])), (287, '2i', 1, _micron_to_cm(metadata['coordinates'][1]))] resolution = (((img_data.shape[0] * 10000.0) / float(metadata['size'])), ((img_data.shape[1] * 10000.0) / float(metadata['size'])), 'cm') description = {} for (key, value) in metadata.items(): if (key in _PREFIXED_METADATA_ATTRIBUTES): description[f'mibi.{key}'] = value with TiffWriter(filepath, software='IonpathMIBIv1.0') as infile: for (index, channel_tuple) in enumerate(channel_tuples): (mass, target) = channel_tuple _metadata = description.copy() _metadata.update({'image.type': 'SIMS', 'channel.mass': int(mass), 'channel.target': target}) page_name = (285, 's', 0, '{} ({})'.format(target, mass)) min_value = (340, range_dtype, 1, ranges[index][0]) max_value = (341, range_dtype, 1, ranges[index][1]) page_tags = (coordinates + [page_name, min_value, max_value]) infile.save(img_data[(:, :, index)], compress=6, resolution=resolution, extratags=page_tags, metadata=_metadata, datetime=datetime.datetime.strptime(metadata['date'], '%Y-%m-%dT%H:%M:%S'))
Writes MIBI data to a multipage TIFF. Args: filepath (str): The path to the target file img_data (np.ndarray): Image data channel_tuples (iterable): Iterable of tuples corresponding to image channel massess and target names metadata (dict): MIBItiff specific metadata
ark/utils/tiff_utils.py
write_mibitiff
ngreenwald/segmentation
17
python
def write_mibitiff(filepath, img_data, channel_tuples, metadata): ' Writes MIBI data to a multipage TIFF.\n\n Args:\n filepath (str):\n The path to the target file\n img_data (np.ndarray):\n Image data\n channel_tuples (iterable):\n Iterable of tuples corresponding to image channel massess and target names\n metadata (dict):\n MIBItiff specific metadata\n ' ranges = [(0, m) for m in img_data.max(axis=(0, 1))] range_dtype = _range_dtype_map(img_data.dtype) coordinates = [(286, '2i', 1, _micron_to_cm(metadata['coordinates'][0])), (287, '2i', 1, _micron_to_cm(metadata['coordinates'][1]))] resolution = (((img_data.shape[0] * 10000.0) / float(metadata['size'])), ((img_data.shape[1] * 10000.0) / float(metadata['size'])), 'cm') description = {} for (key, value) in metadata.items(): if (key in _PREFIXED_METADATA_ATTRIBUTES): description[f'mibi.{key}'] = value with TiffWriter(filepath, software='IonpathMIBIv1.0') as infile: for (index, channel_tuple) in enumerate(channel_tuples): (mass, target) = channel_tuple _metadata = description.copy() _metadata.update({'image.type': 'SIMS', 'channel.mass': int(mass), 'channel.target': target}) page_name = (285, 's', 0, '{} ({})'.format(target, mass)) min_value = (340, range_dtype, 1, ranges[index][0]) max_value = (341, range_dtype, 1, ranges[index][1]) page_tags = (coordinates + [page_name, min_value, max_value]) infile.save(img_data[(:, :, index)], compress=6, resolution=resolution, extratags=page_tags, metadata=_metadata, datetime=datetime.datetime.strptime(metadata['date'], '%Y-%m-%dT%H:%M:%S'))
def write_mibitiff(filepath, img_data, channel_tuples, metadata): ' Writes MIBI data to a multipage TIFF.\n\n Args:\n filepath (str):\n The path to the target file\n img_data (np.ndarray):\n Image data\n channel_tuples (iterable):\n Iterable of tuples corresponding to image channel massess and target names\n metadata (dict):\n MIBItiff specific metadata\n ' ranges = [(0, m) for m in img_data.max(axis=(0, 1))] range_dtype = _range_dtype_map(img_data.dtype) coordinates = [(286, '2i', 1, _micron_to_cm(metadata['coordinates'][0])), (287, '2i', 1, _micron_to_cm(metadata['coordinates'][1]))] resolution = (((img_data.shape[0] * 10000.0) / float(metadata['size'])), ((img_data.shape[1] * 10000.0) / float(metadata['size'])), 'cm') description = {} for (key, value) in metadata.items(): if (key in _PREFIXED_METADATA_ATTRIBUTES): description[f'mibi.{key}'] = value with TiffWriter(filepath, software='IonpathMIBIv1.0') as infile: for (index, channel_tuple) in enumerate(channel_tuples): (mass, target) = channel_tuple _metadata = description.copy() _metadata.update({'image.type': 'SIMS', 'channel.mass': int(mass), 'channel.target': target}) page_name = (285, 's', 0, '{} ({})'.format(target, mass)) min_value = (340, range_dtype, 1, ranges[index][0]) max_value = (341, range_dtype, 1, ranges[index][1]) page_tags = (coordinates + [page_name, min_value, max_value]) infile.save(img_data[(:, :, index)], compress=6, resolution=resolution, extratags=page_tags, metadata=_metadata, datetime=datetime.datetime.strptime(metadata['date'], '%Y-%m-%dT%H:%M:%S'))<|docstring|>Writes MIBI data to a multipage TIFF. Args: filepath (str): The path to the target file img_data (np.ndarray): Image data channel_tuples (iterable): Iterable of tuples corresponding to image channel massess and target names metadata (dict): MIBItiff specific metadata<|endoftext|>
05e337c5de17023c3837fd9461f014b9cf0cba107c831998a1ab742a884732a7
def _micron_to_cm(um): ' Converts microns to a fraction tuple in cm\n ' frac = Fraction((float(um) / 10000)).limit_denominator(1000000) return (frac.numerator, frac.denominator)
Converts microns to a fraction tuple in cm
ark/utils/tiff_utils.py
_micron_to_cm
ngreenwald/segmentation
17
python
def _micron_to_cm(um): ' \n ' frac = Fraction((float(um) / 10000)).limit_denominator(1000000) return (frac.numerator, frac.denominator)
def _micron_to_cm(um): ' \n ' frac = Fraction((float(um) / 10000)).limit_denominator(1000000) return (frac.numerator, frac.denominator)<|docstring|>Converts microns to a fraction tuple in cm<|endoftext|>
e065a4c6154f63d7b18bb5b00a192feee20b0c22b1167e5e3b85dcfeaea71fc2
@_if_not_installed('embossversion') def install_emboss(env): 'EMBOSS: A high-quality package of free, Open Source software for molecular biology.\n http://emboss.sourceforge.net/\n Emboss target for platforms without packages (CentOS -- rpm systems).\n ' default_version = '6.4.0' version = env.get('tool_version', default_version) url = ('ftp://emboss.open-bio.org/pub/EMBOSS/EMBOSS-%s.tar.gz' % version) _get_install(url, env, _configure_make)
EMBOSS: A high-quality package of free, Open Source software for molecular biology. http://emboss.sourceforge.net/ Emboss target for platforms without packages (CentOS -- rpm systems).
cloudbio/custom/bio_general.py
install_emboss
afgane/cloudbiolinux
1
python
@_if_not_installed('embossversion') def install_emboss(env): 'EMBOSS: A high-quality package of free, Open Source software for molecular biology.\n http://emboss.sourceforge.net/\n Emboss target for platforms without packages (CentOS -- rpm systems).\n ' default_version = '6.4.0' version = env.get('tool_version', default_version) url = ('ftp://emboss.open-bio.org/pub/EMBOSS/EMBOSS-%s.tar.gz' % version) _get_install(url, env, _configure_make)
@_if_not_installed('embossversion') def install_emboss(env): 'EMBOSS: A high-quality package of free, Open Source software for molecular biology.\n http://emboss.sourceforge.net/\n Emboss target for platforms without packages (CentOS -- rpm systems).\n ' default_version = '6.4.0' version = env.get('tool_version', default_version) url = ('ftp://emboss.open-bio.org/pub/EMBOSS/EMBOSS-%s.tar.gz' % version) _get_install(url, env, _configure_make)<|docstring|>EMBOSS: A high-quality package of free, Open Source software for molecular biology. http://emboss.sourceforge.net/ Emboss target for platforms without packages (CentOS -- rpm systems).<|endoftext|>
84e0f570d361e55b189feaa6cf567314b5f8cd5a5a59f8d45b894a2b7f945907
@_if_not_installed('PGDSpider2.sh') def install_pgdspider(env): 'PGDSpider format conversion for population genetics programs.\n http://www.cmpg.unibe.ch/software/PGDSpider/\n ' version = '2.0.2.0' url = 'http://www.cmpg.unibe.ch/software/PGDSpider/PGDSpider_{v}.zip'.format(v=version) def _install_fn(env, install_dir): env.safe_sudo(('mv *.jar %s' % install_dir)) bin_dir = os.path.join(env.system_install, 'bin') exe_file = 'PGDSpider2.sh' jar = 'PGDSpider2.jar' sed(exe_file, jar, '{dir}/{jar}'.format(dir=install_dir, jar=jar)) run('chmod a+x {0}'.format(exe_file)) env.safe_sudo('mv {exe} {bin}'.format(exe=exe_file, bin=bin_dir)) _java_install('PGDSpider', version, url, env, install_fn=_install_fn)
PGDSpider format conversion for population genetics programs. http://www.cmpg.unibe.ch/software/PGDSpider/
cloudbio/custom/bio_general.py
install_pgdspider
afgane/cloudbiolinux
1
python
@_if_not_installed('PGDSpider2.sh') def install_pgdspider(env): 'PGDSpider format conversion for population genetics programs.\n http://www.cmpg.unibe.ch/software/PGDSpider/\n ' version = '2.0.2.0' url = 'http://www.cmpg.unibe.ch/software/PGDSpider/PGDSpider_{v}.zip'.format(v=version) def _install_fn(env, install_dir): env.safe_sudo(('mv *.jar %s' % install_dir)) bin_dir = os.path.join(env.system_install, 'bin') exe_file = 'PGDSpider2.sh' jar = 'PGDSpider2.jar' sed(exe_file, jar, '{dir}/{jar}'.format(dir=install_dir, jar=jar)) run('chmod a+x {0}'.format(exe_file)) env.safe_sudo('mv {exe} {bin}'.format(exe=exe_file, bin=bin_dir)) _java_install('PGDSpider', version, url, env, install_fn=_install_fn)
@_if_not_installed('PGDSpider2.sh') def install_pgdspider(env): 'PGDSpider format conversion for population genetics programs.\n http://www.cmpg.unibe.ch/software/PGDSpider/\n ' version = '2.0.2.0' url = 'http://www.cmpg.unibe.ch/software/PGDSpider/PGDSpider_{v}.zip'.format(v=version) def _install_fn(env, install_dir): env.safe_sudo(('mv *.jar %s' % install_dir)) bin_dir = os.path.join(env.system_install, 'bin') exe_file = 'PGDSpider2.sh' jar = 'PGDSpider2.jar' sed(exe_file, jar, '{dir}/{jar}'.format(dir=install_dir, jar=jar)) run('chmod a+x {0}'.format(exe_file)) env.safe_sudo('mv {exe} {bin}'.format(exe=exe_file, bin=bin_dir)) _java_install('PGDSpider', version, url, env, install_fn=_install_fn)<|docstring|>PGDSpider format conversion for population genetics programs. http://www.cmpg.unibe.ch/software/PGDSpider/<|endoftext|>
3d9c0fe6d3f4918231b22119eb00328644a2709ba60b75b255010f9d4d586a7b
def install_bio4j(env): 'Bio4j graph based database built on Neo4j with UniProt, GO, RefSeq and more.\n http://www.bio4j.com/\n ' version = '0.8' url = 'https://s3-eu-west-1.amazonaws.com/bio4j-public/releases/{v}/bio4j-{v}.zip'.format(v=version) def _install_fn(env, install_dir): targets = ['conf', 'doc', 'jars', 'lib', 'README'] for x in targets: env.safe_sudo('mv {0} {1}'.format(x, install_dir)) _java_install('bio4j', version, url, env, install_fn=_install_fn)
Bio4j graph based database built on Neo4j with UniProt, GO, RefSeq and more. http://www.bio4j.com/
cloudbio/custom/bio_general.py
install_bio4j
afgane/cloudbiolinux
1
python
def install_bio4j(env): 'Bio4j graph based database built on Neo4j with UniProt, GO, RefSeq and more.\n http://www.bio4j.com/\n ' version = '0.8' url = 'https://s3-eu-west-1.amazonaws.com/bio4j-public/releases/{v}/bio4j-{v}.zip'.format(v=version) def _install_fn(env, install_dir): targets = ['conf', 'doc', 'jars', 'lib', 'README'] for x in targets: env.safe_sudo('mv {0} {1}'.format(x, install_dir)) _java_install('bio4j', version, url, env, install_fn=_install_fn)
def install_bio4j(env): 'Bio4j graph based database built on Neo4j with UniProt, GO, RefSeq and more.\n http://www.bio4j.com/\n ' version = '0.8' url = 'https://s3-eu-west-1.amazonaws.com/bio4j-public/releases/{v}/bio4j-{v}.zip'.format(v=version) def _install_fn(env, install_dir): targets = ['conf', 'doc', 'jars', 'lib', 'README'] for x in targets: env.safe_sudo('mv {0} {1}'.format(x, install_dir)) _java_install('bio4j', version, url, env, install_fn=_install_fn)<|docstring|>Bio4j graph based database built on Neo4j with UniProt, GO, RefSeq and more. http://www.bio4j.com/<|endoftext|>
4f2254d22365640df37684ebfe97cbc9482517cc880b27f3f0e1fde7dd548920
@contextmanager def skip_run(flag, f): 'To skip a block of code.\n\n Parameters\n ----------\n flag : str\n skip or run.\n\n Returns\n -------\n None\n\n ' @contextmanager def check_active(): deactivated = ['skip'] if (flag in deactivated): print(('Skipping the block: ' + f)) raise SkipWith() else: print(('Running the block: ' + f)) (yield) try: (yield check_active) except SkipWith: pass
To skip a block of code. Parameters ---------- flag : str skip or run. Returns ------- None
src/utils.py
skip_run
srisadhan/speech_emotions
3
python
@contextmanager def skip_run(flag, f): 'To skip a block of code.\n\n Parameters\n ----------\n flag : str\n skip or run.\n\n Returns\n -------\n None\n\n ' @contextmanager def check_active(): deactivated = ['skip'] if (flag in deactivated): print(('Skipping the block: ' + f)) raise SkipWith() else: print(('Running the block: ' + f)) (yield) try: (yield check_active) except SkipWith: pass
@contextmanager def skip_run(flag, f): 'To skip a block of code.\n\n Parameters\n ----------\n flag : str\n skip or run.\n\n Returns\n -------\n None\n\n ' @contextmanager def check_active(): deactivated = ['skip'] if (flag in deactivated): print(('Skipping the block: ' + f)) raise SkipWith() else: print(('Running the block: ' + f)) (yield) try: (yield check_active) except SkipWith: pass<|docstring|>To skip a block of code. Parameters ---------- flag : str skip or run. Returns ------- None<|endoftext|>
30da39eafafd476e17f4fc2c24c1585515da142c1249ebdb18d46948d56db66a
def generate_2D_molecule_from_reference(smiles, num): 'generate molecules with similar connectivity with the reference molecule\n smiles: input molecule\n num: number of augmented molecules to generate\n '
generate molecules with similar connectivity with the reference molecule smiles: input molecule num: number of augmented molecules to generate
chem/molecule_generator.py
generate_2D_molecule_from_reference
LanceKnight/pretrain-gnns
0
python
def generate_2D_molecule_from_reference(smiles, num): 'generate molecules with similar connectivity with the reference molecule\n smiles: input molecule\n num: number of augmented molecules to generate\n '
def generate_2D_molecule_from_reference(smiles, num): 'generate molecules with similar connectivity with the reference molecule\n smiles: input molecule\n num: number of augmented molecules to generate\n '<|docstring|>generate molecules with similar connectivity with the reference molecule smiles: input molecule num: number of augmented molecules to generate<|endoftext|>
8908e8cec658bcd1b6cb172474889d5a363c020e9574aad7396a5937dec77a37
def unpickle(file_path): '\n Load pickled python object from file path\n ' if file_path.endswith('.gz'): f = gzip.open(file_path, 'rb') else: f = open(file_path, 'rb') unpickled = CustomUnpickler(f).load() return unpickled
Load pickled python object from file path
app/utils/models.py
unpickle
Tim-ty-tang/mlflow-fastapi-deploy
0
python
def unpickle(file_path): '\n \n ' if file_path.endswith('.gz'): f = gzip.open(file_path, 'rb') else: f = open(file_path, 'rb') unpickled = CustomUnpickler(f).load() return unpickled
def unpickle(file_path): '\n \n ' if file_path.endswith('.gz'): f = gzip.open(file_path, 'rb') else: f = open(file_path, 'rb') unpickled = CustomUnpickler(f).load() return unpickled<|docstring|>Load pickled python object from file path<|endoftext|>
bfc1721d7f3ccfb4ab5449374d412aea47977909fa5c3e81155d604b7041ac34
def tearDown(self): "\n we delete the objects from the database to make sure other tests don't fail because of them...\n " with settings.backend.transaction(): for model in self.models: settings.backend.filter(model, {}).delete()
we delete the objects from the database to make sure other tests don't fail because of them...
quantifiedcode/test/helpers.py
tearDown
marcinguy/scanmycode-ce
138
python
def tearDown(self): "\n \n " with settings.backend.transaction(): for model in self.models: settings.backend.filter(model, {}).delete()
def tearDown(self): "\n \n " with settings.backend.transaction(): for model in self.models: settings.backend.filter(model, {}).delete()<|docstring|>we delete the objects from the database to make sure other tests don't fail because of them...<|endoftext|>
a586eb82a73a89df04cfad799a29a67678b9b7803114a28f7d2554076f6e4aa3
def __init__(self, config, handlers): '\n Create a new instance of the SNMPCollector class\n ' diamond.collector.Collector.__init__(self, config, handlers)
Create a new instance of the SNMPCollector class
src/collectors/snmp/snmp.py
__init__
lixiaocheng18/ops
6
python
def __init__(self, config, handlers): '\n \n ' diamond.collector.Collector.__init__(self, config, handlers)
def __init__(self, config, handlers): '\n \n ' diamond.collector.Collector.__init__(self, config, handlers)<|docstring|>Create a new instance of the SNMPCollector class<|endoftext|>
8caaf81a435ebe57641f6d5dd74100e967e7fd042e1d31a0d4fda56f08f058db
def get_schedule(self): '\n Override SNMPCollector.get_schedule\n ' schedule = {} if (not cmdgen): self.log.error('pysnmp.entity.rfc3413.oneliner.cmdgen failed to load') return self.snmpCmdGen = cmdgen.CommandGenerator() if ('devices' in self.config): for device in self.config['devices']: c = self.config['devices'][device] task = '_'.join([self.__class__.__name__, device]) if (task in schedule): raise KeyError('Duplicate device scheduled') schedule[task] = (self.collect_snmp, (device, c['host'], int(c['port']), c['community']), int(self.config['splay']), int(self.config['interval'])) return schedule
Override SNMPCollector.get_schedule
src/collectors/snmp/snmp.py
get_schedule
lixiaocheng18/ops
6
python
def get_schedule(self): '\n \n ' schedule = {} if (not cmdgen): self.log.error('pysnmp.entity.rfc3413.oneliner.cmdgen failed to load') return self.snmpCmdGen = cmdgen.CommandGenerator() if ('devices' in self.config): for device in self.config['devices']: c = self.config['devices'][device] task = '_'.join([self.__class__.__name__, device]) if (task in schedule): raise KeyError('Duplicate device scheduled') schedule[task] = (self.collect_snmp, (device, c['host'], int(c['port']), c['community']), int(self.config['splay']), int(self.config['interval'])) return schedule
def get_schedule(self): '\n \n ' schedule = {} if (not cmdgen): self.log.error('pysnmp.entity.rfc3413.oneliner.cmdgen failed to load') return self.snmpCmdGen = cmdgen.CommandGenerator() if ('devices' in self.config): for device in self.config['devices']: c = self.config['devices'][device] task = '_'.join([self.__class__.__name__, device]) if (task in schedule): raise KeyError('Duplicate device scheduled') schedule[task] = (self.collect_snmp, (device, c['host'], int(c['port']), c['community']), int(self.config['splay']), int(self.config['interval'])) return schedule<|docstring|>Override SNMPCollector.get_schedule<|endoftext|>
2fbfc42ab63eb8f490489442d687e80b4955871bca1250de1ef350f467818a89
def get(self, oid, host, port, community): '\n Perform SNMP get for a given OID\n ' ret = {} if (not isinstance(oid, tuple)): oid = self._convert_to_oid(oid) host = socket.gethostbyname(host) snmpAuthData = cmdgen.CommunityData('agent', community) snmpTransportData = cmdgen.UdpTransportTarget((host, port), int(self.config['timeout']), int(self.config['retries'])) result = self.snmpCmdGen.getCmd(snmpAuthData, snmpTransportData, oid) varBind = result[3] for (o, v) in varBind: ret[o.prettyPrint()] = v.prettyPrint() return ret
Perform SNMP get for a given OID
src/collectors/snmp/snmp.py
get
lixiaocheng18/ops
6
python
def get(self, oid, host, port, community): '\n \n ' ret = {} if (not isinstance(oid, tuple)): oid = self._convert_to_oid(oid) host = socket.gethostbyname(host) snmpAuthData = cmdgen.CommunityData('agent', community) snmpTransportData = cmdgen.UdpTransportTarget((host, port), int(self.config['timeout']), int(self.config['retries'])) result = self.snmpCmdGen.getCmd(snmpAuthData, snmpTransportData, oid) varBind = result[3] for (o, v) in varBind: ret[o.prettyPrint()] = v.prettyPrint() return ret
def get(self, oid, host, port, community): '\n \n ' ret = {} if (not isinstance(oid, tuple)): oid = self._convert_to_oid(oid) host = socket.gethostbyname(host) snmpAuthData = cmdgen.CommunityData('agent', community) snmpTransportData = cmdgen.UdpTransportTarget((host, port), int(self.config['timeout']), int(self.config['retries'])) result = self.snmpCmdGen.getCmd(snmpAuthData, snmpTransportData, oid) varBind = result[3] for (o, v) in varBind: ret[o.prettyPrint()] = v.prettyPrint() return ret<|docstring|>Perform SNMP get for a given OID<|endoftext|>
0c7b3872dc71a8523df4cd4f6d2496cdf17649b7cdf49225f0363c703aa818ca
def walk(self, oid, host, port, community): '\n Perform an SNMP walk on a given OID\n ' ret = {} if (not isinstance(oid, tuple)): oid = self._convert_to_oid(oid) host = socket.gethostbyname(host) snmpAuthData = cmdgen.CommunityData('agent', community) snmpTransportData = cmdgen.UdpTransportTarget((host, port), int(self.config['timeout']), int(self.config['retries'])) resultTable = self.snmpCmdGen.nextCmd(snmpAuthData, snmpTransportData, oid) varBindTable = resultTable[3] for varBindTableRow in varBindTable: for (o, v) in varBindTableRow: ret[o.prettyPrint()] = v.prettyPrint() return ret
Perform an SNMP walk on a given OID
src/collectors/snmp/snmp.py
walk
lixiaocheng18/ops
6
python
def walk(self, oid, host, port, community): '\n \n ' ret = {} if (not isinstance(oid, tuple)): oid = self._convert_to_oid(oid) host = socket.gethostbyname(host) snmpAuthData = cmdgen.CommunityData('agent', community) snmpTransportData = cmdgen.UdpTransportTarget((host, port), int(self.config['timeout']), int(self.config['retries'])) resultTable = self.snmpCmdGen.nextCmd(snmpAuthData, snmpTransportData, oid) varBindTable = resultTable[3] for varBindTableRow in varBindTable: for (o, v) in varBindTableRow: ret[o.prettyPrint()] = v.prettyPrint() return ret
def walk(self, oid, host, port, community): '\n \n ' ret = {} if (not isinstance(oid, tuple)): oid = self._convert_to_oid(oid) host = socket.gethostbyname(host) snmpAuthData = cmdgen.CommunityData('agent', community) snmpTransportData = cmdgen.UdpTransportTarget((host, port), int(self.config['timeout']), int(self.config['retries'])) resultTable = self.snmpCmdGen.nextCmd(snmpAuthData, snmpTransportData, oid) varBindTable = resultTable[3] for varBindTableRow in varBindTable: for (o, v) in varBindTableRow: ret[o.prettyPrint()] = v.prettyPrint() return ret<|docstring|>Perform an SNMP walk on a given OID<|endoftext|>
e3221ee71f0a52d3f7a5ca1d0e3d04e31bdb46101eb33ead6d5d91a14f7275cb
def save_summary_to_txt(summary: Tuple[(np.ndarray, np.ndarray, np.ndarray, str, Union[(int, None)])], performance_name: str) -> None: '\n output summary to text file.\n useful when plotting data with pgfplots on overleaf.org.\n\n notes:\n 1. latex does not like spaces in file name\n ' (x, y_mean, h, label, job_id) = summary fn = (performance_name + '_') fn += label.replace('\n', '-').replace(' ', '') path = (configs.Dirs.summaries / f'{fn}.txt') if (not path.parent.exists()): path.parent.mkdir() df = pd.DataFrame(data={'mean': y_mean, 'margin_of_error': h}, index=list(x)) df.index.name = 'step' df.round(3).to_csv(path, sep=' ') print(f'Saved summary to {path}')
output summary to text file. useful when plotting data with pgfplots on overleaf.org. notes: 1. latex does not like spaces in file name
entropicstarttheory/summary.py
save_summary_to_txt
phueb/Provident
0
python
def save_summary_to_txt(summary: Tuple[(np.ndarray, np.ndarray, np.ndarray, str, Union[(int, None)])], performance_name: str) -> None: '\n output summary to text file.\n useful when plotting data with pgfplots on overleaf.org.\n\n notes:\n 1. latex does not like spaces in file name\n ' (x, y_mean, h, label, job_id) = summary fn = (performance_name + '_') fn += label.replace('\n', '-').replace(' ', ) path = (configs.Dirs.summaries / f'{fn}.txt') if (not path.parent.exists()): path.parent.mkdir() df = pd.DataFrame(data={'mean': y_mean, 'margin_of_error': h}, index=list(x)) df.index.name = 'step' df.round(3).to_csv(path, sep=' ') print(f'Saved summary to {path}')
def save_summary_to_txt(summary: Tuple[(np.ndarray, np.ndarray, np.ndarray, str, Union[(int, None)])], performance_name: str) -> None: '\n output summary to text file.\n useful when plotting data with pgfplots on overleaf.org.\n\n notes:\n 1. latex does not like spaces in file name\n ' (x, y_mean, h, label, job_id) = summary fn = (performance_name + '_') fn += label.replace('\n', '-').replace(' ', ) path = (configs.Dirs.summaries / f'{fn}.txt') if (not path.parent.exists()): path.parent.mkdir() df = pd.DataFrame(data={'mean': y_mean, 'margin_of_error': h}, index=list(x)) df.index.name = 'step' df.round(3).to_csv(path, sep=' ') print(f'Saved summary to {path}')<|docstring|>output summary to text file. useful when plotting data with pgfplots on overleaf.org. notes: 1. latex does not like spaces in file name<|endoftext|>
7b16ceb211067481a6bba7fff3b3d8d7a1e67e042df6580c0c57d5248aa3622f
def make_summary(pattern: str, param_path: Path, label: str, confidence: float) -> Tuple[(np.ndarray, np.ndarray, np.ndarray, str, Union[(int, None)])]: '\n load all csv files matching pattern and return mean and std across their contents\n ' pattern = f'{pattern}.csv' series_list = [pd.read_csv(p, index_col=0, squeeze=True) for p in param_path.rglob(pattern)] n = len(series_list) if (not series_list): raise RuntimeError(f'Did not find any csv files matching pattern="{pattern}"') concatenated_df = pd.concat(series_list, axis=1) x = concatenated_df.index.values y_mean = concatenated_df.mean(axis=1).values.flatten() y_sem = sem(concatenated_df.values, axis=1) h = (y_sem * t.ppf(((1 + confidence) / 2), (n - 1))) with (param_path / 'param2val.yaml').open('r') as f: param2val = yaml.load(f, Loader=yaml.FullLoader) params = Params.from_param2val(param2val) if (params.start != 'none'): num_probes = 0 for structure in configs.Eval.structures: probe2cat = load_probe2cat(configs.Dirs.root, structure, params.corpus) num_probes += len(probe2cat) num_start_tokens = None assert num_start_tokens num_shifted_steps = ((num_start_tokens // params.batch_size) * params.num_iterations[0]) print(f'Shifting x axis by {num_shifted_steps}') x -= num_shifted_steps if (params.probe_embeddings_info[0] is not None): start_step = params.probe_embeddings_info[2] x += start_step job_id = None return (x, y_mean, h, label, job_id)
load all csv files matching pattern and return mean and std across their contents
entropicstarttheory/summary.py
make_summary
phueb/Provident
0
python
def make_summary(pattern: str, param_path: Path, label: str, confidence: float) -> Tuple[(np.ndarray, np.ndarray, np.ndarray, str, Union[(int, None)])]: '\n \n ' pattern = f'{pattern}.csv' series_list = [pd.read_csv(p, index_col=0, squeeze=True) for p in param_path.rglob(pattern)] n = len(series_list) if (not series_list): raise RuntimeError(f'Did not find any csv files matching pattern="{pattern}"') concatenated_df = pd.concat(series_list, axis=1) x = concatenated_df.index.values y_mean = concatenated_df.mean(axis=1).values.flatten() y_sem = sem(concatenated_df.values, axis=1) h = (y_sem * t.ppf(((1 + confidence) / 2), (n - 1))) with (param_path / 'param2val.yaml').open('r') as f: param2val = yaml.load(f, Loader=yaml.FullLoader) params = Params.from_param2val(param2val) if (params.start != 'none'): num_probes = 0 for structure in configs.Eval.structures: probe2cat = load_probe2cat(configs.Dirs.root, structure, params.corpus) num_probes += len(probe2cat) num_start_tokens = None assert num_start_tokens num_shifted_steps = ((num_start_tokens // params.batch_size) * params.num_iterations[0]) print(f'Shifting x axis by {num_shifted_steps}') x -= num_shifted_steps if (params.probe_embeddings_info[0] is not None): start_step = params.probe_embeddings_info[2] x += start_step job_id = None return (x, y_mean, h, label, job_id)
def make_summary(pattern: str, param_path: Path, label: str, confidence: float) -> Tuple[(np.ndarray, np.ndarray, np.ndarray, str, Union[(int, None)])]: '\n \n ' pattern = f'{pattern}.csv' series_list = [pd.read_csv(p, index_col=0, squeeze=True) for p in param_path.rglob(pattern)] n = len(series_list) if (not series_list): raise RuntimeError(f'Did not find any csv files matching pattern="{pattern}"') concatenated_df = pd.concat(series_list, axis=1) x = concatenated_df.index.values y_mean = concatenated_df.mean(axis=1).values.flatten() y_sem = sem(concatenated_df.values, axis=1) h = (y_sem * t.ppf(((1 + confidence) / 2), (n - 1))) with (param_path / 'param2val.yaml').open('r') as f: param2val = yaml.load(f, Loader=yaml.FullLoader) params = Params.from_param2val(param2val) if (params.start != 'none'): num_probes = 0 for structure in configs.Eval.structures: probe2cat = load_probe2cat(configs.Dirs.root, structure, params.corpus) num_probes += len(probe2cat) num_start_tokens = None assert num_start_tokens num_shifted_steps = ((num_start_tokens // params.batch_size) * params.num_iterations[0]) print(f'Shifting x axis by {num_shifted_steps}') x -= num_shifted_steps if (params.probe_embeddings_info[0] is not None): start_step = params.probe_embeddings_info[2] x += start_step job_id = None return (x, y_mean, h, label, job_id)<|docstring|>load all csv files matching pattern and return mean and std across their contents<|endoftext|>
a35a0cb5a4b795f7eff969b43f17de184a4b7a737a0b22c46b6d139db447576d
def part1(points, folds): "\n Let's fold a piece of transparent paper\n 0,0 represents the top-left coordinate\n The first value, x, increases to the right.\n The second value, y, increases downward\n\n There is a list of fold instructions\n fold the paper up (for horizontal y=... lines)\n fold the paper left (for vertical x=... lines)\n\n Some of the dots might end up overlapping after the fold is complete\n Dots will never appear exactly on a fold line\n Overlapping dots - in this case, the dots merge together and become a single dot\n\n How many dots are visible after completing just the first fold instruction on your transparent paper?\n " (bigx, bigy) = (0, 0) for coords in points: x = coords[0] y = coords[1] if (x > bigx): bigx = x if (y > bigy): bigy = y grid = {} for iy in range((bigy + 1)): grid[iy] = [' ' for ix in range((bigx + 1))] for coords in points: x = coords[0] y = coords[1] grid[y][x] = '#' fold = folds[0] fold_axis = fold[0] fold_line = fold[1] if (fold_axis == 'y'): grid = fold_up_along_y(grid, fold_line) elif (fold_axis == 'x'): grid = fold_left_along_x(grid, fold_line) total_dots = 0 for line in grid: for point in grid[line]: if (point == '#'): total_dots += 1 return total_dots
Let's fold a piece of transparent paper 0,0 represents the top-left coordinate The first value, x, increases to the right. The second value, y, increases downward There is a list of fold instructions fold the paper up (for horizontal y=... lines) fold the paper left (for vertical x=... lines) Some of the dots might end up overlapping after the fold is complete Dots will never appear exactly on a fold line Overlapping dots - in this case, the dots merge together and become a single dot How many dots are visible after completing just the first fold instruction on your transparent paper?
2021/day13/day13.py
part1
jeremy-frank/advent-of-code
0
python
def part1(points, folds): "\n Let's fold a piece of transparent paper\n 0,0 represents the top-left coordinate\n The first value, x, increases to the right.\n The second value, y, increases downward\n\n There is a list of fold instructions\n fold the paper up (for horizontal y=... lines)\n fold the paper left (for vertical x=... lines)\n\n Some of the dots might end up overlapping after the fold is complete\n Dots will never appear exactly on a fold line\n Overlapping dots - in this case, the dots merge together and become a single dot\n\n How many dots are visible after completing just the first fold instruction on your transparent paper?\n " (bigx, bigy) = (0, 0) for coords in points: x = coords[0] y = coords[1] if (x > bigx): bigx = x if (y > bigy): bigy = y grid = {} for iy in range((bigy + 1)): grid[iy] = [' ' for ix in range((bigx + 1))] for coords in points: x = coords[0] y = coords[1] grid[y][x] = '#' fold = folds[0] fold_axis = fold[0] fold_line = fold[1] if (fold_axis == 'y'): grid = fold_up_along_y(grid, fold_line) elif (fold_axis == 'x'): grid = fold_left_along_x(grid, fold_line) total_dots = 0 for line in grid: for point in grid[line]: if (point == '#'): total_dots += 1 return total_dots
def part1(points, folds): "\n Let's fold a piece of transparent paper\n 0,0 represents the top-left coordinate\n The first value, x, increases to the right.\n The second value, y, increases downward\n\n There is a list of fold instructions\n fold the paper up (for horizontal y=... lines)\n fold the paper left (for vertical x=... lines)\n\n Some of the dots might end up overlapping after the fold is complete\n Dots will never appear exactly on a fold line\n Overlapping dots - in this case, the dots merge together and become a single dot\n\n How many dots are visible after completing just the first fold instruction on your transparent paper?\n " (bigx, bigy) = (0, 0) for coords in points: x = coords[0] y = coords[1] if (x > bigx): bigx = x if (y > bigy): bigy = y grid = {} for iy in range((bigy + 1)): grid[iy] = [' ' for ix in range((bigx + 1))] for coords in points: x = coords[0] y = coords[1] grid[y][x] = '#' fold = folds[0] fold_axis = fold[0] fold_line = fold[1] if (fold_axis == 'y'): grid = fold_up_along_y(grid, fold_line) elif (fold_axis == 'x'): grid = fold_left_along_x(grid, fold_line) total_dots = 0 for line in grid: for point in grid[line]: if (point == '#'): total_dots += 1 return total_dots<|docstring|>Let's fold a piece of transparent paper 0,0 represents the top-left coordinate The first value, x, increases to the right. The second value, y, increases downward There is a list of fold instructions fold the paper up (for horizontal y=... lines) fold the paper left (for vertical x=... lines) Some of the dots might end up overlapping after the fold is complete Dots will never appear exactly on a fold line Overlapping dots - in this case, the dots merge together and become a single dot How many dots are visible after completing just the first fold instruction on your transparent paper?<|endoftext|>
193a0bef3f7dd27682089215979b2daa2b568d442b9cab1d9773f7ba035979a4
def part2(points, folds): '\n Finish folding the transparent paper according to the instructions.\n The manual says the code is always eight capital letters.\n\n What code do you use to activate the infrared thermal imaging camera system?\n ' (bigx, bigy) = (0, 0) for coords in points: x = coords[0] y = coords[1] if (x > bigx): bigx = x if (y > bigy): bigy = y grid = {} for iy in range((bigy + 1)): grid[iy] = ['.' for ix in range((bigx + 1))] for coords in points: x = coords[0] y = coords[1] grid[y][x] = '#' for fold in folds: fold_axis = fold[0] fold_line = fold[1] if (fold_axis == 'y'): grid = fold_up_along_y(grid, fold_line) elif (fold_axis == 'x'): grid = fold_left_along_x(grid, fold_line) print('Part 2 - final grid:') for line in grid: print(''.join(grid[line]))
Finish folding the transparent paper according to the instructions. The manual says the code is always eight capital letters. What code do you use to activate the infrared thermal imaging camera system?
2021/day13/day13.py
part2
jeremy-frank/advent-of-code
0
python
def part2(points, folds): '\n Finish folding the transparent paper according to the instructions.\n The manual says the code is always eight capital letters.\n\n What code do you use to activate the infrared thermal imaging camera system?\n ' (bigx, bigy) = (0, 0) for coords in points: x = coords[0] y = coords[1] if (x > bigx): bigx = x if (y > bigy): bigy = y grid = {} for iy in range((bigy + 1)): grid[iy] = ['.' for ix in range((bigx + 1))] for coords in points: x = coords[0] y = coords[1] grid[y][x] = '#' for fold in folds: fold_axis = fold[0] fold_line = fold[1] if (fold_axis == 'y'): grid = fold_up_along_y(grid, fold_line) elif (fold_axis == 'x'): grid = fold_left_along_x(grid, fold_line) print('Part 2 - final grid:') for line in grid: print(.join(grid[line]))
def part2(points, folds): '\n Finish folding the transparent paper according to the instructions.\n The manual says the code is always eight capital letters.\n\n What code do you use to activate the infrared thermal imaging camera system?\n ' (bigx, bigy) = (0, 0) for coords in points: x = coords[0] y = coords[1] if (x > bigx): bigx = x if (y > bigy): bigy = y grid = {} for iy in range((bigy + 1)): grid[iy] = ['.' for ix in range((bigx + 1))] for coords in points: x = coords[0] y = coords[1] grid[y][x] = '#' for fold in folds: fold_axis = fold[0] fold_line = fold[1] if (fold_axis == 'y'): grid = fold_up_along_y(grid, fold_line) elif (fold_axis == 'x'): grid = fold_left_along_x(grid, fold_line) print('Part 2 - final grid:') for line in grid: print(.join(grid[line]))<|docstring|>Finish folding the transparent paper according to the instructions. The manual says the code is always eight capital letters. What code do you use to activate the infrared thermal imaging camera system?<|endoftext|>
42666d591f8a621cd3b1cd0938289d48e340a8e59d040bab299246bf0b5b0d41
def focusInEvent(self, focusEvent): '获得焦点事件' super(EXEdit, self).focusInEvent(focusEvent) self.parent().pen = self.parent().pen_style['press'] self.parent().update() focusEvent.accept()
获得焦点事件
TLineEdit.py
focusInEvent
Drelf2018/Automatic-Goodnight-Algorithm
0
python
def focusInEvent(self, focusEvent): super(EXEdit, self).focusInEvent(focusEvent) self.parent().pen = self.parent().pen_style['press'] self.parent().update() focusEvent.accept()
def focusInEvent(self, focusEvent): super(EXEdit, self).focusInEvent(focusEvent) self.parent().pen = self.parent().pen_style['press'] self.parent().update() focusEvent.accept()<|docstring|>获得焦点事件<|endoftext|>
0a108a159819be2b252d70710b89df7ee5ff7b7ec8bf99d11a1878164795d5f5
def focusOutEvent(self, focusEvent): '失去焦点事件' super(EXEdit, self).focusOutEvent(focusEvent) self.parent().pen = self.parent().pen_style['leave'] self.parent().update() focusEvent.accept()
失去焦点事件
TLineEdit.py
focusOutEvent
Drelf2018/Automatic-Goodnight-Algorithm
0
python
def focusOutEvent(self, focusEvent): super(EXEdit, self).focusOutEvent(focusEvent) self.parent().pen = self.parent().pen_style['leave'] self.parent().update() focusEvent.accept()
def focusOutEvent(self, focusEvent): super(EXEdit, self).focusOutEvent(focusEvent) self.parent().pen = self.parent().pen_style['leave'] self.parent().update() focusEvent.accept()<|docstring|>失去焦点事件<|endoftext|>
dae0409058ca340e475b33ca1032439697971abf0431e7ec0482a808ca945f72
def paintEvent(self, event): '绘制文本框' pat = QPainter(self) pat.setRenderHint(pat.Antialiasing) pat.setPen(self.pen) font = QFont('微软雅黑', 13, QFont.Normal) font.setPixelSize((0.45 * self.height())) fm = QFontMetricsF(font) w = fm.width(self.title) pat.setFont(font) pat.drawText(self.rect(), (Qt.AlignVCenter | Qt.AlignLeft), self.title) pat.drawLine(QPointF(w, self.height()), QPointF(self.width(), self.height())) self.Edit.setFont(font) self.Edit.setGeometry(w, (0.05 * self.height()), ((self.width() - w) - 5), (0.9 * self.height()))
绘制文本框
TLineEdit.py
paintEvent
Drelf2018/Automatic-Goodnight-Algorithm
0
python
def paintEvent(self, event): pat = QPainter(self) pat.setRenderHint(pat.Antialiasing) pat.setPen(self.pen) font = QFont('微软雅黑', 13, QFont.Normal) font.setPixelSize((0.45 * self.height())) fm = QFontMetricsF(font) w = fm.width(self.title) pat.setFont(font) pat.drawText(self.rect(), (Qt.AlignVCenter | Qt.AlignLeft), self.title) pat.drawLine(QPointF(w, self.height()), QPointF(self.width(), self.height())) self.Edit.setFont(font) self.Edit.setGeometry(w, (0.05 * self.height()), ((self.width() - w) - 5), (0.9 * self.height()))
def paintEvent(self, event): pat = QPainter(self) pat.setRenderHint(pat.Antialiasing) pat.setPen(self.pen) font = QFont('微软雅黑', 13, QFont.Normal) font.setPixelSize((0.45 * self.height())) fm = QFontMetricsF(font) w = fm.width(self.title) pat.setFont(font) pat.drawText(self.rect(), (Qt.AlignVCenter | Qt.AlignLeft), self.title) pat.drawLine(QPointF(w, self.height()), QPointF(self.width(), self.height())) self.Edit.setFont(font) self.Edit.setGeometry(w, (0.05 * self.height()), ((self.width() - w) - 5), (0.9 * self.height()))<|docstring|>绘制文本框<|endoftext|>
28cd4154acd53c13d447f585ec90430ec0bbebfaaa932f2eed8339c4610e5d56
def enterEvent(self, QMouseEvent): '检测鼠标是否移动至文本框并变色' self.pen = self.pen_style['enter'] self.update() QMouseEvent.accept()
检测鼠标是否移动至文本框并变色
TLineEdit.py
enterEvent
Drelf2018/Automatic-Goodnight-Algorithm
0
python
def enterEvent(self, QMouseEvent): self.pen = self.pen_style['enter'] self.update() QMouseEvent.accept()
def enterEvent(self, QMouseEvent): self.pen = self.pen_style['enter'] self.update() QMouseEvent.accept()<|docstring|>检测鼠标是否移动至文本框并变色<|endoftext|>
4944ca626f5043285744687932daabf81c406b37ce8c1b9be50bdcf30088b62e
def mousePressEvent(self, QMouseEvent): '按下文本框 变色' self.pen = self.pen_style['press'] self.Edit.setFocus() self.update() QMouseEvent.accept()
按下文本框 变色
TLineEdit.py
mousePressEvent
Drelf2018/Automatic-Goodnight-Algorithm
0
python
def mousePressEvent(self, QMouseEvent): self.pen = self.pen_style['press'] self.Edit.setFocus() self.update() QMouseEvent.accept()
def mousePressEvent(self, QMouseEvent): self.pen = self.pen_style['press'] self.Edit.setFocus() self.update() QMouseEvent.accept()<|docstring|>按下文本框 变色<|endoftext|>
de354d0bff715f770380d8dc1862afa7d78092dcd4726bb1740b546df8d9bad5
def leaveEvent(self, QMouseEvent): '未按下时移开鼠标变色' if (self.pen == self.pen_style['enter']): self.pen = self.pen_style['leave'] self.update() QMouseEvent.accept()
未按下时移开鼠标变色
TLineEdit.py
leaveEvent
Drelf2018/Automatic-Goodnight-Algorithm
0
python
def leaveEvent(self, QMouseEvent): if (self.pen == self.pen_style['enter']): self.pen = self.pen_style['leave'] self.update() QMouseEvent.accept()
def leaveEvent(self, QMouseEvent): if (self.pen == self.pen_style['enter']): self.pen = self.pen_style['leave'] self.update() QMouseEvent.accept()<|docstring|>未按下时移开鼠标变色<|endoftext|>
257c994be0322f75726ffc599ec8981d1534a745ac71f278dd83beb802b23e69
def focusInEvent(self, focusEvent): '获得焦点事件' self.pen = self.pen_style['press'] self.update() focusEvent.accept()
获得焦点事件
TLineEdit.py
focusInEvent
Drelf2018/Automatic-Goodnight-Algorithm
0
python
def focusInEvent(self, focusEvent): self.pen = self.pen_style['press'] self.update() focusEvent.accept()
def focusInEvent(self, focusEvent): self.pen = self.pen_style['press'] self.update() focusEvent.accept()<|docstring|>获得焦点事件<|endoftext|>
d1fa75b84fb1903fc91c02191c82d4a5b002863b126da16f5eeb0e9ad463bc7d
def focusOutEvent(self, focusEvent): '失去焦点事件' self.pen = self.pen_style['leave'] self.update() focusEvent.accept()
失去焦点事件
TLineEdit.py
focusOutEvent
Drelf2018/Automatic-Goodnight-Algorithm
0
python
def focusOutEvent(self, focusEvent): self.pen = self.pen_style['leave'] self.update() focusEvent.accept()
def focusOutEvent(self, focusEvent): self.pen = self.pen_style['leave'] self.update() focusEvent.accept()<|docstring|>失去焦点事件<|endoftext|>
3305efd87c8e768d50e7075c3bcd8a75ec9d45b304b35eb4531a856bf52c70e7
def __init__(self, mode='full', history=None): '\n Calculate position loss in global coordinate frame\n Target :- Global Velocity\n Prediction :- Global Velocity\n ' super(GlobalPosLoss, self).__init__() self.mse_loss = torch.nn.MSELoss(reduction='none') assert (mode in ['full', 'part']) self.mode = mode if (self.mode == 'part'): assert (history is not None) self.history = history elif (self.mode == 'full'): self.history = 1
Calculate position loss in global coordinate frame Target :- Global Velocity Prediction :- Global Velocity
ronin_3d/source/ronin_lstm_tcn.py
__init__
zju3dv/rnin-vio
10
python
def __init__(self, mode='full', history=None): '\n Calculate position loss in global coordinate frame\n Target :- Global Velocity\n Prediction :- Global Velocity\n ' super(GlobalPosLoss, self).__init__() self.mse_loss = torch.nn.MSELoss(reduction='none') assert (mode in ['full', 'part']) self.mode = mode if (self.mode == 'part'): assert (history is not None) self.history = history elif (self.mode == 'full'): self.history = 1
def __init__(self, mode='full', history=None): '\n Calculate position loss in global coordinate frame\n Target :- Global Velocity\n Prediction :- Global Velocity\n ' super(GlobalPosLoss, self).__init__() self.mse_loss = torch.nn.MSELoss(reduction='none') assert (mode in ['full', 'part']) self.mode = mode if (self.mode == 'part'): assert (history is not None) self.history = history elif (self.mode == 'full'): self.history = 1<|docstring|>Calculate position loss in global coordinate frame Target :- Global Velocity Prediction :- Global Velocity<|endoftext|>
045de4d20847a8aa5e4b285cc08f1f6e2e3d9693aa79b5b8715bcd2b57860d0f
def print_scores(scores): '"Print CV Scores in a standard format' print(f'{len(scores)} Scores min:{scores.min():.3f} max:{scores.max():.3f}') print(f'CV Mean Score: {scores.mean():.3f} +/- {scores.std():.3f}')
"Print CV Scores in a standard format
projects/titanic/titanic_helper_code.py
print_scores
sdiehl28/tutorial-jupyter-notebooks
0
python
def print_scores(scores): print(f'{len(scores)} Scores min:{scores.min():.3f} max:{scores.max():.3f}') print(f'CV Mean Score: {scores.mean():.3f} +/- {scores.std():.3f}')
def print_scores(scores): print(f'{len(scores)} Scores min:{scores.min():.3f} max:{scores.max():.3f}') print(f'CV Mean Score: {scores.mean():.3f} +/- {scores.std():.3f}')<|docstring|>"Print CV Scores in a standard format<|endoftext|>
23bff068e6522836f9a6db3ed3f55fa3b0de753f777e548c31a08dfa74d02c07
def print_grid(grid, pandas=False): 'Print Best and Return Results in a DataFrame' sd = grid.cv_results_['std_test_score'][grid.best_index_] print(f'Best: {grid.best_score_:0.3f} +/- {sd:0.3f}') for (key, value) in grid.best_params_.items(): print(f'{key}: {value}') if pandas: results = [] for i in range(len(grid.cv_results_['mean_test_score'])): score = grid.cv_results_['mean_test_score'][i] std = grid.cv_results_['std_test_score'][i] params = grid.cv_results_['params'][i] params['score'] = score params['std'] = std results.append(params) return pd.DataFrame(results)
Print Best and Return Results in a DataFrame
projects/titanic/titanic_helper_code.py
print_grid
sdiehl28/tutorial-jupyter-notebooks
0
python
def print_grid(grid, pandas=False): sd = grid.cv_results_['std_test_score'][grid.best_index_] print(f'Best: {grid.best_score_:0.3f} +/- {sd:0.3f}') for (key, value) in grid.best_params_.items(): print(f'{key}: {value}') if pandas: results = [] for i in range(len(grid.cv_results_['mean_test_score'])): score = grid.cv_results_['mean_test_score'][i] std = grid.cv_results_['std_test_score'][i] params = grid.cv_results_['params'][i] params['score'] = score params['std'] = std results.append(params) return pd.DataFrame(results)
def print_grid(grid, pandas=False): sd = grid.cv_results_['std_test_score'][grid.best_index_] print(f'Best: {grid.best_score_:0.3f} +/- {sd:0.3f}') for (key, value) in grid.best_params_.items(): print(f'{key}: {value}') if pandas: results = [] for i in range(len(grid.cv_results_['mean_test_score'])): score = grid.cv_results_['mean_test_score'][i] std = grid.cv_results_['std_test_score'][i] params = grid.cv_results_['params'][i] params['score'] = score params['std'] = std results.append(params) return pd.DataFrame(results)<|docstring|>Print Best and Return Results in a DataFrame<|endoftext|>
bc7c2ad37175257810aab01512b9968f4dc612c3a634cd4da8785e98c7aecf9c
def get_ct_v1(): 'Column Transform for Features\n\n Version 1\n * without Categorical Variable Encoding\n * uses SimpleImputer for Age\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() si = SimpleImputer() kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ss_si_pipe = Pipeline([('ss', ss), ('si', si)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ss_si_cols = ['Age'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ss_si_tr', ss_si_pipe, ss_si_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ss_si_cols) + ['is_fare_high']) + bool_cols) return (cols, ct)
Column Transform for Features Version 1 * without Categorical Variable Encoding * uses SimpleImputer for Age Returns column names and ColumnTransform instance.
projects/titanic/titanic_helper_code.py
get_ct_v1
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_ct_v1(): 'Column Transform for Features\n\n Version 1\n * without Categorical Variable Encoding\n * uses SimpleImputer for Age\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() si = SimpleImputer() kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ss_si_pipe = Pipeline([('ss', ss), ('si', si)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ss_si_cols = ['Age'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ss_si_tr', ss_si_pipe, ss_si_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ss_si_cols) + ['is_fare_high']) + bool_cols) return (cols, ct)
def get_ct_v1(): 'Column Transform for Features\n\n Version 1\n * without Categorical Variable Encoding\n * uses SimpleImputer for Age\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() si = SimpleImputer() kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ss_si_pipe = Pipeline([('ss', ss), ('si', si)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ss_si_cols = ['Age'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ss_si_tr', ss_si_pipe, ss_si_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ss_si_cols) + ['is_fare_high']) + bool_cols) return (cols, ct)<|docstring|>Column Transform for Features Version 1 * without Categorical Variable Encoding * uses SimpleImputer for Age Returns column names and ColumnTransform instance.<|endoftext|>
9c470468d3e47bdec70c58ee57c9931d698fe6c381d6d4331fc474d2ee3864cd
def get_ct_v2(): 'Column Transform for Features\n\n Version 2\n * without Categorical Variable Encoding\n * uses Wrapped IterativeImputer for Age\n\n The IterativeImputer needs many columns in order to impute well.\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'SibSp', 'Parch', 'Fare', 'family_size', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_SS_tr', ii_ss_pipe, ii_ss_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ['Age']) + ['is_fare_high']) + bool_cols) return (cols, ct)
Column Transform for Features Version 2 * without Categorical Variable Encoding * uses Wrapped IterativeImputer for Age The IterativeImputer needs many columns in order to impute well. Returns column names and ColumnTransform instance.
projects/titanic/titanic_helper_code.py
get_ct_v2
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_ct_v2(): 'Column Transform for Features\n\n Version 2\n * without Categorical Variable Encoding\n * uses Wrapped IterativeImputer for Age\n\n The IterativeImputer needs many columns in order to impute well.\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'SibSp', 'Parch', 'Fare', 'family_size', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_SS_tr', ii_ss_pipe, ii_ss_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ['Age']) + ['is_fare_high']) + bool_cols) return (cols, ct)
def get_ct_v2(): 'Column Transform for Features\n\n Version 2\n * without Categorical Variable Encoding\n * uses Wrapped IterativeImputer for Age\n\n The IterativeImputer needs many columns in order to impute well.\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'SibSp', 'Parch', 'Fare', 'family_size', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_SS_tr', ii_ss_pipe, ii_ss_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ['Age']) + ['is_fare_high']) + bool_cols) return (cols, ct)<|docstring|>Column Transform for Features Version 2 * without Categorical Variable Encoding * uses Wrapped IterativeImputer for Age The IterativeImputer needs many columns in order to impute well. Returns column names and ColumnTransform instance.<|endoftext|>
4687b658666944ade531430d5088f8e02339bdf4d59f0a1ec58abbfde84b281a
def get_ct_v3(): 'Column Transform for Features\n\n Version 3\n * with Categorical Variable Encoding\n * uses all columns for Wrapped IterativeImputer\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'SibSp', 'Parch', 'Fare', 'family_size', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_ss_tr', ii_ss_pipe, ii_ss_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ['Age']) + ['is_fare_high']) + bool_cols) return (cols, ct)
Column Transform for Features Version 3 * with Categorical Variable Encoding * uses all columns for Wrapped IterativeImputer Returns column names and ColumnTransform instance.
projects/titanic/titanic_helper_code.py
get_ct_v3
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_ct_v3(): 'Column Transform for Features\n\n Version 3\n * with Categorical Variable Encoding\n * uses all columns for Wrapped IterativeImputer\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'SibSp', 'Parch', 'Fare', 'family_size', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_ss_tr', ii_ss_pipe, ii_ss_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ['Age']) + ['is_fare_high']) + bool_cols) return (cols, ct)
def get_ct_v3(): 'Column Transform for Features\n\n Version 3\n * with Categorical Variable Encoding\n * uses all columns for Wrapped IterativeImputer\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'SibSp', 'Parch', 'Fare', 'family_size', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_ss_tr', ii_ss_pipe, ii_ss_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ['Age']) + ['is_fare_high']) + bool_cols) return (cols, ct)<|docstring|>Column Transform for Features Version 3 * with Categorical Variable Encoding * uses all columns for Wrapped IterativeImputer Returns column names and ColumnTransform instance.<|endoftext|>
19228d38b143d5d818e83464480a580d302204683a0dbe30622f5a1b2325b1c1
def get_ct_v4(): 'Column Transform for Features\n\n Version 4\n * with Categorical Variable Encoding\n * use subset of variables for Wrapped IterativeImputer\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_ss_tr', ii_ss_pipe, ii_ss_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ['Age']) + ['is_fare_high']) + bool_cols) return (cols, ct)
Column Transform for Features Version 4 * with Categorical Variable Encoding * use subset of variables for Wrapped IterativeImputer Returns column names and ColumnTransform instance.
projects/titanic/titanic_helper_code.py
get_ct_v4
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_ct_v4(): 'Column Transform for Features\n\n Version 4\n * with Categorical Variable Encoding\n * use subset of variables for Wrapped IterativeImputer\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_ss_tr', ii_ss_pipe, ii_ss_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ['Age']) + ['is_fare_high']) + bool_cols) return (cols, ct)
def get_ct_v4(): 'Column Transform for Features\n\n Version 4\n * with Categorical Variable Encoding\n * use subset of variables for Wrapped IterativeImputer\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') kbin = KBinsDiscretizer(n_bins=2, encode='ordinal', strategy='quantile') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) kbin_pipe = Pipeline([('kbin', kbin)]) ss_cols = ['Pclass', 'SibSp', 'Parch', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] kbin_cols = ['Fare'] bool_cols = ['Sex', 'is_cabin_notnull', 'is_large_family', 'is_child', 'is_sibsp_zero', 'is_parch_zero', 'is_boy', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_ss_tr', ii_ss_pipe, ii_ss_cols), ('kbin_tr', kbin_pipe, kbin_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = (((ss_cols + ['Age']) + ['is_fare_high']) + bool_cols) return (cols, ct)<|docstring|>Column Transform for Features Version 4 * with Categorical Variable Encoding * use subset of variables for Wrapped IterativeImputer Returns column names and ColumnTransform instance.<|endoftext|>
1a690c7cfca0e7277923e381d00ec864ecdd8b3d98cc3ecca260d472cd542f47
def get_ct_v5(): 'Column Transform for Features\n\n Version 5\n * with Categorical Variable Encoding\n * use subset of variables for Wrapped IterativeImputer\n * use subset of variables for prediction\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) ss_cols = ['Pclass', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] bool_cols = ['Sex', 'is_cabin_notnull', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_ss_tr', ii_ss_pipe, ii_ss_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = ((ss_cols + ['Age']) + bool_cols) return (cols, ct)
Column Transform for Features Version 5 * with Categorical Variable Encoding * use subset of variables for Wrapped IterativeImputer * use subset of variables for prediction Returns column names and ColumnTransform instance.
projects/titanic/titanic_helper_code.py
get_ct_v5
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_ct_v5(): 'Column Transform for Features\n\n Version 5\n * with Categorical Variable Encoding\n * use subset of variables for Wrapped IterativeImputer\n * use subset of variables for prediction\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) ss_cols = ['Pclass', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] bool_cols = ['Sex', 'is_cabin_notnull', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_ss_tr', ii_ss_pipe, ii_ss_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = ((ss_cols + ['Age']) + bool_cols) return (cols, ct)
def get_ct_v5(): 'Column Transform for Features\n\n Version 5\n * with Categorical Variable Encoding\n * use subset of variables for Wrapped IterativeImputer\n * use subset of variables for prediction\n\n Returns column names and ColumnTransform instance.\n ' ss = StandardScaler() ii = WrappedIterativeImputer('Age') ss_pipe = Pipeline([('ss', ss)]) ii_ss_pipe = Pipeline([('ii', ii), ('ss', ss)]) ss_cols = ['Pclass', 'Fare', 'family_size'] ii_ss_cols = ['Pclass', 'Sex', 'Age', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] bool_cols = ['Sex', 'is_cabin_notnull', 'Port_C', 'Port_Q', 'Port_S', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] transformers = [('ss_tr', ss_pipe, ss_cols), ('ii_ss_tr', ii_ss_pipe, ii_ss_cols), ('as_is', 'passthrough', bool_cols)] ct = ColumnTransformer(transformers=transformers) cols = ((ss_cols + ['Age']) + bool_cols) return (cols, ct)<|docstring|>Column Transform for Features Version 5 * with Categorical Variable Encoding * use subset of variables for Wrapped IterativeImputer * use subset of variables for prediction Returns column names and ColumnTransform instance.<|endoftext|>
07f2d7158dfe8800c1e70135bcf012dd99a9743140f1221dca75cf943fcea60e
def get_Xy_v1(filename='./data/train.csv'): 'Data Encoding\n\n Version 1\n * Pclass and Sex encoded as 1/0\n ' all_data = pd.read_csv(filename) X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) drop_columns = ['PassengerId', 'Name', 'Age', 'SibSp', 'Parch', 'Fare', 'Ticket', 'Cabin', 'Embarked'] X = X.drop(drop_columns, axis=1) return (X, y)
Data Encoding Version 1 * Pclass and Sex encoded as 1/0
projects/titanic/titanic_helper_code.py
get_Xy_v1
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_Xy_v1(filename='./data/train.csv'): 'Data Encoding\n\n Version 1\n * Pclass and Sex encoded as 1/0\n ' all_data = pd.read_csv(filename) X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) drop_columns = ['PassengerId', 'Name', 'Age', 'SibSp', 'Parch', 'Fare', 'Ticket', 'Cabin', 'Embarked'] X = X.drop(drop_columns, axis=1) return (X, y)
def get_Xy_v1(filename='./data/train.csv'): 'Data Encoding\n\n Version 1\n * Pclass and Sex encoded as 1/0\n ' all_data = pd.read_csv(filename) X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) drop_columns = ['PassengerId', 'Name', 'Age', 'SibSp', 'Parch', 'Fare', 'Ticket', 'Cabin', 'Embarked'] X = X.drop(drop_columns, axis=1) return (X, y)<|docstring|>Data Encoding Version 1 * Pclass and Sex encoded as 1/0<|endoftext|>
c243d257d2399a305851b49414f6fcbe56179ca3586ef4e4d96449a44b65cbe2
def get_Xy_v2(filename='./data/train.csv'): 'Data Encoding\n\n Version 2\n * Pclass and Sex encoded as 1/0\n * Age, Fare, SibSp, Parch\n ' all_data = pd.read_csv(filename) X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) drop_columns = ['PassengerId', 'Name', 'Ticket', 'Cabin', 'Embarked'] X = X.drop(drop_columns, axis=1) return (X, y)
Data Encoding Version 2 * Pclass and Sex encoded as 1/0 * Age, Fare, SibSp, Parch
projects/titanic/titanic_helper_code.py
get_Xy_v2
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_Xy_v2(filename='./data/train.csv'): 'Data Encoding\n\n Version 2\n * Pclass and Sex encoded as 1/0\n * Age, Fare, SibSp, Parch\n ' all_data = pd.read_csv(filename) X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) drop_columns = ['PassengerId', 'Name', 'Ticket', 'Cabin', 'Embarked'] X = X.drop(drop_columns, axis=1) return (X, y)
def get_Xy_v2(filename='./data/train.csv'): 'Data Encoding\n\n Version 2\n * Pclass and Sex encoded as 1/0\n * Age, Fare, SibSp, Parch\n ' all_data = pd.read_csv(filename) X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) drop_columns = ['PassengerId', 'Name', 'Ticket', 'Cabin', 'Embarked'] X = X.drop(drop_columns, axis=1) return (X, y)<|docstring|>Data Encoding Version 2 * Pclass and Sex encoded as 1/0 * Age, Fare, SibSp, Parch<|endoftext|>
cb13bf30819921dd6bfd32df9109151c60ad597daf3ba630a7e1aa0c2e2cb087
def get_Xy_v3(filename='./data/train.csv'): 'Data Encoding\n\n Version 3\n * Pclass and Sex encoded as 1/0\n * Age, Fare, SibSp, Parch\n * family_size, is_cabin_notnull, is_large_family\n * is_child, is_boy, is_sibsp_zero, is_parch_zero\n ' all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() X['is_large_family'] = (X['family_size'] > 4) X['is_sibsp_zero'] = (X['SibSp'] == 0) X['is_parch_zero'] = (X['Parch'] == 0) X['is_child'] = (X['Age'] < 18) X['is_boy'] = ((X['Age'] < 18) & (X['Sex'] == 0)) drop_columns = ['PassengerId', 'Name', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)
Data Encoding Version 3 * Pclass and Sex encoded as 1/0 * Age, Fare, SibSp, Parch * family_size, is_cabin_notnull, is_large_family * is_child, is_boy, is_sibsp_zero, is_parch_zero
projects/titanic/titanic_helper_code.py
get_Xy_v3
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_Xy_v3(filename='./data/train.csv'): 'Data Encoding\n\n Version 3\n * Pclass and Sex encoded as 1/0\n * Age, Fare, SibSp, Parch\n * family_size, is_cabin_notnull, is_large_family\n * is_child, is_boy, is_sibsp_zero, is_parch_zero\n ' all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() X['is_large_family'] = (X['family_size'] > 4) X['is_sibsp_zero'] = (X['SibSp'] == 0) X['is_parch_zero'] = (X['Parch'] == 0) X['is_child'] = (X['Age'] < 18) X['is_boy'] = ((X['Age'] < 18) & (X['Sex'] == 0)) drop_columns = ['PassengerId', 'Name', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)
def get_Xy_v3(filename='./data/train.csv'): 'Data Encoding\n\n Version 3\n * Pclass and Sex encoded as 1/0\n * Age, Fare, SibSp, Parch\n * family_size, is_cabin_notnull, is_large_family\n * is_child, is_boy, is_sibsp_zero, is_parch_zero\n ' all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() X['is_large_family'] = (X['family_size'] > 4) X['is_sibsp_zero'] = (X['SibSp'] == 0) X['is_parch_zero'] = (X['Parch'] == 0) X['is_child'] = (X['Age'] < 18) X['is_boy'] = ((X['Age'] < 18) & (X['Sex'] == 0)) drop_columns = ['PassengerId', 'Name', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)<|docstring|>Data Encoding Version 3 * Pclass and Sex encoded as 1/0 * Age, Fare, SibSp, Parch * family_size, is_cabin_notnull, is_large_family * is_child, is_boy, is_sibsp_zero, is_parch_zero<|endoftext|>
554475396e7ea05abe7a50c30f759c19bb8103a0bb6ba1889f0b092f70a9d034
def get_Xy_v4(filename='./data/train.csv'): 'Data Encoding\n\n Version 4\n * Pclass and Sex encoded as 1/0\n * Age, Fare, SibSp, Parch\n * family_size, is_cabin_notnull, is_large_family\n * is_child, is_boy, is_sibsp_zero, is_parch_zero\n * extract Title and dummy encode it\n * dummy encode Embarked\n ' def extract_title(x): title = x.split(',')[1].split('.')[0].strip() if (title not in ['Mr', 'Miss', 'Mrs', 'Master']): title = 'Other' return title all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() X['is_large_family'] = (X['family_size'] > 4) X['is_sibsp_zero'] = (X['SibSp'] == 0) X['is_parch_zero'] = (X['Parch'] == 0) X['is_child'] = (X['Age'] < 18) X['is_boy'] = ((X['Age'] < 18) & (X['Sex'] == 0)) title = X['Name'].apply(extract_title) dummy_title = pd.get_dummies(title, prefix='Title') dummy_embarked = pd.get_dummies(X['Embarked'], prefix='Port') X = pd.concat([X, dummy_embarked, dummy_title], axis=1) drop_columns = ['PassengerId', 'Name', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)
Data Encoding Version 4 * Pclass and Sex encoded as 1/0 * Age, Fare, SibSp, Parch * family_size, is_cabin_notnull, is_large_family * is_child, is_boy, is_sibsp_zero, is_parch_zero * extract Title and dummy encode it * dummy encode Embarked
projects/titanic/titanic_helper_code.py
get_Xy_v4
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_Xy_v4(filename='./data/train.csv'): 'Data Encoding\n\n Version 4\n * Pclass and Sex encoded as 1/0\n * Age, Fare, SibSp, Parch\n * family_size, is_cabin_notnull, is_large_family\n * is_child, is_boy, is_sibsp_zero, is_parch_zero\n * extract Title and dummy encode it\n * dummy encode Embarked\n ' def extract_title(x): title = x.split(',')[1].split('.')[0].strip() if (title not in ['Mr', 'Miss', 'Mrs', 'Master']): title = 'Other' return title all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() X['is_large_family'] = (X['family_size'] > 4) X['is_sibsp_zero'] = (X['SibSp'] == 0) X['is_parch_zero'] = (X['Parch'] == 0) X['is_child'] = (X['Age'] < 18) X['is_boy'] = ((X['Age'] < 18) & (X['Sex'] == 0)) title = X['Name'].apply(extract_title) dummy_title = pd.get_dummies(title, prefix='Title') dummy_embarked = pd.get_dummies(X['Embarked'], prefix='Port') X = pd.concat([X, dummy_embarked, dummy_title], axis=1) drop_columns = ['PassengerId', 'Name', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)
def get_Xy_v4(filename='./data/train.csv'): 'Data Encoding\n\n Version 4\n * Pclass and Sex encoded as 1/0\n * Age, Fare, SibSp, Parch\n * family_size, is_cabin_notnull, is_large_family\n * is_child, is_boy, is_sibsp_zero, is_parch_zero\n * extract Title and dummy encode it\n * dummy encode Embarked\n ' def extract_title(x): title = x.split(',')[1].split('.')[0].strip() if (title not in ['Mr', 'Miss', 'Mrs', 'Master']): title = 'Other' return title all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() X['is_large_family'] = (X['family_size'] > 4) X['is_sibsp_zero'] = (X['SibSp'] == 0) X['is_parch_zero'] = (X['Parch'] == 0) X['is_child'] = (X['Age'] < 18) X['is_boy'] = ((X['Age'] < 18) & (X['Sex'] == 0)) title = X['Name'].apply(extract_title) dummy_title = pd.get_dummies(title, prefix='Title') dummy_embarked = pd.get_dummies(X['Embarked'], prefix='Port') X = pd.concat([X, dummy_embarked, dummy_title], axis=1) drop_columns = ['PassengerId', 'Name', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)<|docstring|>Data Encoding Version 4 * Pclass and Sex encoded as 1/0 * Age, Fare, SibSp, Parch * family_size, is_cabin_notnull, is_large_family * is_child, is_boy, is_sibsp_zero, is_parch_zero * extract Title and dummy encode it * dummy encode Embarked<|endoftext|>
a2997d8231d6a9b2f7e6c1fd68cc11a85b5540bb5788112cdecce919b7cdfc58
def get_Xy_v5(filename='./data/train.csv'): 'Data Encoding\n\n Version 5 -- Reduced set of features\n * Pclass and Sex encoded as 1/0\n * Age, Fare\n * extract Title and dummy encode it\n * dummy encode Embarked\n ' def extract_title(x): title = x.split(',')[1].split('.')[0].strip() if (title not in ['Mr', 'Miss', 'Mrs', 'Master']): title = 'Other' return title all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() title = X['Name'].apply(extract_title) dummy_title = pd.get_dummies(title, prefix='Title') dummy_embarked = pd.get_dummies(X['Embarked'], prefix='Port') X = pd.concat([X, dummy_embarked, dummy_title], axis=1) drop_columns = ['PassengerId', 'Name', 'SibSp', 'Parch', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)
Data Encoding Version 5 -- Reduced set of features * Pclass and Sex encoded as 1/0 * Age, Fare * extract Title and dummy encode it * dummy encode Embarked
projects/titanic/titanic_helper_code.py
get_Xy_v5
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_Xy_v5(filename='./data/train.csv'): 'Data Encoding\n\n Version 5 -- Reduced set of features\n * Pclass and Sex encoded as 1/0\n * Age, Fare\n * extract Title and dummy encode it\n * dummy encode Embarked\n ' def extract_title(x): title = x.split(',')[1].split('.')[0].strip() if (title not in ['Mr', 'Miss', 'Mrs', 'Master']): title = 'Other' return title all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() title = X['Name'].apply(extract_title) dummy_title = pd.get_dummies(title, prefix='Title') dummy_embarked = pd.get_dummies(X['Embarked'], prefix='Port') X = pd.concat([X, dummy_embarked, dummy_title], axis=1) drop_columns = ['PassengerId', 'Name', 'SibSp', 'Parch', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)
def get_Xy_v5(filename='./data/train.csv'): 'Data Encoding\n\n Version 5 -- Reduced set of features\n * Pclass and Sex encoded as 1/0\n * Age, Fare\n * extract Title and dummy encode it\n * dummy encode Embarked\n ' def extract_title(x): title = x.split(',')[1].split('.')[0].strip() if (title not in ['Mr', 'Miss', 'Mrs', 'Master']): title = 'Other' return title all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() title = X['Name'].apply(extract_title) dummy_title = pd.get_dummies(title, prefix='Title') dummy_embarked = pd.get_dummies(X['Embarked'], prefix='Port') X = pd.concat([X, dummy_embarked, dummy_title], axis=1) drop_columns = ['PassengerId', 'Name', 'SibSp', 'Parch', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)<|docstring|>Data Encoding Version 5 -- Reduced set of features * Pclass and Sex encoded as 1/0 * Age, Fare * extract Title and dummy encode it * dummy encode Embarked<|endoftext|>
1c1f05390b2593916fe96414d32e5012ebcd1f7a98cc49699782c6f92d31e74d
def get_Xy_v6(filename='./data/train.csv'): 'Data Encoding\n\n Version 5\n * same as version 4 except encode 3rd class as the number 4\n * to better reflect the added difficultly of being in 3rd class\n ' def extract_title(x): title = x.split(',')[1].split('.')[0].strip() if (title not in ['Mr', 'Miss', 'Mrs', 'Master']): title = 'Other' return title all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['Pclass'] = X['Pclass'].replace({1: 1, 2: 2, 3: 4}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() title = X['Name'].apply(extract_title) dummy_title = pd.get_dummies(title, prefix='Title') dummy_embarked = pd.get_dummies(X['Embarked'], prefix='Port') X = pd.concat([X, dummy_embarked, dummy_title], axis=1) drop_columns = ['PassengerId', 'Name', 'SibSp', 'Parch', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)
Data Encoding Version 5 * same as version 4 except encode 3rd class as the number 4 * to better reflect the added difficultly of being in 3rd class
projects/titanic/titanic_helper_code.py
get_Xy_v6
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_Xy_v6(filename='./data/train.csv'): 'Data Encoding\n\n Version 5\n * same as version 4 except encode 3rd class as the number 4\n * to better reflect the added difficultly of being in 3rd class\n ' def extract_title(x): title = x.split(',')[1].split('.')[0].strip() if (title not in ['Mr', 'Miss', 'Mrs', 'Master']): title = 'Other' return title all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['Pclass'] = X['Pclass'].replace({1: 1, 2: 2, 3: 4}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() title = X['Name'].apply(extract_title) dummy_title = pd.get_dummies(title, prefix='Title') dummy_embarked = pd.get_dummies(X['Embarked'], prefix='Port') X = pd.concat([X, dummy_embarked, dummy_title], axis=1) drop_columns = ['PassengerId', 'Name', 'SibSp', 'Parch', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)
def get_Xy_v6(filename='./data/train.csv'): 'Data Encoding\n\n Version 5\n * same as version 4 except encode 3rd class as the number 4\n * to better reflect the added difficultly of being in 3rd class\n ' def extract_title(x): title = x.split(',')[1].split('.')[0].strip() if (title not in ['Mr', 'Miss', 'Mrs', 'Master']): title = 'Other' return title all_data = pd.read_csv('./data/train.csv') X = all_data.drop('Survived', axis=1) y = all_data['Survived'] X['Sex'] = X['Sex'].replace({'female': 1, 'male': 0}) X['Pclass'] = X['Pclass'].replace({1: 1, 2: 2, 3: 4}) X['family_size'] = ((X['SibSp'] + X['Parch']) + 1) X['is_cabin_notnull'] = X['Cabin'].notnull() title = X['Name'].apply(extract_title) dummy_title = pd.get_dummies(title, prefix='Title') dummy_embarked = pd.get_dummies(X['Embarked'], prefix='Port') X = pd.concat([X, dummy_embarked, dummy_title], axis=1) drop_columns = ['PassengerId', 'Name', 'SibSp', 'Parch', 'Ticket', 'Embarked', 'Cabin'] X = X.drop(drop_columns, axis=1) return (X, y)<|docstring|>Data Encoding Version 5 * same as version 4 except encode 3rd class as the number 4 * to better reflect the added difficultly of being in 3rd class<|endoftext|>
141754ab1cbad4c2a168a4c9aa2087d0ce7918e12abf53e8e96aeb263e345947
def get_ct_bycolumn(cols): 'Column Transform for Features\n\n ' ii = WrappedIterativeImputer('Age') ii_pipe = Pipeline([('ii', ii)]) ii_cols = ['Pclass', 'Sex', 'Age', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] if ('Age' in cols): cols = cols.copy() cols.remove('Age') transformers = [('ii_tr', ii_pipe, ii_cols), ('as_is', 'passthrough', cols)] return_cols = (['Age'] + cols) else: transformers = [('as_is', 'passthrough', cols)] return_cols = cols ct = ColumnTransformer(transformers=transformers) return (return_cols, ct)
Column Transform for Features
projects/titanic/titanic_helper_code.py
get_ct_bycolumn
sdiehl28/tutorial-jupyter-notebooks
0
python
def get_ct_bycolumn(cols): '\n\n ' ii = WrappedIterativeImputer('Age') ii_pipe = Pipeline([('ii', ii)]) ii_cols = ['Pclass', 'Sex', 'Age', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] if ('Age' in cols): cols = cols.copy() cols.remove('Age') transformers = [('ii_tr', ii_pipe, ii_cols), ('as_is', 'passthrough', cols)] return_cols = (['Age'] + cols) else: transformers = [('as_is', 'passthrough', cols)] return_cols = cols ct = ColumnTransformer(transformers=transformers) return (return_cols, ct)
def get_ct_bycolumn(cols): '\n\n ' ii = WrappedIterativeImputer('Age') ii_pipe = Pipeline([('ii', ii)]) ii_cols = ['Pclass', 'Sex', 'Age', 'Title_Master', 'Title_Miss', 'Title_Mr', 'Title_Mrs', 'Title_Other'] if ('Age' in cols): cols = cols.copy() cols.remove('Age') transformers = [('ii_tr', ii_pipe, ii_cols), ('as_is', 'passthrough', cols)] return_cols = (['Age'] + cols) else: transformers = [('as_is', 'passthrough', cols)] return_cols = cols ct = ColumnTransformer(transformers=transformers) return (return_cols, ct)<|docstring|>Column Transform for Features<|endoftext|>
1b238618bd053207f4a5af61b085f3855c2c8eb843f78926ba7d0a9332e6fadd
def wma(arg, window): 'WMA: Weighted Moving Average.\n\n Params:\n arg (Series): Time series data such as close prices.\n\n window (int): Moving average window size.\n\n Returns:\n Series: Weighted moving average of arg.\n ' values = arg.values wma = [] bar_count = len(arg) nan_count = (window - 1) if (bar_count < nan_count): nan_count = bar_count for i in range(nan_count): wma.append(np.nan) div = (((window + 1) * window) / 2.0) for i in range((window - 1), bar_count): sum = 0 for j in range(window): sum += (values[(i - j)] * (window - j)) wma.append((sum / div)) return Series(data=wma, name=('wma' + str(window)), index=arg.index)
WMA: Weighted Moving Average. Params: arg (Series): Time series data such as close prices. window (int): Moving average window size. Returns: Series: Weighted moving average of arg.
wma.py
wma
guiyanzhong/pyta
7
python
def wma(arg, window): 'WMA: Weighted Moving Average.\n\n Params:\n arg (Series): Time series data such as close prices.\n\n window (int): Moving average window size.\n\n Returns:\n Series: Weighted moving average of arg.\n ' values = arg.values wma = [] bar_count = len(arg) nan_count = (window - 1) if (bar_count < nan_count): nan_count = bar_count for i in range(nan_count): wma.append(np.nan) div = (((window + 1) * window) / 2.0) for i in range((window - 1), bar_count): sum = 0 for j in range(window): sum += (values[(i - j)] * (window - j)) wma.append((sum / div)) return Series(data=wma, name=('wma' + str(window)), index=arg.index)
def wma(arg, window): 'WMA: Weighted Moving Average.\n\n Params:\n arg (Series): Time series data such as close prices.\n\n window (int): Moving average window size.\n\n Returns:\n Series: Weighted moving average of arg.\n ' values = arg.values wma = [] bar_count = len(arg) nan_count = (window - 1) if (bar_count < nan_count): nan_count = bar_count for i in range(nan_count): wma.append(np.nan) div = (((window + 1) * window) / 2.0) for i in range((window - 1), bar_count): sum = 0 for j in range(window): sum += (values[(i - j)] * (window - j)) wma.append((sum / div)) return Series(data=wma, name=('wma' + str(window)), index=arg.index)<|docstring|>WMA: Weighted Moving Average. Params: arg (Series): Time series data such as close prices. window (int): Moving average window size. Returns: Series: Weighted moving average of arg.<|endoftext|>
93f2d3c1009123ccd5f8ffc2e2f3a75fcf7a72493339827b7528c76461e0fa6c
def test_wma(closes): 'WMA test function.' wma5 = wma(closes, 5) wma10 = wma(closes, 10) data = pd.concat([closes, wma5, wma10], axis=1) data.plot(title='WMA Chart') plt.show()
WMA test function.
wma.py
test_wma
guiyanzhong/pyta
7
python
def test_wma(closes): wma5 = wma(closes, 5) wma10 = wma(closes, 10) data = pd.concat([closes, wma5, wma10], axis=1) data.plot(title='WMA Chart') plt.show()
def test_wma(closes): wma5 = wma(closes, 5) wma10 = wma(closes, 10) data = pd.concat([closes, wma5, wma10], axis=1) data.plot(title='WMA Chart') plt.show()<|docstring|>WMA test function.<|endoftext|>
fe00895ce4cdcd4d4b018f5b9ca41b74865a7a36bd939c014f7c356d20f8b729
def nearest_neighbours(x, y, precision=5): "\n Vector between 2 closest points. If there's a tie (there\n likely will be), then we get the first point with a minimally\n nearest neighbour (and its first such neighbour).\n \n The precision cut-off is needed to deal with floating-point\n imprecision.\n " X = np.array([x, y]).T D = sd.squareform(sd.pdist(X)) D[(D == 0)] = np.inf D = np.round(D, decimals=precision) dmin = np.argmin(D) p = X[(dmin // len(x))] q = X[(dmin % len(x))] return (p - q)
Vector between 2 closest points. If there's a tie (there likely will be), then we get the first point with a minimally nearest neighbour (and its first such neighbour). The precision cut-off is needed to deal with floating-point imprecision.
gio/xy_to_grid.py
nearest_neighbours
agilescientific/gio
1
python
def nearest_neighbours(x, y, precision=5): "\n Vector between 2 closest points. If there's a tie (there\n likely will be), then we get the first point with a minimally\n nearest neighbour (and its first such neighbour).\n \n The precision cut-off is needed to deal with floating-point\n imprecision.\n " X = np.array([x, y]).T D = sd.squareform(sd.pdist(X)) D[(D == 0)] = np.inf D = np.round(D, decimals=precision) dmin = np.argmin(D) p = X[(dmin // len(x))] q = X[(dmin % len(x))] return (p - q)
def nearest_neighbours(x, y, precision=5): "\n Vector between 2 closest points. If there's a tie (there\n likely will be), then we get the first point with a minimally\n nearest neighbour (and its first such neighbour).\n \n The precision cut-off is needed to deal with floating-point\n imprecision.\n " X = np.array([x, y]).T D = sd.squareform(sd.pdist(X)) D[(D == 0)] = np.inf D = np.round(D, decimals=precision) dmin = np.argmin(D) p = X[(dmin // len(x))] q = X[(dmin % len(x))] return (p - q)<|docstring|>Vector between 2 closest points. If there's a tie (there likely will be), then we get the first point with a minimally nearest neighbour (and its first such neighbour). The precision cut-off is needed to deal with floating-point imprecision.<|endoftext|>
b898b0e9011affc283785c3a32f4554e19329175c332d54e6da0e299eae7aa51
def rectify(X, vector): '\n Rotate the geometry X, which should have columns\n (x, y, data). Data can be anything, just ones if\n you only have points to transform.\n ' θ = np.angle(complex(*vector)) A = np.array([[np.cos(θ), (- np.sin(θ)), 0], [np.sin(θ), np.cos(θ), 0], [0, 0, 1]]) return (X @ A)
Rotate the geometry X, which should have columns (x, y, data). Data can be anything, just ones if you only have points to transform.
gio/xy_to_grid.py
rectify
agilescientific/gio
1
python
def rectify(X, vector): '\n Rotate the geometry X, which should have columns\n (x, y, data). Data can be anything, just ones if\n you only have points to transform.\n ' θ = np.angle(complex(*vector)) A = np.array([[np.cos(θ), (- np.sin(θ)), 0], [np.sin(θ), np.cos(θ), 0], [0, 0, 1]]) return (X @ A)
def rectify(X, vector): '\n Rotate the geometry X, which should have columns\n (x, y, data). Data can be anything, just ones if\n you only have points to transform.\n ' θ = np.angle(complex(*vector)) A = np.array([[np.cos(θ), (- np.sin(θ)), 0], [np.sin(θ), np.cos(θ), 0], [0, 0, 1]]) return (X @ A)<|docstring|>Rotate the geometry X, which should have columns (x, y, data). Data can be anything, just ones if you only have points to transform.<|endoftext|>
cef7fe19862edc5433da18f1ee78da5ae8020724b28091616323dd11e6a9c727
def parabolic(f, x): '\n Quadratic interpolation for estimating the true position of an\n inter-sample maximum when nearby samples are known.\n \n f is a vector and x is an index for that vector.\n \n Returns (vx, vy), the coordinates of the vertex of a parabola that goes\n through point x and its two neighbors.\n \n Example:\n Defining a vector f with a local maximum at index 3 (= 6), find local\n maximum if points 2, 3, and 4 actually defined a parabola.\n \n >>> import numpy as np\n >>> f = [2, 3, 1, 6, 4, 2, 3, 1]\n >>> parabolic(f, np.argmax(f))\n (3.2142857142857144, 6.1607142857142856)\n ' xv = ((((1 / 2) * (f[(x - 1)] - f[(x + 1)])) / ((f[(x - 1)] - (2 * f[x])) + f[(x + 1)])) + x) yv = (f[x] - (((1 / 4) * (f[(x - 1)] - f[(x + 1)])) * (xv - x))) return (xv, yv)
Quadratic interpolation for estimating the true position of an inter-sample maximum when nearby samples are known. f is a vector and x is an index for that vector. Returns (vx, vy), the coordinates of the vertex of a parabola that goes through point x and its two neighbors. Example: Defining a vector f with a local maximum at index 3 (= 6), find local maximum if points 2, 3, and 4 actually defined a parabola. >>> import numpy as np >>> f = [2, 3, 1, 6, 4, 2, 3, 1] >>> parabolic(f, np.argmax(f)) (3.2142857142857144, 6.1607142857142856)
gio/xy_to_grid.py
parabolic
agilescientific/gio
1
python
def parabolic(f, x): '\n Quadratic interpolation for estimating the true position of an\n inter-sample maximum when nearby samples are known.\n \n f is a vector and x is an index for that vector.\n \n Returns (vx, vy), the coordinates of the vertex of a parabola that goes\n through point x and its two neighbors.\n \n Example:\n Defining a vector f with a local maximum at index 3 (= 6), find local\n maximum if points 2, 3, and 4 actually defined a parabola.\n \n >>> import numpy as np\n >>> f = [2, 3, 1, 6, 4, 2, 3, 1]\n >>> parabolic(f, np.argmax(f))\n (3.2142857142857144, 6.1607142857142856)\n ' xv = ((((1 / 2) * (f[(x - 1)] - f[(x + 1)])) / ((f[(x - 1)] - (2 * f[x])) + f[(x + 1)])) + x) yv = (f[x] - (((1 / 4) * (f[(x - 1)] - f[(x + 1)])) * (xv - x))) return (xv, yv)
def parabolic(f, x): '\n Quadratic interpolation for estimating the true position of an\n inter-sample maximum when nearby samples are known.\n \n f is a vector and x is an index for that vector.\n \n Returns (vx, vy), the coordinates of the vertex of a parabola that goes\n through point x and its two neighbors.\n \n Example:\n Defining a vector f with a local maximum at index 3 (= 6), find local\n maximum if points 2, 3, and 4 actually defined a parabola.\n \n >>> import numpy as np\n >>> f = [2, 3, 1, 6, 4, 2, 3, 1]\n >>> parabolic(f, np.argmax(f))\n (3.2142857142857144, 6.1607142857142856)\n ' xv = ((((1 / 2) * (f[(x - 1)] - f[(x + 1)])) / ((f[(x - 1)] - (2 * f[x])) + f[(x + 1)])) + x) yv = (f[x] - (((1 / 4) * (f[(x - 1)] - f[(x + 1)])) * (xv - x))) return (xv, yv)<|docstring|>Quadratic interpolation for estimating the true position of an inter-sample maximum when nearby samples are known. f is a vector and x is an index for that vector. Returns (vx, vy), the coordinates of the vertex of a parabola that goes through point x and its two neighbors. Example: Defining a vector f with a local maximum at index 3 (= 6), find local maximum if points 2, 3, and 4 actually defined a parabola. >>> import numpy as np >>> f = [2, 3, 1, 6, 4, 2, 3, 1] >>> parabolic(f, np.argmax(f)) (3.2142857142857144, 6.1607142857142856)<|endoftext|>
eb6f096c915576b514156fc070db90e84084addcf1535ac59647831e69f9c0fb
def get_intervals(x): '\n From an unsorted and possibly sparse collection\n of 1D coordinates, compute the number of bins and\n the distance between bin centres.\n \n Default is nearest-metre precision, which should be\n fine for most seismic surveys.\n ' x = np.asanyarray(x) assert (x.ndim == 1), 'Array must be 1D.' (xmax, xmin) = (x.max(), x.min()) est = np.sqrt(x.size).astype(int) N = max((20 * est), 2500) pad = ((xmax - xmin) / est) kde = gaussian_kde(x, bw_method=0.005) x_eval = np.linspace((xmin - pad), (xmax + pad), N) x_kde = kde.evaluate(x_eval) (peaks, _) = find_peaks(x_kde) n_peaks = len(peaks) (x_pos, _) = parabolic(x_kde, peaks) M = x_eval.size idx = np.linspace(0, (M - 1), M) f = interp1d(idx, x_eval, kind='linear') x_best = f(x_pos) vals = np.sort(np.diff(x_best)) trim = np.mean(vals[(n_peaks // 4):((- n_peaks) // 4)]) minn = vals[0] dx = (trim if ((trim - minn) < 1) else minn) dx = np.round(dx, 2) Nx = (1 + np.round(((np.max(x) - np.min(x)) / dx), decimals=0).astype(int)) return (Nx, dx)
From an unsorted and possibly sparse collection of 1D coordinates, compute the number of bins and the distance between bin centres. Default is nearest-metre precision, which should be fine for most seismic surveys.
gio/xy_to_grid.py
get_intervals
agilescientific/gio
1
python
def get_intervals(x): '\n From an unsorted and possibly sparse collection\n of 1D coordinates, compute the number of bins and\n the distance between bin centres.\n \n Default is nearest-metre precision, which should be\n fine for most seismic surveys.\n ' x = np.asanyarray(x) assert (x.ndim == 1), 'Array must be 1D.' (xmax, xmin) = (x.max(), x.min()) est = np.sqrt(x.size).astype(int) N = max((20 * est), 2500) pad = ((xmax - xmin) / est) kde = gaussian_kde(x, bw_method=0.005) x_eval = np.linspace((xmin - pad), (xmax + pad), N) x_kde = kde.evaluate(x_eval) (peaks, _) = find_peaks(x_kde) n_peaks = len(peaks) (x_pos, _) = parabolic(x_kde, peaks) M = x_eval.size idx = np.linspace(0, (M - 1), M) f = interp1d(idx, x_eval, kind='linear') x_best = f(x_pos) vals = np.sort(np.diff(x_best)) trim = np.mean(vals[(n_peaks // 4):((- n_peaks) // 4)]) minn = vals[0] dx = (trim if ((trim - minn) < 1) else minn) dx = np.round(dx, 2) Nx = (1 + np.round(((np.max(x) - np.min(x)) / dx), decimals=0).astype(int)) return (Nx, dx)
def get_intervals(x): '\n From an unsorted and possibly sparse collection\n of 1D coordinates, compute the number of bins and\n the distance between bin centres.\n \n Default is nearest-metre precision, which should be\n fine for most seismic surveys.\n ' x = np.asanyarray(x) assert (x.ndim == 1), 'Array must be 1D.' (xmax, xmin) = (x.max(), x.min()) est = np.sqrt(x.size).astype(int) N = max((20 * est), 2500) pad = ((xmax - xmin) / est) kde = gaussian_kde(x, bw_method=0.005) x_eval = np.linspace((xmin - pad), (xmax + pad), N) x_kde = kde.evaluate(x_eval) (peaks, _) = find_peaks(x_kde) n_peaks = len(peaks) (x_pos, _) = parabolic(x_kde, peaks) M = x_eval.size idx = np.linspace(0, (M - 1), M) f = interp1d(idx, x_eval, kind='linear') x_best = f(x_pos) vals = np.sort(np.diff(x_best)) trim = np.mean(vals[(n_peaks // 4):((- n_peaks) // 4)]) minn = vals[0] dx = (trim if ((trim - minn) < 1) else minn) dx = np.round(dx, 2) Nx = (1 + np.round(((np.max(x) - np.min(x)) / dx), decimals=0).astype(int)) return (Nx, dx)<|docstring|>From an unsorted and possibly sparse collection of 1D coordinates, compute the number of bins and the distance between bin centres. Default is nearest-metre precision, which should be fine for most seismic surveys.<|endoftext|>
045777fc3f0eb08be4185cdf1567b89cfeac1a3dd11d4a1742b1fc1e18dbb06a
def xy_to_grid(x, y, data, compute_array=False): '\n Bin a bunch of unsorted (x, y) datapoints into a regular grid.\n\n Returns:\n tuple:\n\n - arr (ndarray): The binned data.\n - (dx, dy): The spacing between bins in the x and y directions.\n - (addx, addy): The destination of each data point into the grid;\n n.b. this is given in NumPy (row, col) format.\n ' X = np.vstack([x, y, data]).T v = nearest_neighbours(x, y) p = rectify(X, v) (x_new, y_new, _) = p.T (Nx, dx) = get_intervals(x_new) (Ny, dy) = get_intervals(y_new) xedge = np.linspace((np.min(x_new) - (dx / 2)), (np.max(x_new) + (dx / 2)), (Nx + 1)) yedge = np.linspace((np.min(y_new) - (dy / 2)), (np.max(y_new) + (dy / 2)), (Ny + 1)) assert np.all((data > 0)), 'Data are not strictly positive; see docs.' if compute_array: (arr, *_) = np.histogram2d(y_new, x_new, bins=[yedge, xedge], weights=data) arr[(arr == 0)] = np.nan else: arr = None addx = (np.digitize(x_new, xedge) - 1) addy = (np.digitize(y_new, yedge) - 1) addy = (Ny - addy) return (arr, (dx, dy), (addy, addx))
Bin a bunch of unsorted (x, y) datapoints into a regular grid. Returns: tuple: - arr (ndarray): The binned data. - (dx, dy): The spacing between bins in the x and y directions. - (addx, addy): The destination of each data point into the grid; n.b. this is given in NumPy (row, col) format.
gio/xy_to_grid.py
xy_to_grid
agilescientific/gio
1
python
def xy_to_grid(x, y, data, compute_array=False): '\n Bin a bunch of unsorted (x, y) datapoints into a regular grid.\n\n Returns:\n tuple:\n\n - arr (ndarray): The binned data.\n - (dx, dy): The spacing between bins in the x and y directions.\n - (addx, addy): The destination of each data point into the grid;\n n.b. this is given in NumPy (row, col) format.\n ' X = np.vstack([x, y, data]).T v = nearest_neighbours(x, y) p = rectify(X, v) (x_new, y_new, _) = p.T (Nx, dx) = get_intervals(x_new) (Ny, dy) = get_intervals(y_new) xedge = np.linspace((np.min(x_new) - (dx / 2)), (np.max(x_new) + (dx / 2)), (Nx + 1)) yedge = np.linspace((np.min(y_new) - (dy / 2)), (np.max(y_new) + (dy / 2)), (Ny + 1)) assert np.all((data > 0)), 'Data are not strictly positive; see docs.' if compute_array: (arr, *_) = np.histogram2d(y_new, x_new, bins=[yedge, xedge], weights=data) arr[(arr == 0)] = np.nan else: arr = None addx = (np.digitize(x_new, xedge) - 1) addy = (np.digitize(y_new, yedge) - 1) addy = (Ny - addy) return (arr, (dx, dy), (addy, addx))
def xy_to_grid(x, y, data, compute_array=False): '\n Bin a bunch of unsorted (x, y) datapoints into a regular grid.\n\n Returns:\n tuple:\n\n - arr (ndarray): The binned data.\n - (dx, dy): The spacing between bins in the x and y directions.\n - (addx, addy): The destination of each data point into the grid;\n n.b. this is given in NumPy (row, col) format.\n ' X = np.vstack([x, y, data]).T v = nearest_neighbours(x, y) p = rectify(X, v) (x_new, y_new, _) = p.T (Nx, dx) = get_intervals(x_new) (Ny, dy) = get_intervals(y_new) xedge = np.linspace((np.min(x_new) - (dx / 2)), (np.max(x_new) + (dx / 2)), (Nx + 1)) yedge = np.linspace((np.min(y_new) - (dy / 2)), (np.max(y_new) + (dy / 2)), (Ny + 1)) assert np.all((data > 0)), 'Data are not strictly positive; see docs.' if compute_array: (arr, *_) = np.histogram2d(y_new, x_new, bins=[yedge, xedge], weights=data) arr[(arr == 0)] = np.nan else: arr = None addx = (np.digitize(x_new, xedge) - 1) addy = (np.digitize(y_new, yedge) - 1) addy = (Ny - addy) return (arr, (dx, dy), (addy, addx))<|docstring|>Bin a bunch of unsorted (x, y) datapoints into a regular grid. Returns: tuple: - arr (ndarray): The binned data. - (dx, dy): The spacing between bins in the x and y directions. - (addx, addy): The destination of each data point into the grid; n.b. this is given in NumPy (row, col) format.<|endoftext|>
22891cbb8d723e6e98dc863f2723263d14be41df06e48e6a63f69241ff6f4e54
@ops.RegisterShape('DecodeAudio') def _decode_audio_shape(op): "Computes the shape of a DecodeAudio operation.\n\n Args:\n op: A DecodeAudio operation.\n\n Returns:\n A list of output shapes. There's exactly one output, the sampled audio.\n This is a rank 2 tensor with an unknown number of samples and a\n known number of channels.\n " try: channels = op.get_attr('channel_count') except ValueError: channels = None return [tensor_shape.TensorShape([None, channels])]
Computes the shape of a DecodeAudio operation. Args: op: A DecodeAudio operation. Returns: A list of output shapes. There's exactly one output, the sampled audio. This is a rank 2 tensor with an unknown number of samples and a known number of channels.
tensorflow/contrib/ffmpeg/ffmpeg_ops.py
_decode_audio_shape
fastener/tensorflow
680
python
@ops.RegisterShape('DecodeAudio') def _decode_audio_shape(op): "Computes the shape of a DecodeAudio operation.\n\n Args:\n op: A DecodeAudio operation.\n\n Returns:\n A list of output shapes. There's exactly one output, the sampled audio.\n This is a rank 2 tensor with an unknown number of samples and a\n known number of channels.\n " try: channels = op.get_attr('channel_count') except ValueError: channels = None return [tensor_shape.TensorShape([None, channels])]
@ops.RegisterShape('DecodeAudio') def _decode_audio_shape(op): "Computes the shape of a DecodeAudio operation.\n\n Args:\n op: A DecodeAudio operation.\n\n Returns:\n A list of output shapes. There's exactly one output, the sampled audio.\n This is a rank 2 tensor with an unknown number of samples and a\n known number of channels.\n " try: channels = op.get_attr('channel_count') except ValueError: channels = None return [tensor_shape.TensorShape([None, channels])]<|docstring|>Computes the shape of a DecodeAudio operation. Args: op: A DecodeAudio operation. Returns: A list of output shapes. There's exactly one output, the sampled audio. This is a rank 2 tensor with an unknown number of samples and a known number of channels.<|endoftext|>
40b8e59e4776b44ea0d43585812522451725b2c5ef27dbbbf983a7df605927d6
def decode_audio(contents, file_format=None, samples_per_second=None, channel_count=None): 'Create an op that decodes the contents of an audio file.\n\n Args:\n contents: The binary contents of the audio file to decode. This is a\n scalar.\n file_format: A string specifying which format the contents will conform\n to. This can be mp3, ogg, or wav.\n samples_per_second: The number of samples per second that is assumed.\n In some cases, resampling will occur to generate the correct sample\n rate.\n channel_count: The number of channels that should be created from the\n audio contents. If the contents have more than this number, then\n some channels will be merged or dropped. If contents has fewer than\n this, then additional channels will be created from the existing ones.\n\n Returns:\n A rank 2 tensor that has time along dimension 0 and channels along\n dimension 1. Dimension 0 will be `samples_per_second * length` wide, and\n dimension 1 will be `channel_count` wide. If ffmpeg fails to decode the\n audio then an empty tensor will be returned.\n ' return gen_decode_audio_op_py.decode_audio(contents, file_format=file_format, samples_per_second=samples_per_second, channel_count=channel_count)
Create an op that decodes the contents of an audio file. Args: contents: The binary contents of the audio file to decode. This is a scalar. file_format: A string specifying which format the contents will conform to. This can be mp3, ogg, or wav. samples_per_second: The number of samples per second that is assumed. In some cases, resampling will occur to generate the correct sample rate. channel_count: The number of channels that should be created from the audio contents. If the contents have more than this number, then some channels will be merged or dropped. If contents has fewer than this, then additional channels will be created from the existing ones. Returns: A rank 2 tensor that has time along dimension 0 and channels along dimension 1. Dimension 0 will be `samples_per_second * length` wide, and dimension 1 will be `channel_count` wide. If ffmpeg fails to decode the audio then an empty tensor will be returned.
tensorflow/contrib/ffmpeg/ffmpeg_ops.py
decode_audio
fastener/tensorflow
680
python
def decode_audio(contents, file_format=None, samples_per_second=None, channel_count=None): 'Create an op that decodes the contents of an audio file.\n\n Args:\n contents: The binary contents of the audio file to decode. This is a\n scalar.\n file_format: A string specifying which format the contents will conform\n to. This can be mp3, ogg, or wav.\n samples_per_second: The number of samples per second that is assumed.\n In some cases, resampling will occur to generate the correct sample\n rate.\n channel_count: The number of channels that should be created from the\n audio contents. If the contents have more than this number, then\n some channels will be merged or dropped. If contents has fewer than\n this, then additional channels will be created from the existing ones.\n\n Returns:\n A rank 2 tensor that has time along dimension 0 and channels along\n dimension 1. Dimension 0 will be `samples_per_second * length` wide, and\n dimension 1 will be `channel_count` wide. If ffmpeg fails to decode the\n audio then an empty tensor will be returned.\n ' return gen_decode_audio_op_py.decode_audio(contents, file_format=file_format, samples_per_second=samples_per_second, channel_count=channel_count)
def decode_audio(contents, file_format=None, samples_per_second=None, channel_count=None): 'Create an op that decodes the contents of an audio file.\n\n Args:\n contents: The binary contents of the audio file to decode. This is a\n scalar.\n file_format: A string specifying which format the contents will conform\n to. This can be mp3, ogg, or wav.\n samples_per_second: The number of samples per second that is assumed.\n In some cases, resampling will occur to generate the correct sample\n rate.\n channel_count: The number of channels that should be created from the\n audio contents. If the contents have more than this number, then\n some channels will be merged or dropped. If contents has fewer than\n this, then additional channels will be created from the existing ones.\n\n Returns:\n A rank 2 tensor that has time along dimension 0 and channels along\n dimension 1. Dimension 0 will be `samples_per_second * length` wide, and\n dimension 1 will be `channel_count` wide. If ffmpeg fails to decode the\n audio then an empty tensor will be returned.\n ' return gen_decode_audio_op_py.decode_audio(contents, file_format=file_format, samples_per_second=samples_per_second, channel_count=channel_count)<|docstring|>Create an op that decodes the contents of an audio file. Args: contents: The binary contents of the audio file to decode. This is a scalar. file_format: A string specifying which format the contents will conform to. This can be mp3, ogg, or wav. samples_per_second: The number of samples per second that is assumed. In some cases, resampling will occur to generate the correct sample rate. channel_count: The number of channels that should be created from the audio contents. If the contents have more than this number, then some channels will be merged or dropped. If contents has fewer than this, then additional channels will be created from the existing ones. Returns: A rank 2 tensor that has time along dimension 0 and channels along dimension 1. Dimension 0 will be `samples_per_second * length` wide, and dimension 1 will be `channel_count` wide. If ffmpeg fails to decode the audio then an empty tensor will be returned.<|endoftext|>
a2a08116d624c00fc981854c35681720ff97ad89b884f0c2eb242b1798fe5fa1
@ops.RegisterShape('EncodeAudio') def _encode_audio_shape(unused_op): "Computes the shape of an EncodeAudio operation.\n\n Returns:\n A list of output shapes. There's exactly one output, the formatted audio\n file. This is a rank 0 tensor.\n " return [tensor_shape.TensorShape([])]
Computes the shape of an EncodeAudio operation. Returns: A list of output shapes. There's exactly one output, the formatted audio file. This is a rank 0 tensor.
tensorflow/contrib/ffmpeg/ffmpeg_ops.py
_encode_audio_shape
fastener/tensorflow
680
python
@ops.RegisterShape('EncodeAudio') def _encode_audio_shape(unused_op): "Computes the shape of an EncodeAudio operation.\n\n Returns:\n A list of output shapes. There's exactly one output, the formatted audio\n file. This is a rank 0 tensor.\n " return [tensor_shape.TensorShape([])]
@ops.RegisterShape('EncodeAudio') def _encode_audio_shape(unused_op): "Computes the shape of an EncodeAudio operation.\n\n Returns:\n A list of output shapes. There's exactly one output, the formatted audio\n file. This is a rank 0 tensor.\n " return [tensor_shape.TensorShape([])]<|docstring|>Computes the shape of an EncodeAudio operation. Returns: A list of output shapes. There's exactly one output, the formatted audio file. This is a rank 0 tensor.<|endoftext|>
1843695696f87656f0318ad1d79585107756f41c8ca272f56874b11922688007
def encode_audio(audio, file_format=None, samples_per_second=None): 'Creates an op that encodes an audio file using sampled audio from a tensor.\n\n Args:\n audio: A rank 2 tensor that has time along dimension 0 and channels along\n dimension 1. Dimension 0 is `samples_per_second * length` long in\n seconds.\n file_format: The type of file to encode. "wav" is the only supported format.\n samples_per_second: The number of samples in the audio tensor per second of\n audio.\n\n Returns:\n A scalar tensor that contains the encoded audio in the specified file\n format.\n ' return gen_encode_audio_op_py.encode_audio(audio, file_format=file_format, samples_per_second=samples_per_second)
Creates an op that encodes an audio file using sampled audio from a tensor. Args: audio: A rank 2 tensor that has time along dimension 0 and channels along dimension 1. Dimension 0 is `samples_per_second * length` long in seconds. file_format: The type of file to encode. "wav" is the only supported format. samples_per_second: The number of samples in the audio tensor per second of audio. Returns: A scalar tensor that contains the encoded audio in the specified file format.
tensorflow/contrib/ffmpeg/ffmpeg_ops.py
encode_audio
fastener/tensorflow
680
python
def encode_audio(audio, file_format=None, samples_per_second=None): 'Creates an op that encodes an audio file using sampled audio from a tensor.\n\n Args:\n audio: A rank 2 tensor that has time along dimension 0 and channels along\n dimension 1. Dimension 0 is `samples_per_second * length` long in\n seconds.\n file_format: The type of file to encode. "wav" is the only supported format.\n samples_per_second: The number of samples in the audio tensor per second of\n audio.\n\n Returns:\n A scalar tensor that contains the encoded audio in the specified file\n format.\n ' return gen_encode_audio_op_py.encode_audio(audio, file_format=file_format, samples_per_second=samples_per_second)
def encode_audio(audio, file_format=None, samples_per_second=None): 'Creates an op that encodes an audio file using sampled audio from a tensor.\n\n Args:\n audio: A rank 2 tensor that has time along dimension 0 and channels along\n dimension 1. Dimension 0 is `samples_per_second * length` long in\n seconds.\n file_format: The type of file to encode. "wav" is the only supported format.\n samples_per_second: The number of samples in the audio tensor per second of\n audio.\n\n Returns:\n A scalar tensor that contains the encoded audio in the specified file\n format.\n ' return gen_encode_audio_op_py.encode_audio(audio, file_format=file_format, samples_per_second=samples_per_second)<|docstring|>Creates an op that encodes an audio file using sampled audio from a tensor. Args: audio: A rank 2 tensor that has time along dimension 0 and channels along dimension 1. Dimension 0 is `samples_per_second * length` long in seconds. file_format: The type of file to encode. "wav" is the only supported format. samples_per_second: The number of samples in the audio tensor per second of audio. Returns: A scalar tensor that contains the encoded audio in the specified file format.<|endoftext|>
77a00804534688ffab33af863e577eda00d06cc475bad36bc930d5a2970811d0
def _load_library(name, op_list=None): "Loads a .so file containing the specified operators.\n\n Args:\n name: The name of the .so file to load.\n op_list: A list of names of operators that the library should have. If None\n then the .so file's contents will not be verified.\n\n Raises:\n NameError if one of the required ops is missing.\n " try: filename = resource_loader.get_path_to_datafile(name) library = load_library.load_op_library(filename) for expected_op in (op_list or []): for lib_op in library.OP_LIST.op: if (lib_op.name == expected_op): break else: raise NameError(('Could not find operator %s in dynamic library %s' % (expected_op, name))) except errors.NotFoundError: logging.warning('%s file could not be loaded.', name)
Loads a .so file containing the specified operators. Args: name: The name of the .so file to load. op_list: A list of names of operators that the library should have. If None then the .so file's contents will not be verified. Raises: NameError if one of the required ops is missing.
tensorflow/contrib/ffmpeg/ffmpeg_ops.py
_load_library
fastener/tensorflow
680
python
def _load_library(name, op_list=None): "Loads a .so file containing the specified operators.\n\n Args:\n name: The name of the .so file to load.\n op_list: A list of names of operators that the library should have. If None\n then the .so file's contents will not be verified.\n\n Raises:\n NameError if one of the required ops is missing.\n " try: filename = resource_loader.get_path_to_datafile(name) library = load_library.load_op_library(filename) for expected_op in (op_list or []): for lib_op in library.OP_LIST.op: if (lib_op.name == expected_op): break else: raise NameError(('Could not find operator %s in dynamic library %s' % (expected_op, name))) except errors.NotFoundError: logging.warning('%s file could not be loaded.', name)
def _load_library(name, op_list=None): "Loads a .so file containing the specified operators.\n\n Args:\n name: The name of the .so file to load.\n op_list: A list of names of operators that the library should have. If None\n then the .so file's contents will not be verified.\n\n Raises:\n NameError if one of the required ops is missing.\n " try: filename = resource_loader.get_path_to_datafile(name) library = load_library.load_op_library(filename) for expected_op in (op_list or []): for lib_op in library.OP_LIST.op: if (lib_op.name == expected_op): break else: raise NameError(('Could not find operator %s in dynamic library %s' % (expected_op, name))) except errors.NotFoundError: logging.warning('%s file could not be loaded.', name)<|docstring|>Loads a .so file containing the specified operators. Args: name: The name of the .so file to load. op_list: A list of names of operators that the library should have. If None then the .so file's contents will not be verified. Raises: NameError if one of the required ops is missing.<|endoftext|>
117a97ecc7499f9b837b1dc51bf89e5b2626bf5d33e2dc46ee749d5a2da3daa6
def year_calendar(year): 'Get calendar of a given year' try: print(calendar.calendar(year)) except (ValueError, NameError): print('Integer was expected')
Get calendar of a given year
Minor Projects/year_calendar.py
year_calendar
chandthash/nppy
0
python
def year_calendar(year): try: print(calendar.calendar(year)) except (ValueError, NameError): print('Integer was expected')
def year_calendar(year): try: print(calendar.calendar(year)) except (ValueError, NameError): print('Integer was expected')<|docstring|>Get calendar of a given year<|endoftext|>
659a980ece68f388379ae3baec6e014e8f1e463d90fd86d80920eeacda7df785
def __new__(self, coordinates=None): '\n Parameters\n ----------\n coordinates : sequence\n A sequence of (x, y [,z]) numeric coordinate pairs or triples or\n an object that provides the numpy array interface, including\n another instance of LineString.\n\n Example\n -------\n Create a line with two segments\n\n >>> a = LineString([[0, 0], [1, 0], [1, 1]])\n >>> a.length\n 2.0\n ' if (coordinates is None): return shapely.from_wkt('LINESTRING EMPTY') elif isinstance(coordinates, LineString): if (type(coordinates) == LineString): return coordinates else: coordinates = coordinates.coords else: def _coords(o): if isinstance(o, Point): return o.coords[0] else: return o coordinates = [_coords(o) for o in coordinates] if (len(coordinates) == 0): return shapely.from_wkt('LINESTRING EMPTY') geom = shapely.linestrings(coordinates) if (not isinstance(geom, LineString)): raise ValueError('Invalid values passed to LineString constructor') return geom
Parameters ---------- coordinates : sequence A sequence of (x, y [,z]) numeric coordinate pairs or triples or an object that provides the numpy array interface, including another instance of LineString. Example ------- Create a line with two segments >>> a = LineString([[0, 0], [1, 0], [1, 1]]) >>> a.length 2.0
shapely/geometry/linestring.py
__new__
jGaboardi/shapely
189
python
def __new__(self, coordinates=None): '\n Parameters\n ----------\n coordinates : sequence\n A sequence of (x, y [,z]) numeric coordinate pairs or triples or\n an object that provides the numpy array interface, including\n another instance of LineString.\n\n Example\n -------\n Create a line with two segments\n\n >>> a = LineString([[0, 0], [1, 0], [1, 1]])\n >>> a.length\n 2.0\n ' if (coordinates is None): return shapely.from_wkt('LINESTRING EMPTY') elif isinstance(coordinates, LineString): if (type(coordinates) == LineString): return coordinates else: coordinates = coordinates.coords else: def _coords(o): if isinstance(o, Point): return o.coords[0] else: return o coordinates = [_coords(o) for o in coordinates] if (len(coordinates) == 0): return shapely.from_wkt('LINESTRING EMPTY') geom = shapely.linestrings(coordinates) if (not isinstance(geom, LineString)): raise ValueError('Invalid values passed to LineString constructor') return geom
def __new__(self, coordinates=None): '\n Parameters\n ----------\n coordinates : sequence\n A sequence of (x, y [,z]) numeric coordinate pairs or triples or\n an object that provides the numpy array interface, including\n another instance of LineString.\n\n Example\n -------\n Create a line with two segments\n\n >>> a = LineString([[0, 0], [1, 0], [1, 1]])\n >>> a.length\n 2.0\n ' if (coordinates is None): return shapely.from_wkt('LINESTRING EMPTY') elif isinstance(coordinates, LineString): if (type(coordinates) == LineString): return coordinates else: coordinates = coordinates.coords else: def _coords(o): if isinstance(o, Point): return o.coords[0] else: return o coordinates = [_coords(o) for o in coordinates] if (len(coordinates) == 0): return shapely.from_wkt('LINESTRING EMPTY') geom = shapely.linestrings(coordinates) if (not isinstance(geom, LineString)): raise ValueError('Invalid values passed to LineString constructor') return geom<|docstring|>Parameters ---------- coordinates : sequence A sequence of (x, y [,z]) numeric coordinate pairs or triples or an object that provides the numpy array interface, including another instance of LineString. Example ------- Create a line with two segments >>> a = LineString([[0, 0], [1, 0], [1, 1]]) >>> a.length 2.0<|endoftext|>
6fd5eb18845b443e1ef262993ffbb87c092bdfb5728cc5dc641e40a7f3d18789
def svg(self, scale_factor=1.0, stroke_color=None, opacity=None): 'Returns SVG polyline element for the LineString geometry.\n\n Parameters\n ==========\n scale_factor : float\n Multiplication factor for the SVG stroke-width. Default is 1.\n stroke_color : str, optional\n Hex string for stroke color. Default is to use "#66cc99" if\n geometry is valid, and "#ff3333" if invalid.\n opacity : float\n Float number between 0 and 1 for color opacity. Default value is 0.8\n ' if self.is_empty: return '<g />' if (stroke_color is None): stroke_color = ('#66cc99' if self.is_valid else '#ff3333') if (opacity is None): opacity = 0.8 pnt_format = ' '.join(['{},{}'.format(*c) for c in self.coords]) return '<polyline fill="none" stroke="{2}" stroke-width="{1}" points="{0}" opacity="{3}" />'.format(pnt_format, (2.0 * scale_factor), stroke_color, opacity)
Returns SVG polyline element for the LineString geometry. Parameters ========== scale_factor : float Multiplication factor for the SVG stroke-width. Default is 1. stroke_color : str, optional Hex string for stroke color. Default is to use "#66cc99" if geometry is valid, and "#ff3333" if invalid. opacity : float Float number between 0 and 1 for color opacity. Default value is 0.8
shapely/geometry/linestring.py
svg
jGaboardi/shapely
189
python
def svg(self, scale_factor=1.0, stroke_color=None, opacity=None): 'Returns SVG polyline element for the LineString geometry.\n\n Parameters\n ==========\n scale_factor : float\n Multiplication factor for the SVG stroke-width. Default is 1.\n stroke_color : str, optional\n Hex string for stroke color. Default is to use "#66cc99" if\n geometry is valid, and "#ff3333" if invalid.\n opacity : float\n Float number between 0 and 1 for color opacity. Default value is 0.8\n ' if self.is_empty: return '<g />' if (stroke_color is None): stroke_color = ('#66cc99' if self.is_valid else '#ff3333') if (opacity is None): opacity = 0.8 pnt_format = ' '.join(['{},{}'.format(*c) for c in self.coords]) return '<polyline fill="none" stroke="{2}" stroke-width="{1}" points="{0}" opacity="{3}" />'.format(pnt_format, (2.0 * scale_factor), stroke_color, opacity)
def svg(self, scale_factor=1.0, stroke_color=None, opacity=None): 'Returns SVG polyline element for the LineString geometry.\n\n Parameters\n ==========\n scale_factor : float\n Multiplication factor for the SVG stroke-width. Default is 1.\n stroke_color : str, optional\n Hex string for stroke color. Default is to use "#66cc99" if\n geometry is valid, and "#ff3333" if invalid.\n opacity : float\n Float number between 0 and 1 for color opacity. Default value is 0.8\n ' if self.is_empty: return '<g />' if (stroke_color is None): stroke_color = ('#66cc99' if self.is_valid else '#ff3333') if (opacity is None): opacity = 0.8 pnt_format = ' '.join(['{},{}'.format(*c) for c in self.coords]) return '<polyline fill="none" stroke="{2}" stroke-width="{1}" points="{0}" opacity="{3}" />'.format(pnt_format, (2.0 * scale_factor), stroke_color, opacity)<|docstring|>Returns SVG polyline element for the LineString geometry. Parameters ========== scale_factor : float Multiplication factor for the SVG stroke-width. Default is 1. stroke_color : str, optional Hex string for stroke color. Default is to use "#66cc99" if geometry is valid, and "#ff3333" if invalid. opacity : float Float number between 0 and 1 for color opacity. Default value is 0.8<|endoftext|>