_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q263700
scanAllProcessesForCwd
validation
def scanAllProcessesForCwd(searchPortion, isExactMatch=False): ''' scanAllProcessesForCwd - Scans all processes on the system for a given search pattern. @param searchPortion <str> - Any portion of directory to search @param isExactMatch <bool> Default False - If match should be exact, otherwise a partial match is performed. @return - <dict> - A dictionary of pid -> cwdResults for each pid that matched the search pattern. For format of "cwdResults", @see scanProcessForCwd ''' pids = getAllRunningPids() cwdResults = [scanProcessForCwd(pid, searchPortion, isExactMatch) for pid in pids] ret = {} for i in range(len(pids)): if cwdResults[i] is not None: ret[pids[i]] = cwdResults[i] return ret
python
{ "resource": "" }
q263701
scanProcessForMapping
validation
def scanProcessForMapping(pid, searchPortion, isExactMatch=False, ignoreCase=False): ''' scanProcessForMapping - Searches a given pid's mappings for a certain pattern. @param pid <int> - A running process ID on this system @param searchPortion <str> - A mapping for which to search, example: libc or python or libz.so.1. Give empty string to return all mappings. @param isExactMatch <bool> Default False - If match should be exact, otherwise a partial match is performed. @param ignoreCase <bool> Default False - If True, search will be performed case-insensitively @return <dict> - If result is found, the following dict is returned. If no match found on the given pid, or pid is not found running, None is returned. { 'searchPortion' : The passed search pattern 'pid' : The passed pid (as an integer) 'owner' : String of process owner, or uid if no mapping can be found, or "unknown" if neither could be determined. 'cmdline' : Commandline string 'matchedMappings' : All mappings likes that matched the given search pattern } ''' try: try: pid = int(pid) except ValueError as e: sys.stderr.write('Expected an integer, got %s for pid.\n' %(str(type(pid)),)) raise e with open('/proc/%d/maps' %(pid,), 'r') as f: contents = f.read() lines = contents.split('\n') matchedMappings = [] if isExactMatch is True: if ignoreCase is False: isMatch = lambda searchFor, searchIn : bool(searchFor == searchIn) else: isMatch = lambda searchFor, searchIn : bool(searchFor.lower() == searchIn.lower()) else: if ignoreCase is False: isMatch = lambda searchFor, searchIn : bool(searchFor in searchIn) else: isMatch = lambda searchFor, searchIn : bool(searchFor.lower() in searchIn.lower()) for line in lines: portion = ' '.join(line.split(' ')[5:]).lstrip() if isMatch(searchPortion, portion): matchedMappings.append('\t' + line) if len(matchedMappings) == 0: return None cmdline = getProcessCommandLineStr(pid) owner = getProcessOwnerStr(pid) return { 'searchPortion' : searchPortion, 'pid' : pid, 'owner' : owner, 'cmdline' : cmdline, 'matchedMappings' : matchedMappings, } except OSError: return None except IOError: return None except FileNotFoundError: return None except PermissionError: return None
python
{ "resource": "" }
q263702
scanAllProcessesForMapping
validation
def scanAllProcessesForMapping(searchPortion, isExactMatch=False, ignoreCase=False): ''' scanAllProcessesForMapping - Scans all processes on the system for a given search pattern. @param searchPortion <str> - A mapping for which to search, example: libc or python or libz.so.1. Give empty string to return all mappings. @param isExactMatch <bool> Default False - If match should be exact, otherwise a partial match is performed. @param ignoreCase <bool> Default False - If True, search will be performed case-insensitively @return - <dict> - A dictionary of pid -> mappingResults for each pid that matched the search pattern. For format of "mappingResults", @see scanProcessForMapping ''' pids = getAllRunningPids() # Since processes could disappear, we run the scan as fast as possible here with a list comprehension, then assemble the return dictionary later. mappingResults = [scanProcessForMapping(pid, searchPortion, isExactMatch, ignoreCase) for pid in pids] ret = {} for i in range(len(pids)): if mappingResults[i] is not None: ret[pids[i]] = mappingResults[i] return ret
python
{ "resource": "" }
q263703
scanProcessForOpenFile
validation
def scanProcessForOpenFile(pid, searchPortion, isExactMatch=True, ignoreCase=False): ''' scanProcessForOpenFile - Scans open FDs for a given pid to see if any are the provided searchPortion @param searchPortion <str> - Filename to check @param isExactMatch <bool> Default True - If match should be exact, otherwise a partial match is performed. @param ignoreCase <bool> Default False - If True, search will be performed case-insensitively @return - If result is found, the following dict is returned. If no match found on the given pid, or the pid is not found running, None is returned. { 'searchPortion' : The search portion provided 'pid' : The passed pid (as an integer) 'owner' : String of process owner, or "unknown" if one could not be determined 'cmdline' : Commandline string 'fds' : List of file descriptors assigned to this file (could be mapped several times) 'filenames' : List of the filenames matched } ''' try: try: pid = int(pid) except ValueError as e: sys.stderr.write('Expected an integer, got %s for pid.\n' %(str(type(pid)),)) raise e prefixDir = "/proc/%d/fd" % (pid,) processFDs = os.listdir(prefixDir) matchedFDs = [] matchedFilenames = [] if isExactMatch is True: if ignoreCase is False: isMatch = lambda searchFor, totalPath : bool(searchFor == totalPath) else: isMatch = lambda searchFor, totalPath : bool(searchFor.lower() == totalPath.lower()) else: if ignoreCase is False: isMatch = lambda searchFor, totalPath : bool(searchFor in totalPath) else: isMatch = lambda searchFor, totalPath : bool(searchFor.lower() in totalPath.lower()) for fd in processFDs: fdPath = os.readlink(prefixDir + '/' + fd) if isMatch(searchPortion, fdPath): matchedFDs.append(fd) matchedFilenames.append(fdPath) if len(matchedFDs) == 0: return None cmdline = getProcessCommandLineStr(pid) owner = getProcessOwnerStr(pid) return { 'searchPortion' : searchPortion, 'pid' : pid, 'owner' : owner, 'cmdline' : cmdline, 'fds' : matchedFDs, 'filenames' : matchedFilenames, } except OSError: return None except IOError: return None except FileNotFoundError: return None except PermissionError: return None
python
{ "resource": "" }
q263704
scanAllProcessesForOpenFile
validation
def scanAllProcessesForOpenFile(searchPortion, isExactMatch=True, ignoreCase=False): ''' scanAllProcessessForOpenFile - Scans all processes on the system for a given filename @param searchPortion <str> - Filename to check @param isExactMatch <bool> Default True - If match should be exact, otherwise a partial match is performed. @param ignoreCase <bool> Default False - If True, search will be performed case-insensitively @return - <dict> - A dictionary of pid -> mappingResults for each pid that matched the search pattern. For format of "mappingResults", @see scanProcessForOpenFile ''' pids = getAllRunningPids() # Since processes could disappear, we run the scan as fast as possible here with a list comprehension, then assemble the return dictionary later. mappingResults = [scanProcessForOpenFile(pid, searchPortion, isExactMatch, ignoreCase) for pid in pids] ret = {} for i in range(len(pids)): if mappingResults[i] is not None: ret[pids[i]] = mappingResults[i] return ret
python
{ "resource": "" }
q263705
Hub.connect
validation
def connect(self): """Create and connect to socket for TCP communication with hub.""" try: self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self._socket.settimeout(TIMEOUT_SECONDS) self._socket.connect((self._ip, self._port)) _LOGGER.debug("Successfully created Hub at %s:%s :)", self._ip, self._port) except socket.error as error: _LOGGER.error("Error creating Hub: %s :(", error) self._socket.close()
python
{ "resource": "" }
q263706
Hub.send_command
validation
def send_command(self, command): """Send TCP command to hub and return response.""" # use lock to make TCP send/receive thread safe with self._lock: try: self._socket.send(command.encode("utf8")) result = self.receive() # hub may send "status"/"new" messages that should be ignored while result.startswith("S") or result.startswith("NEW"): _LOGGER.debug("!Got response: %s", result) result = self.receive() _LOGGER.debug("Received: %s", result) return result except socket.error as error: _LOGGER.error("Error sending command: %s", error) # try re-connecting socket self.connect() return ""
python
{ "resource": "" }
q263707
Hub.receive
validation
def receive(self): """Receive TCP response, looping to get whole thing or timeout.""" try: buffer = self._socket.recv(BUFFER_SIZE) except socket.timeout as error: # Something is wrong, assume it's offline temporarily _LOGGER.error("Error receiving: %s", error) # self._socket.close() return "" # Read until a newline or timeout buffering = True response = '' while buffering: if '\n' in buffer.decode("utf8"): response = buffer.decode("utf8").split('\n')[0] buffering = False else: try: more = self._socket.recv(BUFFER_SIZE) except socket.timeout: more = None if not more: buffering = False response = buffer.decode("utf8") else: buffer += more return response
python
{ "resource": "" }
q263708
Hub.get_data
validation
def get_data(self): """Get current light data as dictionary with light zids as keys.""" response = self.send_command(GET_LIGHTS_COMMAND) _LOGGER.debug("get_data response: %s", repr(response)) if not response: _LOGGER.debug("Empty response: %s", response) return {} response = response.strip() # Check string before splitting (avoid IndexError if malformed) if not (response.startswith("GLB") and response.endswith(";")): _LOGGER.debug("Invalid response: %s", repr(response)) return {} # deconstruct response string into light data. Example data: # GLB 143E,1,1,25,255,255,255,0,0;287B,1,1,22,255,255,255,0,0;\r\n response = response[4:-3] # strip start (GLB) and end (;\r\n) light_strings = response.split(';') light_data_by_id = {} for light_string in light_strings: values = light_string.split(',') try: light_data_by_id[values[0]] = [int(values[2]), int(values[4]), int(values[5]), int(values[6]), int(values[7])] except ValueError as error: _LOGGER.error("Error %s: %s (%s)", error, values, response) except IndexError as error: _LOGGER.error("Error %s: %s (%s)", error, values, response) return light_data_by_id
python
{ "resource": "" }
q263709
Hub.get_lights
validation
def get_lights(self): """Get current light data, set and return as list of Bulb objects.""" # Throttle updates. Use cached data if within UPDATE_INTERVAL_SECONDS now = datetime.datetime.now() if (now - self._last_updated) < datetime.timedelta( seconds=UPDATE_INTERVAL_SECONDS): # _LOGGER.debug("Using cached light data") return self._bulbs else: self._last_updated = now light_data = self.get_data() _LOGGER.debug("got: %s", light_data) if not light_data: return [] if self._bulbs: # Bulbs already created, just update values for bulb in self._bulbs: # use the values for the bulb with the correct ID try: values = light_data[bulb.zid] bulb._online, bulb._red, bulb._green, bulb._blue, \ bulb._level = values except KeyError: pass else: for light_id in light_data: self._bulbs.append(Bulb(self, light_id, *light_data[light_id])) # return a list of Bulb objects return self._bulbs
python
{ "resource": "" }
q263710
Bulb.set_brightness
validation
def set_brightness(self, brightness): """Set brightness of bulb.""" command = "C {},,,,{},\r\n".format(self._zid, brightness) response = self._hub.send_command(command) _LOGGER.debug("Set brightness %s: %s", repr(command), response) return response
python
{ "resource": "" }
q263711
Bulb.set_all
validation
def set_all(self, red, green, blue, brightness): """Set color and brightness of bulb.""" command = "C {},{},{},{},{},\r\n".format(self._zid, red, green, blue, brightness) response = self._hub.send_command(command) _LOGGER.debug("Set all %s: %s", repr(command), response) return response
python
{ "resource": "" }
q263712
Bulb.update
validation
def update(self): """Update light objects to their current values.""" bulbs = self._hub.get_lights() if not bulbs: _LOGGER.debug("%s is offline, send command failed", self._zid) self._online = False
python
{ "resource": "" }
q263713
retrieve_document
validation
def retrieve_document(file_path, directory='sec_filings'): ''' This function takes a file path beginning with edgar and stores the form in a directory. The default directory is sec_filings but can be changed through a keyword argument. ''' ftp = FTP('ftp.sec.gov', timeout=None) ftp.login() name = file_path.replace('/', '_') if not os.path.exists(directory): os.makedirs(directory) with tempfile.TemporaryFile() as temp: ftp.retrbinary('RETR %s' % file_path, temp.write) temp.seek(0) with open('{}/{}'.format(directory, name), 'w+') as f: f.write(temp.read().decode("utf-8")) f.closed records = temp retry = False ftp.close()
python
{ "resource": "" }
q263714
readtxt
validation
def readtxt(filepath): """ read file as is""" with open(filepath, 'rt') as f: lines = f.readlines() return ''.join(lines)
python
{ "resource": "" }
q263715
_clean_up
validation
def _clean_up(paths): """ Clean up after ourselves, removing created files. @param {[String]} A list of file paths specifying the files we've created during run. Will all be deleted. @return {None} """ print('Cleaning up') # Iterate over the given paths, unlinking them for path in paths: print('Removing %s' % path) os.unlink(path)
python
{ "resource": "" }
q263716
_create_index_file
validation
def _create_index_file( root_dir, location, image_files, dirs, force_no_processing=False): """ Create an index file in the given location, supplying known lists of present image files and subdirectories. @param {String} root_dir - The root directory of the entire crawl. Used to ascertain whether the given location is the top level. @param {String} location - The current directory of the crawl. The index file will be created here. @param {[String]} image_files - A list of image file names in the location. These will be displayed in the index file's gallery. @param {[String]} dirs - The subdirectories of the location directory. These will be displayed as links further down the file structure. @param {Boolean=False} force_no_processing - If True, do not attempt to actually process thumbnails, PIL images or anything. Simply index <img> tags with original file src attributes. @return {String} The full path (location plus filename) of the newly created index file. Intended for usage cleaning up created files. """ # Put together HTML as a list of the lines we'll want to include # Issue #2 exists to do this better than HTML in-code header_text = \ 'imageMe: ' + location + ' [' + str(len(image_files)) + ' image(s)]' html = [ '<!DOCTYPE html>', '<html>', ' <head>', ' <title>imageMe</title>' ' <style>', ' html, body {margin: 0;padding: 0;}', ' .header {text-align: right;}', ' .content {', ' padding: 3em;', ' padding-left: 4em;', ' padding-right: 4em;', ' }', ' .image {max-width: 100%; border-radius: 0.3em;}', ' td {width: ' + str(100.0 / IMAGES_PER_ROW) + '%;}', ' </style>', ' </head>', ' <body>', ' <div class="content">', ' <h2 class="header">' + header_text + '</h2>' ] # Populate the present subdirectories - this includes '..' unless we're at # the top level directories = [] if root_dir != location: directories = ['..'] directories += dirs if len(directories) > 0: html.append('<hr>') # For each subdirectory, include a link to its index file for directory in directories: link = directory + '/' + INDEX_FILE_NAME html += [ ' <h3 class="header">', ' <a href="' + link + '">' + directory + '</a>', ' </h3>' ] # Populate the image gallery table # Counter to cycle down through table rows table_row_count = 1 html += ['<hr>', '<table>'] # For each image file, potentially create a new <tr> and create a new <td> for image_file in image_files: if table_row_count == 1: html.append('<tr>') img_src = _get_thumbnail_src_from_file( location, image_file, force_no_processing ) link_target = _get_image_link_target_from_file( location, image_file, force_no_processing ) html += [ ' <td>', ' <a href="' + link_target + '">', ' <img class="image" src="' + img_src + '">', ' </a>', ' </td>' ] if table_row_count == IMAGES_PER_ROW: table_row_count = 0 html.append('</tr>') table_row_count += 1 html += ['</tr>', '</table>'] html += [ ' </div>', ' </body>', '</html>' ] # Actually create the file, now we've put together the HTML content index_file_path = _get_index_file_path(location) print('Creating index file %s' % index_file_path) index_file = open(index_file_path, 'w') index_file.write('\n'.join(html)) index_file.close() # Return the path for cleaning up later return index_file_path
python
{ "resource": "" }
q263717
_create_index_files
validation
def _create_index_files(root_dir, force_no_processing=False): """ Crawl the root directory downwards, generating an index HTML file in each directory on the way down. @param {String} root_dir - The top level directory to crawl down from. In normal usage, this will be '.'. @param {Boolean=False} force_no_processing - If True, do not attempt to actually process thumbnails, PIL images or anything. Simply index <img> tags with original file src attributes. @return {[String]} Full file paths of all created files. """ # Initialise list of created file paths to build up as we make them created_files = [] # Walk the root dir downwards, creating index files as we go for here, dirs, files in os.walk(root_dir): print('Processing %s' % here) # Sort the subdirectories by name dirs = sorted(dirs) # Get image files - all files in the directory matching IMAGE_FILE_REGEX image_files = [f for f in files if re.match(IMAGE_FILE_REGEX, f)] # Sort the image files by name image_files = sorted(image_files) # Create this directory's index file and add its name to the created # files list created_files.append( _create_index_file( root_dir, here, image_files, dirs, force_no_processing ) ) # Return the list of created files return created_files
python
{ "resource": "" }
q263718
_get_image_from_file
validation
def _get_image_from_file(dir_path, image_file): """ Get an instance of PIL.Image from the given file. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @return {PIL.Image} An instance of the image file as a PIL Image, or None if the functionality is not available. This could be because PIL is not present, or because it can't process the given file type. """ # Save ourselves the effort if PIL is not present, and return None now if not PIL_ENABLED: return None # Put together full path path = os.path.join(dir_path, image_file) # Try to read the image img = None try: img = Image.open(path) except IOError as exptn: print('Error loading image file %s: %s' % (path, exptn)) # Return image or None return img
python
{ "resource": "" }
q263719
_get_src_from_image
validation
def _get_src_from_image(img, fallback_image_file): """ Get base-64 encoded data as a string for the given image. Fallback to return fallback_image_file if cannot get the image data or img is None. @param {Image} img - The PIL Image to get src data for @param {String} fallback_image_file - The filename of the image file, to be used when image data capture fails @return {String} The base-64 encoded image data string, or path to the file itself if not supported. """ # If the image is None, then we can't process, so we should return the # path to the file itself if img is None: return fallback_image_file # Target format should be the same as the original image format, unless it's # a TIF/TIFF, which can't be displayed by most browsers; we convert these # to jpeg target_format = img.format if target_format.lower() in ['tif', 'tiff']: target_format = 'JPEG' # If we have an actual Image, great - put together the base64 image string try: bytesio = io.BytesIO() img.save(bytesio, target_format) byte_value = bytesio.getvalue() b64 = base64.b64encode(byte_value) return 'data:image/%s;base64,%s' % (target_format.lower(), b64) except IOError as exptn: print('IOError while saving image bytes: %s' % exptn) return fallback_image_file
python
{ "resource": "" }
q263720
_get_thumbnail_image_from_file
validation
def _get_thumbnail_image_from_file(dir_path, image_file): """ Get a PIL.Image from the given image file which has been scaled down to THUMBNAIL_WIDTH wide. @param {String} dir_path - The directory containing the image file @param {String} image_file - The filename of the image file within dir_path @return {PIL.Image} An instance of the thumbnail as a PIL Image, or None if the functionality is not available. See _get_image_from_file for details. """ # Get image img = _get_image_from_file(dir_path, image_file) # If it's not supported, exit now if img is None: return None if img.format.lower() == 'gif': return None # Get image dimensions img_width, img_height = img.size # We need to perform a resize - first, work out the scale ratio to take the # image width to THUMBNAIL_WIDTH (THUMBNAIL_WIDTH:img_width ratio) scale_ratio = THUMBNAIL_WIDTH / float(img_width) # Work out target image height based on the scale ratio target_height = int(scale_ratio * img_height) # Perform the resize try: img.thumbnail((THUMBNAIL_WIDTH, target_height), resample=RESAMPLE) except IOError as exptn: print('WARNING: IOError when thumbnailing %s/%s: %s' % ( dir_path, image_file, exptn )) return None # Return the resized image return img
python
{ "resource": "" }
q263721
_run_server
validation
def _run_server(): """ Run the image server. This is blocking. Will handle user KeyboardInterrupt and other exceptions appropriately and return control once the server is stopped. @return {None} """ # Get the port to run on port = _get_server_port() # Configure allow_reuse_address to make re-runs of the script less painful - # if this is not True then waiting for the address to be freed after the # last run can block a subsequent run SocketServer.TCPServer.allow_reuse_address = True # Create the server instance server = SocketServer.TCPServer( ('', port), SimpleHTTPServer.SimpleHTTPRequestHandler ) # Print out before actually running the server (cheeky / optimistic, however # you want to look at it) print('Your images are at http://127.0.0.1:%d/%s' % ( port, INDEX_FILE_NAME )) # Try to run the server try: # Run it - this call blocks until the server is killed server.serve_forever() except KeyboardInterrupt: # This is the expected way of the server being killed, since imageMe is # intended for ad-hoc running from command line print('User interrupted, stopping') except Exception as exptn: # Catch everything else - this will handle shutdowns via other signals # and faults actually starting the server in the first place print(exptn) print('Unhandled exception in server, stopping')
python
{ "resource": "" }
q263722
serve_dir
validation
def serve_dir(dir_path): """ Generate indexes and run server from the given directory downwards. @param {String} dir_path - The directory path (absolute, or relative to CWD) @return {None} """ # Create index files, and store the list of their paths for cleanup later # This time, force no processing - this gives us a fast first-pass in terms # of page generation, but potentially slow serving for large image files print('Performing first pass index file generation') created_files = _create_index_files(dir_path, True) if (PIL_ENABLED): # If PIL is enabled, we'd like to process the HTML indexes to include # generated thumbnails - this slows down generation so we don't do it # first time around, but now we're serving it's good to do in the # background print('Performing PIL-enchanced optimised index file generation in background') background_indexer = BackgroundIndexFileGenerator(dir_path) background_indexer.run() # Run the server in the current location - this blocks until it's stopped _run_server() # Clean up the index files created earlier so we don't make a mess of # the image directories _clean_up(created_files)
python
{ "resource": "" }
q263723
static
validation
def static(**kwargs): """ USE carefully ^^ """ def wrap(fn): fn.func_globals['static'] = fn fn.__dict__.update(kwargs) return fn return wrap
python
{ "resource": "" }
q263724
rand_blend_mask
validation
def rand_blend_mask(shape, rand=rand.uniform(-10, 10), **kwargs): """ random blending masks """ # batch, channel = shape[0], shape[3] z = rand(shape[0]) # seed noise = snoise2dz((shape[1], shape[2]), z, **kwargs) return noise
python
{ "resource": "" }
q263725
snoise2d
validation
def snoise2d(size, z=0.0, scale=0.05, octaves=1, persistence=0.25, lacunarity=2.0): """ z value as like a seed """ import noise data = np.empty(size, dtype='float32') for y in range(size[0]): for x in range(size[1]): v = noise.snoise3(x * scale, y * scale, z, octaves=octaves, persistence=persistence, lacunarity=lacunarity) data[x, y] = v data = data * 0.5 + 0.5 if __debug__: assert data.min() >= 0. and data.max() <= 1.0 return data
python
{ "resource": "" }
q263726
to_permutation_matrix
validation
def to_permutation_matrix(matches): """Converts a permutation into a permutation matrix. `matches` is a dictionary whose keys are vertices and whose values are partners. For each vertex ``u`` and ``v``, entry (``u``, ``v``) in the returned matrix will be a ``1`` if and only if ``matches[u] == v``. Pre-condition: `matches` must be a permutation on an initial subset of the natural numbers. Returns a permutation matrix as a square NumPy array. """ n = len(matches) P = np.zeros((n, n)) # This is a cleverer way of doing # # for (u, v) in matches.items(): # P[u, v] = 1 # P[list(zip(*(matches.items())))] = 1 return P
python
{ "resource": "" }
q263727
four_blocks
validation
def four_blocks(topleft, topright, bottomleft, bottomright): """Convenience function that creates a block matrix with the specified blocks. Each argument must be a NumPy matrix. The two top matrices must have the same number of rows, as must the two bottom matrices. The two left matrices must have the same number of columns, as must the two right matrices. """ return vstack(hstack(topleft, topright), hstack(bottomleft, bottomright))
python
{ "resource": "" }
q263728
to_bipartite_matrix
validation
def to_bipartite_matrix(A): """Returns the adjacency matrix of a bipartite graph whose biadjacency matrix is `A`. `A` must be a NumPy array. If `A` has **m** rows and **n** columns, then the returned matrix has **m + n** rows and columns. """ m, n = A.shape return four_blocks(zeros(m, m), A, A.T, zeros(n, n))
python
{ "resource": "" }
q263729
to_pattern_matrix
validation
def to_pattern_matrix(D): """Returns the Boolean matrix in the same shape as `D` with ones exactly where there are nonzero entries in `D`. `D` must be a NumPy array. """ result = np.zeros_like(D) # This is a cleverer way of doing # # for (u, v) in zip(*(D.nonzero())): # result[u, v] = 1 # result[D.nonzero()] = 1 return result
python
{ "resource": "" }
q263730
bump_version
validation
def bump_version(version, which=None): """Returns the result of incrementing `version`. If `which` is not specified, the "patch" part of the version number will be incremented. If `which` is specified, it must be ``'major'``, ``'minor'``, or ``'patch'``. If it is one of these three strings, the corresponding part of the version number will be incremented instead of the patch number. Returns a string representing the next version number. Example:: >>> bump_version('2.7.1') '2.7.2' >>> bump_version('2.7.1', 'minor') '2.8.0' >>> bump_version('2.7.1', 'major') '3.0.0' """ try: parts = [int(n) for n in version.split('.')] except ValueError: fail('Current version is not numeric') if len(parts) != 3: fail('Current version is not semantic versioning') # Determine where to increment the version number PARTS = {'major': 0, 'minor': 1, 'patch': 2} index = PARTS[which] if which in PARTS else 2 # Increment the version number at that index and set the subsequent parts # to 0. before, middle, after = parts[:index], parts[index], parts[index + 1:] middle += 1 return '.'.join(str(n) for n in before + [middle] + after)
python
{ "resource": "" }
q263731
get_version
validation
def get_version(filename, pattern): """Gets the current version from the specified file. This function assumes the file includes a string of the form:: <pattern> = <version> """ with open(filename) as f: match = re.search(r"^(\s*%s\s*=\s*')(.+?)(')(?sm)" % pattern, f.read()) if match: before, version, after = match.groups() return version fail('Could not find {} in {}'.format(pattern, filename))
python
{ "resource": "" }
q263732
fail
validation
def fail(message=None, exit_status=None): """Prints the specified message and exits the program with the specified exit status. """ print('Error:', message, file=sys.stderr) sys.exit(exit_status or 1)
python
{ "resource": "" }
q263733
git_tag
validation
def git_tag(tag): """Tags the current version.""" print('Tagging "{}"'.format(tag)) msg = '"Released version {}"'.format(tag) Popen(['git', 'tag', '-s', '-m', msg, tag]).wait()
python
{ "resource": "" }
q263734
Renderer.initialize
validation
def initialize(self, templates_path, global_data): """initialize with templates' path parameters templates_path str the position of templates directory global_data dict globa data can be got in any templates""" self.env = Environment(loader=FileSystemLoader(templates_path)) self.env.trim_blocks = True self.global_data = global_data
python
{ "resource": "" }
q263735
Renderer.render
validation
def render(self, template, **data): """Render data with template, return html unicodes. parameters template str the template's filename data dict the data to render """ # make a copy and update the copy dct = self.global_data.copy() dct.update(data) try: html = self.env.get_template(template).render(**dct) except TemplateNotFound: raise JinjaTemplateNotFound return html
python
{ "resource": "" }
q263736
Renderer.render_to
validation
def render_to(self, path, template, **data): """Render data with template and then write to path""" html = self.render(template, **data) with open(path, 'w') as f: f.write(html.encode(charset))
python
{ "resource": "" }
q263737
render
validation
def render(template, **data): """shortcut to render data with `template`. Just add exception catch to `renderer.render`""" try: return renderer.render(template, **data) except JinjaTemplateNotFound as e: logger.error(e.__doc__ + ', Template: %r' % template) sys.exit(e.exit_code)
python
{ "resource": "" }
q263738
GenericDataFrameAPIView.get_dataframe
validation
def get_dataframe(self): """ Get the DataFrame for this view. Defaults to using `self.dataframe`. This method should always be used rather than accessing `self.dataframe` directly, as `self.dataframe` gets evaluated only once, and those results are cached for all subsequent requests. You may want to override this if you need to provide different dataframes depending on the incoming request. """ assert self.dataframe is not None, ( "'%s' should either include a `dataframe` attribute, " "or override the `get_dataframe()` method." % self.__class__.__name__ ) dataframe = self.dataframe return dataframe
python
{ "resource": "" }
q263739
GenericDataFrameAPIView.index_row
validation
def index_row(self, dataframe): """ Indexes the row based on the request parameters. """ return dataframe.loc[self.kwargs[self.lookup_url_kwarg]].to_frame().T
python
{ "resource": "" }
q263740
GenericDataFrameAPIView.get_object
validation
def get_object(self): """ Returns the row the view is displaying. You may want to override this if you need to provide non-standard queryset lookups. Eg if objects are referenced using multiple keyword arguments in the url conf. """ dataframe = self.filter_dataframe(self.get_dataframe()) assert self.lookup_url_kwarg in self.kwargs, ( 'Expected view %s to be called with a URL keyword argument ' 'named "%s". Fix your URL conf, or set the `.lookup_field` ' 'attribute on the view correctly.' % (self.__class__.__name__, self.lookup_url_kwarg) ) try: obj = self.index_row(dataframe) except (IndexError, KeyError, ValueError): raise Http404 # May raise a permission denied self.check_object_permissions(self.request, obj) return obj
python
{ "resource": "" }
q263741
GenericDataFrameAPIView.paginator
validation
def paginator(self): """ The paginator instance associated with the view, or `None`. """ if not hasattr(self, '_paginator'): if self.pagination_class is None: self._paginator = None else: self._paginator = self.pagination_class() return self._paginator
python
{ "resource": "" }
q263742
GenericDataFrameAPIView.paginate_dataframe
validation
def paginate_dataframe(self, dataframe): """ Return a single page of results, or `None` if pagination is disabled. """ if self.paginator is None: return None return self.paginator.paginate_dataframe(dataframe, self.request, view=self)
python
{ "resource": "" }
q263743
Config.parse
validation
def parse(self): """parse config, return a dict""" if exists(self.filepath): content = open(self.filepath).read().decode(charset) else: content = "" try: config = toml.loads(content) except toml.TomlSyntaxError: raise ConfigSyntaxError return config
python
{ "resource": "" }
q263744
render_to
validation
def render_to(path, template, **data): """shortcut to render data with `template` and then write to `path`. Just add exception catch to `renderer.render_to`""" try: renderer.render_to(path, template, **data) except JinjaTemplateNotFound as e: logger.error(e.__doc__ + ', Template: %r' % template) sys.exit(e.exit_code)
python
{ "resource": "" }
q263745
Parser.parse
validation
def parse(self, source): """Parse ascii post source, return dict""" rt, title, title_pic, markdown = libparser.parse(source) if rt == -1: raise SeparatorNotFound elif rt == -2: raise PostTitleNotFound # change to unicode title, title_pic, markdown = map(to_unicode, (title, title_pic, markdown)) # render to html html = self.markdown.render(markdown) summary = self.markdown.render(markdown[:200]) return { 'title': title, 'markdown': markdown, 'html': html, 'summary': summary, 'title_pic': title_pic }
python
{ "resource": "" }
q263746
Parser.parse_filename
validation
def parse_filename(self, filepath): """parse post source files name to datetime object""" name = os.path.basename(filepath)[:-src_ext_len] try: dt = datetime.strptime(name, "%Y-%m-%d-%H-%M") except ValueError: raise PostNameInvalid return {'name': name, 'datetime': dt, 'filepath': filepath}
python
{ "resource": "" }
q263747
Server.run_server
validation
def run_server(self, port): """run a server binding to port""" try: self.server = MultiThreadedHTTPServer(('0.0.0.0', port), Handler) except socket.error, e: # failed to bind port logger.error(str(e)) sys.exit(1) logger.info("HTTP serve at http://0.0.0.0:%d (ctrl-c to stop) ..." % port) try: self.server.serve_forever() except KeyboardInterrupt: logger.info("^C received, shutting down server") self.shutdown_server()
python
{ "resource": "" }
q263748
Server.get_files_stat
validation
def get_files_stat(self): """get source files' update time""" if not exists(Post.src_dir): logger.error(SourceDirectoryNotFound.__doc__) sys.exit(SourceDirectoryNotFound.exit_code) paths = [] for fn in ls(Post.src_dir): if fn.endswith(src_ext): paths.append(join(Post.src_dir, fn)) # config.toml if exists(config.filepath): paths.append(config.filepath) # files: a <filepath to updated time> dict files = dict((p, stat(p).st_mtime) for p in paths) return files
python
{ "resource": "" }
q263749
Server.watch_files
validation
def watch_files(self): """watch files for changes, if changed, rebuild blog. this thread will quit if the main process ends""" try: while 1: sleep(1) # check every 1s try: files_stat = self.get_files_stat() except SystemExit: logger.error("Error occurred, server shut down") self.shutdown_server() if self.files_stat != files_stat: logger.info("Changes detected, start rebuilding..") try: generator.re_generate() global _root _root = generator.root except SystemExit: # catch sys.exit, it means fatal error logger.error("Error occurred, server shut down") self.shutdown_server() self.files_stat = files_stat # update files' stat except KeyboardInterrupt: # I dont know why, but this exception won't be catched # because absolutly each KeyboardInterrupt is catched by # the server thread, which will terminate this thread the same time logger.info("^C received, shutting down watcher") self.shutdown_watcher()
python
{ "resource": "" }
q263750
deploy_blog
validation
def deploy_blog(): """Deploy new blog to current directory""" logger.info(deploy_blog.__doc__) # `rsync -aqu path/to/res/* .` call( 'rsync -aqu ' + join(dirname(__file__), 'res', '*') + ' .', shell=True) logger.success('Done') logger.info('Please edit config.toml to meet your needs')
python
{ "resource": "" }
q263751
using
validation
def using(context, alias): ''' Temporarily update the context to use the BlockContext for the given alias. ''' # An empty alias means look in the current widget set. if alias == '': yield context else: try: widgets = context.render_context[WIDGET_CONTEXT_KEY] except KeyError: raise template.TemplateSyntaxError('No widget libraries loaded!') try: block_set = widgets[alias] except KeyError: raise template.TemplateSyntaxError('No widget library loaded for alias: %r' % alias) context.render_context.push() context.render_context[BLOCK_CONTEXT_KEY] = block_set context.render_context[WIDGET_CONTEXT_KEY] = widgets yield context context.render_context.pop()
python
{ "resource": "" }
q263752
find_block
validation
def find_block(context, *names): ''' Find the first matching block in the current block_context ''' block_set = context.render_context[BLOCK_CONTEXT_KEY] for name in names: block = block_set.get_block(name) if block is not None: return block raise template.TemplateSyntaxError('No widget found for: %r' % (names,))
python
{ "resource": "" }
q263753
load_widgets
validation
def load_widgets(context, **kwargs): ''' Load a series of widget libraries. ''' _soft = kwargs.pop('_soft', False) try: widgets = context.render_context[WIDGET_CONTEXT_KEY] except KeyError: widgets = context.render_context[WIDGET_CONTEXT_KEY] = {} for alias, template_name in kwargs.items(): if _soft and alias in widgets: continue with context.render_context.push({BLOCK_CONTEXT_KEY: BlockContext()}): blocks = resolve_blocks(template_name, context) widgets[alias] = blocks return ''
python
{ "resource": "" }
q263754
auto_widget
validation
def auto_widget(field): '''Return a list of widget names for the provided field.''' # Auto-detect info = { 'widget': field.field.widget.__class__.__name__, 'field': field.field.__class__.__name__, 'name': field.name, } return [ fmt.format(**info) for fmt in ( '{field}_{widget}_{name}', '{field}_{name}', '{widget}_{name}', '{field}_{widget}', '{name}', '{widget}', '{field}', ) ]
python
{ "resource": "" }
q263755
reuse
validation
def reuse(context, block_list, **kwargs): ''' Allow reuse of a block within a template. {% reuse '_myblock' foo=bar %} If passed a list of block names, will use the first that matches: {% reuse list_of_block_names .... %} ''' try: block_context = context.render_context[BLOCK_CONTEXT_KEY] except KeyError: block_context = BlockContext() if not isinstance(block_list, (list, tuple)): block_list = [block_list] for block in block_list: block = block_context.get_block(block) if block: break else: return '' with context.push(kwargs): return block.render(context)
python
{ "resource": "" }
q263756
ChoiceWrapper.display
validation
def display(self): """ When dealing with optgroups, ensure that the value is properly force_text'd. """ if not self.is_group(): return self._display return ((force_text(k), v) for k, v in self._display)
python
{ "resource": "" }
q263757
RedisBackend.create_message
validation
def create_message(self, level, msg_text, extra_tags='', date=None, url=None): """ Message instances are namedtuples of type `Message`. The date field is already serialized in datetime.isoformat ECMA-262 format """ if not date: now = timezone.now() else: now = date r = now.isoformat() if now.microsecond: r = r[:23] + r[26:] if r.endswith('+00:00'): r = r[:-6] + 'Z' fingerprint = r + msg_text msg_id = hashlib.sha256(fingerprint.encode('ascii', 'ignore')).hexdigest() return Message(id=msg_id, message=msg_text, level=level, tags=extra_tags, date=r, url=url)
python
{ "resource": "" }
q263758
add_message_for
validation
def add_message_for(users, level, message_text, extra_tags='', date=None, url=None, fail_silently=False): """ Send a message to a list of users without passing through `django.contrib.messages` :param users: an iterable containing the recipients of the messages :param level: message level :param message_text: the string containing the message :param extra_tags: like the Django api, a string containing extra tags for the message :param date: a date, different than the default timezone.now :param url: an optional url :param fail_silently: not used at the moment """ BackendClass = stored_messages_settings.STORAGE_BACKEND backend = BackendClass() m = backend.create_message(level, message_text, extra_tags, date, url) backend.archive_store(users, m) backend.inbox_store(users, m)
python
{ "resource": "" }
q263759
broadcast_message
validation
def broadcast_message(level, message_text, extra_tags='', date=None, url=None, fail_silently=False): """ Send a message to all users aka broadcast. :param level: message level :param message_text: the string containing the message :param extra_tags: like the Django api, a string containing extra tags for the message :param date: a date, different than the default timezone.now :param url: an optional url :param fail_silently: not used at the moment """ from django.contrib.auth import get_user_model users = get_user_model().objects.all() add_message_for(users, level, message_text, extra_tags=extra_tags, date=date, url=url, fail_silently=fail_silently)
python
{ "resource": "" }
q263760
mark_read
validation
def mark_read(user, message): """ Mark message instance as read for user. Returns True if the message was `unread` and thus actually marked as `read` or False in case it is already `read` or it does not exist at all. :param user: user instance for the recipient :param message: a Message instance to mark as read """ BackendClass = stored_messages_settings.STORAGE_BACKEND backend = BackendClass() backend.inbox_delete(user, message)
python
{ "resource": "" }
q263761
mark_all_read
validation
def mark_all_read(user): """ Mark all message instances for a user as read. :param user: user instance for the recipient """ BackendClass = stored_messages_settings.STORAGE_BACKEND backend = BackendClass() backend.inbox_purge(user)
python
{ "resource": "" }
q263762
stored_messages_archive
validation
def stored_messages_archive(context, num_elements=10): """ Renders a list of archived messages for the current user """ if "user" in context: user = context["user"] if user.is_authenticated(): qs = MessageArchive.objects.select_related("message").filter(user=user) return { "messages": qs[:num_elements], "count": qs.count(), }
python
{ "resource": "" }
q263763
StorageMixin._get
validation
def _get(self, *args, **kwargs): """ Retrieve unread messages for current user, both from the inbox and from other storages """ messages, all_retrieved = super(StorageMixin, self)._get(*args, **kwargs) if self.user.is_authenticated(): inbox_messages = self.backend.inbox_list(self.user) else: inbox_messages = [] return messages + inbox_messages, all_retrieved
python
{ "resource": "" }
q263764
StorageMixin.add
validation
def add(self, level, message, extra_tags=''): """ If the message level was configured for being stored and request.user is not anonymous, save it to the database. Otherwise, let some other class handle the message. Notice: controls like checking the message is not empty and the level is above the filter need to be performed here, but it could happen they'll be performed again later if the message does not need to be stored. """ if not message: return # Check that the message level is not less than the recording level. level = int(level) if level < self.level: return # Check if the message doesn't have a level that needs to be persisted if level not in stored_messages_settings.STORE_LEVELS or self.user.is_anonymous(): return super(StorageMixin, self).add(level, message, extra_tags) self.added_new = True m = self.backend.create_message(level, message, extra_tags) self.backend.archive_store([self.user], m) self._queued_messages.append(m)
python
{ "resource": "" }
q263765
StorageMixin._store
validation
def _store(self, messages, response, *args, **kwargs): """ persistent messages are already in the database inside the 'archive', so we can say they're already "stored". Here we put them in the inbox, or remove from the inbox in case the messages were iterated. messages contains only new msgs if self.used==True else contains both new and unread messages """ contrib_messages = [] if self.user.is_authenticated(): if not messages: # erase inbox self.backend.inbox_purge(self.user) else: for m in messages: try: self.backend.inbox_store([self.user], m) except MessageTypeNotSupported: contrib_messages.append(m) super(StorageMixin, self)._store(contrib_messages, response, *args, **kwargs)
python
{ "resource": "" }
q263766
StorageMixin._prepare_messages
validation
def _prepare_messages(self, messages): """ Like the base class method, prepares a list of messages for storage but avoid to do this for `models.Message` instances. """ for message in messages: if not self.backend.can_handle(message): message._prepare()
python
{ "resource": "" }
q263767
jocker
validation
def jocker(test_options=None): """Main entry point for script.""" version = ver_check() options = test_options or docopt(__doc__, version=version) _set_global_verbosity_level(options.get('--verbose')) jocker_lgr.debug(options) jocker_run(options)
python
{ "resource": "" }
q263768
init
validation
def init(base_level=DEFAULT_BASE_LOGGING_LEVEL, verbose_level=DEFAULT_VERBOSE_LOGGING_LEVEL, logging_config=None): """initializes a base logger you can use this to init a logger in any of your files. this will use config.py's LOGGER param and logging.dictConfig to configure the logger for you. :param int|logging.LEVEL base_level: desired base logging level :param int|logging.LEVEL verbose_level: desired verbose logging level :param dict logging_dict: dictConfig based configuration. used to override the default configuration from config.py :rtype: `python logger` """ if logging_config is None: logging_config = {} logging_config = logging_config or LOGGER # TODO: (IMPRV) only perform file related actions if file handler is # TODO: (IMPRV) defined. log_file = LOGGER['handlers']['file']['filename'] log_dir = os.path.dirname(os.path.expanduser(log_file)) if os.path.isfile(log_dir): sys.exit('file {0} exists - log directory cannot be created ' 'there. please remove the file and try again.' .format(log_dir)) try: if not os.path.exists(log_dir) and not len(log_dir) == 0: os.makedirs(log_dir) dictconfig.dictConfig(logging_config) lgr = logging.getLogger('user') lgr.setLevel(base_level) return lgr except ValueError as e: sys.exit('could not initialize logger.' ' verify your logger config' ' and permissions to write to {0} ({1})' .format(log_file, e))
python
{ "resource": "" }
q263769
BaseConfigurator.configure_custom
validation
def configure_custom(self, config): """Configure an object with a user-supplied factory.""" c = config.pop('()') if not hasattr(c, '__call__') and \ hasattr(types, 'ClassType') and isinstance(c, types.ClassType): c = self.resolve(c) props = config.pop('.', None) # Check for valid identifiers kwargs = dict((k, config[k]) for k in config if valid_ident(k)) result = c(**kwargs) if props: for name, value in props.items(): setattr(result, name, value) return result
python
{ "resource": "" }
q263770
_set_global_verbosity_level
validation
def _set_global_verbosity_level(is_verbose_output=False): """sets the global verbosity level for console and the jocker_lgr logger. :param bool is_verbose_output: should be output be verbose """ global verbose_output # TODO: (IMPRV) only raise exceptions in verbose mode verbose_output = is_verbose_output if verbose_output: jocker_lgr.setLevel(logging.DEBUG) else: jocker_lgr.setLevel(logging.INFO)
python
{ "resource": "" }
q263771
_import_config
validation
def _import_config(config_file): """returns a configuration object :param string config_file: path to config file """ # get config file path jocker_lgr.debug('config file is: {0}'.format(config_file)) # append to path for importing try: jocker_lgr.debug('importing config...') with open(config_file, 'r') as c: return yaml.safe_load(c.read()) except IOError as ex: jocker_lgr.error(str(ex)) raise RuntimeError('cannot access config file') except yaml.parser.ParserError as ex: jocker_lgr.error('invalid yaml file: {0}'.format(ex)) raise RuntimeError('invalid yaml file')
python
{ "resource": "" }
q263772
execute
validation
def execute(varsfile, templatefile, outputfile=None, configfile=None, dryrun=False, build=False, push=False, verbose=False): """generates a Dockerfile, builds an image and pushes it to DockerHub A `Dockerfile` will be generated by Jinja2 according to the `varsfile` imported. If build is true, an image will be generated from the `outputfile` which is the generated Dockerfile and committed to the image:tag string supplied to `build`. If push is true, a build will be triggered and the produced image will be pushed to DockerHub upon completion. :param string varsfile: path to file with variables. :param string templatefile: path to template file to use. :param string outputfile: path to output Dockerfile. :param string configfile: path to yaml file with docker-py config. :param bool dryrun: mock run. :param build: False or the image:tag to build to. :param push: False or the image:tag to build to. (triggers build) :param bool verbose: verbose output. """ if dryrun and (build or push): jocker_lgr.error('dryrun requested, cannot build.') sys.exit(100) _set_global_verbosity_level(verbose) j = Jocker(varsfile, templatefile, outputfile, configfile, dryrun, build, push) formatted_text = j.generate() if dryrun: g = j.dryrun(formatted_text) if build or push: j.build_image() if push: j.push_image() if dryrun: return g
python
{ "resource": "" }
q263773
Jocker._parse_dumb_push_output
validation
def _parse_dumb_push_output(self, string): """since the push process outputs a single unicode string consisting of multiple JSON formatted "status" lines, we need to parse it so that it can be read as multiple strings. This will receive the string as an input, count curly braces and ignore any newlines. When the curly braces stack is 0, it will append the entire string it has read up until then to a list and so forth. :param string: the string to parse :rtype: list of JSON's """ stack = 0 json_list = [] tmp_json = '' for char in string: if not char == '\r' and not char == '\n': tmp_json += char if char == '{': stack += 1 elif char == '}': stack -= 1 if stack == 0: if not len(tmp_json) == 0: json_list.append(tmp_json) tmp_json = '' return json_list
python
{ "resource": "" }
q263774
upload_gif
validation
def upload_gif(gif): """Uploads an image file to Imgur""" client_id = os.environ.get('IMGUR_API_ID') client_secret = os.environ.get('IMGUR_API_SECRET') if client_id is None or client_secret is None: click.echo('Cannot upload - could not find IMGUR_API_ID or IMGUR_API_SECRET environment variables') return client = ImgurClient(client_id, client_secret) click.echo('Uploading file {}'.format(click.format_filename(gif))) response = client.upload_from_path(gif) click.echo('File uploaded - see your gif at {}'.format(response['link']))
python
{ "resource": "" }
q263775
is_dot
validation
def is_dot(ip): """Return true if the IP address is in dotted decimal notation.""" octets = str(ip).split('.') if len(octets) != 4: return False for i in octets: try: val = int(i) except ValueError: return False if val > 255 or val < 0: return False return True
python
{ "resource": "" }
q263776
is_bin
validation
def is_bin(ip): """Return true if the IP address is in binary notation.""" try: ip = str(ip) if len(ip) != 32: return False dec = int(ip, 2) except (TypeError, ValueError): return False if dec > 4294967295 or dec < 0: return False return True
python
{ "resource": "" }
q263777
is_oct
validation
def is_oct(ip): """Return true if the IP address is in octal notation.""" try: dec = int(str(ip), 8) except (TypeError, ValueError): return False if dec > 0o37777777777 or dec < 0: return False return True
python
{ "resource": "" }
q263778
is_dec
validation
def is_dec(ip): """Return true if the IP address is in decimal notation.""" try: dec = int(str(ip)) except ValueError: return False if dec > 4294967295 or dec < 0: return False return True
python
{ "resource": "" }
q263779
_check_nm
validation
def _check_nm(nm, notation): """Function internally used to check if the given netmask is of the specified notation.""" # Convert to decimal, and check if it's in the list of valid netmasks. _NM_CHECK_FUNCT = { NM_DOT: _dot_to_dec, NM_HEX: _hex_to_dec, NM_BIN: _bin_to_dec, NM_OCT: _oct_to_dec, NM_DEC: _dec_to_dec_long} try: dec = _NM_CHECK_FUNCT[notation](nm, check=True) except ValueError: return False if dec in _NETMASKS_VALUES: return True return False
python
{ "resource": "" }
q263780
is_bits_nm
validation
def is_bits_nm(nm): """Return true if the netmask is in bits notatation.""" try: bits = int(str(nm)) except ValueError: return False if bits > 32 or bits < 0: return False return True
python
{ "resource": "" }
q263781
is_wildcard_nm
validation
def is_wildcard_nm(nm): """Return true if the netmask is in wildcard bits notatation.""" try: dec = 0xFFFFFFFF - _dot_to_dec(nm, check=True) except ValueError: return False if dec in _NETMASKS_VALUES: return True return False
python
{ "resource": "" }
q263782
_dot_to_dec
validation
def _dot_to_dec(ip, check=True): """Dotted decimal notation to decimal conversion.""" if check and not is_dot(ip): raise ValueError('_dot_to_dec: invalid IP: "%s"' % ip) octets = str(ip).split('.') dec = 0 dec |= int(octets[0]) << 24 dec |= int(octets[1]) << 16 dec |= int(octets[2]) << 8 dec |= int(octets[3]) return dec
python
{ "resource": "" }
q263783
_dec_to_dot
validation
def _dec_to_dot(ip): """Decimal to dotted decimal notation conversion.""" first = int((ip >> 24) & 255) second = int((ip >> 16) & 255) third = int((ip >> 8) & 255) fourth = int(ip & 255) return '%d.%d.%d.%d' % (first, second, third, fourth)
python
{ "resource": "" }
q263784
_hex_to_dec
validation
def _hex_to_dec(ip, check=True): """Hexadecimal to decimal conversion.""" if check and not is_hex(ip): raise ValueError('_hex_to_dec: invalid IP: "%s"' % ip) if isinstance(ip, int): ip = hex(ip) return int(str(ip), 16)
python
{ "resource": "" }
q263785
_oct_to_dec
validation
def _oct_to_dec(ip, check=True): """Octal to decimal conversion.""" if check and not is_oct(ip): raise ValueError('_oct_to_dec: invalid IP: "%s"' % ip) if isinstance(ip, int): ip = oct(ip) return int(str(ip), 8)
python
{ "resource": "" }
q263786
_bin_to_dec
validation
def _bin_to_dec(ip, check=True): """Binary to decimal conversion.""" if check and not is_bin(ip): raise ValueError('_bin_to_dec: invalid IP: "%s"' % ip) if isinstance(ip, int): ip = str(ip) return int(str(ip), 2)
python
{ "resource": "" }
q263787
_BYTES_TO_BITS
validation
def _BYTES_TO_BITS(): """Generate a table to convert a whole byte to binary. This code was taken from the Python Cookbook, 2nd edition - O'Reilly.""" the_table = 256*[None] bits_per_byte = list(range(7, -1, -1)) for n in range(256): l = n bits = 8*[None] for i in bits_per_byte: bits[i] = '01'[n & 1] n >>= 1 the_table[l] = ''.join(bits) return the_table
python
{ "resource": "" }
q263788
_dec_to_bin
validation
def _dec_to_bin(ip): """Decimal to binary conversion.""" bits = [] while ip: bits.append(_BYTES_TO_BITS[ip & 255]) ip >>= 8 bits.reverse() return ''.join(bits) or 32*'0'
python
{ "resource": "" }
q263789
_bits_to_dec
validation
def _bits_to_dec(nm, check=True): """Bits to decimal conversion.""" if check and not is_bits_nm(nm): raise ValueError('_bits_to_dec: invalid netmask: "%s"' % nm) bits = int(str(nm)) return VALID_NETMASKS[bits]
python
{ "resource": "" }
q263790
_wildcard_to_dec
validation
def _wildcard_to_dec(nm, check=False): """Wildcard bits to decimal conversion.""" if check and not is_wildcard_nm(nm): raise ValueError('_wildcard_to_dec: invalid netmask: "%s"' % nm) return 0xFFFFFFFF - _dot_to_dec(nm, check=False)
python
{ "resource": "" }
q263791
_detect
validation
def _detect(ip, _isnm): """Function internally used to detect the notation of the given IP or netmask.""" ip = str(ip) if len(ip) > 1: if ip[0:2] == '0x': if _CHECK_FUNCT[IP_HEX][_isnm](ip): return IP_HEX elif ip[0] == '0': if _CHECK_FUNCT[IP_OCT][_isnm](ip): return IP_OCT if _CHECK_FUNCT[IP_DOT][_isnm](ip): return IP_DOT elif _isnm and _CHECK_FUNCT[NM_BITS][_isnm](ip): return NM_BITS elif _CHECK_FUNCT[IP_DEC][_isnm](ip): return IP_DEC elif _isnm and _CHECK_FUNCT[NM_WILDCARD][_isnm](ip): return NM_WILDCARD elif _CHECK_FUNCT[IP_BIN][_isnm](ip): return IP_BIN return IP_UNKNOWN
python
{ "resource": "" }
q263792
_convert
validation
def _convert(ip, notation, inotation, _check, _isnm): """Internally used to convert IPs and netmasks to other notations.""" inotation_orig = inotation notation_orig = notation inotation = _get_notation(inotation) notation = _get_notation(notation) if inotation is None: raise ValueError('_convert: unknown input notation: "%s"' % inotation_orig) if notation is None: raise ValueError('_convert: unknown output notation: "%s"' % notation_orig) docheck = _check or False if inotation == IP_UNKNOWN: inotation = _detect(ip, _isnm) if inotation == IP_UNKNOWN: raise ValueError('_convert: unable to guess input notation or invalid value') if _check is None: docheck = True # We _always_ check this case later. if _isnm: docheck = False dec = 0 if inotation == IP_DOT: dec = _dot_to_dec(ip, docheck) elif inotation == IP_HEX: dec = _hex_to_dec(ip, docheck) elif inotation == IP_BIN: dec = _bin_to_dec(ip, docheck) elif inotation == IP_OCT: dec = _oct_to_dec(ip, docheck) elif inotation == IP_DEC: dec = _dec_to_dec_long(ip, docheck) elif _isnm and inotation == NM_BITS: dec = _bits_to_dec(ip, docheck) elif _isnm and inotation == NM_WILDCARD: dec = _wildcard_to_dec(ip, docheck) else: raise ValueError('_convert: unknown IP/netmask notation: "%s"' % inotation_orig) # Ensure this is a valid netmask. if _isnm and dec not in _NETMASKS_VALUES: raise ValueError('_convert: invalid netmask: "%s"' % ip) if notation == IP_DOT: return _dec_to_dot(dec) elif notation == IP_HEX: return _dec_to_hex(dec) elif notation == IP_BIN: return _dec_to_bin(dec) elif notation == IP_OCT: return _dec_to_oct(dec) elif notation == IP_DEC: return _dec_to_dec_str(dec) elif _isnm and notation == NM_BITS: return _dec_to_bits(dec) elif _isnm and notation == NM_WILDCARD: return _dec_to_wildcard(dec) else: raise ValueError('convert: unknown notation: "%s"' % notation_orig)
python
{ "resource": "" }
q263793
convert
validation
def convert(ip, notation=IP_DOT, inotation=IP_UNKNOWN, check=True): """Convert among IP address notations. Given an IP address, this function returns the address in another notation. @param ip: the IP address. @type ip: integers, strings or object with an appropriate __str()__ method. @param notation: the notation of the output (default: IP_DOT). @type notation: one of the IP_* constants, or the equivalent strings. @param inotation: force the input to be considered in the given notation (default the notation of the input is autodetected). @type inotation: one of the IP_* constants, or the equivalent strings. @param check: force the notation check on the input. @type check: True force the check, False force not to check and None do the check only if the inotation is unknown. @return: a string representing the IP in the selected notation. @raise ValueError: raised when the input is in unknown notation.""" return _convert(ip, notation, inotation, _check=check, _isnm=False)
python
{ "resource": "" }
q263794
convert_nm
validation
def convert_nm(nm, notation=IP_DOT, inotation=IP_UNKNOWN, check=True): """Convert a netmask to another notation.""" return _convert(nm, notation, inotation, _check=check, _isnm=True)
python
{ "resource": "" }
q263795
IPv4Address._add
validation
def _add(self, other): """Sum two IP addresses.""" if isinstance(other, self.__class__): sum_ = self._ip_dec + other._ip_dec elif isinstance(other, int): sum_ = self._ip_dec + other else: other = self.__class__(other) sum_ = self._ip_dec + other._ip_dec return sum_
python
{ "resource": "" }
q263796
IPv4Address._sub
validation
def _sub(self, other): """Subtract two IP addresses.""" if isinstance(other, self.__class__): sub = self._ip_dec - other._ip_dec if isinstance(other, int): sub = self._ip_dec - other else: other = self.__class__(other) sub = self._ip_dec - other._ip_dec return sub
python
{ "resource": "" }
q263797
IPv4NetMask.get_bits
validation
def get_bits(self): """Return the bits notation of the netmask.""" return _convert(self._ip, notation=NM_BITS, inotation=IP_DOT, _check=False, _isnm=self._isnm)
python
{ "resource": "" }
q263798
IPv4NetMask.get_wildcard
validation
def get_wildcard(self): """Return the wildcard bits notation of the netmask.""" return _convert(self._ip, notation=NM_WILDCARD, inotation=IP_DOT, _check=False, _isnm=self._isnm)
python
{ "resource": "" }
q263799
CIDR.set
validation
def set(self, ip, netmask=None): """Set the IP address and the netmask.""" if isinstance(ip, str) and netmask is None: ipnm = ip.split('/') if len(ipnm) != 2: raise ValueError('set: invalid CIDR: "%s"' % ip) ip = ipnm[0] netmask = ipnm[1] if isinstance(ip, IPv4Address): self._ip = ip else: self._ip = IPv4Address(ip) if isinstance(netmask, IPv4NetMask): self._nm = netmask else: self._nm = IPv4NetMask(netmask) ipl = int(self._ip) nml = int(self._nm) base_add = ipl & nml self._ip_num = 0xFFFFFFFF - 1 - nml # NOTE: quite a mess. # This's here to handle /32 (-1) and /31 (0) netmasks. if self._ip_num in (-1, 0): if self._ip_num == -1: self._ip_num = 1 else: self._ip_num = 2 self._net_ip = None self._bc_ip = None self._first_ip_dec = base_add self._first_ip = IPv4Address(self._first_ip_dec, notation=IP_DEC) if self._ip_num == 1: last_ip_dec = self._first_ip_dec else: last_ip_dec = self._first_ip_dec + 1 self._last_ip = IPv4Address(last_ip_dec, notation=IP_DEC) return self._net_ip = IPv4Address(base_add, notation=IP_DEC) self._bc_ip = IPv4Address(base_add + self._ip_num + 1, notation=IP_DEC) self._first_ip_dec = base_add + 1 self._first_ip = IPv4Address(self._first_ip_dec, notation=IP_DEC) self._last_ip = IPv4Address(base_add + self._ip_num, notation=IP_DEC)
python
{ "resource": "" }