docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Convert decimal angle to degrees, minutes and possibly seconds. Args: angle (float): Angle to convert style (str): Return fractional or whole minutes values Returns: tuple of int: Angle converted to degrees, minutes and possibly seconds Raises: ValueError: Unknown value fo...
def to_dms(angle, style='dms'): sign = 1 if angle >= 0 else -1 angle = abs(angle) * 3600 minutes, seconds = divmod(angle, 60) degrees, minutes = divmod(minutes, 60) if style == 'dms': return tuple(sign * abs(i) for i in (int(degrees), int(minutes), ...
1,058,923
Convert degrees, minutes and optionally seconds to decimal angle. Args: degrees (float): Number of degrees minutes (float): Number of minutes seconds (float): Number of seconds Returns: float: Angle converted to decimal degrees
def to_dd(degrees, minutes, seconds=0): sign = -1 if any(i < 0 for i in (degrees, minutes, seconds)) else 1 return sign * (abs(degrees) + abs(minutes) / 60 + abs(seconds) / 3600)
1,058,924
Generate a ``tuple`` of compass direction names. Args: segment (list): Compass segment to generate names for abbr (bool): Names should use single letter abbreviations Returns: bool: Direction names for compass segment
def __chunk(segment, abbr=False): names = ('north', 'east', 'south', 'west', 'north') if not abbr: sjoin = '-' else: names = [s[0].upper() for s in names] sjoin = '' if segment % 2 == 0: return (names[segment].capitalize(), sjoin.join((names[segment]....
1,058,925
Convert angle in to direction name. Args: angle (float): Angle in degrees to convert to direction name segments (int): Number of segments to split compass in to abbr (bool): Whether to return abbreviated direction string Returns: str: Direction name for ``angle``
def angle_to_name(angle, segments=8, abbr=False): if segments == 4: string = COMPASS_NAMES[int((angle + 45) / 90) % 4 * 2] elif segments == 8: string = COMPASS_NAMES[int((angle + 22.5) / 45) % 8 * 2] elif segments == 16: string = COMPASS_NAMES[int((angle + 11.25) / 22.5) % 16] ...
1,058,926
Convert angle in to distance along a great circle. Args: angle (float): Angle in degrees to convert to distance units (str): Unit type to be used for distances Returns: float: Distance in ``units`` Raises: ValueError: Unknown value for ``units``
def angle_to_distance(angle, units='metric'): distance = math.radians(angle) * BODY_RADIUS if units in ('km', 'metric'): return distance elif units in ('sm', 'imperial', 'US customary'): return distance / STATUTE_MILE elif units in ('nm', 'nautical'): return distance / NAUT...
1,058,929
Convert a distance in to an angle along a great circle. Args: distance (float): Distance to convert to degrees units (str): Unit type to be used for distances Returns: float: Angle in degrees Raises: ValueError: Unknown value for ``units``
def distance_to_angle(distance, units='metric'): if units in ('km', 'metric'): pass elif units in ('sm', 'imperial', 'US customary'): distance *= STATUTE_MILE elif units in ('nm', 'nautical'): distance *= NAUTICAL_MILE else: raise ValueError('Unknown units type %r' %...
1,058,930
Calculate geodesic latitude/longitude from Maidenhead locator. Args: locator (str): Maidenhead locator string Returns: tuple of float: Geodesic latitude and longitude values Raises: ValueError: Incorrect grid locator length ValueError: Invalid values in locator string
def from_grid_locator(locator): if not len(locator) in (4, 6, 8): raise ValueError('Locator must be 4, 6 or 8 characters long %r' % locator) # Convert the locator string to a list, because we need it to be mutable to # munge the values locator = list(locator) ...
1,058,931
Calculate Maidenhead locator from latitude and longitude. Args: latitude (float): Position's latitude longitude (float): Position's longitude precision (str): Precision with which generate locator string Returns: str: Maidenhead locator for latitude and longitude Raise: ...
def to_grid_locator(latitude, longitude, precision='square'): if precision not in ('square', 'subsquare', 'extsquare'): raise ValueError('Unsupported precision value %r' % precision) if not -90 <= latitude <= 90: raise ValueError('Invalid latitude value %r' % latitude) if not -180 <= l...
1,058,932
Parse latitude and longitude from string location. Args: location (str): String to parse Returns: tuple of float: Latitude and longitude of location
def parse_location(location): def split_dms(text, hemisphere): out = [] sect = [] for i in text: if i.isdigit(): sect.append(i) else: out.append(sect) sect = [] d, m, s = [float(''.join(i)) for i in...
1,058,933
Initialise a new ``FileFormatError`` object. Args: site (str): Remote site name to display in error message
def __init__(self, site=None): super(FileFormatError, self).__init__() self.site = site
1,058,938
Parse an ISO 8601 formatted time stamp. Args: timestamp (str): Timestamp to parse Returns: Timestamp: Parsed timestamp
def parse_isoformat(timestamp): if len(timestamp) == 20: zone = TzOffset('+00:00') timestamp = timestamp[:-1] elif len(timestamp) == 24: zone = TzOffset('%s:%s' % (timestamp[-5:-2], timestamp[-2:])) timestamp = timestamp[:-5] elif len(time...
1,058,940
Update the locator, and trigger a latitude and longitude update. Args: value (str): New Maidenhead locator string
def locator(self, value): self._locator = value self._latitude, self._longitude = utils.from_grid_locator(value)
1,059,006
Pretty printed location string. Args: mode (str): Coordinate formatting system to use Returns: str: Human readable string representation of ``Baken`` object
def __str__(self): text = super(Baken, self).__format__('dms') if self._locator: text = '%s (%s)' % (self._locator, text) return text
1,059,007
Search for funds matching a search term. Args: term (str): Fund id to search on field (str): The field to search on. Options are title, amount, org_name and type. kwargs (dict): additional keywords passed into requests.session.get params keyword.
def funds(self, term, field=None, **kwargs): params = kwargs params['q'] = term if field: params['f'] = field else: params['f'] = 'fu.org.n' baseuri = self._BASE_URI + 'funds' res = self.session.get(baseuri, params=params) self.han...
1,059,025
Returns a GDAL virtual filesystem prefixed path. Arguments: path -- file path as str
def vsiprefix(path): vpath = path.lower() scheme = VSI_SCHEMES.get(urlparse(vpath).scheme, '') for ext in VSI_TYPES: if ext in vpath: filesys = VSI_TYPES[ext] break else: filesys = '' if filesys and scheme: filesys = filesys[:-1] return ''.joi...
1,059,028
Extended pretty printing for location strings. Args: format_spec (str): Coordinate formatting system to use Returns: str: Human readable string representation of ``Point`` object Raises: ValueError: Unknown value for ``format_spec``
def __format__(self, format_spec='dd'): text = super(Location.__base__, self).__format__(format_spec) if self.alt_names: return '%s (%s - %s)' % (self.name, ', '.join(self.alt_names), text) else: return '%s (%s)' % (self.name...
1,059,090
Return the corresponding latitude Args: line (int): Line number Returns: Correponding latitude in degree
def lat_id(self, line): if self.grid == 'WAC': lat = ((1 + self.LINE_PROJECTION_OFFSET - line) * self.MAP_SCALE * 1e-3 / self.A_AXIS_RADIUS) return lat * 180 / np.pi else: lat = float(self.CENTER_LATITUDE) - \ (line - float(...
1,059,148
Return the corresponding longitude Args: sample (int): sample number on a line Returns: Correponding longidude in degree
def long_id(self, sample): if self.grid == 'WAC': lon = self.CENTER_LONGITUDE + (sample - self.SAMPLE_PROJECTION_OFFSET - 1)\ * self.MAP_SCALE * 1e-3 / (self.A_AXIS_RADIUS * np.cos(self.CENTER_LATITUDE * np.pi / 180.0)) return lon * 180 / np.pi else: ...
1,059,149
Return the corresponding sample Args: lon (int): longidute in degree Returns: Correponding sample
def sample_id(self, lon): if self.grid == 'WAC': sample = np.rint(float(self.SAMPLE_PROJECTION_OFFSET) + 1.0 + (lon * np.pi / 180.0 - float(self.CENTER_LONGITUDE)) * self.A_AXIS_RADIUS * np.cos(self.CENTE...
1,059,151
Return the corresponding line Args: lat (int): latitude in degree Returns: Correponding line
def line_id(self, lat): if self.grid == 'WAC': line = np.rint(1.0 + self.LINE_PROJECTION_OFFSET - self.A_AXIS_RADIUS * np.pi * lat / (self.MAP_SCALE * 1e-3 * 180)) else: line = np.rint(float(self.LINE_PROJECTION_OFFSET) - float(self.MAP_RESOLUT...
1,059,153
Read part of the binary file Args: size_chunk (int) : Size of the chunk to read start (int): Starting byte bytesize (int): Ending byte Returns: (np.array): array of the corresponding values
def array(self, size_chunk, start, bytesize): with open(self.img, 'rb') as f1: f1.seek(self.start_byte + start * self.bytesize) data = f1.read(size_chunk * self.bytesize) Z = np.fromstring(data, dtype=self.dtype, count=size_chunk) if self.grid == 'LOLA':...
1,059,154
Change the region of interest Args: size_window (float): Radius of the region of interest (km) Notes: Change the attributes ``size_window`` and ``window`` to correspond to the new region of interest.
def change_window(self, size_window): self.size_window = size_window self.window = self.lambert_window( self.size_window, self.lat0, self.lon0)
1,059,234
Return arrays the region of interest Args: type_img (str): Either lola or wac. Returns: A tupple of three arrays ``(X,Y,Z)`` with ``X`` contains the longitudes, ``Y`` contains the latitude and ``Z`` the values extracted for the region of interest. ...
def get_arrays(self, type_img): if type_img.lower() == 'lola': return LolaMap(self.ppdlola, *self.window, path_pdsfile=self.path_pdsfiles).image() elif type_img.lower() == 'wac': return WacMap(self.ppdwac, *self.window, path_pdsfile=self.path_pdsfiles).image() e...
1,059,237
desc: Contains the URIs for this service args: - name: routes type: list subtypes: [Route] desc: a mapping of URIs to Route instances ctor: pymarshal.api_docs.routes.Route.__init__
def __init__( self, routes, ): self.routes = type_assert_iter( routes, Route, ) check_dups(routes)
1,059,399
Attempt to load config from paths, in order. Args: paths (List[string]): list of paths to python files Return: Config: loaded config
def load_config(paths=DEFAULT_CONFIG_PATHS): config = Config() for path in paths: if os.path.isfile(path): config.load_pyfile(path) return config
1,059,489
Exception to be raised if pased file is invalid. Args: file_path (string): path to bad config cause (string): reason of failure, i.e. what exactly was the problem while parsing
def __init__(self, file_path, cause): message = six.text_type("Malformed config at {}: {}").format( file_path, cause ) super(MalformedConfig, self).__init__(message)
1,059,490
Load python file as config. Args: path (string): path to the python file
def load_pyfile(self, path): with open(path) as config_file: contents = config_file.read() try: exec(compile(contents, path, 'exec'), self) except Exception as e: raise MalformedConfig(path, six.text_type(e))
1,059,491
Search for a project by id. Args: term (str): Term to search for. kwargs (dict): additional keywords passed into requests.session.get params keyword.
def project(self, term, **kwargs): params = kwargs baseuri = self._BASE_URI + 'projects/' + term res = self.session.get(baseuri, params=params) self.handle_http_error(res) return res
1,059,562
Search for projects. Defaults to project_title. Other fields are: project_reference project_abstract Args: term (str): Term to search for. kwargs (dict): additional keywords passed into requests.session.get params keyword.
def projects(self, term, field=None, **kwargs): params = kwargs params['q'] = term if field: params['f'] = self._FIELD_MAP[field] else: params['f'] = 'pro.t' baseuri = self._BASE_URI + 'projects' res = self.session.get(baseuri, params=para...
1,059,563
Recursively marshal a Python object to a JSON-compatible dict that can be passed to json.{dump,dumps}, a web client, or a web server, etc... Args: obj: object, It's members can be nested Python objects which will be converted to dictionaries types: tuple-of-types...
def marshal_json( obj, types=JSON_TYPES, fields=None, ): return marshal_dict( obj, types, fields=fields, )
1,059,782
Parse OSM XML element for generic data. Args: element (etree.Element): Element to parse Returns: tuple: Generic OSM data for object instantiation
def _parse_flags(element): visible = True if element.get('visible') else False user = element.get('user') timestamp = element.get('timestamp') if timestamp: timestamp = utils.Timestamp.parse_isoformat(timestamp) tags = {} try: for tag in element['tag']: key = tag...
1,059,980
Create element independent flags output. Args: osm_obj (Node): Object with OSM-style metadata Returns: list: Human readable flags output
def _get_flags(osm_obj): flags = [] if osm_obj.visible: flags.append('visible') if osm_obj.user: flags.append('user: %s' % osm_obj.user) if osm_obj.timestamp: flags.append('timestamp: %s' % osm_obj.timestamp.isoformat()) if osm_obj.tags: flags.append(', '.join('%...
1,059,981
Initialise a new ``Node`` object. Args: ident (int): Unique identifier for the node latitude (float): Nodes's latitude longitude (float): Node's longitude visible (bool): Whether the node is visible user (str): User who logged the node tim...
def __init__(self, ident, latitude, longitude, visible=False, user=None, timestamp=None, tags=None): super(Node, self).__init__(latitude, longitude) self.ident = ident self.visible = visible self.user = user self.timestamp = timestamp self.tags ...
1,059,983
Parse a OSM node XML element. Args: element (etree.Element): XML Element to parse Returns: Node: Object representing parsed element
def parse_elem(element): ident = int(element.get('id')) latitude = element.get('lat') longitude = element.get('lon') flags = _parse_flags(element) return Node(ident, latitude, longitude, *flags)
1,059,986
Return the Luhn check digit for the given string. Args: base(str): string for which to calculate the check digit num_only(bool): allow only digits in `base` (default: False) allow_lower_case(bool): allow lower case letters in `base` (default: False) Returns: int: Lu...
def luhn(base, num_only=False, allow_lower_case=False): if num_only: alphabet = _ALPHABET[:10] else: alphabet = _ALPHABET if allow_lower_case: base = base.upper() try: pre_calc = (_PRE_CALC[alphabet.index(c)] for c in reversed(base)) cum = 0 parity = ...
1,060,293
Note that type_assert can't be used because it would create a circular dependency. Args: cls, type, The type that was attempted to unmarshal into diff: dict, The extra arguments that were passed to @cls
def __init__( self, cls, diff, ): msg = "\n".join([ "", # Newline to make the output cleaner "ctor: {}".format(cls), "extras: {}".format(diff) ]) Exception.__init__(self, msg) self.type = str( type(self)...
1,060,351
Note that type_assert can't be used because it would create a circular dependency. Args: cls, type-or-static-method, The type or constructor that was attempted to unmarshal into cls_args: list, The arguments of @cls kwargs: dict, The ...
def __init__( self, cls, cls_args, kwargs, ex, ): msg = "\n".join([ "", # Newline to make the output cleaner "module: {}".format(cls.__module__), "ctor: {}".format(cls), "ctor_args: {}".format(cls_args), ...
1,060,352
Initialise a new ``Placemark`` object. Args: latitude (float): Placemarks's latitude longitude (float): Placemark's longitude altitude (float): Placemark's altitude name (str): Name for placemark description (str): Placemark's description
def __init__(self, latitude, longitude, altitude=None, name=None, description=None): super(Placemark, self).__init__(latitude, longitude, altitude, name) if altitude: self.altitude = float(altitude) self.description = description
1,060,500
Makes sure the request has a valid authorization jwt before calling the wrapped function. It does this by checking the timestamp of the last jwt and if > 10 minutes have elapsed, it refreshes it's existing jwt from the server. Args: f: Function to wrap Returns: F...
def _auth(f): @wraps(f) def method(self, *args, **kwargs): if not self._auth_token or datetime.utcnow() >= self._last_auth + timedelta(minutes=10): # Need to get new jwt self.auth_refresh() return f(self, *args, **kwargs) return m...
1,060,536
Checks if the expected response code matches the actual response code. If they're not equal, raises the appropriate exception Args: response: (int) Actual status code expected: (int) Expected status code
def _check_response(response, expected): response_code = response.status_code if expected == response_code: return if response_code < 400: raise ex.UnexpectedResponseCodeException(response.text) elif response_code == 401: raise ex.Unauthori...
1,060,542
Take a string + filename, return a (tarinfo, stringbuf) tuple for insertion. Args: bytes (bstring): Bytestring representation of the filedata. filename (string): Filepath relative to tarfile root. Returns: tuple: (tarfile.TarInfo,io.BytesIO). This can be passed directly to T...
def bytestring_to_tar_tuple(filename, bytes): info = tarfile.TarInfo(filename) info.size = len(bytes) return info, BytesIO(bytes)
1,060,723
Iterate over voevent models / dbrows and write to bz'd tarball. Args: voevents (iterable): An iterable (e.g. list) of e.g. Voevent db-rows, with access to the 'ivorn' and 'xml' attributes. filepath (string): Path to the new tarball to create. Typically of form '/path/to/foo....
def write_tarball(voevents, filepath): tuple_gen = ( (v.ivorn, v.xml) for v in voevents) return write_tarball_from_ivorn_xml_tuples(tuple_gen, filepath)
1,060,724
Iterate over a series of ivorn / xml bstring tuples and write to bz'd tarball. Args: ivorn_xml_tuples (iterable): [(ivorn,xml)] An iterable (e.g. list) of tuples containing two entries - an ivorn string and an xml bytestring. filepath (string): Path to the new tarball to cre...
def write_tarball_from_ivorn_xml_tuples(ivorn_xml_tuples, filepath): out = tarfile.open(filepath, mode='w:bz2') logger.info("Writing packets to tarball at " + filepath) packet_count = 0 try: for (ivorn, xml) in ivorn_xml_tuples: out.addfile(*bytestring_to_tar_tuple( ...
1,060,725
Initialise a new ``Xearth`` object. Args: latitude (float): Location's latitude longitude (float): Location's longitude comment (str): Comment for location
def __init__(self, latitude, longitude, comment=None): super(Xearth, self).__init__(latitude, longitude) self.comment = comment
1,060,735
Swap the keys in a dictionary Args: d: dict, The dict to swap keys in cls: class, If the class has a staticly defined _marshal_key_swap and/or _unmarshal_key_swap dict, the keys will be swapped. Otherwise @d is ret...
def key_swap( d, cls, marshal ): dname = '_{}marshal_key_swap'.format("" if marshal else "un") if hasattr(cls, dname): key_swap = getattr(cls, dname) return { key_swap[k] if k in key_swap else k: v for k, v in d.items() } else: return ...
1,060,991
Return the matching score of 2 given lists of authors. Args: x_authors (list(dict)): first schema-compliant list of authors. y_authors (list(dict)): second schema-compliant list of authors. Returns: float: matching score of authors.
def compute_author_match_score(x_authors, y_authors): if not x_authors or not y_authors: return 0.0 matches = get_number_of_author_matches(x_authors, y_authors) max_length = max(len(x_authors), len(y_authors)) return matches / float(max_length)
1,061,103
Return the Jaccard similarity coefficient of 2 given sets. Args: x_set (set): first set. y_set (set): second set. Returns: float: Jaccard similarity coefficient.
def compute_jaccard_index(x_set, y_set): if not x_set or not y_set: return 0.0 intersection_cardinal = len(x_set & y_set) union_cardinal = len(x_set | y_set) return intersection_cardinal / float(union_cardinal)
1,061,104
Calculate a NMEA 0183 checksum for the given sentence. NMEA checksums are a simple XOR of all the characters in the sentence between the leading "$" symbol, and the "*" checksum separator. Args: sentence (str): NMEA 0183 formatted sentence
def calc_checksum(sentence): if sentence.startswith('$'): sentence = sentence[1:] sentence = sentence.split('*')[0] return reduce(xor, map(ord, sentence))
1,061,167
Parse a NMEA-formatted latitude pair. Args: latitude (str): Latitude in DDMM.MMMM hemisphere (str): North or South Returns: float: Decimal representation of latitude
def parse_latitude(latitude, hemisphere): latitude = int(latitude[:2]) + float(latitude[2:]) / 60 if hemisphere == 'S': latitude = -latitude elif not hemisphere == 'N': raise ValueError('Incorrect North/South value %r' % hemisphere) return latitude
1,061,168
Parse a NMEA-formatted longitude pair. Args: longitude (str): Longitude in DDDMM.MMMM hemisphere (str): East or West Returns: float: Decimal representation of longitude
def parse_longitude(longitude, hemisphere): longitude = int(longitude[:3]) + float(longitude[3:]) / 60 if hemisphere == 'W': longitude = -longitude elif not hemisphere == 'E': raise ValueError('Incorrect North/South value %r' % hemisphere) return longitude
1,061,169
Initialise a new ``LoranPosition`` object. Args: latitude (float): Fix's latitude longitude (float): Fix's longitude time (datetime.time): Time the fix was taken status (bool): Whether the data is active mode (str): Type of reading
def __init__(self, latitude, longitude, time, status, mode=None): super(LoranPosition, self).__init__(latitude, longitude) self.time = time self.status = status self.mode = mode
1,061,170
Pretty printed position string. Args: talker (str): Talker ID Returns: str: Human readable string representation of ``Position`` object
def __str__(self, talker='GP'): if not len(talker) == 2: raise ValueError('Talker ID must be two characters %r' % talker) data = ['%sGLL' % talker] data.extend(nmea_latitude(self.latitude)) data.extend(nmea_longitude(self.longitude)) data.append('%s.%02i' % (...
1,061,171
Parse position data elements. Args: elements (list): Data values for fix Returns: Fix: Fix object representing data
def parse_elements(elements): if not len(elements) in (6, 7): raise ValueError('Invalid GLL position data') # Latitude and longitude are checked for validity during Fix # instantiation latitude = parse_latitude(elements[0], elements[1]) longitude = parse_long...
1,061,172
Initialise a new ``Position`` object. Args: time (datetime.time): Time the fix was taken status (bool): Whether the data is active latitude (float): Fix's latitude longitude (float): Fix's longitude speed (float): Ground speed track (float...
def __init__(self, time, status, latitude, longitude, speed, track, date, variation, mode=None): super(Position, self).__init__(latitude, longitude) self.time = time self.status = status self.speed = speed self.track = track self.date = date ...
1,061,173
Parse position data elements. Args: elements (list): Data values for position Returns: Position: Position object representing data
def parse_elements(elements): if not len(elements) in (11, 12): raise ValueError('Invalid RMC position data') time = datetime.time(*[int(elements[0][i:i + 2]) for i in range(0, 6, 2)]) active = True if elements[1] == 'A' else False # La...
1,061,175
Parse essential fix's data elements. Args: elements (list): Data values for fix Returns: Fix: Fix object representing data
def parse_elements(elements): if not len(elements) in (14, 15): raise ValueError('Invalid GGA fix data') time = datetime.time(*[int(elements[0][i:i + 2]) for i in range(0, 6, 2)]) # Latitude and longitude are checked for validity during Fix ...
1,061,178
Initialise a new ``Waypoint`` object. Args: latitude (float): Waypoint's latitude longitude (float): Waypoint's longitude name (str): Comment for waypoint
def __init__(self, latitude, longitude, name): super(Waypoint, self).__init__(latitude, longitude) self.name = name.upper()
1,061,179
Parse waypoint data elements. Args: elements (list): Data values for fix Returns: nmea.Waypoint: Object representing data
def parse_elements(elements): if not len(elements) == 5: raise ValueError('Invalid WPL waypoint data') # Latitude and longitude are checked for validity during Fix # instantiation latitude = parse_latitude(elements[0], elements[1]) longitude = parse_longitude...
1,061,181
Initialise a new ``Trigpoint`` object. Args: latitude (float): Location's latitude longitude (float): Location's longitude altitude (float): Location's altitude name (str): Name for location identity (int): Database identifier, if known
def __init__(self, latitude, longitude, altitude, name=None, identity=None): super(Trigpoint, self).__init__(latitude, longitude) self.altitude = altitude self.name = name self.identity = identity
1,061,325
Extended pretty printing for location strings. Args: format_spec (str): Coordinate formatting system to use Returns: str: Human readable string representation of ``Trigpoint`` object Raises: ValueError: Unknown value for ``format_spec``
def __format__(self, format_spec='dms'): location = [super(Trigpoint, self).__format__(format_spec), ] if self.altitude: location.append('alt %im' % self.altitude) if self.name: return '%s (%s)' % (self.name, ' '.join(location)) else: return ...
1,061,326
Returns the gdal.Driver for a path or None based on the file extension. Arguments: path -- file path as str with a GDAL supported file extension
def driver_for_path(path, drivers=None): ext = (os.path.splitext(path)[1][1:] or path).lower() drivers = drivers or ImageDriver.registry if ext else {} for name, meta in drivers.items(): if ext == meta.get('DMD_EXTENSION', '').lower(): return ImageDriver(name) return None
1,061,421
Converts an OGR polygon to a 2D NumPy array. Arguments: geom -- OGR Geometry size -- array size in pixels as a tuple of (width, height) affine -- AffineTransform
def geom_to_array(geom, size, affine): driver = ImageDriver('MEM') rast = driver.raster(driver.ShortName, size) rast.affine = affine rast.sref = geom.GetSpatialReference() with MemoryLayer.from_records([(1, geom)]) as ml: status = gdal.RasterizeLayer(rast.ds, (1,), ml.layer, burn_values...
1,061,422
Returns a Raster from layer features. Arguments: layer -- Layer to rasterize rast -- Raster with target affine, size, and sref
def rasterize(layer, rast): driver = ImageDriver('MEM') r2 = driver.raster(driver.ShortName, rast.size) r2.affine = rast.affine sref = rast.sref if not sref.srid: sref = SpatialReference(4326) r2.sref = sref ml = MemoryLayer(sref, layer.GetGeomType()) ml.load(layer) stat...
1,061,423
Returns a Raster instance. Arguments: path -- local or remote path as str or file-like object Keyword args: mode -- gdal constant representing access mode
def open(path, mode=gdalconst.GA_ReadOnly): path = getattr(path, 'name', path) try: return Raster(vsiprefix(path), mode) except AttributeError: try: imgdata = path.read() except AttributeError: raise TypeError('Not a file-like object providing read()') ...
1,061,424
Returns an in-memory raster initialized from a pixel buffer. Arguments: data -- byte buffer of raw pixel data size -- two or three-tuple of (xsize, ysize, bandcount) bandtype -- band data type
def frombytes(data, size, bandtype=gdal.GDT_Byte): r = ImageDriver('MEM').raster('', size, bandtype) r.frombytes(data) return r
1,061,425
Generally this will be initialized from a six-element tuple in the format returned by gdal.Dataset.GetGeoTransform(). Arguments: xorigin -- top left corner x coordinate xscale -- x scaling rx -- x rotation yorigin -- top left corner y coordinate ry -- y rotation ...
def __init__(self, xorigin, xscale, rx, yorigin, ry, yscale): # Origin coordinate in projected space. self.origin = (xorigin, yorigin) self.scale = (xscale, yscale) # Rotation in X and Y directions. (0, 0) is north up. self.rotation = (rx, ry) # Avoid repeated ca...
1,061,426
Convert image pixel/line coordinates to georeferenced x/y, return a generator of two-tuples. Arguments: coords -- input coordinates as iterable containing two-tuples/lists such as ((0, 0), (10, 10))
def project(self, coords): geotransform = self.tuple for x, y in coords: geo_x = geotransform[0] + geotransform[1] * x + geotransform[2] * y geo_y = geotransform[3] + geotransform[4] * x + geotransform[5] * y # Move the coordinate to the center of the pixel. ...
1,061,427
Transform from projection coordinates (Xp,Yp) space to pixel/line (P,L) raster space, based on the provided geotransformation. Arguments: coords -- input coordinates as iterable containing two-tuples/lists such as ((-120, 38), (-121, 39))
def transform(self, coords): # Use local vars for better performance here. origin_x, origin_y = self.origin sx, sy = self.scale return [(int(math.floor((x - origin_x) / sx)), int(math.floor((y - origin_y) / sy))) for x, y in coords]
1,061,428
Returns a copied Raster instance. Arguments: source -- the source Raster instance or filepath as str dest -- destination filepath as str
def copy(self, source, dest): if not self.copyable: raise IOError('Driver does not support raster copying') if not isinstance(source, Raster): source = Raster(source) should_close = True else: should_close = False if source.name ==...
1,061,431
Returns a new Raster instance. gdal.Driver.Create() does not support all formats. Arguments: path -- file object or path as str size -- two or three-tuple of (xsize, ysize, bandcount) bandtype -- GDAL pixel data type
def raster(self, path, size, bandtype=gdal.GDT_Byte): path = getattr(path, 'name', path) try: is_multiband = len(size) > 2 nx, ny, nbands = size if is_multiband else size + (1,) except (TypeError, ValueError) as exc: exc.args = ('Size must be 2 or 3-i...
1,061,435
Initialize a Raster data set from a path or file Arguments: path -- path as str, file object, or gdal.Dataset Keyword args: mode -- gdal constant representing access mode
def __init__(self, path, mode=gdalconst.GA_ReadOnly): if path and not isinstance(path, gdal.Dataset): # Get the name if we have a file-like object. dataset = gdal.Open(getattr(path, 'name', path), mode) else: dataset = path if not dataset: ...
1,061,436
Sets the affine transformation. Intercepts the gdal.Dataset call to ensure use as a property setter. Arguments: affine -- AffineTransform or six-tuple of geotransformation values
def SetGeoTransform(self, affine): if isinstance(affine, collections.Sequence): affine = AffineTransform(*affine) self._affine = affine self.ds.SetGeoTransform(affine)
1,061,440
Returns a 4-tuple pixel window (x_offset, y_offset, x_size, y_size). Arguments: envelope -- coordinate extent tuple or Envelope
def get_offset(self, envelope): if isinstance(envelope, collections.Sequence): envelope = Envelope(envelope) if not (self.envelope.contains(envelope) or self.envelope.intersects(envelope)): raise ValueError('Envelope does not intersect with this extent') ...
1,061,444
Returns a new instance resampled to provided size. Arguments: size -- tuple of x,y image dimensions
def resample(self, size, interpolation=gdalconst.GRA_NearestNeighbour): # Find the scaling factor for pixel size. factors = (size[0] / float(self.RasterXSize), size[1] / float(self.RasterYSize)) affine = AffineTransform(*tuple(self.affine)) affine.scale = (aff...
1,061,452
Save this instance to the path and format provided. Arguments: to -- output path as str, file, or MemFileIO instance Keyword args: driver -- GDAL driver name as string or ImageDriver
def save(self, to, driver=None): path = getattr(to, 'name', to) if not driver and hasattr(path, 'encode'): driver = driver_for_path(path, self.driver.filter_copyable()) elif hasattr(driver, 'encode'): driver = ImageDriver(driver) if driver is None or not ...
1,061,453
Sets the spatial reference. Intercepts the gdal.Dataset call to ensure use as a property setter. Arguments: sref -- SpatialReference or any format supported by the constructor
def SetProjection(self, sref): if not hasattr(sref, 'ExportToWkt'): sref = SpatialReference(sref) self._sref = sref self.ds.SetProjection(sref.ExportToWkt())
1,061,454
Returns a new reprojected instance. Arguments: to_sref -- spatial reference as a proj4 or wkt string, or a SpatialReference Keyword args: dest -- filepath as str interpolation -- GDAL interpolation type
def warp(self, to_sref, dest=None, interpolation=gdalconst.GRA_NearestNeighbour): if not hasattr(to_sref, 'ExportToWkt'): to_sref = SpatialReference(to_sref) dest_wkt = to_sref.ExportToWkt() dtype = self[0].DataType err_thresh = 0.125 # Determine new values f...
1,061,456
Generate name variations for a given name. Args: name (six.text_type): The name whose variations are to be generated. Returns: list: All the name variations for the given name. Notes: Uses `unidecode` for doing unicode characters transliteration to ASCII ones. This was chosen so t...
def generate_name_variations(name): def _update_name_variations_with_product(set_a, set_b): name_variations.update([ unidecode((names_variation[0] + separator + names_variation[1]).strip(''.join(_LASTNAME_NON_LASTNAME_SEPARATORS))).lower() ...
1,061,685
Create a ParsedName instance. Args: name (Union[str, HumanName]): The name to be parsed (must be non empty nor None). constants (:class:`nameparser.config.Constants`): Configuration for `HumanName` instantiation. (Can be None, if provided it overwrites the default one ge...
def __init__(self, name, constants=None): if not constants: constants = ParsedName.constants if isinstance(name, HumanName): self._parsed_name = name else: self._parsed_name = HumanName(name, constants=constants) self._parsed_name.capital...
1,061,686
Filter down obj based on marks, presuming keys should be kept/deleted. Args: obj: The object to be filtered. Filtering is done in-place. marks: An object mapping id(obj) --> {DELETE,KEEP} These values apply to the entire subtree, unless inverted. presumption: The default acti...
def filter_object(obj, marks, presumption=DELETE): if isinstance(obj, list): keys = reversed(range(0, len(obj))) else: keys = obj.keys() for k in keys: v = obj[k] m = marks.get(id(v), UNSPECIFIED) if m == DELETE: del obj[k] # an explicit deletion is...
1,061,823
Return the earliest among the schema-compliant dates. This is a convenience wrapper around :ref:`PartialDate`, which should be used instead if more features are needed. Args: dates(list): List of dates from which oldest/earliest one will be returned full_date(bool): Adds month and/or day a...
def earliest_date(dates, full_date=False): min_date = min(PartialDate.loads(date) for date in dates) if not min_date.month and full_date: min_date.month = 1 if not min_date.day and full_date: min_date.day = 1 return min_date.dumps()
1,062,172
Adds a scheme to a url if not present. Args: url (string): a url, assumed to start with netloc default_scheme (string): a scheme to be added Returns: string: URL with a scheme
def ensure_scheme(url, default_scheme='http'): parsed = urlsplit(url, scheme=default_scheme) if not parsed.netloc: parsed = SplitResult( scheme=parsed.scheme, netloc=parsed.path, path='', query=parsed.query, fragment=parsed.fragment ...
1,062,186
Return the __init__ args (minus 'self') for @cls Args: cls: class, instance or callable Returns: list of str, the arguments minus 'self'
def init_args(cls): # This looks insanely goofy, but seems to literally be the # only thing that actually works. Your obvious ways to # accomplish this task do not apply here. try: # Assume it's a factory function, static method, or other callable argspec = getargspec(cls) exce...
1,062,210
Convert metadata from WA-KAT to Dublin core dictionary like structure, which may be easily converted to xml using :mod:`xmltodict` module. Args: data (dict): Nested WA-KAT data. See tests for example. Returns: dict: Dict in dublin core format.
def _convert_metadata(data): def compose(val, arguments=None): if val is None: return None if not arguments: return val arguments["#text"] = val return arguments conspect = data.get("conspect", {}) author_name = data.get("author", {})....
1,062,417
Convert WA-KAT `data` to Dublin core XML. Args: data (dict): Nested WA-KAT data. See tests for example. Returns: unicode: XML with dublin core.
def to_dc(data): root = odict[ "metadata": odict[ "@xmlns:xsi": "http://www.w3.org/2001/XMLSchema-instance", "@xmlns:dc": "http://purl.org/dc/elements/1.1/", "@xmlns:dcterms": "http://purl.org/dc/terms/", "@xmlns:rdf": "http://www.w3.org/1999/02/22-rdf-sy...
1,062,419
Initialization of instances: Args: section (str): invalid section name. Attributes: section (str): invalid section name.
def __init__(self, section): self.section = section super().__init__('invalid section name: {}'.format(section))
1,062,513
Initialization of instances: Args: option (str): invalid option name. Attributes: option (str): invalid option name.
def __init__(self, option): self.option = option super().__init__('invalid option name: {}'.format(option))
1,062,514
Gets escape-codes for flag combinations. Arguments: combination (int): Either a single integer-convertible flag or an OR'd flag-combination. Returns: A semi-colon-delimited string of appropriate escape sequences. Raises: errors.FlagError if the combination is out-of-range.
def codify(combination): if (isinstance(combination, int) and (combination < 0 or combination >= LIMIT)): raise errors.FlagError("Out-of-range flag-combination!") codes = [] for enum in (Style, Color, Fill): for flag in enum: if combination & flag: codes.append(str(flag)) return ";".join(codes)
1,062,703
Look into the database and return :class:`RequestInfo` if the `url` was already analyzed, or create and return new instance, if not. If the `new` is set to True, always create new instance. Args: url (str): URL of the analyzed resource. new (bool, default False): Force new instance? R...
def get_cached_or_new(url, new=False): garbage_collection() old_req = DATABASE.get(url) if old_req and not new: return old_req if not (url.startswith("http://") or url.startswith("https://")): raise ValueError("Invalid URL `%s`!" % url) req = RequestInfo(url=url) DATABAS...
1,062,990
Collect and remove all :class:`.RequestInfo` objects older than `time_limit` (in seconds). Args: time_limit (float, default YEAR / 2): Collect objects older than this limit.
def garbage_collection(time_limit=YEAR/12.0): expired_request_infos = ( ri for ri in DATABASE.values() if ri.creation_ts + time_limit <= time.time() ) for ri in expired_request_infos: del DATABASE[ri.url]
1,062,991
Go over all attributes in `model` and add :class:`SourceString` to them. Args: model (obj): :class:`Model` instance. Returns: obj: :class:`Model` instance with :class:`SourceString` descriptors.
def _add_source(model): ignored_keys = {"author_tags", "original_xml", "additional_info"} # convert all values to source strings source = "Aleph" for key, val in model.get_mapping().iteritems(): if key in ignored_keys: continue if type(val) in [list, tuple]: ...
1,063,054
Query aleph for records with given `issn`. The lookup is directed to the NTK's Aleph. Args: issn (str): ISSN of the periodical. Returns: obj: :class:`Model` instances for each record.
def by_issn(issn): # monkeypatched to allow search in NTK's Aleph old_url = aleph.ALEPH_URL aleph.ALEPH_URL = NTK_ALEPH_URL records = aleph.getISSNsXML(issn, base="STK02") aleph.ALEPH_URL = old_url # process all records for record in records: marc = MARCXMLRecord(record) ...
1,063,055
Parse author from `marc` data. Args: marc (obj): :class:`.MARCXMLRecord` instance. See module :mod:`.marcxml_parser` for details. Returns: obj: :class:`Author`.
def parse_author(cls, marc): name = None code = None linked_forms = None is_corporation = None record = None # parse informations from the record if marc["100a"]: # persons name = _first_or_none(marc["100a"]) code = _first_or_non...
1,063,056
Look for author in NK Aleph authority base by `name`. Args: name (str): Author's name. Yields: obj: :class:`Author` instances.
def search_by_name(cls, name): records = aleph.downloadRecords( aleph.searchInAleph("aut", name, False, "wau") ) for record in records: marc = MARCXMLRecord(record) author = cls.parse_author(marc) if author: yield author
1,063,057
Use this function to automatically filter all the entries defined for a given rule. Params: conflicts_list(List[Conflict]): the list of conflicts to filter. fields(List[str]): fields to filter out, using an accessor syntax of the form ``field.subfield.subsubfield``. Return: ...
def filter_conflicts(conflicts_list, fields): for field in fields: conflicts_list = filter_conflicts_by_path(conflicts_list, field) return conflicts_list
1,063,109
Convert MRC data format to MARC XML. Args: mrc (str): MRC as string. Returns: str: XML with MARC.
def mrc_to_marc(mrc): # ignore blank lines lines = [ line for line in mrc.splitlines() if line.strip() ] def split_to_parts(lines): for line in lines: first_part, second_part = line.split(" L ", 1) yield line, first_part, second_part.lstrip(...
1,063,190
Convert `dicts` under `code` to MRC. This is used to compose some of the data from user's input to MRC template, which is then converted to MARC XML / Dublin core. Args: code (str): Code of the aleph field, whic hshould be used. dicts (dict): Dict with aleph fields (i1/i2, a..z, 0..9) and ...
def dicts_to_mrc(code, dicts): def _dict_to_mrc(code, d): i1 = d.get("i1", d.get("ind1")) i2 = d.get("i2", d.get("ind2")) one_chars = [k for k in d.keys() if len(k) == 1] out = "%s%s%s L " % (code, i1, i2) for key in resorted(one_chars): for item in d[key]: ...
1,063,191
Convert `val` to MRC, whether it is dict or string. Args: code (str): Code of the field. val (str or dict): Value of the field. Returns: list: MRC lines for output template.
def item_to_mrc(code, val): if isinstance(val, basestring): return [val_to_mrc(code, val)] if isinstance(val, dict): val = [val] return dicts_to_mrc(code, val)
1,063,193
Create a material stress table. Args: temperatures: A sequence of temperatures. materials: A mapping of material names to sequences of stress values which correspond to the temperatures.
def __init__(self, temperatures, materials): self._table = Table( column_keys=temperatures, rows_mapping=materials )
1,063,605
Filter each tuple according to visibility. Args: key_tuples: A sequence of tuples of equal length (i.e. rectangular) visibilities: A sequence of booleans equal in length to the tuples contained in key_tuples. Returns: A sequence equal in length to key_tuples where the items are tuples ...
def strip_hidden(key_tuples, visibilities): result = [] for key_tuple in key_tuples: if len(key_tuple) != len(visibilities): raise ValueError( "length of key tuple {} is not equal to length of visibilities {}".format( key_tuple, visibilities ...
1,063,812