code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def check_player_collision(self): player_tiles = r.TileMapManager.active_map.grab_collisions(self.char.coords) enemy_tiles = r.TileMapManager.active_map.grab_collisions(self.coords) for ptile in player_tiles: for etile in enemy_tiles: if r.TileMapManager.active_map.pi...
Check to see if we are colliding with the player.
def _hijack_target(self): if self._target.is_class_or_module(): setattr(self._target.obj, self._method_name, self) elif self._attr.kind == 'property': proxy_property = ProxyProperty( double_name(self._method_name), self._original_method, ...
Replaces the target method on the target object with the proxy method.
def scoped_session_decorator(func): @wraps(func) def wrapper(*args, **kwargs): with sessions_scope(session): logger.debug("Running worker %s in scoped DB session", func.__name__) return func(*args, **kwargs) return wrapper
Manage contexts and add debugging to db sessions.
def gps_message_arrived(self, m): gps_week = getattr(m, 'Week', None) gps_timems = getattr(m, 'TimeMS', None) if gps_week is None: gps_week = getattr(m, 'GWk', None) gps_timems = getattr(m, 'GMS', None) if gps_week is None: if getattr(m, 'GPSTi...
adjust time base from GPS message
def benchmark_command(cmd, progress): full_cmd = '/usr/bin/time --format="%U %M" {0}'.format(cmd) print '{0:6.2f}% Running {1}'.format(100.0 * progress, full_cmd) (_, err) = subprocess.Popen( ['/bin/sh', '-c', full_cmd], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=s...
Benchmark one command execution
def _unpaginate(self, domain, initial_op, *, on_properties): request = initial_op(domain) while request is not None: result = self._retry_on_reset(request, request.execute) for on_property in on_properties: items = result.get(on_property) if items ...
Iterate thru the request pages until all items have been processed
def failed_extra_capabilities(self): failed = [] for capability, f_name in self.extra_capability_checks.items(): f = getattr(self, f_name) instance_capable = f() if not instance_capable: failed.append(capability) return failed
Check to see if instance passes its `extra_capability_checks`.
def interpolation_change_cb(self, setting, value): canvas_img = self.get_canvas_image() canvas_img.interpolation = value canvas_img.reset_optimize() self.redraw(whence=0)
Handle callback related to changes in interpolation.
def _sim_texture(r1, r2): return sum([min(a, b) for a, b in zip(r1["hist_t"], r2["hist_t"])])
calculate the sum of histogram intersection of texture
def check_nonstandard_section_name(self): std_sections = ['.text', '.bss', '.rdata', '.data', '.rsrc', '.edata', '.idata', '.pdata', '.debug', '.reloc', '.stab', '.stabstr', '.tls', '.crt', '.gnu_deb', '.eh_fram', '.exptbl', '.rodata'] for i in range(200):...
Checking for an non-standard section name
def inputtemplate(self,template_id): for profile in self.profiles: for inputtemplate in profile.input: if inputtemplate.id == template_id: return inputtemplate raise Exception("No such input template: " + repr(template_id))
Return the inputtemplate with the specified ID. This is used to resolve a inputtemplate ID to an InputTemplate object instance
def create_graph(): with tf.gfile.FastGFile(os.path.join( FLAGS.model_dir, 'classify_image_graph_def.pb'), 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) _ = tf.import_graph_def(graph_def, name='')
Creates a graph from saved GraphDef file and returns a saver.
def write(self, data): m = self._convert_structure(data) self._file.write(self._format_mol(*m)) self._file.write('M END\n') for k, v in data.meta.items(): self._file.write(f'> <{k}>\n{v}\n') self._file.write('$$$$\n')
write single molecule into file
def link(source_path): if not os.path.isfile(source_path): raise SourceNotFound(source_path) with open(source_path, 'r') as f: content = f.read() block_map = BlockMap() all_block = convert_lines_to_block( content.splitlines(), block_map, LinkStack(source_path), source_path) ...
Links the content found at source_path and represents a Block that represents the content.
def _ExportFileContent(self, aff4_object, result): if self.options.export_files_contents: try: result.content = aff4_object.Read(self.MAX_CONTENT_SIZE) result.content_sha256 = hashlib.sha256(result.content).hexdigest() except (IOError, AttributeError) as e: logging.warning("Can't...
Add file content from aff4_object to result.
def polish_model(model): onnx.checker.check_model(model) onnx.helper.strip_doc_string(model) model = onnx.shape_inference.infer_shapes(model) model = onnx.optimizer.optimize(model) onnx.checker.check_model(model) return model
This function combines several useful utility functions together.
def _get_insert_commands(self, rows, cols): insert_queries = {} for table in tqdm(list(rows.keys()), total=len(list(rows.keys())), desc='Getting insert rows queries'): insert_queries[table] = {} _rows = rows.pop(table) _cols = cols.pop(table) if len(_rows)...
Retrieve dictionary of insert statements to be executed.
def gdbgui(): interpreter = "lldb" if app.config["LLDB"] else "gdb" gdbpid = request.args.get("gdbpid", 0) initial_gdb_user_command = request.args.get("initial_gdb_user_command", "") add_csrf_token_to_session() THEMES = ["monokai", "light"] initial_data = { "csrf_token": session["csrf_to...
Render the main gdbgui interface
def start_write(self, frame, node=None): if frame.buffer is None: self.writeline('yield ', node) else: self.writeline('%s.append(' % frame.buffer, node)
Yield or write into the frame buffer.
def delete(self, id): resp = self.client.accounts.delete(id) self.display(resp)
Delete an tenant id
def action(self, *action_names): def action_wrapper(decorated): @functools.wraps(decorated) def wrapper(argv): kwargs = dict(arg.split('=') for arg in argv) try: return decorated(**kwargs) except TypeError as e: ...
Decorator, registering them as actions
def datetime_at_loc(self, loc): return pd.Timestamp(self._zdt_to_nanos(self._jdt_index.dateTimeAtLoc(loc)))
Returns the timestamp at the given integer location as a Pandas Timestamp.
def _check_patch_type_mismatch(self, patched_item, existing_item): def raise_mismatch_error(patched_item, existing_item, data_type_name): error_msg = ('Type mismatch. Patch {} corresponds to pre-existing ' 'data_type {} ({}:{}) that has type other than {}.') raise Invalid...
Enforces that each patch has a corresponding, already-defined data type.
def chunker(ensemble_list, ncpu): length = int(len(ensemble_list) / ncpu) for i in range(0, len(ensemble_list), length): yield ensemble_list[i:i + length]
Generate successive chunks of ensemble_list.
def _post_init(self): self._led_type_code = self.manager.get_typecode('LED') self.device_path = os.path.realpath(os.path.join(self.path, 'device')) if '::' in self.name: chardev, code_name = self.name.split('::') if code_name in self.manager.codes['LED_type_codes']: ...
Set up the device path and type code.
def restore_type(self, type): mapping = { ARRAY: 'array', sa.Boolean: 'boolean', sa.Date: 'date', sa.DateTime: 'datetime', sa.Float: 'number', sa.Integer: 'integer', JSONB: 'object', JSON: 'object', sa.Nu...
Restore type from SQL
def ls(dataset_uri): dataset = dtoolcore.DataSet.from_uri(dataset_uri) for overlay_name in dataset.list_overlay_names(): click.secho(overlay_name)
List the overlays in the dataset.
def backend_to_retrieve(self, namespace, stream): if namespace not in self.namespaces: raise NamespaceMissing('`{}` namespace is not configured' .format(namespace)) stream_prefix = self.get_matching_prefix(namespace, stream) read_backend = self.prefix_read_backends[namespa...
Return backend enabled for reading for `stream`.
def find_postaggs_for(postagg_names, metrics_dict): postagg_metrics = [ metrics_dict[name] for name in postagg_names if metrics_dict[name].metric_type == POST_AGG_TYPE ] for postagg in postagg_metrics: postagg_names.remove(postagg.metric_name) return p...
Return a list of metrics that are post aggregations
def list_custom_images(call=None): if call != 'function': raise SaltCloudSystemExit( 'The list_vlans function must be called with -f or --function.' ) ret = {} conn = get_conn('SoftLayer_Account') response = conn.getBlockDeviceTemplateGroups() for image in response: ...
Return a dict of all custom VM images on the cloud provider.
def extract_error_message(cls, e): message = str(e) try: if isinstance(e.args, tuple) and len(e.args) > 1: message = e.args[1] except Exception: pass return message
Extract error message for queries
def release(self): if self.errored: self.pool.delete_resource(self) else: self.pool.release(self)
Releases this resource back to the pool it came from.
def _diff_bounds(bounds, coord): try: return bounds[:, 1] - bounds[:, 0] except IndexError: diff = np.diff(bounds, axis=0) return xr.DataArray(diff, dims=coord.dims, coords=coord.coords)
Get grid spacing by subtracting upper and lower bounds.
def position(self): position = self.topic.posts.filter(Q(created__lt=self.created) | Q(id=self.id)).count() return position
Returns an integer corresponding to the position of the post in the topic.
def _populate(self, soup): tables = soup.select('table[rules=all]') if not tables: return trs = tables[0].select('tr')[1:] if len(trs[0]) == 5: self._populate_small_table(trs) else: self._populate_large_table(trs)
Populate the list, assuming ``soup`` is a ``BeautifulSoup`` object.
def open_organisation_logo_path(self): file_name, __ = QFileDialog.getOpenFileName( self, self.tr('Set organisation logo file'), self.organisation_logo_path_line_edit.text(), self.tr( 'Portable Network Graphics files (*.png *.PNG);;' ...
Open File dialog to choose the organisation logo path.
def _get_solar_flux(self, band): solar_flux = self.cal['solar_flux'].isel(bands=band).values d_index = self.cal['detector_index'].fillna(0).astype(int) return da.map_blocks(self._get_items, d_index.data, solar_flux=solar_flux, dtype=solar_flux.dtype)
Get the solar flux for the band.
def register_warning_code(code, exception_type, domain='core'): Logger._warning_code_to_exception[code] = (exception_type, domain) Logger._domain_codes[domain].add(code)
Register a new warning code
def could_collide_ver(self, vpos, adsb_pkt): if adsb_pkt.emitter_type < 100 or adsb_pkt.emitter_type > 104: return True margin = self.asterix_settings.filter_dist_z vtype = adsb_pkt.emitter_type - 100 valt = vpos.alt aalt1 = adsb_pkt.altitude * 0.001 if vtype ...
return true if vehicle could come within filter_dist_z meters of adsb vehicle in timeout seconds
def _save_json(self, filename): with open(filename, 'w') as file_handle: json.dump(self._sensors, file_handle, cls=MySensorsJSONEncoder, indent=4) file_handle.flush() os.fsync(file_handle.fileno())
Save sensors to json file.
def close(self): if self._local.conn: self._local.conn.transport.close() self._local.conn = None
If a connection is open, close its transport.
def _send_periodic_internal(self, msg, period, duration=None): if self._scheduler is None: self._scheduler = HANDLE() _canlib.canSchedulerOpen(self._device_handle, self.channel, self._scheduler) caps = structures.CANCAPABILITIES() ...
Send a message using built-in cyclic transmit list functionality.
def _verify_page(self): title_date = self._get_date_in_title().lower() split_date = self.date.lower().split() split_date[0] = split_date[0][:3] return all(term in title_date for term in split_date)
Verify the ratings page matches the correct date
def parse_float_literal(ast, _variables=None): if isinstance(ast, (FloatValueNode, IntValueNode)): return float(ast.value) return INVALID
Parse a float value node in the AST.
def check_existing_filename (filename, onlyfiles=True): if not os.path.exists(filename): raise PatoolError("file `%s' was not found" % filename) if not os.access(filename, os.R_OK): raise PatoolError("file `%s' is not readable" % filename) if onlyfiles and not os.path.isfile(filename): ...
Ensure that given filename is a valid, existing file.
def evaluate_tour_M(self, tour): from .chic import score_evaluate_M return score_evaluate_M(tour, self.active_sizes, self.M)
Use Cythonized version to evaluate the score of a current tour
def enforce_filetype_file(form, field): if form._fields.get('filetype').data != RESOURCE_FILETYPE_FILE: return domain = urlparse(field.data).netloc allowed_domains = current_app.config['RESOURCES_FILE_ALLOWED_DOMAINS'] allowed_domains += [current_app.config.get('SERVER_NAME')] if current_app...
Only allowed domains in resource.url when filetype is file
def close(self): try: self.connection.close() self.connection = None except Exception: if not self.fail_silently: raise
Close any open HTTP connections to the API server.
def _construct_functions(self, coefs, **kwargs): return [self.basis_functions.functions_factory(coef, **kwargs) for coef in coefs]
Return a list of functions given a list of coefficients.
def cdk_module_matches_env(env_name, env_config, env_vars): if env_config.get(env_name): current_env_config = env_config[env_name] if isinstance(current_env_config, type(True)) and current_env_config: return True if isinstance(current_env_config, six.string_types): (a...
Return bool on whether cdk command should continue in current env.
def restrict_to_parent(self, target, parent): if not (parent['start'] < target < parent['end']): target = parent['end'] return target
Restrict target to parent structure boundaries.
def single_read(self, register): comm_reg = (0b00010 << 3) + register if register == self.AD7730_STATUS_REG: bytes_num = 1 elif register == self.AD7730_DATA_REG: bytes_num = 3 elif register == self.AD7730_MODE_REG: bytes_num = 2 elif register =...
Reads data from desired register only once.
def state_names(): names = set() fname = pkg_resources.resource_filename(__name__, 'resources/States.csv') with open(fname, 'rU') as csvfile: reader = csv.reader(csvfile, delimiter = ',') for row in reader: names.add(row[0]) return names
Get the set of all US state names
def SelfReferenceProperty(label=None, collection_name=None, **attrs): if 'reference_class' in attrs: raise ConfigurationError( 'Do not provide reference_class to self-reference.') return ReferenceProperty(_SELF_REFERENCE, label, collection_name, **attrs)
Create a self reference.
def getPropagationBit(self, t, p): try: return self.validPropagations[t][p]['BITS'] except KeyError: raise CommandExecutionError(( 'No propagation type of "{0}". It should be one of the following: {1}' ).format(p, ', '.join(self.validPropagations...
returns the propagation bit of a text value
def encompassed_by(self, span): if isinstance(span, list): return [sp for sp in span if sp.encompasses(self)] return span.encompasses(self)
Returns true if the given span encompasses this span.
def interface_ip(iface): iface_info, error = _get_iface_info(iface) if error is False: inet = iface_info.get(iface, {}).get('inet', None) return inet[0].get('address', '') if inet else '' else: return error
Return `iface` IPv4 addr or an error if `iface` does not exist
def commitAndCloseEditor(self): editor = self.sender() try: self.commitData.emit(editor) except AttributeError: pass self.closeEditor.emit(editor, QAbstractItemDelegate.NoHint)
Commit and close editor
def from_socket(controller, host=None, port=None, track_path=None, log_level=logging.ERROR): rocket = Rocket(controller, track_path=track_path, log_level=log_level) rocket.connector = SocketConnector(controller=controller, tracks=rocket.tracks, ...
Create rocket instance using socket connector
def iscm_md_update_dict(self, keypath, data): current = self.metadata for k in string.split(keypath, "."): if not current.has_key(k): current[k] = {} current = current[k] current.update(data)
Update a metadata dictionary entry
def _delete_doc_from_index(index_writer, docid): query = whoosh.query.Term("docid", docid) index_writer.delete_by_query(query)
Remove a document from the index
def _unstructure_mapping(self, mapping): dispatch = self._unstructure_func.dispatch return mapping.__class__( (dispatch(k.__class__)(k), dispatch(v.__class__)(v)) for k, v in mapping.items() )
Convert a mapping of attr classes to primitive equivalents.
def superclass(self, klass): return bool(lib.EnvSuperclassP(self._env, self._cls, klass._cls))
True if the Class is a superclass of the given one.
def setdummies(self,e): v0,v1 = e.v r0,r1 = self.grx[v0].rank,self.grx[v1].rank if r0>r1: assert e in self.alt_e v0,v1 = v1,v0 r0,r1 = r1,r0 if (r1-r0)>1: ctrl=self.ctrls[e]={} ctrl[r0]=v0 ctrl[r1]=v1 for...
creates and defines all needed dummy vertices for edge e.
def split_docstring(self, block): try: first_line, rest_of_lines = block.split("\n", 1) except ValueError: pass else: raw_first_line = split_leading_trailing_indent(rem_comment(first_line))[1] if match_in(self.just_a_string, raw_first_line): ...
Split a code block into a docstring and a body.
def search_fields(self): search_fields = self.request.get("search_fields", None) if not search_fields: return [] search_fields = json.loads(_u(search_fields)) return search_fields
Returns the object field names to search against
def model_saved(sender, instance, created, raw, using, **kwargs): opts = get_opts(instance) model = '.'.join([opts.app_label, opts.object_name]) action = 'created' if created else 'updated' distill_model_even...
Automatically triggers "created" and "updated" actions.
def takeoff(self): self.send(at.REF(at.REF.input.start))
Sends the takeoff command.
def freeze(self): self.in_connections = frozenset(self.in_connections) self.out_connections = frozenset(self.out_connections)
Make the SchemaElement's connections immutable.
def wait(self): while True: if not self.greenlet_watch: break if self.stopping: gevent.sleep(0.1) else: gevent.sleep(1)
Waits for the pool to be fully stopped
def __load_section(self, section_key): if self._sections[section_key] is not None: return articles = [] for page in count(1): if page > 50: raise Exception('Last page detection is probably broken') url = '{domain}{section}&iMenuID=1&iSubMenuID={page}'.form...
Reads the set of article links for a section if they are not cached.
def _fill(self): types_to_exclude = ['module', 'function', 'builtin_function_or_method', 'instance', '_Feature', 'type', 'ufunc'] values = self.namespace.who_ls() def eval(expr): return self.namespace.shell.ev(expr) var = [(v, type(...
Fill self with variable information.
def plot_lc(calc_id, aid=None): dstore = util.read(calc_id) dset = dstore['agg_curves-rlzs'] if aid is None: plt = make_figure(dset.attrs['return_periods'], dset.value) else: sys.exit('Not implemented yet') plt.show()
Plot loss curves given a calculation id and an asset ordinal.
def install_signal_handlers(self): self.graceful_stop = False def request_shutdown_now(): self.shutdown_now() def request_shutdown_graceful(): if self.graceful_stop: self.shutdown_now() else: self.graceful_stop = True ...
Handle events like Ctrl-C from the command line.
def put_file(client, source_file, destination_file): try: sftp_client = client.open_sftp() sftp_client.put(source_file, destination_file) except Exception as error: raise IpaUtilsException( 'Error copying file to instance: {0}.'.format(error) ) finally: wi...
Copy file to instance using Paramiko client connection.
def _retrieve_grains_cache(proxy=None): global GRAINS_CACHE if not GRAINS_CACHE: if proxy and salt.utils.napalm.is_proxy(__opts__): GRAINS_CACHE = proxy['napalm.get_grains']() elif not proxy and salt.utils.napalm.is_minion(__opts__): GRAINS_CACHE = salt.utils.napalm.call(...
Retrieves the grains from the network device if not cached already.
def getLevel(self): lvl = 0 p = self while True: p = p.parent if not isinstance(p, LPort): break lvl += 1 return lvl
Get nest-level of this port
def user_pass(self, func=None, location=None, **rkwargs): def wrapper(view): view = to_coroutine(view) @functools.wraps(view) async def handler(request, *args, **kwargs): await self.check_user(request, func, location, **rkwargs) return await vi...
Decorator ensures that user pass the given func.
def domain_unblock(self, domain=None): params = self.__generate_params(locals()) self.__api_request('DELETE', '/api/v1/domain_blocks', params)
Remove a domain block for the logged-in user.
async def _get_cdn_client(self, cdn_redirect): raise NotImplementedError session = self._exported_sessions.get(cdn_redirect.dc_id) if not session: dc = await self._get_dc(cdn_redirect.dc_id, cdn=True) session = self.session.clone() await session.set_dc(dc.id, ...
Similar to ._borrow_exported_client, but for CDNs
def single_node_env(num_gpus=1): import tensorflow as tf if 'HADOOP_PREFIX' in os.environ and 'TFOS_CLASSPATH_UPDATED' not in os.environ: classpath = os.environ['CLASSPATH'] hadoop_path = os.path.join(os.environ['HADOOP_PREFIX'], 'bin', 'hadoop') hadoop_classpath = subprocess.check_output([hadoop_...
Setup environment variables for Hadoop compatibility and GPU allocation
def queue_purge(self, queue, **kwargs): qsize = mqueue.qsize() mqueue.queue.clear() return qsize
Discard all messages in the queue.
def create_kernel_instance(self, kernel_options, params, verbose): instance_string = util.get_instance_string(params) grid_div = (kernel_options.grid_div_x, kernel_options.grid_div_y, kernel_options.grid_div_z) if not kernel_options.block_size_names: kernel_options.block_size_names =...
create kernel instance from kernel source, parameters, problem size, grid divisors, and so on
def clip_image(image, clip_min, clip_max): return np.minimum(np.maximum(clip_min, image), clip_max)
Clip an image, or an image batch, with upper and lower threshold.
def prepare_date(data, schema): if isinstance(data, datetime.date): return data.toordinal() - DAYS_SHIFT else: return data
Converts datetime.date to int timestamp
def segment_volumes(neurites, neurite_type=NeuriteType.all): def _func(sec): return [morphmath.segment_volume(seg) for seg in zip(sec.points[:-1], sec.points[1:])] return map_segments(_func, neurites, neurite_type)
Volumes of the segments in a collection of neurites
def load_reg(): reg_dir = _reg_dir() regfile = os.path.join(reg_dir, 'register') try: with salt.utils.files.fopen(regfile, 'r') as fh_: return salt.utils.msgpack.load(fh_) except Exception: log.error('Could not write to msgpack file %s', __opts__['outdir']) raise
Load the register from msgpack files
def time_between_updates(self): if 'last_updated' not in self._original: return 0 last_update = self._original['last_updated'] this_update = self.last_updated return this_update - last_update
Time between current `last_updated` and previous `last_updated`
def create_weights(nodes, dist): poly = chaospy.quad.generate_stieltjes(dist, len(nodes)-1, retall=True)[0] poly = chaospy.poly.flatten(chaospy.poly.Poly(poly)) weights_inverse = poly(nodes) weights = numpy.linalg.inv(weights_inverse) return weights[:, 0]
Create weights for the Laja method.
def call(function): message = 'call:%s.%s' % (function.__module__,function.__name__) @functools.wraps(function) def wrapper(*args, **kwargs): _collect(message) return function(*args, **kwargs) return wrapper
decorator that collect function call count.
def delete_all_renditions(self): if self.renditions: for r in self.renditions.values(): default_storage.delete(r) self.renditions = {}
delete all renditions and rendition dict
def __set_values(self, values): array = tuple(tuple(self._clean_value(col) for col in row) for row in values) self._get_target().setDataArray(array)
Sets values in this cell range from an iterable of iterables.
def as_list(self, decode=False): items = self.database.lrange(self.key, 0, -1) return [_decode(item) for item in items] if decode else items
Return a list containing all the items in the list.
def display_grid_scores(grid_scores, top=None): grid_scores = sorted(grid_scores, key=lambda x: x[1], reverse=True) if top is not None: grid_scores = grid_scores[:top] _, best_mean, best_scores = grid_scores[0] threshold = best_mean - 2 * sem(best_scores) for params, mean_score, scores in gr...
Helper function to format a report on a grid of scores
def json_decode(data): if isinstance(data, six.binary_type): data = data.decode('utf-8') return json.loads(data)
Decodes the given JSON as primitives
def pattern_logic_aeidon(): if Config.options.pattern_files: return prep_patterns(Config.options.pattern_files) elif Config.options.regex: return Config.REGEX else: return Config.TERMS
Return patterns to be used for searching subtitles via aeidon.
def path(args): from .query import Database db = Database() output = sys.stdout if args.selftest: from bob.db.utils import null output = null() r = db.paths(args.id, prefix=args.directory, suffix=args.extension) for path in r: output.write('%s\n' % path) if not r: return 1 return 0
Returns a list of fully formed paths or stems given some file id
def _copy_update(sourcepath, destname): if sys.platform.startswith('linux'): return os.system("/bin/cp -ua '%s' '%s'" % (sourcepath, destname)) else: return os.system("rsync -ua '%s' '%s'" % (sourcepath, destname))
Copy source to dest only if source is newer.
def getReferenceSetByName(self, name): if name not in self._referenceSetNameMap: raise exceptions.ReferenceSetNameNotFoundException(name) return self._referenceSetNameMap[name]
Returns the reference set with the specified name.
def dePeriod(arr): diff= arr-nu.roll(arr,1,axis=1) w= diff < -6. addto= nu.cumsum(w.astype(int),axis=1) return arr+_TWOPI*addto
make an array of periodic angles increase linearly