code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def on_backward_begin(self, last_loss:Rank0Tensor, last_input:Tensor, **kwargs): "Apply AR and TAR to `last_loss`." if self.alpha != 0.: last_loss += self.alpha * self.out[-1].float().pow(2).mean() if self.beta != 0.: h = self.raw_out[-1] if len(h)>1: last_loss += self.b...
Apply AR and TAR to `last_loss`.
def _lockstep_fcn(values): numrequired, fcn, args = values with _process_lock: _numdone.value += 1 while 1: if _numdone.value == numrequired: return fcn(args)
Wrapper to ensure that all processes execute together
async def _unwatch(self, conn): "Unwatches all previously specified keys" await conn.send_command('UNWATCH') res = await conn.read_response() return self.watching and res or True
Unwatches all previously specified keys
def fits(self, jobShape): return jobShape.memory <= self.shape.memory and \ jobShape.cores <= self.shape.cores and \ jobShape.disk <= self.shape.disk and \ (jobShape.preemptable or not self.shape.preemptable)
Check if a job shape's resource requirements will fit within this allocation.
def random(magnitude=1): theta = random.uniform(0, 2 * math.pi) return magnitude * Vector(math.cos(theta), math.sin(theta))
Create a unit vector pointing in a random direction.
def lookup_full_hashes(self, hash_values): q = output = [] with self.get_cursor() as dbc: placeholders = ','.join(['?'] * len(hash_values)) dbc.execute(q.format(placeholders), [sqlite3.Binary(hv) for hv in hash_values]) for h in dbc.fetchall(): ...
Query DB to see if hash is blacklisted
def state_to_wavefunction(state: State) -> pyquil.Wavefunction: amplitudes = state.vec.asarray() amplitudes = amplitudes.transpose() amplitudes = amplitudes.reshape([amplitudes.size]) return pyquil.Wavefunction(amplitudes)
Convert a QuantumFlow state to a pyQuil Wavefunction
def copy_uri_options(hosts, mongodb_uri): if "?" in mongodb_uri: options = mongodb_uri.split("?", 1)[1] else: options = None uri = "mongodb://" + hosts if options: uri += "/?" + options return uri
Returns a MongoDB URI to hosts with the options from mongodb_uri.
def _random_cycle(adj, random_state): n = random_state.randint(len(adj)) for idx, v in enumerate(adj): if idx == n: break start = v walk = [start] visited = {start: 0} while True: if len(walk) > 1: previous = walk[-2] neighbors = [u for u in ad...
Find a cycle using a random graph walk.
def init_celery(self, celery): count = next(self.counter) def task_with_hub(f, **opts): @functools.wraps(f) def wrapper(*args, **kwargs): return f(self, *args, **kwargs) wrapper.__name__ = wrapper.__name__ + '_' + str(count) return celery.t...
Registers the celery tasks on the hub object.
def _list_element_starts_with(items, needle): for item in items: if item.startswith(needle): return True return False
True of any of the list elements starts with needle
def _from_dict(cls, _dict): args = {} xtra = _dict.copy() if 'log_messages' in _dict: args['log_messages'] = [ LogMessage._from_dict(x) for x in (_dict.get('log_messages')) ] del xtra['log_messages'] else: raise ValueError( ...
Initialize a OutputData object from a json dictionary.
def post(self, url, data, params=None): r = self.session.post(url, data=data, params=params) return self._response_parser(r, expect_json=False)
Initiate a POST request
def register_annotype_converter(cls, types, is_array=False, is_mapping=False): if not isinstance(types, Sequence): types = [types] def decorator(subclass): for typ in types: cls._annotype_lookup[(typ, is_array, is_mapping)] = su...
Register this class as a converter for Anno instances
def administration(self): url = self._url res = search("/rest/", url).span() addText = "admin/" part1 = url[:res[1]] part2 = url[res[1]:] adminURL = "%s%s%s" % (part1, addText, part2) res = AdminFeatureService(url=url, securityHandler=s...
returns the hostservice object to manage the back-end functions
def fixPath(path): path = os.path.abspath(os.path.expanduser(path)) if path.startswith("\\"): return "C:" + path return path
Ensures paths are correct for linux and windows
def trace(self, n): "Restore the position in the history of individual v's nodes" trace_map = {} self._trace(n, trace_map) s = list(trace_map.keys()) s.sort() return s
Restore the position in the history of individual v's nodes
def add_link(self): "Create a new internal link" n=len(self.links)+1 self.links[n]=(0,0) return n
Create a new internal link
def spawn_batch_jobs(job, shared_ids, input_args): samples = [] config = input_args['config'] with open(config, 'r') as f_in: for line in f_in: line = line.strip().split(',') uuid = line[0] urls = line[1:] samples.append((uuid, urls)) for sample in...
Spawns an alignment job for every sample in the input configuration file
def _list_items(queue): con = _conn(queue) with con: cur = con.cursor() cmd = 'SELECT name FROM {0}'.format(queue) log.debug('SQL Query: %s', cmd) cur.execute(cmd) contents = cur.fetchall() return contents
Private function to list contents of a queue
def dhcp_request(iface=None, **kargs): if conf.checkIPaddr != 0: warning("conf.checkIPaddr is not 0, I may not be able to match the answer") if iface is None: iface = conf.iface fam, hw = get_if_raw_hwaddr(iface) return srp1(Ether(dst="ff:ff:ff:ff:ff:ff") / IP(src="0.0.0.0", dst="255.255...
Send a DHCP discover request and return the answer
def disablingBuidCache(self): self.buidcache = s_cache.LruDict(0) yield self.buidcache = s_cache.LruDict(BUID_CACHE_SIZE)
Disable and invalidate the layer buid cache for migration
def view_count(self): views = self.list_views() counts = defaultdict(lambda: 0) for view in views: counts[view.name] += 1 return dict(counts)
Return the number of opened views.
def map_services(self): self.get_simple_devices_info() j = self.data_request({'id': 'status', 'output_format': 'json'}).json() service_map = {} items = j.get('devices') for item in items: service_map[item.get('id')] = item.get('states') self.device_services_ma...
Get full Vera device service info.
def to_integer(value, ctx): if isinstance(value, bool): return 1 if value else 0 elif isinstance(value, int): return value elif isinstance(value, Decimal): try: val = int(value.to_integral_exact(ROUND_HALF_UP)) if isinstance(val, int): return v...
Tries conversion of any value to an integer
def loads(s, cls=BinaryQuadraticModel, vartype=None): return load(s.split('\n'), cls=cls, vartype=vartype)
Load a COOrdinate formatted binary quadratic model from a string.
def layer_to_solr(self, layer): success = True message = 'Synced layer id %s to Solr' % layer.id layer_dict, message = layer2dict(layer) if not layer_dict: success = False else: layer_json = json.dumps(layer_dict) try: url_solr_...
Sync a layer in Solr.
def main(): parser = argparse.ArgumentParser(description='AFTV Server') parser.add_argument('-p', '--port', type=int, help='listen port', default=5556) parser.add_argument('-d', '--default', help='default Amazon Fire TV host', nargs='?') parser.add_argument('-c', '--config', type=str, help='Path to conf...
Set up the server.
def update_cursor(self, dc, grid, row, col): old_row, old_col = self.old_cursor_row_col bgcolor = get_color(config["background_color"]) self._draw_cursor(dc, grid, old_row, old_col, pen=wx.Pen(bgcolor), brush=wx.Brush(bgcolor)) self._draw_cursor(dc, grid, row, c...
Whites out the old cursor and draws the new one
def line(self, node, coords, close=False, **kwargs): line_len = len(coords) if len([c for c in coords if c[1] is not None]) < 2: return root = 'M%s L%s Z' if close else 'M%s L%s' origin_index = 0 while origin_index < line_len and None in coords[origin_index]: ...
Draw a svg line
def load_lsa_information(self): if not (49 < int(self.clustering_parameter) < 101): raise Exception('Only LSA dimensionalities in the range 50-100' + ' are supported.') if not self.quiet: print "Loading LSA term vectors..." with open(os.path.jo...
Loads a dictionary from disk that maps permissible words to their LSA term vectors.
def actually_mount(self, client): a_obj = self.config.copy() if 'description' in a_obj: del a_obj['description'] try: m_fun = getattr(client, self.mount_fun) if self.description and a_obj: m_fun(self.backend, mount_point=s...
Actually mount something in Vault
def project_name_changed(self, widget, data=None): if widget.get_text() != "": self.run_btn.set_sensitive(True) else: self.run_btn.set_sensitive(False) self.update_full_label()
Function controls whether run button is enabled
def disk_check_size(ctx, param, value): if value: if isinstance(value, tuple): val = value[1] else: val = value if val % 1024: raise click.ClickException('Size must be a multiple of 1024.') return value
Validation callback for disk size parameter.
def irreducible_causes(self): return tuple(link for link in self if link.direction is Direction.CAUSE)
The set of irreducible causes in this |Account|.
def isHandlerPresent(self, event_name): if event_name not in self.handlers: raise ValueError('{} is not a valid event'.format(event_name)) return self.handlers[event_name] is not None
Check if an event has an handler.
def main(): alarm = XBeeAlarm('/dev/ttyUSB0', '\x56\x78') routine = SimpleWakeupRoutine(alarm) from time import sleep while True: try: print "Waiting 5 seconds..." sleep(5) print "Firing" routine.trigger() except KeyboardInterrupt: ...
Run through simple demonstration of alarm concept
def _get_filtered_study_ids(shard, include_aliases=False): from peyotl.phylesystem.helper import DIGIT_PATTERN k = shard.get_doc_ids() if shard.has_aliases and (not include_aliases): x = [] for i in k: if DIGIT_PATTERN.match(i) or ((len(i) > 1) and (i[-2] == '_')): ...
Optionally filters out aliases from standard doc-id list
def split_pdf(pdf_path): pdf = PdfFileReader(pdf_path) pdf_list = [] for page_num in range(pdf.numPages): page = pdf.getPage(page_num) pdf_writer = PdfFileWriter() pdf_writer.addPage(page) with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj: pdf_writer.write...
Splits a multi-page pdf into a list of single page pdfs
def _get_id(self): return ''.join(map(str, filter(is_not_None, [self.Prefix, self.Name])))
Construct and return the identifier
def __try_parse_number(self, string): try: return int(string) except ValueError: try: return float(string) except ValueError: return False
Try to parse a string to a number, else return False.
def __add(self, token): self.__symbols.append(token) text = token.text if text is not None and text not in self.__mapping: self.__mapping[text] = token
Unconditionally adds a token to the table.
def getProvStack(self, iden: bytes): retn = self.slab.get(iden, db=self.db) if retn is None: return None return s_msgpack.un(retn)
Returns the provenance stack given the iden to it
def bootstrap_app(): from salt.netapi.rest_cherrypy import app import salt.config __opts__ = salt.config.client_config( os.environ.get('SALT_MASTER_CONFIG', '/etc/salt/master')) return app.get_app(__opts__)
Grab the opts dict of the master config by trying to import Salt
def load(provider, config_location=DEFAULT_CONFIG_DIR): auth = None auth_file = None try: config_dir = os.path.join(config_location, NOIPY_CONFIG) print("Loading stored auth info [%s]... " % config_dir, end="") auth_file = os.path.join(config_dir, provider) with open(auth_fil...
Load provider specific auth info from file
def validate(self): valids = [getattr(self, valid) for valid in sorted(dir(self.__class__)) if valid.startswith('is_valid_')] for is_valid in valids: if not is_valid(): docstring = '\n'.join( line.strip() for line in is_...
Apply the `is_valid` methods to self and possibly raise a ValueError.
def next_batch(self, n=1): if len(self.queue) == 0: return [] batch = list(reversed((self.queue[-n:]))) self.queue = self.queue[:-n] return batch
Return the next requests that should be dispatched.
def form_valid(self, form): response = super(FormAjaxMixin, self).form_valid(form) if self.request.is_ajax(): return self.json_to_response() return response
If form valid return response with action
def assume_role_credentials(self, arn): log.info("Assuming role as %s", arn) for name in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SECURITY_TOKEN', 'AWS_SESSION_TOKEN']: if name in os.environ and not os.environ[name]: del os.environ[name] sts = self.amazon.s...
Return the environment variables for an assumed role
def visit_UnaryOp(self, node: AST, dfltChaining: bool = True) -> str: op = node.op with self.op_man(op): return self.visit(op) + self.visit(node.operand)
Return representation of `node`s operator and operand.
def iter_callback_properties(self): for name in dir(self): if self.is_callback_property(name): yield name, getattr(type(self), name)
Iterator to loop over all callback properties.
def input_file(self, _container): p = local.path(_container) if set_input_container(p, CFG): return p = find_hash(CFG["container"]["known"].value, container) if set_input_container(p, CFG): return raise ValueError("The path '{0}' does not exist.".format(p)...
Find the input path of a uchroot container.
def extract_secs(self, tx, tx_in_idx): sc = tx.SolutionChecker(tx) tx_context = sc.tx_context_for_idx(tx_in_idx) solution_stack = [] for puzzle_script, solution_stack, flags, sighash_f in sc.puzzle_and_solution_iterator(tx_context): for opcode, data, pc, new_pc in self._scrip...
For a given script solution, iterate yield its sec blobs
def modifie_many(self, dic: dict): for i, v in dic.items(): self.modifie(i, v)
Convenience function which calls modifie on each element of dic
def _parse_datetime_value(value): if value.endswith('Z'): value = value[:-1] + '+00:00' return arrow.get(value, 'YYYY-MM-DDTHH:mm:ssZ').datetime
Deserialize a DateTime object from its proper ISO-8601 representation.
def add_ignore(self, depend): try: self._add_child(self.ignore, self.ignore_set, depend) except TypeError as e: e = e.args[0] if SCons.Util.is_List(e): s = list(map(str, e)) else: s = str(e) raise SCons.Errors.Us...
Adds dependencies to ignore.
def call_name_is(siter, name): return ( isinstance(siter, ast.Call) and hasattr(siter.func, 'attr') and siter.func.attr == name )
Checks the function call name
def query_all(): recs = TabPost2Tag.select( TabPost2Tag, TabTag.kind.alias('tag_kind'), ).join( TabTag, on=(TabPost2Tag.tag_id == TabTag.uid) ) return recs
Query all the records from TabPost2Tag.
def relative_path(self, key): key = str(key) key = key.replace(':', '/') key = key[1:] if not self.case_sensitive: key = key.lower() return os.path.normpath(key)
Returns the relative path for given `key`
def get(self, uri, disable_proxy=False, stream=False): response = requests.get( uri, headers=self.headers, allow_redirects=True, cookies={}, stream=stream, proxies=self.proxy if not disable_proxy else False ) if response.sta...
Return Requests response to GET request.
def promote_s3app(self): utils.banner("Promoting S3 App") primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Deployment( app=self.app, env=self.env, region=self.region, prop_path=self.json_path, artifact_path=self....
promotes S3 deployment to LATEST
def insert_after(self, target): if not target.parent: return target.parent.insert(target.parent.sprites.index(target) + 1, self)
insert this widget into the targets parent container after the target
def flatten_container(self, container): for names in ARG_MAP.values(): if names[TransformationTypes.CHRONOS.value]['name'] and \ '.' in names[TransformationTypes.CHRONOS.value]['name']: chronos_dotted_name = names[TransformationTypes.CHRONOS.value]['name']...
Accepts a chronos container and pulls out the nested values into the top level
def _check_gle_response(result): _check_command_response(result) if result.get("wtimeout", False): raise WTimeoutError(result.get("errmsg", result.get("err")), result.get("code"), result) error_msg = result.get("err", "") if error_msg is No...
Return getlasterror response as a dict, or raise OperationFailure.
def _bg_combine(self, bgs): out = np.ones(self.h5["raw"].shape, dtype=float) for bg in bgs: out *= bg[:] return out
Combine several background amplitude images
def delete(self): if self._sheet.readonly: raise ReadOnlyException gd_client = self._sheet.client assert gd_client is not None return gd_client.DeleteRow(self._entry)
Delete the row from the spreadsheet
def setAttributesJson(self, attributesJson): try: self._attributes = json.loads(attributesJson) except: raise exceptions.InvalidJsonException(attributesJson) return self
Sets the attributes dictionary from a JSON string.
def __wrap_docker_exec(func): def call(*args, **kwargs): try: return func(*args, **kwargs) except OSError as e: if e.errno == errno.ENOENT: raise DockerExecuteError('Failed to execute docker. Is it installed?') raise return call
Wrap a function to raise DockerExecuteError on ENOENT
def copy_resources(self): if not os.path.isdir('resources'): os.mkdir('resources') resource_dir = os.path.join(os.getcwd(), 'resources', '') copied_resources = [] for resource in self.resources: src = os.path.join(EULER_DATA, 'resources', resource) if ...
Copies the relevant resources to a resources subdirectory
def add_url(self, name: str, pattern: str, application: Callable) -> None: self.urlmapper.add(name, self.prefix + pattern) self.register_app(name, application)
add url pattern dispatching to application
def summary_plotting_engine(**kwargs): logging.debug(f"Using {prms.Batch.backend} for plotting") experiments = kwargs["experiments"] farms = kwargs["farms"] barn = None logging.debug(" - summary_plot_engine") farms = _preparing_data_and_plotting( experiments=experiments, farms...
creates plots of summary data.
def dragend(self, event): x_range = [self.begin_drag.x//self.col_width, event.x//self.col_width] y_range = [self.begin_drag.y//self.row_height, event.y//self.row_height] for i in range(2): for ls in [x_range, y_range]: if ls[i] < 0: ...
Handles the end of a drag action.
def create_geotiff(name, Array, driver, ndv, xsize, ysize, geot, projection, datatype, band=1): if isinstance(datatype, np.int) == False: if datatype.startswith('gdal.GDT_') == False: datatype = eval('gdal.GDT_'+datatype) newfilename = name+'.tif' Array[np.isnan(Array)] = ndv DataSet...
Creates new geotiff from array
def subcommand(self, name, help): if self._subcommands is None: self._subcommands = self.add_subparsers(help='commands') return self._subcommands.add_parser(name, description=help, help=help)
Creates a parser for a sub-command.
def handle_token(cls, parser, token): tag_error = "Accepted formats {%% %(tagname)s %(args)s %%} or " \ "{%% %(tagname)s %(args)s as [var] %%}" bits = token.split_contents() args_count = len(bits) - 1 if args_count >= 2 and bits[-2] == 'as': as_var = bits[...
Class method to parse and return a Node.
def _create_default_config_file(self): logger.info('Initialize Maya launcher, creating config file...\n') self.add_section(self.DEFAULTS) self.add_section(self.PATTERNS) self.add_section(self.ENVIRONMENTS) self.add_section(self.EXECUTABLES) self.set(self.DEFAULTS, '...
If config file does not exists create and set default values.
def all_states(self) -> Tuple[State, ...]: return tuple(self._transform_list_of_states_to_state(states) for states in self._cartesian_product_of_every_states_of_each_genes())
Return all the possible states of this influence graph.
def _average_called_depth(in_file): import cyvcf2 depths = [] for rec in cyvcf2.VCF(str(in_file)): d = rec.INFO.get("DP") if d is not None: depths.append(int(d)) if len(depths) > 0: return int(math.ceil(numpy.mean(depths))) else: return 0
Retrieve the average depth of called reads in the provided VCF.
def start_subscribe(self): if not self.conn: raise ValueError('Not connected') elif not self.pubsub_conn: raise ValueError('PubSub not enabled') return Subscription(self)
Create a new Subscription context manager.
def check_dependencies(self): deadlocks = [] for task in self.iflat_tasks(): for dep in task.deps: if dep.node.depends_on(task): deadlocks.append((task, dep.node)) if deadlocks: lines = ["Detect wrong list of dependecies that will lead ...
Test the dependencies of the nodes for possible deadlocks.
def qstd(x,quant=0.05,top=False,bottom=False): s = np.sort(x) n = np.size(x) lo = s[int(n*quant)] hi = s[int(n*(1-quant))] if top: w = np.where(x>=lo) elif bottom: w = np.where(x<=hi) else: w = np.where((x>=lo)&(x<=hi)) return np.std(x[w])
returns std, ignoring outer 'quant' pctiles
def bound_symbols(self): if self._bound_symbols is None: res = set.union( set([]), *[_bound_symbols(val) for val in self.kwargs.values()]) res.update( set([]), *[_bound_symbols(arg) for arg in self.args]) self._b...
Set of bound SymPy symbols in the expression
def _decoder(self, obj): if '__class__' in obj: elem = eval(obj['__class__'])() elem.ident = obj['ident'] elem.group = str(obj['group']) elem.name = str(obj['name']) elem.ctype = str(obj['ctype']) elem.pytype = str(obj['pytype']) ...
Decode a toc element leaf-node
def load(self, callback): if callback is None: def callb(): pass callback = callb if len(self._loaded_callbacks) == 0: self._request_module_status() self._request_channel_name() else: print("+++++++++++++++++++++++++++++...
Retrieve names of channels
def cleanPolyline(elem, options): pts = parseListOfPoints(elem.getAttribute('points')) elem.setAttribute('points', scourCoordinates(pts, options, True))
Scour the polyline points attribute
def send_ack(self): if self.last_ack == self.proto.max_id: return LOGGER.debug("ack (%d)", self.proto.max_id) self.last_ack = self.proto.max_id self.send_message(f"4{to_json([self.proto.max_id])}")
Send an ack message
def pack(content): if isinstance(content, six.text_type): content = content.encode("utf-8") return struct.pack('i', len(content)) + content
content should be str
def redirect(self, url, status=None): self.status_code = 302 if status is None else status self.headers = Headers([('location', url)]) self.message = '' self.end()
Redirect to the specified url, optional status code defaults to 302.
def _start_thread(self): self._stopping_event = Event() self._enqueueing_thread = Thread(target=self._enqueue_batches, args=(self._stopping_event,)) self._enqueueing_thread.start()
Start an enqueueing thread.
def cprint(msg, *args, **kw): if len(args) or len(kw): msg = msg.format(*args, **kw) print(fmt('{}<0>'.format(msg)))
Print colored message to stdout.
def existing(self): catalog = api.portal.get_tool('portal_catalog') results = [] layout_path = self._get_layout_path( self.request.form.get('layout', '') ) for brain in catalog(layout=layout_path): results.append({ 'title': brain.Title, ...
find existing content assigned to this layout
def union(a: Iterable[Any], b: Iterable[Any]) -> List[Any]: u = [] for item in a: if item not in u: u.append(item) for item in b: if item not in u: u.append(item) return u
Return a list of items that are in `a` or `b`
def setup(self, environ): json_handler = Root().putSubHandler('calc', Calculator()) middleware = wsgi.Router('/', post=json_handler, accept_content_types=JSON_CONTENT_TYPES) response = [wsgi.GZipMiddleware(200)] return wsgi.WsgiHandler(middleware=[wsgi.wa...
Called once to setup the list of wsgi middleware.
def init(lang, domain): translations_dir = _get_translations_dir() domain = _get_translations_domain(domain) pot = os.path.join(translations_dir, f'{domain}.pot') return _run(f'init -i {pot} -d {translations_dir} -l {lang} --domain={domain}')
Initialize translations for a language code.
def partition_version_classifiers( classifiers: t.Sequence[str], version_prefix: str = 'Programming Language :: Python :: ', only_suffix: str = ' :: Only') -> t.Tuple[t.List[str], t.List[str]]: versions_min, versions_only = [], [] for classifier in classifiers: version = classifier.repla...
Find version number classifiers in given list and partition them into 2 groups.
def main(args): random.seed() temp_dir = tempfile.mkdtemp() logging.info('Created temporary directory: %s', temp_dir) validator = SubmissionValidator( source_dir=args.source_dir, target_dir=args.target_dir, temp_dir=temp_dir, do_copy=args.copy, use_gpu=args.use_gpu, container...
Validate all submissions and copy them into place
def sentence_texts(self): if not self.is_tagged(SENTENCES): self.tokenize_sentences() return self.texts(SENTENCES)
The list of texts representing ``sentences`` layer elements.
def _wipe_www_page(self, slug): wd = os.path.join(self._dirs['www'], slug) if os.path.isdir(wd): shutil.rmtree(wd)
Remove all data in www about the page identified by slug.
def stylize(txt, bold=False, underline=False): setting = '' setting += _SET_BOLD if bold is True else '' setting += _SET_UNDERLINE if underline is True else '' return setting + str(txt) + _STYLE_RESET
Changes style of the text.
def _calculate(self, field): base_offset = 0 if self.base_field is not None: base_offset = self.base_field.offset target_offset = self._field.offset if (target_offset is None) or (base_offset is None): return 0 return target_offset - base_offset
If the offset is unknown, return 0