code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def process_response(self, request, response): if COOKIE_KEY not in request.COOKIES: response.set_cookie(COOKIE_KEY, COOKIE_PASS, httponly=True, expires=datetime.now()+timedelta(days=30)) if DJANGOSPAM_LOG: logger.log("PASS RESPONSE", reque...
Sets "Ok" cookie on unknown users.
def _handle_response(self, response): status = response.status_code if status == 400: msg = u"bad request" raise exceptions.BadRequest(status, msg) elif status == 401: msg = u"authorization failed user:%s" % (self.sk_user) raise exceptions.Unauthorized(sta...
internal method to throw the correct exception if something went wrong
def _get_parallel_regions(data): callable_regions = tz.get_in(["config", "algorithm", "callable_regions"], data) if not callable_regions: raise ValueError("Did not find any callable regions for sample: %s\n" "Check 'align/%s/*-callableblocks.bed' and 'regions' to examine call...
Retrieve regions to run in parallel, putting longest intervals first.
def _column_reduction(self): i1, j = np.unique(np.argmin(self.c, axis=0), return_index=True) self._x[i1] = j if len(i1) == self.n: return False self._y[j] = i1 self._v = np.min(self.c, axis=0) tempc = self.c.copy() tempc[i1, j] = np.inf mu = np...
Column reduction and reduction transfer steps from LAPJV algorithm
def create_bottleneck_file(bottleneck_path, image_lists, label_name, index, image_dir, category, sess, jpeg_data_tensor, decoded_image_tensor, resized_input_tensor, bottleneck_tensor): tf.logging.debug('Creating bottleneck at ' + bottlen...
Create a single bottleneck file.
def mnist(training): if training: data_filename = 'train-images-idx3-ubyte.gz' labels_filename = 'train-labels-idx1-ubyte.gz' count = 60000 else: data_filename = 't10k-images-idx3-ubyte.gz' labels_filename = 't10k-labels-idx1-ubyte.gz' count = 10000 data_filename = maybe_download(MNIST_URL...
Downloads MNIST and loads it into numpy arrays.
def destroy(self, request, pk=None): org = self.get_object() org.archived = True org.save() return Response(status=status.HTTP_204_NO_CONTENT)
For DELETE actions, archive the organization, don't delete.
def open_file(self, fname, length=None, offset=None, swap=None, block=None, peek=None): if length is None: length = self.length if offset is None: offset = self.offset if swap is None: swap = self.swap_size return binwalk.core.common.BlockFile(fname, ...
Opens the specified file with all pertinent configuration settings.
def load (filename, simConfig=None, output=False, instantiate=True, createNEURONObj=True): from .. import sim sim.initialize() sim.cfg.createNEURONObj = createNEURONObj sim.loadAll(filename, instantiate=instantiate, createNEURONObj=createNEURONObj) if simConfig: sim.setSimCfg(simConfig) if len(s...
Sequence of commands load, simulate and analyse network
def alloc_vpcid(nexus_ips): LOG.debug("alloc_vpc() called") vpc_id = 0 intersect = _get_free_vpcids_on_switches(nexus_ips) for intersect_tuple in intersect: try: update_vpc_entry(nexus_ips, intersect_tuple.vpc_id, False, True) vpc_id = interse...
Allocate a vpc id for the given list of switch_ips.
def send(self, commands): "Ship commands to the daemon." if not commands.endswith("\n"): commands += "\n" self.sock.send(commands)
Ship commands to the daemon.
def unquote_string(code): scode = code.strip() assert scode[-1] in ["'", '"'] assert scode[0] in ["'", '"'] or scode[1] in ["'", '"'] return ast.literal_eval(scode)
Returns a string from code that contains a repr of the string
def _update(self): initial_time = time.time() self._updateHiddenStateTrajectories() self._updateEmissionProbabilities() self._updateTransitionMatrix() final_time = time.time() elapsed_time = final_time - initial_time logger().info("BHMM update iteration took %.3f ...
Update the current model using one round of Gibbs sampling.
def column(self, key): for row in self.rows: if key in row: yield row[key]
Iterator over a given column, skipping steps that don't have that key
def getKnownLadders(reset=False): if not ladderCache or reset: jsonFiles = os.path.join(c.LADDER_FOLDER, "*.json") for ladderFilepath in glob.glob(jsonFiles): filename = os.path.basename(ladderFilepath) name = re.search("^ladder_(.*?).json$", filename).groups()[0] ...
identify all of the currently defined ladders
def _iris_obj_to_attrs(obj): attrs = {'standard_name': obj.standard_name, 'long_name': obj.long_name} if obj.units.calendar: attrs['calendar'] = obj.units.calendar if obj.units.origin != '1' and not obj.units.is_unknown(): attrs['units'] = obj.units.origin attrs.update(obj.a...
Return a dictionary of attrs when given a Iris object
def build_base_image_cmd(self, force): check_permissions() basetag = self.conf.basetag basedir = self.conf.basedir verbose = self.conf.verbose if self.image_exists(tag=basetag): if not force: echo("Image with tag '{0}' already exists".format(basetag)) ...
Build the glusterbase image
def post_process_images(self, doctree): super(AbstractSlideBuilder, self).post_process_images(doctree) relative_base = ( ['..'] * doctree.attributes.get('source')[len(self.srcdir) + 1:].count('/') ) for node in doctree.traverse(nodes.image): if node.ge...
Pick the best candidate for all image URIs.
def create_transaction(self, *args: Any, **kwargs: Any) -> BaseTransaction: return self.get_transaction_class()(*args, **kwargs)
Proxy for instantiating a signed transaction for this VM.
def _find_unused_partition() -> RootPartitions: which = subprocess.check_output(['ot-unused-partition']).strip() return {b'2': RootPartitions.TWO, b'3': RootPartitions.THREE}[which]
Find the currently-unused root partition to write to
def _build_model_factories(store): result = {} for schemaname in store: schema = None try: schema = store[schemaname]['schema'] except KeyError: schemata_log("No schema found for ", schemaname, lvl=critical, exc=True) try: result[schemaname] = ...
Generate factories to construct objects from schemata
def collect(self): instances = {} for device in os.listdir('/dev/'): instances.update(self.match_device(device, '/dev/')) for device_id in os.listdir('/dev/disk/by-id/'): instances.update(self.match_device(device, '/dev/disk/by-id/')) metrics = {} for devi...
Collect and publish disk temperatures
def remote_resolve_reference(self, ref, remote='origin'): return git_remote_resolve_reference(self.repo_dir, ref, remote=remote)
Resolve a reference to a remote revision.
def cas2tas(Vcas, H): p, rho, T = atmos(H) qdyn = p0*((1.+rho0*Vcas*Vcas/(7.*p0))**3.5-1.) Vtas = np.sqrt(7.*p/rho*((1.+qdyn/p)**(2./7.)-1.)) return Vtas
Calibrated Airspeed to True Airspeed
def fill_attrs(self, attrs): for trname, attrname in self.transitions_at.items(): implem = self.implementations[trname] if attrname in attrs: conflicting = attrs[attrname] if not self._may_override(implem, conflicting): raise ValueError...
Update the 'attrs' dict with generated ImplementationProperty.
def summarize_crud_mutation(method, model, isAsync=False): action_type = get_crud_action(method=method, model=model) name = crud_mutation_name(model=model, action=method) input_map = { 'create': create_mutation_inputs, 'update': update_mutation_inputs, 'delete': delete_mutation_input...
This function provides the standard form for crud mutations.
def _get_platform_name(ncattr): match = re.match(r'G-(\d+)', ncattr) if match: return SPACECRAFTS.get(int(match.groups()[0])) return None
Determine name of the platform
def addRnaQuantification(self): self._openRepo() dataset = self._repo.getDatasetByName(self._args.datasetName) biosampleId = "" if self._args.biosampleName: biosample = dataset.getBiosampleByName(self._args.biosampleName) biosampleId = biosample.getId() if...
Adds an rnaQuantification into this repo
def _from_dict(cls, _dict): args = {} if 'count' in _dict: args['count'] = _dict.get('count') if 'relevance' in _dict: args['relevance'] = _dict.get('relevance') if 'text' in _dict: args['text'] = _dict.get('text') if 'emotion' in _dict: ...
Initialize a KeywordsResult object from a json dictionary.
def print_datetime_object(dt): print(dt) print('ctime :', dt.ctime()) print('tuple :', dt.timetuple()) print('ordinal:', dt.toordinal()) print('Year :', dt.year) print('Mon :', dt.month) print('Day :', dt.day)
prints a date-object
def A(*a): return np.array(a[0]) if len(a)==1 else [np.array(o) for o in a]
convert iterable object into numpy array
def float_input(self, question, message='Invalid entry', default=None, required=True): float_result = None requiredFlag = True while (float_result is None and requiredFlag): result = input('%s: ' % question) if not result and not required: float_result = N...
Method for floating point inputs with optionally specifiable error message.
def AvgPool3D(a, k, strides, padding): patches = _pool_patches(a, k, strides, padding.decode("ascii")) return np.average(patches, axis=tuple(range(-len(k), 0))),
Average 3D pooling op.
def start_simple_server(): "A simple mail server that sends a simple response" args = _get_args() addr = ('', args.port) DebuggingServer(addr, None) asyncore.loop()
A simple mail server that sends a simple response
def instance(self, skip_exist_test=False): model = self.database._models[self.related_to] meth = model.lazy_connect if skip_exist_test else model return meth(self.proxy_get())
Returns the instance of the related object linked by the field.
def MemoryExceeded(self): rss_size = self.proc.memory_info().rss return rss_size // 1024 // 1024 > config.CONFIG["Client.rss_max"]
Returns True if our memory footprint is too large.
def instrs_to_body(instrs, context): stack = [] body = [] process_instrs(instrs, stack, body, context) if stack: raise DecompilationError( "Non-empty stack at the end of instrs_to_body(): %s." % stack ) return body
Convert a list of Instruction objects to a list of AST body nodes.
def main(flags): dl = SheetDownloader(flags) dl.init() for file_info in settings.GOOGLE_SHEET_SYNC['files']: print('Downloading {}'.format(file_info['path'])) dl.download_sheet( file_info['path'], file_info['sheet'], file_info['range'], )
Download all sheets as configured.
def load_template_help(builtin): help_file = "templates/%s-help.yml" % builtin help_file = resource_filename(__name__, help_file) help_obj = {} if os.path.exists(help_file): help_data = yaml.safe_load(open(help_file)) if 'name' in help_data: help_obj['name'] = help_data['name...
Loads the help for a given template
def copy(self): return LabeledTree( udepth = self.udepth, depth = self.depth, text = self.text, label = self.label, children = self.children.copy() if self.children != None else [], parent = self.parent)
Deep Copy of a LabeledTree
def load(self, filename=None): assert not self.__flag_loaded, "File can be loaded only once" if filename is None: filename = self.default_filename assert filename is not None, \ "{0!s} class has no default filename".format(self.__class__.__name__) size = os.path.g...
Loads file and registers filename as attribute.
def _make_client(self): self._logger.debug('setting initial watchman timeout to %s', self._startup_timeout) return StreamableWatchmanClient(sockpath=self.socket, transport='local', timeout=self._startup_timeout)
Create a new watchman client using the BSER protocol over a UNIX socket.
def update(self, storagemodel:object, modeldefinition = None, hide = 0) -> StorageQueueModel: if (storagemodel.id != '') and (storagemodel.pop_receipt != '') and (not storagemodel.id is None) and (not storagemodel.pop_receipt is None): try: content = storagemodel.getmessage() ...
update the message in queue
def remove_sample_prep_workflow(portal): logger.info("Removing 'sample_prep' related states and transitions ...") workflow_ids = ["bika_sample_workflow", "bika_ar_workflow", "bika_analysis_workflow"] to_remove = ["sample_prep", "sample_prep_complete"] wf_tool = ap...
Removes sample_prep and sample_prep_complete transitions
def write_output(self): for data in self.output_data.values(): self.create_output(data.get('key'), data.get('value'), data.get('type'))
Write all stored output data to storage.
def fetch_closed_orders(self, limit: int) -> List[Order]: return self._fetch_orders_limit(self._closed_orders, limit)
Fetch latest closed orders, must provide a limit.
def should_log(self, logger_name: str, level: str) -> bool: if (logger_name, level) not in self._should_log: log_level_per_rule = self._get_log_level(logger_name) log_level_per_rule_numeric = getattr(logging, log_level_per_rule.upper(), 10) log_level_event_numeric = getattr(l...
Returns if a message for the logger should be logged.
def assign(self, subject): if not isinstance(subject, (Publisher, Subscriber)): raise TypeError('Assignee has to be Publisher or Subscriber') if self._subject is not None: raise SubscriptionError('Topic %r already assigned' % self._path) self._subject = subject if...
Assigns the given subject to the topic
def _get_requested_databases(self): requested_databases = [] if ((self._requested_namespaces is not None) and (self._requested_namespaces != [])): for requested_namespace in self._requested_namespaces: if requested_namespace[0] is '*': retu...
Returns a list of databases requested, not including ignored dbs
def method(func): attr = abc.abstractmethod(func) attr.__imethod__ = True return attr
Wrap a function as a method.
def make_random_xml_file(fname, num_elements=200, depth=3): with open(fname, 'w') as f: f.write('<?xml version="1.0" ?>\n<random>\n') for dep_num, _ in enumerate(range(1,depth)): f.write(' <depth>\n <content>\n') for num, _ in enumerate(range(1, num_elements)): ...
makes a random xml file mainly for testing the xml_split
def del_aldb(self, mem_addr: int): self._aldb.del_record(mem_addr) self._aldb.add_loaded_callback(self._aldb_loaded_callback)
Delete an All-Link Database record.
def on_status(self, status): self.out.write(json.dumps(status)) self.out.write(os.linesep) self.received += 1 return not self.terminate
Print out some tweets
def top(self): for child in self.children(skip_not_present=False): if not isinstance(child, AddrmapNode): continue return child raise RuntimeError
Returns the top-level addrmap node
def enable(ctx): if ctx.obj['username'] is None: log('Specify the username with "iso db user --username ..."') return change_user = ctx.obj['db'].objectmodels['user'].find_one({ 'name': ctx.obj['username'] }) change_user.active = True change_user.save() log('Done')
Enable an existing user
def parseCustom(self, xbrl, ignore_errors=0): custom_obj = Custom() custom_data = xbrl.find_all(re.compile('^((?!(us-gaap|dei|xbrll|xbrldi)).)*:\s*', re.IGNORECASE | re.MULTILINE)) elements = {} for data in custom_data: if X...
Parse company custom entities from XBRL and return an Custom object.
def queue_context_entry(exchange, queue_name, routing=None): if routing is None: routing = queue_name queue_entry = QueueContextEntry(mq_queue=queue_name, mq_exchange=exchange, mq_rout...
forms queue's context entry
def format_address(address): if address.kind == ArgumentKind.regular: return address.name return "{0}[{1}]".format(address.name, address.key)
Formats an ArgumentAddress for human reading.
def _process(self, name): if self.token.nature == name: self.token = self.lexer.next_token() else: self._error()
Process the current token.
def structparser(token): m = STRUCT_PACK_RE.match(token) if not m: return [token] else: endian = m.group('endian') if endian is None: return [token] formatlist = re.findall(STRUCT_SPLIT_RE, m.group('fmt')) fmt = ''.join([f[-1] * int(f[:-1]) if len(f) != 1 ...
Parse struct-like format string token into sub-token list.
def _handle_api(self, handler, handler_args, handler_kwargs): try: status_code, return_value = handler(*handler_args, **handler_kwargs) except APIError as error: return error.send() web.ctx.status = _convert_http_status(status_code) return _api_convert_output(retu...
Handle call to subclasses and convert the output to an appropriate value
def parse_substitution_from_list(list_rep): if type(list_rep) is not list: raise SyntaxError('Substitution must be a list') if len(list_rep) < 2: raise SyntaxError('Substitution must be a list of size 2') pattern = list_rep[0] replacement = list_rep[1] is_multiline = False if (le...
Parse a substitution from the list representation in the config file.
def _find_function(name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) for funcs in __utils__['boto3.paged_call'](conn.list_functions): for func in funcs['Functions']: if func['FunctionName'] == nam...
Given function name, find and return matching Lambda information.
def getData(self, events): if self.interval not in ["1d", "1wk", "1mo"]: raise ValueError("Incorrect interval: valid intervals are 1d, 1wk, 1mo") url = self.api_url % (self.ticker, self.start, self.end, self.interval, events, self.crumb) data = requests.get(url, cookies={'B':self.coo...
Returns a list of historical data from Yahoo Finance
def _directory (self): if self._filename is None: return os.path.join(self._ROOT_DIR, 'config') else: return os.path.dirname(self._filename)
The directory for this AitConfig.
def allocate(self): self.logger.debug("Allocating environment.") self._allocate() self.logger.debug("Environment successfully allocated.")
Builds the context and the Hooks.
def _setup_packages(self, sc): packages = self.py_packages if not packages: return for package in packages: mod = importlib.import_module(package) try: mod_path = mod.__path__[0] except AttributeError: mod_path = mod...
This method compresses and uploads packages to the cluster
def _force_close(self): if self._sock: try: self._sock.close() except: pass self._sock = None self._rfile = None
Close connection without QUIT message
def shell_out_ignore_exitcode(cmd, stderr=STDOUT, cwd=None): try: return shell_out(cmd, stderr=stderr, cwd=cwd) except CalledProcessError as c: return _clean_output(c.output)
Same as shell_out but doesn't raise if the cmd exits badly.
def folder_name(self): name = self.site_packages_name if name is None: name = self.name return name
The name of the build folders containing this recipe.
def gen_smul(src1, src2, dst): assert src1.size == src2.size return ReilBuilder.build(ReilMnemonic.SMUL, src1, src2, dst)
Return a SMUL instruction.
def string(self, *args): data = self.bytes(*args) if data is not None: return data.rstrip(b"\x00").decode('utf-8')
return string stored in node
def update_image(self, data): if 1 in data.shape: data = data.squeeze() if self.conf.contrast_level is not None: clevels = [self.conf.contrast_level, 100.0-self.conf.contrast_level] imin, imax = np.percentile(data, clevels) data = np.clip((data - imin)/(im...
update image on panel, as quickly as possible
def fragment(pkt, fragsize=1480): fragsize = (fragsize + 7) // 8 * 8 lst = [] for p in pkt: s = bytes(p[IP].payload) nb = (len(s) + fragsize - 1) // fragsize for i in range(nb): q = p.copy() del q[IP].payload del q[IP].chksum del q[IP]....
Fragment a big IP datagram
def update(self, new_data: IntentDict): for locale, data in new_data.items(): if locale not in self.dict: self.dict[locale] = {} self.dict[locale].update(data)
Receive an update from the loaders.
def ifar(self, coinc_stat): n = self.coincs.num_greater(coinc_stat) return self.background_time / lal.YRJUL_SI / (n + 1)
Return the far that would be associated with the coincident given.
def unmarshal(self, value, bind_client=None): if not isinstance(value, self.type): o = self.type() if bind_client is not None and hasattr(o.__class__, 'bind_client'): o.bind_client = bind_client if isinstance(value, dict): for (k, v) in value.i...
Cast the specified value to the entity type.
async def handle_client_get_queue(self, client_addr, _: ClientGetQueue): jobs_running = list() for backend_job_id, content in self._job_running.items(): jobs_running.append((content[1].job_id, backend_job_id[0] == client_addr, self._registered_agents[content[0]], ...
Handles a ClientGetQueue message. Send back info about the job queue
def recv(self, filename, dest_file, timeout=None): transport = DataFilesyncTransport(self.stream) transport.write_data('RECV', filename, timeout) for data_msg in transport.read_until_done('DATA', timeout): dest_file.write(data_msg.data)
Retrieve a file from the device into the file-like dest_file.
def prettify_xml(xml_root): xml_string = etree.tostring(xml_root, encoding="utf-8", xml_declaration=True, pretty_print=True) return get_unicode_str(xml_string)
Returns pretty-printed string representation of element tree.
def download(args): downloader = Downloader(server_index_url = args.server_index_url) if args.packages: for pkg_id in args.packages: rv = downloader.download(info_or_id=unicode(pkg_id), download_dir=args.dir, quiet=args.quiet, force=args.force, ...
Download polyglot packages and models.
def defaultBuilder(value, nt): if callable(value): def logbuilder(V): try: value(V) except: _log.exception("Error in Builder") raise return logbuilder def builder(V): try: if isinstance(value, Value): ...
Reasonably sensible default handling of put builder
def block_ip(ip_address): if not ip_address: return if config.DISABLE_IP_LOCKOUT: return key = get_ip_blocked_cache_key(ip_address) if config.COOLOFF_TIME: REDIS_SERVER.set(key, 'blocked', config.COOLOFF_TIME) else: REDIS_SERVER.set(key, 'blocked') send_ip_block_s...
given the ip, block it
def finish(self, status, response): self.response = binascii.hexlify(response).decode('utf-8') self.status = status self.runtime = monotonic() - self._start_time
Mark the end of a recorded RPC.
def center_image(self, img): img.anchor_x = img.width // 2 img.anchor_y = img.height // 2
Sets an image's anchor point to its center
def promptyn(msg, default=None): while True: yes = "Y" if default else "y" if default or default is None: no = "n" else: no = "N" confirm = prompt("%s [%s/%s]" % (msg, yes, no), "").lower() if confirm in ("y", "yes"): return True el...
Display a blocking prompt until the user confirms
def check_denovo_input(inputfile, params): background = params["background"] input_type = determine_file_type(inputfile) if input_type == "fasta": valid_bg = FA_VALID_BGS elif input_type in ["bed", "narrowpeak"]: genome = params["genome"] valid_bg = BED_VALID_BGS ...
Check if an input file is valid, which means BED, narrowPeak or FASTA
def _gen_full_path(self, filename, file_system=None): if file_system is None: return '{}/{}'.format(self.dest_file_system, filename) else: if ":" not in file_system: raise ValueError("Invalid file_system specified: {}".format(file_system)) return '{}/{...
Generate full file path on remote device.
def strip_accents(string): return u''.join( (character for character in unicodedata.normalize('NFD', string) if unicodedata.category(character) != 'Mn'))
Strip all the accents from the string
def vectorize_dialogues(self, dialogues): return np.array([self.vectorize_dialogue(d) for d in dialogues])
Take in a list of dialogues and vectorize them all
def _handle_poll(self, relpath, params): request = json.loads(params.get('q')[0]) ret = {} for poll in request: _id = poll.get('id', None) path = poll.get('path', None) pos = poll.get('pos', 0) if path: abspath = os.path.normpath(os.path.join(self._root, path)) if os....
Handle poll requests for raw file contents.
def needsattached(func): @functools.wraps(func) def wrap(self, *args, **kwargs): if not self.attached: raise PositionError('Not attached to any process.') return func(self, *args, **kwargs) return wrap
Decorator to prevent commands from being used when not attached.
def prompt_with_prob(self, orig_response=None, prob=None): if self.load_error: return 'Failed to read guidance config file' if hasattr(self.assignment, 'is_test'): log.info("Skipping prompt due to test mode") return "Test response" if prob is None: ...
Ask for rationale with a specific level of probability.
def replace_in_file(self, file_path, old_exp, new_exp): self.term.print_info(u"Making replacement into {}" .format(self.term.text_in_color(file_path, TERM_GREEN))) tmp_file = tempfile.NamedTemporaryFile(mod...
In the given file, replace all 'old_exp' by 'new_exp'.
def make_dist(name, version, **kwargs): summary = kwargs.pop('summary', 'Placeholder for summary') md = Metadata(**kwargs) md.name = name md.version = version md.summary = summary or 'Placeholder for summary' return Distribution(md)
A convenience method for making a dist given just a name and version.
def wp_status(self): try: print("Have %u of %u waypoints" % (self.wploader.count()+len(self.wp_received), self.wploader.expected_count)) except Exception: print("Have %u waypoints" % (self.wploader.count()+len(self.wp_received)))
show status of wp download
def _handle_getconfig(self,cfg_file,*args,**options): if args: raise CommandError("supervisor getconfig takes no arguments") print cfg_file.read() return 0
Command 'supervisor getconfig' prints merged config to stdout.
def the_one(cls): if cls.THE_ONE is None: cls.THE_ONE = cls(settings.HELP_TOKENS_INI_FILE) return cls.THE_ONE
Get the single global HelpUrlExpert object.
def _refresh_aldb_records(self, linkcode, address, group): if self.aldb.status in [ALDBStatus.LOADED, ALDBStatus.PARTIAL]: for mem_addr in self.aldb: rec = self.aldb[mem_addr] if linkcode in [0, 1, 3]: if rec.control_flags.is_high_water_mark: ...
Refresh the IM and device ALDB records.
def _get_num_similar_objects(self, obj): return StatementLine.objects.filter( date=obj.date, amount=obj.amount, description=obj.description ).count()
Get any statement lines which would be considered a duplicate of obj