repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
pudo/jsongraph
jsongraph/common.py
GraphOperations.get
def get(self, id, depth=3, schema=None): """ Construct a single object based on its ID. """ uri = URIRef(id) if schema is None: for o in self.graph.objects(subject=uri, predicate=RDF.type): schema = self.parent.get_schema(str(o)) if schema is not None: break else: schema = self.parent.get_schema(schema) binding = self.get_binding(schema, None) return self._objectify(uri, binding, depth=depth, path=set())
python
def get(self, id, depth=3, schema=None): """ Construct a single object based on its ID. """ uri = URIRef(id) if schema is None: for o in self.graph.objects(subject=uri, predicate=RDF.type): schema = self.parent.get_schema(str(o)) if schema is not None: break else: schema = self.parent.get_schema(schema) binding = self.get_binding(schema, None) return self._objectify(uri, binding, depth=depth, path=set())
[ "def", "get", "(", "self", ",", "id", ",", "depth", "=", "3", ",", "schema", "=", "None", ")", ":", "uri", "=", "URIRef", "(", "id", ")", "if", "schema", "is", "None", ":", "for", "o", "in", "self", ".", "graph", ".", "objects", "(", "subject",...
Construct a single object based on its ID.
[ "Construct", "a", "single", "object", "based", "on", "its", "ID", "." ]
train
https://github.com/pudo/jsongraph/blob/35e4f397dbe69cd5553cf9cb9ab98859c3620f03/jsongraph/common.py#L17-L28
pudo/jsongraph
jsongraph/common.py
GraphOperations._objectify
def _objectify(self, node, binding, depth, path): """ Given an RDF node URI (and it's associated schema), return an object from the ``graph`` that represents the information available about this node. """ if binding.is_object: obj = {'$schema': binding.path} for (s, p, o) in self.graph.triples((node, None, None)): prop = binding.get_property(p) if prop is None or depth <= 1 or o in path: continue # This is slightly odd but yield purty objects: if depth <= 2 and (prop.is_array or prop.is_object): continue sub_path = path.union([node]) value = self._objectify(o, prop, depth - 1, sub_path) if prop.is_array and prop.name in obj: obj[prop.name].extend(value) else: obj[prop.name] = value return obj elif binding.is_array: for item in binding.items: return [self._objectify(node, item, depth, path)] else: return node.toPython()
python
def _objectify(self, node, binding, depth, path): """ Given an RDF node URI (and it's associated schema), return an object from the ``graph`` that represents the information available about this node. """ if binding.is_object: obj = {'$schema': binding.path} for (s, p, o) in self.graph.triples((node, None, None)): prop = binding.get_property(p) if prop is None or depth <= 1 or o in path: continue # This is slightly odd but yield purty objects: if depth <= 2 and (prop.is_array or prop.is_object): continue sub_path = path.union([node]) value = self._objectify(o, prop, depth - 1, sub_path) if prop.is_array and prop.name in obj: obj[prop.name].extend(value) else: obj[prop.name] = value return obj elif binding.is_array: for item in binding.items: return [self._objectify(node, item, depth, path)] else: return node.toPython()
[ "def", "_objectify", "(", "self", ",", "node", ",", "binding", ",", "depth", ",", "path", ")", ":", "if", "binding", ".", "is_object", ":", "obj", "=", "{", "'$schema'", ":", "binding", ".", "path", "}", "for", "(", "s", ",", "p", ",", "o", ")", ...
Given an RDF node URI (and it's associated schema), return an object from the ``graph`` that represents the information available about this node.
[ "Given", "an", "RDF", "node", "URI", "(", "and", "it", "s", "associated", "schema", ")", "return", "an", "object", "from", "the", "graph", "that", "represents", "the", "information", "available", "about", "this", "node", "." ]
train
https://github.com/pudo/jsongraph/blob/35e4f397dbe69cd5553cf9cb9ab98859c3620f03/jsongraph/common.py#L39-L63
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
BaseOAIRELoader.get_text_node
def get_text_node(self, tree, xpath_str): """Return a text node from given XML tree given an lxml XPath.""" try: text = tree.xpath(xpath_str, namespaces=self.namespaces)[0].text return text_type(text) if text else '' except IndexError: # pragma: nocover return ''
python
def get_text_node(self, tree, xpath_str): """Return a text node from given XML tree given an lxml XPath.""" try: text = tree.xpath(xpath_str, namespaces=self.namespaces)[0].text return text_type(text) if text else '' except IndexError: # pragma: nocover return ''
[ "def", "get_text_node", "(", "self", ",", "tree", ",", "xpath_str", ")", ":", "try", ":", "text", "=", "tree", ".", "xpath", "(", "xpath_str", ",", "namespaces", "=", "self", ".", "namespaces", ")", "[", "0", "]", ".", "text", "return", "text_type", ...
Return a text node from given XML tree given an lxml XPath.
[ "Return", "a", "text", "node", "from", "given", "XML", "tree", "given", "an", "lxml", "XPath", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L112-L118
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
BaseOAIRELoader.get_subtree
def get_subtree(self, tree, xpath_str): """Return a subtree given an lxml XPath.""" return tree.xpath(xpath_str, namespaces=self.namespaces)
python
def get_subtree(self, tree, xpath_str): """Return a subtree given an lxml XPath.""" return tree.xpath(xpath_str, namespaces=self.namespaces)
[ "def", "get_subtree", "(", "self", ",", "tree", ",", "xpath_str", ")", ":", "return", "tree", ".", "xpath", "(", "xpath_str", ",", "namespaces", "=", "self", ".", "namespaces", ")" ]
Return a subtree given an lxml XPath.
[ "Return", "a", "subtree", "given", "an", "lxml", "XPath", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L120-L122
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
BaseOAIRELoader.fundertree2json
def fundertree2json(self, tree, oai_id): """Convert OpenAIRE's funder XML to JSON.""" try: tree = self.get_subtree(tree, 'fundingtree')[0] except IndexError: # pragma: nocover pass funder_node = self.get_subtree(tree, 'funder') subfunder_node = self.get_subtree(tree, '//funding_level_0') funder_id = self.get_text_node(funder_node[0], './id') \ if funder_node else None subfunder_id = self.get_text_node(subfunder_node[0], './id') \ if subfunder_node else None funder_name = self.get_text_node(funder_node[0], './shortname') \ if funder_node else "" subfunder_name = self.get_text_node(subfunder_node[0], './name') \ if subfunder_node else "" # Try to resolve the subfunder first, on failure try to resolve the # main funder, on failure raise an error. funder_doi_url = None if subfunder_id: funder_doi_url = self.funder_resolver.resolve_by_id(subfunder_id) if not funder_doi_url: if funder_id: funder_doi_url = self.funder_resolver.resolve_by_id(funder_id) if not funder_doi_url: funder_doi_url = self.funder_resolver.resolve_by_oai_id(oai_id) if not funder_doi_url: raise FunderNotFoundError(oai_id, funder_id, subfunder_id) funder_doi = FundRefDOIResolver.strip_doi_host(funder_doi_url) if not funder_name: # Grab name from FundRef record. resolver = Resolver( pid_type='frdoi', object_type='rec', getter=Record.get_record) try: dummy_pid, funder_rec = resolver.resolve(funder_doi) funder_name = funder_rec['acronyms'][0] except PersistentIdentifierError: raise OAIRELoadingError( "Please ensure that funders have been loaded prior to" "loading grants. Could not resolve funder {0}".format( funder_doi)) return dict( doi=funder_doi, url=funder_doi_url, name=funder_name, program=subfunder_name, )
python
def fundertree2json(self, tree, oai_id): """Convert OpenAIRE's funder XML to JSON.""" try: tree = self.get_subtree(tree, 'fundingtree')[0] except IndexError: # pragma: nocover pass funder_node = self.get_subtree(tree, 'funder') subfunder_node = self.get_subtree(tree, '//funding_level_0') funder_id = self.get_text_node(funder_node[0], './id') \ if funder_node else None subfunder_id = self.get_text_node(subfunder_node[0], './id') \ if subfunder_node else None funder_name = self.get_text_node(funder_node[0], './shortname') \ if funder_node else "" subfunder_name = self.get_text_node(subfunder_node[0], './name') \ if subfunder_node else "" # Try to resolve the subfunder first, on failure try to resolve the # main funder, on failure raise an error. funder_doi_url = None if subfunder_id: funder_doi_url = self.funder_resolver.resolve_by_id(subfunder_id) if not funder_doi_url: if funder_id: funder_doi_url = self.funder_resolver.resolve_by_id(funder_id) if not funder_doi_url: funder_doi_url = self.funder_resolver.resolve_by_oai_id(oai_id) if not funder_doi_url: raise FunderNotFoundError(oai_id, funder_id, subfunder_id) funder_doi = FundRefDOIResolver.strip_doi_host(funder_doi_url) if not funder_name: # Grab name from FundRef record. resolver = Resolver( pid_type='frdoi', object_type='rec', getter=Record.get_record) try: dummy_pid, funder_rec = resolver.resolve(funder_doi) funder_name = funder_rec['acronyms'][0] except PersistentIdentifierError: raise OAIRELoadingError( "Please ensure that funders have been loaded prior to" "loading grants. Could not resolve funder {0}".format( funder_doi)) return dict( doi=funder_doi, url=funder_doi_url, name=funder_name, program=subfunder_name, )
[ "def", "fundertree2json", "(", "self", ",", "tree", ",", "oai_id", ")", ":", "try", ":", "tree", "=", "self", ".", "get_subtree", "(", "tree", ",", "'fundingtree'", ")", "[", "0", "]", "except", "IndexError", ":", "# pragma: nocover", "pass", "funder_node"...
Convert OpenAIRE's funder XML to JSON.
[ "Convert", "OpenAIRE", "s", "funder", "XML", "to", "JSON", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L124-L175
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
BaseOAIRELoader.grantxml2json
def grantxml2json(self, grant_xml): """Convert OpenAIRE grant XML into JSON.""" tree = etree.fromstring(grant_xml) # XML harvested from OAI-PMH has a different format/structure if tree.prefix == 'oai': ptree = self.get_subtree( tree, '/oai:record/oai:metadata/oaf:entity/oaf:project')[0] header = self.get_subtree(tree, '/oai:record/oai:header')[0] oai_id = self.get_text_node(header, 'oai:identifier') modified = self.get_text_node(header, 'oai:datestamp') else: ptree = self.get_subtree( tree, '/record/result/metadata/oaf:entity/oaf:project')[0] header = self.get_subtree(tree, '/record/result/header')[0] oai_id = self.get_text_node(header, 'dri:objIdentifier') modified = self.get_text_node(header, 'dri:dateOfTransformation') url = self.get_text_node(ptree, 'websiteurl') code = self.get_text_node(ptree, 'code') title = self.get_text_node(ptree, 'title') acronym = self.get_text_node(ptree, 'acronym') startdate = self.get_text_node(ptree, 'startdate') enddate = self.get_text_node(ptree, 'enddate') funder = self.fundertree2json(ptree, oai_id) internal_id = "{0}::{1}".format(funder['doi'], code) eurepo_id = \ "info:eu-repo/grantAgreement/{funder}/{program}/{code}/".format( funder=quote_plus(funder['name'].encode('utf8')), program=quote_plus(funder['program'].encode('utf8')), code=quote_plus(code.encode('utf8')), ) ret_json = { '$schema': self.schema_formatter.schema_url, 'internal_id': internal_id, 'identifiers': { 'oaf': oai_id, 'eurepo': eurepo_id, 'purl': url if url.startswith("http://purl.org/") else None, }, 'code': code, 'title': title, 'acronym': acronym, 'startdate': startdate, 'enddate': enddate, 'funder': {'$ref': funder['url']}, 'program': funder['program'], 'url': url, 'remote_modified': modified, } return ret_json
python
def grantxml2json(self, grant_xml): """Convert OpenAIRE grant XML into JSON.""" tree = etree.fromstring(grant_xml) # XML harvested from OAI-PMH has a different format/structure if tree.prefix == 'oai': ptree = self.get_subtree( tree, '/oai:record/oai:metadata/oaf:entity/oaf:project')[0] header = self.get_subtree(tree, '/oai:record/oai:header')[0] oai_id = self.get_text_node(header, 'oai:identifier') modified = self.get_text_node(header, 'oai:datestamp') else: ptree = self.get_subtree( tree, '/record/result/metadata/oaf:entity/oaf:project')[0] header = self.get_subtree(tree, '/record/result/header')[0] oai_id = self.get_text_node(header, 'dri:objIdentifier') modified = self.get_text_node(header, 'dri:dateOfTransformation') url = self.get_text_node(ptree, 'websiteurl') code = self.get_text_node(ptree, 'code') title = self.get_text_node(ptree, 'title') acronym = self.get_text_node(ptree, 'acronym') startdate = self.get_text_node(ptree, 'startdate') enddate = self.get_text_node(ptree, 'enddate') funder = self.fundertree2json(ptree, oai_id) internal_id = "{0}::{1}".format(funder['doi'], code) eurepo_id = \ "info:eu-repo/grantAgreement/{funder}/{program}/{code}/".format( funder=quote_plus(funder['name'].encode('utf8')), program=quote_plus(funder['program'].encode('utf8')), code=quote_plus(code.encode('utf8')), ) ret_json = { '$schema': self.schema_formatter.schema_url, 'internal_id': internal_id, 'identifiers': { 'oaf': oai_id, 'eurepo': eurepo_id, 'purl': url if url.startswith("http://purl.org/") else None, }, 'code': code, 'title': title, 'acronym': acronym, 'startdate': startdate, 'enddate': enddate, 'funder': {'$ref': funder['url']}, 'program': funder['program'], 'url': url, 'remote_modified': modified, } return ret_json
[ "def", "grantxml2json", "(", "self", ",", "grant_xml", ")", ":", "tree", "=", "etree", ".", "fromstring", "(", "grant_xml", ")", "# XML harvested from OAI-PMH has a different format/structure", "if", "tree", ".", "prefix", "==", "'oai'", ":", "ptree", "=", "self",...
Convert OpenAIRE grant XML into JSON.
[ "Convert", "OpenAIRE", "grant", "XML", "into", "JSON", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L177-L228
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
LocalOAIRELoader.iter_grants
def iter_grants(self, as_json=True): """Fetch records from the SQLite database.""" self._connect() result = self.db_connection.cursor().execute( "SELECT data, format FROM grants" ) for data, data_format in result: if (not as_json) and data_format == 'json': raise Exception("Cannot convert JSON source to XML output.") elif as_json and data_format == 'xml': data = self.grantxml2json(data) elif as_json and data_format == 'json': data = json.loads(data) yield data self._disconnect()
python
def iter_grants(self, as_json=True): """Fetch records from the SQLite database.""" self._connect() result = self.db_connection.cursor().execute( "SELECT data, format FROM grants" ) for data, data_format in result: if (not as_json) and data_format == 'json': raise Exception("Cannot convert JSON source to XML output.") elif as_json and data_format == 'xml': data = self.grantxml2json(data) elif as_json and data_format == 'json': data = json.loads(data) yield data self._disconnect()
[ "def", "iter_grants", "(", "self", ",", "as_json", "=", "True", ")", ":", "self", ".", "_connect", "(", ")", "result", "=", "self", ".", "db_connection", ".", "cursor", "(", ")", ".", "execute", "(", "\"SELECT data, format FROM grants\"", ")", "for", "data...
Fetch records from the SQLite database.
[ "Fetch", "records", "from", "the", "SQLite", "database", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L279-L293
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
RemoteOAIRELoader.iter_grants
def iter_grants(self, as_json=True): """Fetch grants from a remote OAI-PMH endpoint. Return the Sickle-provided generator object. """ records = self.client.ListRecords(metadataPrefix='oaf', set=self.setspec) for rec in records: try: grant_out = rec.raw # rec.raw is XML if as_json: grant_out = self.grantxml2json(grant_out) yield grant_out except FunderNotFoundError as e: current_app.logger.warning("Funder '{0}' not found.".format( e.funder_id))
python
def iter_grants(self, as_json=True): """Fetch grants from a remote OAI-PMH endpoint. Return the Sickle-provided generator object. """ records = self.client.ListRecords(metadataPrefix='oaf', set=self.setspec) for rec in records: try: grant_out = rec.raw # rec.raw is XML if as_json: grant_out = self.grantxml2json(grant_out) yield grant_out except FunderNotFoundError as e: current_app.logger.warning("Funder '{0}' not found.".format( e.funder_id))
[ "def", "iter_grants", "(", "self", ",", "as_json", "=", "True", ")", ":", "records", "=", "self", ".", "client", ".", "ListRecords", "(", "metadataPrefix", "=", "'oaf'", ",", "set", "=", "self", ".", "setspec", ")", "for", "rec", "in", "records", ":", ...
Fetch grants from a remote OAI-PMH endpoint. Return the Sickle-provided generator object.
[ "Fetch", "grants", "from", "a", "remote", "OAI", "-", "PMH", "endpoint", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L311-L326
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
OAIREDumper.dump
def dump(self, as_json=True, commit_batch_size=100): """ Dump the grant information to a local storage. :param as_json: Convert XML to JSON before saving (default: True). """ connection = sqlite3.connect(self.destination) format_ = 'json' if as_json else 'xml' if not self._db_exists(connection): connection.execute( "CREATE TABLE grants (data text, format text)") # This will call the RemoteOAIRELoader.iter_grants and fetch # records from remote location. grants_iterator = self.loader.iter_grants(as_json=as_json) for idx, grant_data in enumerate(grants_iterator, 1): if as_json: grant_data = json.dumps(grant_data, indent=2) connection.execute( "INSERT INTO grants VALUES (?, ?)", (grant_data, format_)) # Commit to database every N records if idx % commit_batch_size == 0: connection.commit() connection.commit() connection.close()
python
def dump(self, as_json=True, commit_batch_size=100): """ Dump the grant information to a local storage. :param as_json: Convert XML to JSON before saving (default: True). """ connection = sqlite3.connect(self.destination) format_ = 'json' if as_json else 'xml' if not self._db_exists(connection): connection.execute( "CREATE TABLE grants (data text, format text)") # This will call the RemoteOAIRELoader.iter_grants and fetch # records from remote location. grants_iterator = self.loader.iter_grants(as_json=as_json) for idx, grant_data in enumerate(grants_iterator, 1): if as_json: grant_data = json.dumps(grant_data, indent=2) connection.execute( "INSERT INTO grants VALUES (?, ?)", (grant_data, format_)) # Commit to database every N records if idx % commit_batch_size == 0: connection.commit() connection.commit() connection.close()
[ "def", "dump", "(", "self", ",", "as_json", "=", "True", ",", "commit_batch_size", "=", "100", ")", ":", "connection", "=", "sqlite3", ".", "connect", "(", "self", ".", "destination", ")", "format_", "=", "'json'", "if", "as_json", "else", "'xml'", "if",...
Dump the grant information to a local storage. :param as_json: Convert XML to JSON before saving (default: True).
[ "Dump", "the", "grant", "information", "to", "a", "local", "storage", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L357-L382
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
BaseFundRefLoader.get_attrib
def get_attrib(self, et_node, prefixed_attrib): """Get a prefixed attribute like 'rdf:resource' from ET node.""" prefix, attrib = prefixed_attrib.split(':') return et_node.get('{{{0}}}{1}'.format(self.namespaces[prefix], attrib))
python
def get_attrib(self, et_node, prefixed_attrib): """Get a prefixed attribute like 'rdf:resource' from ET node.""" prefix, attrib = prefixed_attrib.split(':') return et_node.get('{{{0}}}{1}'.format(self.namespaces[prefix], attrib))
[ "def", "get_attrib", "(", "self", ",", "et_node", ",", "prefixed_attrib", ")", ":", "prefix", ",", "attrib", "=", "prefixed_attrib", ".", "split", "(", "':'", ")", "return", "et_node", ".", "get", "(", "'{{{0}}}{1}'", ".", "format", "(", "self", ".", "na...
Get a prefixed attribute like 'rdf:resource' from ET node.
[ "Get", "a", "prefixed", "attribute", "like", "rdf", ":", "resource", "from", "ET", "node", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L437-L441
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
BaseFundRefLoader.fundrefxml2json
def fundrefxml2json(self, node): """Convert a FundRef 'skos:Concept' node into JSON.""" doi = FundRefDOIResolver.strip_doi_host(self.get_attrib(node, 'rdf:about')) oaf_id = FundRefDOIResolver().resolve_by_doi( "http://dx.doi.org/" + doi) name = node.find('./skosxl:prefLabel/skosxl:Label/skosxl:literalForm', namespaces=self.namespaces).text # Extract acronyms acronyms = [] for n in node.findall('./skosxl:altLabel/skosxl:Label', namespaces=self.namespaces): usagenode = n.find('./fref:usageFlag', namespaces=self.namespaces) if usagenode is not None: if self.get_attrib(usagenode, 'rdf:resource') == \ ('http://data.crossref.org/fundingdata' '/vocabulary/abbrevName'): label = n.find('./skosxl:literalForm', namespaces=self.namespaces) if label is not None: acronyms.append(label.text) parent_node = node.find('./skos:broader', namespaces=self.namespaces) if parent_node is None: parent = {} else: parent = { "$ref": self.get_attrib(parent_node, 'rdf:resource'), } country_elem = node.find('./svf:country', namespaces=self.namespaces) country_url = self.get_attrib(country_elem, 'rdf:resource') country_code = self.cc_resolver.cc_from_url(country_url) type_ = node.find('./svf:fundingBodyType', namespaces=self.namespaces).text subtype = node.find('./svf:fundingBodySubType', namespaces=self.namespaces).text country_elem = node.find('./svf:country', namespaces=self.namespaces) modified_elem = node.find('./dct:modified', namespaces=self.namespaces) created_elem = node.find('./dct:created', namespaces=self.namespaces) json_dict = { '$schema': self.schema_formatter.schema_url, 'doi': doi, 'identifiers': { 'oaf': oaf_id, }, 'name': name, 'acronyms': acronyms, 'parent': parent, 'country': country_code, 'type': type_, 'subtype': subtype.lower(), 'remote_created': (created_elem.text if created_elem is not None else None), 'remote_modified': (modified_elem.text if modified_elem is not None else None), } return json_dict
python
def fundrefxml2json(self, node): """Convert a FundRef 'skos:Concept' node into JSON.""" doi = FundRefDOIResolver.strip_doi_host(self.get_attrib(node, 'rdf:about')) oaf_id = FundRefDOIResolver().resolve_by_doi( "http://dx.doi.org/" + doi) name = node.find('./skosxl:prefLabel/skosxl:Label/skosxl:literalForm', namespaces=self.namespaces).text # Extract acronyms acronyms = [] for n in node.findall('./skosxl:altLabel/skosxl:Label', namespaces=self.namespaces): usagenode = n.find('./fref:usageFlag', namespaces=self.namespaces) if usagenode is not None: if self.get_attrib(usagenode, 'rdf:resource') == \ ('http://data.crossref.org/fundingdata' '/vocabulary/abbrevName'): label = n.find('./skosxl:literalForm', namespaces=self.namespaces) if label is not None: acronyms.append(label.text) parent_node = node.find('./skos:broader', namespaces=self.namespaces) if parent_node is None: parent = {} else: parent = { "$ref": self.get_attrib(parent_node, 'rdf:resource'), } country_elem = node.find('./svf:country', namespaces=self.namespaces) country_url = self.get_attrib(country_elem, 'rdf:resource') country_code = self.cc_resolver.cc_from_url(country_url) type_ = node.find('./svf:fundingBodyType', namespaces=self.namespaces).text subtype = node.find('./svf:fundingBodySubType', namespaces=self.namespaces).text country_elem = node.find('./svf:country', namespaces=self.namespaces) modified_elem = node.find('./dct:modified', namespaces=self.namespaces) created_elem = node.find('./dct:created', namespaces=self.namespaces) json_dict = { '$schema': self.schema_formatter.schema_url, 'doi': doi, 'identifiers': { 'oaf': oaf_id, }, 'name': name, 'acronyms': acronyms, 'parent': parent, 'country': country_code, 'type': type_, 'subtype': subtype.lower(), 'remote_created': (created_elem.text if created_elem is not None else None), 'remote_modified': (modified_elem.text if modified_elem is not None else None), } return json_dict
[ "def", "fundrefxml2json", "(", "self", ",", "node", ")", ":", "doi", "=", "FundRefDOIResolver", ".", "strip_doi_host", "(", "self", ".", "get_attrib", "(", "node", ",", "'rdf:about'", ")", ")", "oaf_id", "=", "FundRefDOIResolver", "(", ")", ".", "resolve_by_...
Convert a FundRef 'skos:Concept' node into JSON.
[ "Convert", "a", "FundRef", "skos", ":", "Concept", "node", "into", "JSON", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L443-L501
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
BaseFundRefLoader.iter_funders
def iter_funders(self): """Get a converted list of Funders as JSON dict.""" root = self.doc_root funders = root.findall('./skos:Concept', namespaces=self.namespaces) for funder in funders: funder_json = self.fundrefxml2json(funder) yield funder_json
python
def iter_funders(self): """Get a converted list of Funders as JSON dict.""" root = self.doc_root funders = root.findall('./skos:Concept', namespaces=self.namespaces) for funder in funders: funder_json = self.fundrefxml2json(funder) yield funder_json
[ "def", "iter_funders", "(", "self", ")", ":", "root", "=", "self", ".", "doc_root", "funders", "=", "root", ".", "findall", "(", "'./skos:Concept'", ",", "namespaces", "=", "self", ".", "namespaces", ")", "for", "funder", "in", "funders", ":", "funder_json...
Get a converted list of Funders as JSON dict.
[ "Get", "a", "converted", "list", "of", "Funders", "as", "JSON", "dict", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L503-L509
inveniosoftware/invenio-openaire
invenio_openaire/loaders.py
FundRefDOIResolver.resolve_by_oai_id
def resolve_by_oai_id(self, oai_id): """Resolve the funder from the OpenAIRE OAI record id. Hack for when funder is not provided in OpenAIRE. """ if oai_id.startswith('oai:dnet:'): oai_id = oai_id[len('oai:dnet:'):] prefix = oai_id.split("::")[0] suffix = prefix.replace("_", "").upper() oaf = "{0}::{1}".format(prefix, suffix) return self.data.get(oaf)
python
def resolve_by_oai_id(self, oai_id): """Resolve the funder from the OpenAIRE OAI record id. Hack for when funder is not provided in OpenAIRE. """ if oai_id.startswith('oai:dnet:'): oai_id = oai_id[len('oai:dnet:'):] prefix = oai_id.split("::")[0] suffix = prefix.replace("_", "").upper() oaf = "{0}::{1}".format(prefix, suffix) return self.data.get(oaf)
[ "def", "resolve_by_oai_id", "(", "self", ",", "oai_id", ")", ":", "if", "oai_id", ".", "startswith", "(", "'oai:dnet:'", ")", ":", "oai_id", "=", "oai_id", "[", "len", "(", "'oai:dnet:'", ")", ":", "]", "prefix", "=", "oai_id", ".", "split", "(", "\"::...
Resolve the funder from the OpenAIRE OAI record id. Hack for when funder is not provided in OpenAIRE.
[ "Resolve", "the", "funder", "from", "the", "OpenAIRE", "OAI", "record", "id", "." ]
train
https://github.com/inveniosoftware/invenio-openaire/blob/71860effff6abe7f658d3a11894e064202ef1c36/invenio_openaire/loaders.py#L578-L588
rande/python-simple-ioc
ioc/locator.py
split_resource_path
def split_resource_path(resource): """Split a path into segments and perform a sanity check. If it detects '..' in the path it will raise a `TemplateNotFound` error. """ pieces = [] for piece in resource.split('/'): if path.sep in piece \ or (path.altsep and path.altsep in piece) or \ piece == path.pardir: raise ResourceNotFound(resource) elif piece and piece != '.': pieces.append(piece) return pieces
python
def split_resource_path(resource): """Split a path into segments and perform a sanity check. If it detects '..' in the path it will raise a `TemplateNotFound` error. """ pieces = [] for piece in resource.split('/'): if path.sep in piece \ or (path.altsep and path.altsep in piece) or \ piece == path.pardir: raise ResourceNotFound(resource) elif piece and piece != '.': pieces.append(piece) return pieces
[ "def", "split_resource_path", "(", "resource", ")", ":", "pieces", "=", "[", "]", "for", "piece", "in", "resource", ".", "split", "(", "'/'", ")", ":", "if", "path", ".", "sep", "in", "piece", "or", "(", "path", ".", "altsep", "and", "path", ".", "...
Split a path into segments and perform a sanity check. If it detects '..' in the path it will raise a `TemplateNotFound` error.
[ "Split", "a", "path", "into", "segments", "and", "perform", "a", "sanity", "check", ".", "If", "it", "detects", "..", "in", "the", "path", "it", "will", "raise", "a", "TemplateNotFound", "error", "." ]
train
https://github.com/rande/python-simple-ioc/blob/36ddf667c1213a07a53cd4cdd708d02494e5190b/ioc/locator.py#L26-L38
pudo/jsongraph
jsongraph/reflect.py
predicates
def predicates(graph): """ Return a listing of all known predicates in the registered schemata, including the schema path they associate with, their name and allowed types. """ seen = set() def _traverse(binding): if binding.path in seen: return seen.add(binding.path) if binding.is_object: for prop in binding.properties: yield (binding.path, prop.name, tuple(prop.types)) for pred in _traverse(prop): yield pred elif binding.is_array: for item in binding.items: for pred in _traverse(item): yield pred schemas = graph.aliases.values() schemas.extend(graph.resolver.store) for schema_uri in graph.aliases.values(): binding = graph.get_binding(schema_uri, None) for pred in _traverse(binding): if pred not in seen: yield pred seen.add(pred)
python
def predicates(graph): """ Return a listing of all known predicates in the registered schemata, including the schema path they associate with, their name and allowed types. """ seen = set() def _traverse(binding): if binding.path in seen: return seen.add(binding.path) if binding.is_object: for prop in binding.properties: yield (binding.path, prop.name, tuple(prop.types)) for pred in _traverse(prop): yield pred elif binding.is_array: for item in binding.items: for pred in _traverse(item): yield pred schemas = graph.aliases.values() schemas.extend(graph.resolver.store) for schema_uri in graph.aliases.values(): binding = graph.get_binding(schema_uri, None) for pred in _traverse(binding): if pred not in seen: yield pred seen.add(pred)
[ "def", "predicates", "(", "graph", ")", ":", "seen", "=", "set", "(", ")", "def", "_traverse", "(", "binding", ")", ":", "if", "binding", ".", "path", "in", "seen", ":", "return", "seen", ".", "add", "(", "binding", ".", "path", ")", "if", "binding...
Return a listing of all known predicates in the registered schemata, including the schema path they associate with, their name and allowed types.
[ "Return", "a", "listing", "of", "all", "known", "predicates", "in", "the", "registered", "schemata", "including", "the", "schema", "path", "they", "associate", "with", "their", "name", "and", "allowed", "types", "." ]
train
https://github.com/pudo/jsongraph/blob/35e4f397dbe69cd5553cf9cb9ab98859c3620f03/jsongraph/reflect.py#L2-L30
twisted/mantissa
xmantissa/signup.py
passwordReset1to2
def passwordReset1to2(old): """ Power down and delete the item """ new = old.upgradeVersion(old.typeName, 1, 2, installedOn=None) for iface in new.store.interfacesFor(new): new.store.powerDown(new, iface) new.deleteFromStore()
python
def passwordReset1to2(old): """ Power down and delete the item """ new = old.upgradeVersion(old.typeName, 1, 2, installedOn=None) for iface in new.store.interfacesFor(new): new.store.powerDown(new, iface) new.deleteFromStore()
[ "def", "passwordReset1to2", "(", "old", ")", ":", "new", "=", "old", ".", "upgradeVersion", "(", "old", ".", "typeName", ",", "1", ",", "2", ",", "installedOn", "=", "None", ")", "for", "iface", "in", "new", ".", "store", ".", "interfacesFor", "(", "...
Power down and delete the item
[ "Power", "down", "and", "delete", "the", "item" ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L319-L326
twisted/mantissa
xmantissa/signup.py
upgradeUserInfo1to2
def upgradeUserInfo1to2(oldUserInfo): """ Concatenate the I{firstName} and I{lastName} attributes from the old user info item and set the result as the I{realName} attribute of the upgraded item. """ newUserInfo = oldUserInfo.upgradeVersion( UserInfo.typeName, 1, 2, realName=oldUserInfo.firstName + u" " + oldUserInfo.lastName) return newUserInfo
python
def upgradeUserInfo1to2(oldUserInfo): """ Concatenate the I{firstName} and I{lastName} attributes from the old user info item and set the result as the I{realName} attribute of the upgraded item. """ newUserInfo = oldUserInfo.upgradeVersion( UserInfo.typeName, 1, 2, realName=oldUserInfo.firstName + u" " + oldUserInfo.lastName) return newUserInfo
[ "def", "upgradeUserInfo1to2", "(", "oldUserInfo", ")", ":", "newUserInfo", "=", "oldUserInfo", ".", "upgradeVersion", "(", "UserInfo", ".", "typeName", ",", "1", ",", "2", ",", "realName", "=", "oldUserInfo", ".", "firstName", "+", "u\" \"", "+", "oldUserInfo"...
Concatenate the I{firstName} and I{lastName} attributes from the old user info item and set the result as the I{realName} attribute of the upgraded item.
[ "Concatenate", "the", "I", "{", "firstName", "}", "and", "I", "{", "lastName", "}", "attributes", "from", "the", "old", "user", "info", "item", "and", "set", "the", "result", "as", "the", "I", "{", "realName", "}", "attribute", "of", "the", "upgraded", ...
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L695-L704
twisted/mantissa
xmantissa/signup.py
ticket1to2
def ticket1to2(old): """ change Ticket to refer to Products and not benefactor factories. """ if isinstance(old.benefactor, Multifactor): types = list(chain(*[b.powerupNames for b in old.benefactor.benefactors('ascending')])) elif isinstance(old.benefactor, InitializerBenefactor): #oh man what a mess types = list(chain(*[b.powerupNames for b in old.benefactor.realBenefactor.benefactors('ascending')])) newProduct = old.store.findOrCreate(Product, types=types) if old.issuer is None: issuer = old.store.findOrCreate(TicketBooth) else: issuer = old.issuer t = old.upgradeVersion(Ticket.typeName, 1, 2, product = newProduct, issuer = issuer, booth = old.booth, avatar = old.avatar, claimed = old.claimed, email = old.email, nonce = old.nonce)
python
def ticket1to2(old): """ change Ticket to refer to Products and not benefactor factories. """ if isinstance(old.benefactor, Multifactor): types = list(chain(*[b.powerupNames for b in old.benefactor.benefactors('ascending')])) elif isinstance(old.benefactor, InitializerBenefactor): #oh man what a mess types = list(chain(*[b.powerupNames for b in old.benefactor.realBenefactor.benefactors('ascending')])) newProduct = old.store.findOrCreate(Product, types=types) if old.issuer is None: issuer = old.store.findOrCreate(TicketBooth) else: issuer = old.issuer t = old.upgradeVersion(Ticket.typeName, 1, 2, product = newProduct, issuer = issuer, booth = old.booth, avatar = old.avatar, claimed = old.claimed, email = old.email, nonce = old.nonce)
[ "def", "ticket1to2", "(", "old", ")", ":", "if", "isinstance", "(", "old", ".", "benefactor", ",", "Multifactor", ")", ":", "types", "=", "list", "(", "chain", "(", "*", "[", "b", ".", "powerupNames", "for", "b", "in", "old", ".", "benefactor", ".", ...
change Ticket to refer to Products and not benefactor factories.
[ "change", "Ticket", "to", "refer", "to", "Products", "and", "not", "benefactor", "factories", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L982-L1008
twisted/mantissa
xmantissa/signup.py
_getPublicSignupInfo
def _getPublicSignupInfo(siteStore): """ Get information about public web-based signup mechanisms. @param siteStore: a store with some signups installed on it (as indicated by _SignupTracker instances). @return: a generator which yields 2-tuples of (prompt, url) where 'prompt' is unicode briefly describing the signup mechanism (e.g. "Sign Up"), and 'url' is a (unicode) local URL linking to a page where an anonymous user can access it. """ # Note the underscore; this _should_ be a public API but it is currently an # unfortunate hack; there should be a different powerup interface that # requires prompt and prefixURL attributes rather than _SignupTracker. # -glyph for tr in siteStore.query(_SignupTracker): si = tr.signupItem p = getattr(si, 'prompt', None) u = getattr(si, 'prefixURL', None) if p is not None and u is not None: yield (p, u'/'+u)
python
def _getPublicSignupInfo(siteStore): """ Get information about public web-based signup mechanisms. @param siteStore: a store with some signups installed on it (as indicated by _SignupTracker instances). @return: a generator which yields 2-tuples of (prompt, url) where 'prompt' is unicode briefly describing the signup mechanism (e.g. "Sign Up"), and 'url' is a (unicode) local URL linking to a page where an anonymous user can access it. """ # Note the underscore; this _should_ be a public API but it is currently an # unfortunate hack; there should be a different powerup interface that # requires prompt and prefixURL attributes rather than _SignupTracker. # -glyph for tr in siteStore.query(_SignupTracker): si = tr.signupItem p = getattr(si, 'prompt', None) u = getattr(si, 'prefixURL', None) if p is not None and u is not None: yield (p, u'/'+u)
[ "def", "_getPublicSignupInfo", "(", "siteStore", ")", ":", "# Note the underscore; this _should_ be a public API but it is currently an", "# unfortunate hack; there should be a different powerup interface that", "# requires prompt and prefixURL attributes rather than _SignupTracker.", "# -glyph", ...
Get information about public web-based signup mechanisms. @param siteStore: a store with some signups installed on it (as indicated by _SignupTracker instances). @return: a generator which yields 2-tuples of (prompt, url) where 'prompt' is unicode briefly describing the signup mechanism (e.g. "Sign Up"), and 'url' is a (unicode) local URL linking to a page where an anonymous user can access it.
[ "Get", "information", "about", "public", "web", "-", "based", "signup", "mechanisms", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L1052-L1075
twisted/mantissa
xmantissa/signup.py
PasswordResetResource.locateChild
def locateChild(self, ctx, segments): """ Initialize self with the given key's L{_PasswordResetAttempt}, if any. @param segments: a L{_PasswordResetAttempt} key (hopefully) @return: C{(self, ())} with C{self.attempt} initialized, or L{NotFound} @see: L{attemptByKey} """ if len(segments) == 1: attempt = self.attemptByKey(unicode(segments[0])) if attempt is not None: self.attempt = attempt return (self, ()) return NotFound
python
def locateChild(self, ctx, segments): """ Initialize self with the given key's L{_PasswordResetAttempt}, if any. @param segments: a L{_PasswordResetAttempt} key (hopefully) @return: C{(self, ())} with C{self.attempt} initialized, or L{NotFound} @see: L{attemptByKey} """ if len(segments) == 1: attempt = self.attemptByKey(unicode(segments[0])) if attempt is not None: self.attempt = attempt return (self, ()) return NotFound
[ "def", "locateChild", "(", "self", ",", "ctx", ",", "segments", ")", ":", "if", "len", "(", "segments", ")", "==", "1", ":", "attempt", "=", "self", ".", "attemptByKey", "(", "unicode", "(", "segments", "[", "0", "]", ")", ")", "if", "attempt", "is...
Initialize self with the given key's L{_PasswordResetAttempt}, if any. @param segments: a L{_PasswordResetAttempt} key (hopefully) @return: C{(self, ())} with C{self.attempt} initialized, or L{NotFound} @see: L{attemptByKey}
[ "Initialize", "self", "with", "the", "given", "key", "s", "L", "{", "_PasswordResetAttempt", "}", "if", "any", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L109-L122
twisted/mantissa
xmantissa/signup.py
PasswordResetResource.renderHTTP
def renderHTTP(self, ctx): """ Handle the password reset form. The following exchange describes the process: S: Render C{reset} C: POST C{username} or C{email} S: L{handleRequestForUser}, render C{reset-check-email} (User follows the emailed reset link) S: Render C{reset-step-two} C: POST C{password1} S: L{resetPassword}, render C{reset-done} """ req = inevow.IRequest(ctx) if req.method == 'POST': if req.args.get('username', [''])[0]: user = unicode(usernameFromRequest(req), 'ascii') self.handleRequestForUser(user, URL.fromContext(ctx)) self.fragment = self.templateResolver.getDocFactory( 'reset-check-email') elif req.args.get('email', [''])[0]: email = req.args['email'][0].decode('ascii') acct = self.accountByAddress(email) if acct is not None: username = '@'.join( userbase.getAccountNames(acct.avatars.open()).next()) self.handleRequestForUser(username, URL.fromContext(ctx)) self.fragment = self.templateResolver.getDocFactory('reset-check-email') elif 'password1' in req.args: (password,) = req.args['password1'] self.resetPassword(self.attempt, unicode(password)) self.fragment = self.templateResolver.getDocFactory('reset-done') else: # Empty submit; redirect back to self return URL.fromContext(ctx) elif self.attempt: self.fragment = self.templateResolver.getDocFactory('reset-step-two') return PublicPage.renderHTTP(self, ctx)
python
def renderHTTP(self, ctx): """ Handle the password reset form. The following exchange describes the process: S: Render C{reset} C: POST C{username} or C{email} S: L{handleRequestForUser}, render C{reset-check-email} (User follows the emailed reset link) S: Render C{reset-step-two} C: POST C{password1} S: L{resetPassword}, render C{reset-done} """ req = inevow.IRequest(ctx) if req.method == 'POST': if req.args.get('username', [''])[0]: user = unicode(usernameFromRequest(req), 'ascii') self.handleRequestForUser(user, URL.fromContext(ctx)) self.fragment = self.templateResolver.getDocFactory( 'reset-check-email') elif req.args.get('email', [''])[0]: email = req.args['email'][0].decode('ascii') acct = self.accountByAddress(email) if acct is not None: username = '@'.join( userbase.getAccountNames(acct.avatars.open()).next()) self.handleRequestForUser(username, URL.fromContext(ctx)) self.fragment = self.templateResolver.getDocFactory('reset-check-email') elif 'password1' in req.args: (password,) = req.args['password1'] self.resetPassword(self.attempt, unicode(password)) self.fragment = self.templateResolver.getDocFactory('reset-done') else: # Empty submit; redirect back to self return URL.fromContext(ctx) elif self.attempt: self.fragment = self.templateResolver.getDocFactory('reset-step-two') return PublicPage.renderHTTP(self, ctx)
[ "def", "renderHTTP", "(", "self", ",", "ctx", ")", ":", "req", "=", "inevow", ".", "IRequest", "(", "ctx", ")", "if", "req", ".", "method", "==", "'POST'", ":", "if", "req", ".", "args", ".", "get", "(", "'username'", ",", "[", "''", "]", ")", ...
Handle the password reset form. The following exchange describes the process: S: Render C{reset} C: POST C{username} or C{email} S: L{handleRequestForUser}, render C{reset-check-email} (User follows the emailed reset link) S: Render C{reset-step-two} C: POST C{password1} S: L{resetPassword}, render C{reset-done}
[ "Handle", "the", "password", "reset", "form", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L125-L167
twisted/mantissa
xmantissa/signup.py
PasswordResetResource.handleRequestForUser
def handleRequestForUser(self, username, url): """ User C{username} wants to reset their password. Create an attempt item, and send them an email if the username is valid """ attempt = self.newAttemptForUser(username) account = self.accountByAddress(username) if account is None: # do we want to disclose this to the user? return email = self.getExternalEmail(account) if email is not None: self.sendEmail(url, attempt, email)
python
def handleRequestForUser(self, username, url): """ User C{username} wants to reset their password. Create an attempt item, and send them an email if the username is valid """ attempt = self.newAttemptForUser(username) account = self.accountByAddress(username) if account is None: # do we want to disclose this to the user? return email = self.getExternalEmail(account) if email is not None: self.sendEmail(url, attempt, email)
[ "def", "handleRequestForUser", "(", "self", ",", "username", ",", "url", ")", ":", "attempt", "=", "self", ".", "newAttemptForUser", "(", "username", ")", "account", "=", "self", ".", "accountByAddress", "(", "username", ")", "if", "account", "is", "None", ...
User C{username} wants to reset their password. Create an attempt item, and send them an email if the username is valid
[ "User", "C", "{", "username", "}", "wants", "to", "reset", "their", "password", ".", "Create", "an", "attempt", "item", "and", "send", "them", "an", "email", "if", "the", "username", "is", "valid" ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L170-L182
twisted/mantissa
xmantissa/signup.py
PasswordResetResource.sendEmail
def sendEmail(self, url, attempt, email, _sendEmail=_sendEmail): """ Send an email for the given L{_PasswordResetAttempt}. @type url: L{URL} @param url: The URL of the password reset page. @type attempt: L{_PasswordResetAttempt} @param attempt: An L{Item} representing a particular user's attempt to reset their password. @type email: C{str} @param email: The email will be sent to this address. """ host = url.netloc.split(':', 1)[0] from_ = 'reset@' + host body = file(sibpath(__file__, 'reset.rfc2822')).read() body %= {'from': from_, 'to': email, 'date': rfc822.formatdate(), 'message-id': smtp.messageid(), 'link': url.child(attempt.key)} _sendEmail(from_, email, body)
python
def sendEmail(self, url, attempt, email, _sendEmail=_sendEmail): """ Send an email for the given L{_PasswordResetAttempt}. @type url: L{URL} @param url: The URL of the password reset page. @type attempt: L{_PasswordResetAttempt} @param attempt: An L{Item} representing a particular user's attempt to reset their password. @type email: C{str} @param email: The email will be sent to this address. """ host = url.netloc.split(':', 1)[0] from_ = 'reset@' + host body = file(sibpath(__file__, 'reset.rfc2822')).read() body %= {'from': from_, 'to': email, 'date': rfc822.formatdate(), 'message-id': smtp.messageid(), 'link': url.child(attempt.key)} _sendEmail(from_, email, body)
[ "def", "sendEmail", "(", "self", ",", "url", ",", "attempt", ",", "email", ",", "_sendEmail", "=", "_sendEmail", ")", ":", "host", "=", "url", ".", "netloc", ".", "split", "(", "':'", ",", "1", ")", "[", "0", "]", "from_", "=", "'reset@'", "+", "...
Send an email for the given L{_PasswordResetAttempt}. @type url: L{URL} @param url: The URL of the password reset page. @type attempt: L{_PasswordResetAttempt} @param attempt: An L{Item} representing a particular user's attempt to reset their password. @type email: C{str} @param email: The email will be sent to this address.
[ "Send", "an", "email", "for", "the", "given", "L", "{", "_PasswordResetAttempt", "}", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L185-L210
twisted/mantissa
xmantissa/signup.py
PasswordResetResource.attemptByKey
def attemptByKey(self, key): """ Locate the L{_PasswordResetAttempt} that corresponds to C{key} """ return self.store.findUnique(_PasswordResetAttempt, _PasswordResetAttempt.key == key, default=None)
python
def attemptByKey(self, key): """ Locate the L{_PasswordResetAttempt} that corresponds to C{key} """ return self.store.findUnique(_PasswordResetAttempt, _PasswordResetAttempt.key == key, default=None)
[ "def", "attemptByKey", "(", "self", ",", "key", ")", ":", "return", "self", ".", "store", ".", "findUnique", "(", "_PasswordResetAttempt", ",", "_PasswordResetAttempt", ".", "key", "==", "key", ",", "default", "=", "None", ")" ]
Locate the L{_PasswordResetAttempt} that corresponds to C{key}
[ "Locate", "the", "L", "{", "_PasswordResetAttempt", "}", "that", "corresponds", "to", "C", "{", "key", "}" ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L213-L220
twisted/mantissa
xmantissa/signup.py
PasswordResetResource._makeKey
def _makeKey(self, usern): """ Make a new, probably unique key. This key will be sent in an email to the user and is used to access the password change form. """ return unicode(hashlib.md5(str((usern, time.time(), random.random()))).hexdigest())
python
def _makeKey(self, usern): """ Make a new, probably unique key. This key will be sent in an email to the user and is used to access the password change form. """ return unicode(hashlib.md5(str((usern, time.time(), random.random()))).hexdigest())
[ "def", "_makeKey", "(", "self", ",", "usern", ")", ":", "return", "unicode", "(", "hashlib", ".", "md5", "(", "str", "(", "(", "usern", ",", "time", ".", "time", "(", ")", ",", "random", ".", "random", "(", ")", ")", ")", ")", ".", "hexdigest", ...
Make a new, probably unique key. This key will be sent in an email to the user and is used to access the password change form.
[ "Make", "a", "new", "probably", "unique", "key", ".", "This", "key", "will", "be", "sent", "in", "an", "email", "to", "the", "user", "and", "is", "used", "to", "access", "the", "password", "change", "form", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L223-L228
twisted/mantissa
xmantissa/signup.py
PasswordResetResource.newAttemptForUser
def newAttemptForUser(self, user): """ Create an L{_PasswordResetAttempt} for the user whose username is C{user} @param user: C{unicode} username """ # we could query for other attempts by the same # user within some timeframe and raise an exception, # if we wanted return _PasswordResetAttempt(store=self.store, username=user, timestamp=extime.Time(), key=self._makeKey(user))
python
def newAttemptForUser(self, user): """ Create an L{_PasswordResetAttempt} for the user whose username is C{user} @param user: C{unicode} username """ # we could query for other attempts by the same # user within some timeframe and raise an exception, # if we wanted return _PasswordResetAttempt(store=self.store, username=user, timestamp=extime.Time(), key=self._makeKey(user))
[ "def", "newAttemptForUser", "(", "self", ",", "user", ")", ":", "# we could query for other attempts by the same", "# user within some timeframe and raise an exception,", "# if we wanted", "return", "_PasswordResetAttempt", "(", "store", "=", "self", ".", "store", ",", "usern...
Create an L{_PasswordResetAttempt} for the user whose username is C{user} @param user: C{unicode} username
[ "Create", "an", "L", "{", "_PasswordResetAttempt", "}", "for", "the", "user", "whose", "username", "is", "C", "{", "user", "}" ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L231-L242
twisted/mantissa
xmantissa/signup.py
PasswordResetResource.resetPassword
def resetPassword(self, attempt, newPassword): """ @param attempt: L{_PasswordResetAttempt} reset the password of the user who initiated C{attempt} to C{newPassword}, and afterward, delete the attempt and any persistent sessions that belong to the user """ self.accountByAddress(attempt.username).password = newPassword self.store.query( PersistentSession, PersistentSession.authenticatedAs == str(attempt.username) ).deleteFromStore() attempt.deleteFromStore()
python
def resetPassword(self, attempt, newPassword): """ @param attempt: L{_PasswordResetAttempt} reset the password of the user who initiated C{attempt} to C{newPassword}, and afterward, delete the attempt and any persistent sessions that belong to the user """ self.accountByAddress(attempt.username).password = newPassword self.store.query( PersistentSession, PersistentSession.authenticatedAs == str(attempt.username) ).deleteFromStore() attempt.deleteFromStore()
[ "def", "resetPassword", "(", "self", ",", "attempt", ",", "newPassword", ")", ":", "self", ".", "accountByAddress", "(", "attempt", ".", "username", ")", ".", "password", "=", "newPassword", "self", ".", "store", ".", "query", "(", "PersistentSession", ",", ...
@param attempt: L{_PasswordResetAttempt} reset the password of the user who initiated C{attempt} to C{newPassword}, and afterward, delete the attempt and any persistent sessions that belong to the user
[ "@param", "attempt", ":", "L", "{", "_PasswordResetAttempt", "}" ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L271-L287
twisted/mantissa
xmantissa/signup.py
TicketBooth.issueViaEmail
def issueViaEmail(self, issuer, email, product, templateData, domainName, httpPort=80): """ Send a ticket via email to the supplied address, which, when claimed, will create an avatar and allow the given product to endow it with things. @param issuer: An object, preferably a user, to track who issued this ticket. @param email: a str, formatted as an rfc2821 email address (user@domain) -- source routes not allowed. @param product: an instance of L{Product} @param domainName: a domain name, used as the domain part of the sender's address, and as the web server to generate a link to within the email. @param httpPort: a port number for the web server running on domainName @param templateData: A string containing an rfc2822-format email message, which will have several python values interpolated into it dictwise: %(from)s: To be used for the From: header; will contain an rfc2822-format address. %(to)s: the address that we are going to send to. %(date)s: an rfc2822-format date. %(message-id)s: an rfc2822 message-id %(link)s: an HTTP URL that we are generating a link to. """ ticket = self.createTicket(issuer, unicode(email, 'ascii'), product) nonce = ticket.nonce signupInfo = {'from': 'signup@'+domainName, 'to': email, 'date': rfc822.formatdate(), 'message-id': smtp.messageid(), 'link': self.ticketLink(domainName, httpPort, nonce)} msg = templateData % signupInfo return ticket, _sendEmail(signupInfo['from'], email, msg)
python
def issueViaEmail(self, issuer, email, product, templateData, domainName, httpPort=80): """ Send a ticket via email to the supplied address, which, when claimed, will create an avatar and allow the given product to endow it with things. @param issuer: An object, preferably a user, to track who issued this ticket. @param email: a str, formatted as an rfc2821 email address (user@domain) -- source routes not allowed. @param product: an instance of L{Product} @param domainName: a domain name, used as the domain part of the sender's address, and as the web server to generate a link to within the email. @param httpPort: a port number for the web server running on domainName @param templateData: A string containing an rfc2822-format email message, which will have several python values interpolated into it dictwise: %(from)s: To be used for the From: header; will contain an rfc2822-format address. %(to)s: the address that we are going to send to. %(date)s: an rfc2822-format date. %(message-id)s: an rfc2822 message-id %(link)s: an HTTP URL that we are generating a link to. """ ticket = self.createTicket(issuer, unicode(email, 'ascii'), product) nonce = ticket.nonce signupInfo = {'from': 'signup@'+domainName, 'to': email, 'date': rfc822.formatdate(), 'message-id': smtp.messageid(), 'link': self.ticketLink(domainName, httpPort, nonce)} msg = templateData % signupInfo return ticket, _sendEmail(signupInfo['from'], email, msg)
[ "def", "issueViaEmail", "(", "self", ",", "issuer", ",", "email", ",", "product", ",", "templateData", ",", "domainName", ",", "httpPort", "=", "80", ")", ":", "ticket", "=", "self", ".", "createTicket", "(", "issuer", ",", "unicode", "(", "email", ",", ...
Send a ticket via email to the supplied address, which, when claimed, will create an avatar and allow the given product to endow it with things. @param issuer: An object, preferably a user, to track who issued this ticket. @param email: a str, formatted as an rfc2821 email address (user@domain) -- source routes not allowed. @param product: an instance of L{Product} @param domainName: a domain name, used as the domain part of the sender's address, and as the web server to generate a link to within the email. @param httpPort: a port number for the web server running on domainName @param templateData: A string containing an rfc2822-format email message, which will have several python values interpolated into it dictwise: %(from)s: To be used for the From: header; will contain an rfc2822-format address. %(to)s: the address that we are going to send to. %(date)s: an rfc2822-format date. %(message-id)s: an rfc2822 message-id %(link)s: an HTTP URL that we are generating a link to.
[ "Send", "a", "ticket", "via", "email", "to", "the", "supplied", "address", "which", "when", "claimed", "will", "create", "an", "avatar", "and", "allow", "the", "given", "product", "to", "endow", "it", "with", "things", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L400-L451
twisted/mantissa
xmantissa/signup.py
UserInfoSignup.usernameAvailable
def usernameAvailable(self, username, domain): """ Check to see if a username is available for the user to select. """ if len(username) < 2: return [False, u"Username too short"] for char in u"[ ,:;<>@()!\"'%&\\|\t\b": if char in username: return [False, u"Username contains invalid character: '%s'" % char] # The localpart is acceptable if it can be parsed as the local part # of an RFC 2821 address. try: parseAddress("<%s@example.com>" % (username,)) except ArgumentError: return [False, u"Username fails to parse"] # The domain is acceptable if it is one which we actually host. if domain not in self.getAvailableDomains(): return [False, u"Domain not allowed"] query = self.store.query(userbase.LoginMethod, AND(userbase.LoginMethod.localpart == username, userbase.LoginMethod.domain == domain)) return [not bool(query.count()), u"Username already taken"]
python
def usernameAvailable(self, username, domain): """ Check to see if a username is available for the user to select. """ if len(username) < 2: return [False, u"Username too short"] for char in u"[ ,:;<>@()!\"'%&\\|\t\b": if char in username: return [False, u"Username contains invalid character: '%s'" % char] # The localpart is acceptable if it can be parsed as the local part # of an RFC 2821 address. try: parseAddress("<%s@example.com>" % (username,)) except ArgumentError: return [False, u"Username fails to parse"] # The domain is acceptable if it is one which we actually host. if domain not in self.getAvailableDomains(): return [False, u"Domain not allowed"] query = self.store.query(userbase.LoginMethod, AND(userbase.LoginMethod.localpart == username, userbase.LoginMethod.domain == domain)) return [not bool(query.count()), u"Username already taken"]
[ "def", "usernameAvailable", "(", "self", ",", "username", ",", "domain", ")", ":", "if", "len", "(", "username", ")", "<", "2", ":", "return", "[", "False", ",", "u\"Username too short\"", "]", "for", "char", "in", "u\"[ ,:;<>@()!\\\"'%&\\\\|\\t\\b\"", ":", ...
Check to see if a username is available for the user to select.
[ "Check", "to", "see", "if", "a", "username", "is", "available", "for", "the", "user", "to", "select", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L750-L775
twisted/mantissa
xmantissa/signup.py
UserInfoSignup.createUser
def createUser(self, realName, username, domain, password, emailAddress): """ Create a user, storing some associated metadata in the user's store, i.e. their first and last names (as a L{UserInfo} item), and a L{axiom.userbase.LoginMethod} allowing them to login with their email address. @param realName: the real name of the user. @type realName: C{unicode} @param username: the user's username. they will be able to login with this. @type username: C{unicode} @param domain: the local domain - used internally to turn C{username} into a localpart@domain style string . @type domain: C{unicode} @param password: the password to be used for the user's account. @type password: C{unicode} @param emailAddress: the user's external email address. they will be able to login with this also. @type emailAddress: C{unicode} @rtype: C{NoneType} """ # XXX This method should be called in a transaction, it shouldn't # start a transaction itself. def _(): loginsystem = self.store.findUnique(userbase.LoginSystem) # Create an account with the credentials they specified, # making it internal since it belongs to us. acct = loginsystem.addAccount(username, domain, password, verified=True, internal=True) # Create an external login method associated with the email # address they supplied, as well. This creates an association # between that external address and their account object, # allowing password reset emails to be sent and letting them log # in to this account using that address as a username. emailPart, emailDomain = emailAddress.split("@") acct.addLoginMethod(emailPart, emailDomain, protocol=u"email", verified=False, internal=False) substore = IBeneficiary(acct) # Record some of that signup information in case application # objects are interested in it. UserInfo(store=substore, realName=realName) self.product.installProductOn(substore) self.store.transact(_)
python
def createUser(self, realName, username, domain, password, emailAddress): """ Create a user, storing some associated metadata in the user's store, i.e. their first and last names (as a L{UserInfo} item), and a L{axiom.userbase.LoginMethod} allowing them to login with their email address. @param realName: the real name of the user. @type realName: C{unicode} @param username: the user's username. they will be able to login with this. @type username: C{unicode} @param domain: the local domain - used internally to turn C{username} into a localpart@domain style string . @type domain: C{unicode} @param password: the password to be used for the user's account. @type password: C{unicode} @param emailAddress: the user's external email address. they will be able to login with this also. @type emailAddress: C{unicode} @rtype: C{NoneType} """ # XXX This method should be called in a transaction, it shouldn't # start a transaction itself. def _(): loginsystem = self.store.findUnique(userbase.LoginSystem) # Create an account with the credentials they specified, # making it internal since it belongs to us. acct = loginsystem.addAccount(username, domain, password, verified=True, internal=True) # Create an external login method associated with the email # address they supplied, as well. This creates an association # between that external address and their account object, # allowing password reset emails to be sent and letting them log # in to this account using that address as a username. emailPart, emailDomain = emailAddress.split("@") acct.addLoginMethod(emailPart, emailDomain, protocol=u"email", verified=False, internal=False) substore = IBeneficiary(acct) # Record some of that signup information in case application # objects are interested in it. UserInfo(store=substore, realName=realName) self.product.installProductOn(substore) self.store.transact(_)
[ "def", "createUser", "(", "self", ",", "realName", ",", "username", ",", "domain", ",", "password", ",", "emailAddress", ")", ":", "# XXX This method should be called in a transaction, it shouldn't", "# start a transaction itself.", "def", "_", "(", ")", ":", "loginsyst...
Create a user, storing some associated metadata in the user's store, i.e. their first and last names (as a L{UserInfo} item), and a L{axiom.userbase.LoginMethod} allowing them to login with their email address. @param realName: the real name of the user. @type realName: C{unicode} @param username: the user's username. they will be able to login with this. @type username: C{unicode} @param domain: the local domain - used internally to turn C{username} into a localpart@domain style string . @type domain: C{unicode} @param password: the password to be used for the user's account. @type password: C{unicode} @param emailAddress: the user's external email address. they will be able to login with this also. @type emailAddress: C{unicode} @rtype: C{NoneType}
[ "Create", "a", "user", "storing", "some", "associated", "metadata", "in", "the", "user", "s", "store", "i", ".", "e", ".", "their", "first", "and", "last", "names", "(", "as", "a", "L", "{", "UserInfo", "}", "item", ")", "and", "a", "L", "{", "axio...
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L778-L828
twisted/mantissa
xmantissa/signup.py
SignupConfiguration.createSignup
def createSignup(self, creator, signupClass, signupConf, product, emailTemplate, prompt): """ Create a new signup facility in the site store's database. @param creator: a unicode string describing the creator of the new signup mechanism, for auditing purposes. @param signupClass: the item type of the signup mechanism to create. @param signupConf: a dictionary of keyword arguments for L{signupClass}'s constructor. @param product: A Product instance, describing the powerups to be installed with this signup. @param emailTemplate: a unicode string which contains some text that will be sent in confirmation emails generated by this signup mechanism (if any) @param prompt: a short unicode string describing this signup mechanism, as distinct from others. For example: "Student Sign Up", or "Faculty Sign Up" @return: a newly-created, database-resident instance of signupClass. """ siteStore = self.store.parent booth = siteStore.findOrCreate(TicketBooth, lambda booth: installOn(booth, siteStore)) signupItem = signupClass( store=siteStore, booth=booth, product=product, emailTemplate=emailTemplate, prompt=prompt, **signupConf) siteStore.powerUp(signupItem) _SignupTracker(store=siteStore, signupItem=signupItem, createdOn=extime.Time(), createdBy=creator) return signupItem
python
def createSignup(self, creator, signupClass, signupConf, product, emailTemplate, prompt): """ Create a new signup facility in the site store's database. @param creator: a unicode string describing the creator of the new signup mechanism, for auditing purposes. @param signupClass: the item type of the signup mechanism to create. @param signupConf: a dictionary of keyword arguments for L{signupClass}'s constructor. @param product: A Product instance, describing the powerups to be installed with this signup. @param emailTemplate: a unicode string which contains some text that will be sent in confirmation emails generated by this signup mechanism (if any) @param prompt: a short unicode string describing this signup mechanism, as distinct from others. For example: "Student Sign Up", or "Faculty Sign Up" @return: a newly-created, database-resident instance of signupClass. """ siteStore = self.store.parent booth = siteStore.findOrCreate(TicketBooth, lambda booth: installOn(booth, siteStore)) signupItem = signupClass( store=siteStore, booth=booth, product=product, emailTemplate=emailTemplate, prompt=prompt, **signupConf) siteStore.powerUp(signupItem) _SignupTracker(store=siteStore, signupItem=signupItem, createdOn=extime.Time(), createdBy=creator) return signupItem
[ "def", "createSignup", "(", "self", ",", "creator", ",", "signupClass", ",", "signupConf", ",", "product", ",", "emailTemplate", ",", "prompt", ")", ":", "siteStore", "=", "self", ".", "store", ".", "parent", "booth", "=", "siteStore", ".", "findOrCreate", ...
Create a new signup facility in the site store's database. @param creator: a unicode string describing the creator of the new signup mechanism, for auditing purposes. @param signupClass: the item type of the signup mechanism to create. @param signupConf: a dictionary of keyword arguments for L{signupClass}'s constructor. @param product: A Product instance, describing the powerups to be installed with this signup. @param emailTemplate: a unicode string which contains some text that will be sent in confirmation emails generated by this signup mechanism (if any) @param prompt: a short unicode string describing this signup mechanism, as distinct from others. For example: "Student Sign Up", or "Faculty Sign Up" @return: a newly-created, database-resident instance of signupClass.
[ "Create", "a", "new", "signup", "facility", "in", "the", "site", "store", "s", "database", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L1099-L1142
twisted/mantissa
xmantissa/signup.py
ProductFormMixin.makeProductPicker
def makeProductPicker(self): """ Make a LiveForm with radio buttons for each Product in the store. """ productPicker = liveform.LiveForm( self.coerceProduct, [liveform.Parameter( str(id(product)), liveform.FORM_INPUT, liveform.LiveForm( lambda selectedProduct, product=product: selectedProduct and product, [liveform.Parameter( 'selectedProduct', liveform.RADIO_INPUT, bool, repr(product))] )) for product in self.original.store.parent.query(Product)], u"Product to Install") return productPicker
python
def makeProductPicker(self): """ Make a LiveForm with radio buttons for each Product in the store. """ productPicker = liveform.LiveForm( self.coerceProduct, [liveform.Parameter( str(id(product)), liveform.FORM_INPUT, liveform.LiveForm( lambda selectedProduct, product=product: selectedProduct and product, [liveform.Parameter( 'selectedProduct', liveform.RADIO_INPUT, bool, repr(product))] )) for product in self.original.store.parent.query(Product)], u"Product to Install") return productPicker
[ "def", "makeProductPicker", "(", "self", ")", ":", "productPicker", "=", "liveform", ".", "LiveForm", "(", "self", ".", "coerceProduct", ",", "[", "liveform", ".", "Parameter", "(", "str", "(", "id", "(", "product", ")", ")", ",", "liveform", ".", "FORM_...
Make a LiveForm with radio buttons for each Product in the store.
[ "Make", "a", "LiveForm", "with", "radio", "buttons", "for", "each", "Product", "in", "the", "store", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L1148-L1168
twisted/mantissa
xmantissa/signup.py
SignupFragment._deleteTrackers
def _deleteTrackers(self, trackers): """ Delete the given signup trackers and their associated signup resources. @param trackers: sequence of L{_SignupTrackers} """ for tracker in trackers: if tracker.store is None: # we're not updating the list of live signups client side, so # we might get a signup that has already been deleted continue sig = tracker.signupItem # XXX the only reason we're doing this here is that we're afraid to # add a whenDeleted=CASCADE to powerups because it's inefficient, # however, this is arguably the archetypical use of # whenDeleted=CASCADE. Soon we need to figure out a real solution # (but I have no idea what it is). -glyph for iface in sig.store.interfacesFor(sig): sig.store.powerDown(sig, iface) tracker.deleteFromStore() sig.deleteFromStore()
python
def _deleteTrackers(self, trackers): """ Delete the given signup trackers and their associated signup resources. @param trackers: sequence of L{_SignupTrackers} """ for tracker in trackers: if tracker.store is None: # we're not updating the list of live signups client side, so # we might get a signup that has already been deleted continue sig = tracker.signupItem # XXX the only reason we're doing this here is that we're afraid to # add a whenDeleted=CASCADE to powerups because it's inefficient, # however, this is arguably the archetypical use of # whenDeleted=CASCADE. Soon we need to figure out a real solution # (but I have no idea what it is). -glyph for iface in sig.store.interfacesFor(sig): sig.store.powerDown(sig, iface) tracker.deleteFromStore() sig.deleteFromStore()
[ "def", "_deleteTrackers", "(", "self", ",", "trackers", ")", ":", "for", "tracker", "in", "trackers", ":", "if", "tracker", ".", "store", "is", "None", ":", "# we're not updating the list of live signups client side, so", "# we might get a signup that has already been delet...
Delete the given signup trackers and their associated signup resources. @param trackers: sequence of L{_SignupTrackers}
[ "Delete", "the", "given", "signup", "trackers", "and", "their", "associated", "signup", "resources", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/signup.py#L1335-L1359
jwodder/doapi
doapi/image.py
Image.fetch
def fetch(self): """ Fetch & return a new `Image` object representing the image's current state :rtype: Image :raises DOAPIError: if the API endpoint replies with an error (e.g., if the image no longer exists) """ api = self.doapi_manager return api._image(api.request(self.url)["image"])
python
def fetch(self): """ Fetch & return a new `Image` object representing the image's current state :rtype: Image :raises DOAPIError: if the API endpoint replies with an error (e.g., if the image no longer exists) """ api = self.doapi_manager return api._image(api.request(self.url)["image"])
[ "def", "fetch", "(", "self", ")", ":", "api", "=", "self", ".", "doapi_manager", "return", "api", ".", "_image", "(", "api", ".", "request", "(", "self", ".", "url", ")", "[", "\"image\"", "]", ")" ]
Fetch & return a new `Image` object representing the image's current state :rtype: Image :raises DOAPIError: if the API endpoint replies with an error (e.g., if the image no longer exists)
[ "Fetch", "&", "return", "a", "new", "Image", "object", "representing", "the", "image", "s", "current", "state" ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/image.py#L69-L79
jwodder/doapi
doapi/image.py
Image.update_image
def update_image(self, name): # The `_image` is to avoid conflicts with MutableMapping.update. """ Update (i.e., rename) the image :param str name: the new name for the image :return: an updated `Image` object :rtype: Image :raises DOAPIError: if the API endpoint replies with an error """ api = self.doapi_manager return api._image(api.request(self.url, method='PUT', data={"name": name})["image"])
python
def update_image(self, name): # The `_image` is to avoid conflicts with MutableMapping.update. """ Update (i.e., rename) the image :param str name: the new name for the image :return: an updated `Image` object :rtype: Image :raises DOAPIError: if the API endpoint replies with an error """ api = self.doapi_manager return api._image(api.request(self.url, method='PUT', data={"name": name})["image"])
[ "def", "update_image", "(", "self", ",", "name", ")", ":", "# The `_image` is to avoid conflicts with MutableMapping.update.", "api", "=", "self", ".", "doapi_manager", "return", "api", ".", "_image", "(", "api", ".", "request", "(", "self", ".", "url", ",", "me...
Update (i.e., rename) the image :param str name: the new name for the image :return: an updated `Image` object :rtype: Image :raises DOAPIError: if the API endpoint replies with an error
[ "Update", "(", "i", ".", "e", ".", "rename", ")", "the", "image" ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/image.py#L81-L93
fitnr/censusname
censusname/censusname.py
Censusname.generate
def generate(self, nameformat=None, capitalize=None, formatters=None, **kwargs): '''Pick a random name form a specified list of name parts''' nameformat = nameformat or self.nameformat capitalize = capitalize or self.capitalize formatters = formatters or {} lines = self._get_lines(kwargs) names = dict((k, v['name']) for k, v in list(lines.items())) if capitalize: names = dict((k, n.capitalize()) for k, n in list(names.items())) merged_formatters = dict() try: merged_formatters = dict( (k, self.formatters.get(k, []) + formatters.get(k, [])) for k in set(list(self.formatters.keys()) + list(formatters.keys())) ) except AttributeError: raise TypeError("keyword argument 'formatters' for Censusname.generate() must be a dict") if merged_formatters: for key, functions in list(merged_formatters.items()): # 'surname', [func_a, func_b] for func in functions: # names['surname'] = func_a(name['surname']) names[key] = func(names[key]) return nameformat.format(**names)
python
def generate(self, nameformat=None, capitalize=None, formatters=None, **kwargs): '''Pick a random name form a specified list of name parts''' nameformat = nameformat or self.nameformat capitalize = capitalize or self.capitalize formatters = formatters or {} lines = self._get_lines(kwargs) names = dict((k, v['name']) for k, v in list(lines.items())) if capitalize: names = dict((k, n.capitalize()) for k, n in list(names.items())) merged_formatters = dict() try: merged_formatters = dict( (k, self.formatters.get(k, []) + formatters.get(k, [])) for k in set(list(self.formatters.keys()) + list(formatters.keys())) ) except AttributeError: raise TypeError("keyword argument 'formatters' for Censusname.generate() must be a dict") if merged_formatters: for key, functions in list(merged_formatters.items()): # 'surname', [func_a, func_b] for func in functions: # names['surname'] = func_a(name['surname']) names[key] = func(names[key]) return nameformat.format(**names)
[ "def", "generate", "(", "self", ",", "nameformat", "=", "None", ",", "capitalize", "=", "None", ",", "formatters", "=", "None", ",", "*", "*", "kwargs", ")", ":", "nameformat", "=", "nameformat", "or", "self", ".", "nameformat", "capitalize", "=", "capit...
Pick a random name form a specified list of name parts
[ "Pick", "a", "random", "name", "form", "a", "specified", "list", "of", "name", "parts" ]
train
https://github.com/fitnr/censusname/blob/df845a60bab4f1be20eca33bcd2cd0f747ec0c0f/censusname/censusname.py#L100-L129
fitnr/censusname
censusname/censusname.py
Censusname.pick_frequency_line
def pick_frequency_line(self, filename, frequency, cumulativefield='cumulative_frequency'): '''Given a numeric frequency, pick a line from a csv with a cumulative frequency field''' if resource_exists('censusname', filename): with closing(resource_stream('censusname', filename)) as b: g = codecs.iterdecode(b, 'ascii') return self._pick_frequency_line(g, frequency, cumulativefield) else: with open(filename, encoding='ascii') as g: return self._pick_frequency_line(g, frequency, cumulativefield)
python
def pick_frequency_line(self, filename, frequency, cumulativefield='cumulative_frequency'): '''Given a numeric frequency, pick a line from a csv with a cumulative frequency field''' if resource_exists('censusname', filename): with closing(resource_stream('censusname', filename)) as b: g = codecs.iterdecode(b, 'ascii') return self._pick_frequency_line(g, frequency, cumulativefield) else: with open(filename, encoding='ascii') as g: return self._pick_frequency_line(g, frequency, cumulativefield)
[ "def", "pick_frequency_line", "(", "self", ",", "filename", ",", "frequency", ",", "cumulativefield", "=", "'cumulative_frequency'", ")", ":", "if", "resource_exists", "(", "'censusname'", ",", "filename", ")", ":", "with", "closing", "(", "resource_stream", "(", ...
Given a numeric frequency, pick a line from a csv with a cumulative frequency field
[ "Given", "a", "numeric", "frequency", "pick", "a", "line", "from", "a", "csv", "with", "a", "cumulative", "frequency", "field" ]
train
https://github.com/fitnr/censusname/blob/df845a60bab4f1be20eca33bcd2cd0f747ec0c0f/censusname/censusname.py#L157-L165
richardliaw/track
track/autodetect.py
dfl_local_dir
def dfl_local_dir(): """ Infers a default local directory, which is DFL_DIR_PARENT/<project name>, where the project name is guessed according to the following rules. If we detect we're in a repository, the project name is the repository name (git only for now). If we're not in a repository, and the script file sys.argv[0] is non-null, then that is used. Otherwise, we just say it's "unknown" """ project_name = git_repo() if not project_name and sys.argv: project_name = sys.argv[0] if not project_name: project_name = "unknown" dirpath = os.path.join(DFL_DIR_PARENT, project_name) return os.path.expanduser(dirpath)
python
def dfl_local_dir(): """ Infers a default local directory, which is DFL_DIR_PARENT/<project name>, where the project name is guessed according to the following rules. If we detect we're in a repository, the project name is the repository name (git only for now). If we're not in a repository, and the script file sys.argv[0] is non-null, then that is used. Otherwise, we just say it's "unknown" """ project_name = git_repo() if not project_name and sys.argv: project_name = sys.argv[0] if not project_name: project_name = "unknown" dirpath = os.path.join(DFL_DIR_PARENT, project_name) return os.path.expanduser(dirpath)
[ "def", "dfl_local_dir", "(", ")", ":", "project_name", "=", "git_repo", "(", ")", "if", "not", "project_name", "and", "sys", ".", "argv", ":", "project_name", "=", "sys", ".", "argv", "[", "0", "]", "if", "not", "project_name", ":", "project_name", "=", ...
Infers a default local directory, which is DFL_DIR_PARENT/<project name>, where the project name is guessed according to the following rules. If we detect we're in a repository, the project name is the repository name (git only for now). If we're not in a repository, and the script file sys.argv[0] is non-null, then that is used. Otherwise, we just say it's "unknown"
[ "Infers", "a", "default", "local", "directory", "which", "is", "DFL_DIR_PARENT", "/", "<project", "name", ">", "where", "the", "project", "name", "is", "guessed", "according", "to", "the", "following", "rules", "." ]
train
https://github.com/richardliaw/track/blob/7ac42ea34e5c1d7bb92fd813e938835a06a63fc7/track/autodetect.py#L11-L30
richardliaw/track
track/autodetect.py
git_repo
def git_repo(): """ Returns the git repository root if the cwd is in a repo, else None """ try: reldir = subprocess.check_output( ["git", "rev-parse", "--git-dir"]) reldir = reldir.decode("utf-8") return os.path.basename(os.path.dirname(os.path.abspath(reldir))) except subprocess.CalledProcessError: return None
python
def git_repo(): """ Returns the git repository root if the cwd is in a repo, else None """ try: reldir = subprocess.check_output( ["git", "rev-parse", "--git-dir"]) reldir = reldir.decode("utf-8") return os.path.basename(os.path.dirname(os.path.abspath(reldir))) except subprocess.CalledProcessError: return None
[ "def", "git_repo", "(", ")", ":", "try", ":", "reldir", "=", "subprocess", ".", "check_output", "(", "[", "\"git\"", ",", "\"rev-parse\"", ",", "\"--git-dir\"", "]", ")", "reldir", "=", "reldir", ".", "decode", "(", "\"utf-8\"", ")", "return", "os", ".",...
Returns the git repository root if the cwd is in a repo, else None
[ "Returns", "the", "git", "repository", "root", "if", "the", "cwd", "is", "in", "a", "repo", "else", "None" ]
train
https://github.com/richardliaw/track/blob/7ac42ea34e5c1d7bb92fd813e938835a06a63fc7/track/autodetect.py#L32-L42
richardliaw/track
track/autodetect.py
git_hash
def git_hash(): """returns the current git hash or unknown if not in git repo""" if git_repo() is None: return "unknown" git_hash = subprocess.check_output( ["git", "rev-parse", "HEAD"]) # git_hash is a byte string; we want a string. git_hash = git_hash.decode("utf-8") # git_hash also comes with an extra \n at the end, which we remove. git_hash = git_hash.strip() return git_hash
python
def git_hash(): """returns the current git hash or unknown if not in git repo""" if git_repo() is None: return "unknown" git_hash = subprocess.check_output( ["git", "rev-parse", "HEAD"]) # git_hash is a byte string; we want a string. git_hash = git_hash.decode("utf-8") # git_hash also comes with an extra \n at the end, which we remove. git_hash = git_hash.strip() return git_hash
[ "def", "git_hash", "(", ")", ":", "if", "git_repo", "(", ")", "is", "None", ":", "return", "\"unknown\"", "git_hash", "=", "subprocess", ".", "check_output", "(", "[", "\"git\"", ",", "\"rev-parse\"", ",", "\"HEAD\"", "]", ")", "# git_hash is a byte string; we...
returns the current git hash or unknown if not in git repo
[ "returns", "the", "current", "git", "hash", "or", "unknown", "if", "not", "in", "git", "repo" ]
train
https://github.com/richardliaw/track/blob/7ac42ea34e5c1d7bb92fd813e938835a06a63fc7/track/autodetect.py#L45-L55
richardliaw/track
track/autodetect.py
git_pretty
def git_pretty(): """returns a pretty summary of the commit or unkown if not in git repo""" if git_repo() is None: return "unknown" pretty = subprocess.check_output( ["git", "log", "--pretty=format:%h %s", "-n", "1"]) pretty = pretty.decode("utf-8") pretty = pretty.strip() return pretty
python
def git_pretty(): """returns a pretty summary of the commit or unkown if not in git repo""" if git_repo() is None: return "unknown" pretty = subprocess.check_output( ["git", "log", "--pretty=format:%h %s", "-n", "1"]) pretty = pretty.decode("utf-8") pretty = pretty.strip() return pretty
[ "def", "git_pretty", "(", ")", ":", "if", "git_repo", "(", ")", "is", "None", ":", "return", "\"unknown\"", "pretty", "=", "subprocess", ".", "check_output", "(", "[", "\"git\"", ",", "\"log\"", ",", "\"--pretty=format:%h %s\"", ",", "\"-n\"", ",", "\"1\"", ...
returns a pretty summary of the commit or unkown if not in git repo
[ "returns", "a", "pretty", "summary", "of", "the", "commit", "or", "unkown", "if", "not", "in", "git", "repo" ]
train
https://github.com/richardliaw/track/blob/7ac42ea34e5c1d7bb92fd813e938835a06a63fc7/track/autodetect.py#L57-L65
richardliaw/track
track/autodetect.py
invocation
def invocation(): """reconstructs the invocation for this python program""" cmdargs = [sys.executable] + sys.argv[:] invocation = " ".join(shlex.quote(s) for s in cmdargs) return invocation
python
def invocation(): """reconstructs the invocation for this python program""" cmdargs = [sys.executable] + sys.argv[:] invocation = " ".join(shlex.quote(s) for s in cmdargs) return invocation
[ "def", "invocation", "(", ")", ":", "cmdargs", "=", "[", "sys", ".", "executable", "]", "+", "sys", ".", "argv", "[", ":", "]", "invocation", "=", "\" \"", ".", "join", "(", "shlex", ".", "quote", "(", "s", ")", "for", "s", "in", "cmdargs", ")", ...
reconstructs the invocation for this python program
[ "reconstructs", "the", "invocation", "for", "this", "python", "program" ]
train
https://github.com/richardliaw/track/blob/7ac42ea34e5c1d7bb92fd813e938835a06a63fc7/track/autodetect.py#L67-L71
laurivosandi/butterknife
host/butterknife/api.py
serialize
def serialize(func): """ Falcon response serialization """ def wrapped(instance, req, resp, **kwargs): assert not req.get_param("unicode") or req.get_param("unicode") == u"✓", "Unicode sanity check failed" resp.set_header("Cache-Control", "no-cache, no-store, must-revalidate"); resp.set_header("Pragma", "no-cache"); resp.set_header("Expires", "0"); r = func(instance, req, resp, **kwargs) if not resp.body: if not req.client_accepts_json: raise falcon.HTTPUnsupportedMediaType( 'This API only supports the JSON media type.', href='http://docs.examples.com/api/json') resp.set_header('Content-Type', 'application/json') resp.body = json.dumps(r, cls=MyEncoder) return r return wrapped
python
def serialize(func): """ Falcon response serialization """ def wrapped(instance, req, resp, **kwargs): assert not req.get_param("unicode") or req.get_param("unicode") == u"✓", "Unicode sanity check failed" resp.set_header("Cache-Control", "no-cache, no-store, must-revalidate"); resp.set_header("Pragma", "no-cache"); resp.set_header("Expires", "0"); r = func(instance, req, resp, **kwargs) if not resp.body: if not req.client_accepts_json: raise falcon.HTTPUnsupportedMediaType( 'This API only supports the JSON media type.', href='http://docs.examples.com/api/json') resp.set_header('Content-Type', 'application/json') resp.body = json.dumps(r, cls=MyEncoder) return r return wrapped
[ "def", "serialize", "(", "func", ")", ":", "def", "wrapped", "(", "instance", ",", "req", ",", "resp", ",", "*", "*", "kwargs", ")", ":", "assert", "not", "req", ".", "get_param", "(", "\"unicode\"", ")", "or", "req", ".", "get_param", "(", "\"unicod...
Falcon response serialization
[ "Falcon", "response", "serialization" ]
train
https://github.com/laurivosandi/butterknife/blob/076ddabd66dcc1cedda7eba27ddca2a9ebed309e/host/butterknife/api.py#L27-L45
llazzaro/analyzerdam
analyzerdam/excelLib.py
ExcelLib.readRow
def readRow(self, row, startCol=0, endCol=-1): ''' read row ''' return self.__operation.readRow(row, startCol, endCol)
python
def readRow(self, row, startCol=0, endCol=-1): ''' read row ''' return self.__operation.readRow(row, startCol, endCol)
[ "def", "readRow", "(", "self", ",", "row", ",", "startCol", "=", "0", ",", "endCol", "=", "-", "1", ")", ":", "return", "self", ".", "__operation", ".", "readRow", "(", "row", ",", "startCol", ",", "endCol", ")" ]
read row
[ "read", "row" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelLib.py#L45-L47
llazzaro/analyzerdam
analyzerdam/excelLib.py
ExcelLib.readCol
def readCol(self, col, startRow=0, endRow=-1): ''' read col ''' return self.__operation.readCol(col, startRow, endRow)
python
def readCol(self, col, startRow=0, endRow=-1): ''' read col ''' return self.__operation.readCol(col, startRow, endRow)
[ "def", "readCol", "(", "self", ",", "col", ",", "startRow", "=", "0", ",", "endRow", "=", "-", "1", ")", ":", "return", "self", ".", "__operation", ".", "readCol", "(", "col", ",", "startRow", ",", "endRow", ")" ]
read col
[ "read", "col" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelLib.py#L49-L51
llazzaro/analyzerdam
analyzerdam/excelLib.py
ExcelLib.writeCell
def writeCell(self, row, col, value): ''' write cell''' self.__operation.writeCell(row, col, value)
python
def writeCell(self, row, col, value): ''' write cell''' self.__operation.writeCell(row, col, value)
[ "def", "writeCell", "(", "self", ",", "row", ",", "col", ",", "value", ")", ":", "self", ".", "__operation", ".", "writeCell", "(", "row", ",", "col", ",", "value", ")" ]
write cell
[ "write", "cell" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelLib.py#L61-L63
llazzaro/analyzerdam
analyzerdam/excelLib.py
ExcelWrite.openSheet
def openSheet(self, name): ''' set a sheet to write ''' if name not in self.__sheetNameDict: sheet = self.__workbook.add_sheet(name) self.__sheetNameDict[name] = sheet self.__sheet = self.__sheetNameDict[name]
python
def openSheet(self, name): ''' set a sheet to write ''' if name not in self.__sheetNameDict: sheet = self.__workbook.add_sheet(name) self.__sheetNameDict[name] = sheet self.__sheet = self.__sheetNameDict[name]
[ "def", "openSheet", "(", "self", ",", "name", ")", ":", "if", "name", "not", "in", "self", ".", "__sheetNameDict", ":", "sheet", "=", "self", ".", "__workbook", ".", "add_sheet", "(", "name", ")", "self", ".", "__sheetNameDict", "[", "name", "]", "=", ...
set a sheet to write
[ "set", "a", "sheet", "to", "write" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelLib.py#L116-L122
llazzaro/analyzerdam
analyzerdam/excelLib.py
ExcelWrite.__getSheet
def __getSheet(self, name): ''' get a sheet by name ''' if not self.sheetExsit(name): raise UfException(Errors.SHEET_NAME_INVALID, "Can't find a sheet named %s" % name) return self.__sheetNameDict[name]
python
def __getSheet(self, name): ''' get a sheet by name ''' if not self.sheetExsit(name): raise UfException(Errors.SHEET_NAME_INVALID, "Can't find a sheet named %s" % name) return self.__sheetNameDict[name]
[ "def", "__getSheet", "(", "self", ",", "name", ")", ":", "if", "not", "self", ".", "sheetExsit", "(", "name", ")", ":", "raise", "UfException", "(", "Errors", ".", "SHEET_NAME_INVALID", ",", "\"Can't find a sheet named %s\"", "%", "name", ")", "return", "sel...
get a sheet by name
[ "get", "a", "sheet", "by", "name" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelLib.py#L124-L129
llazzaro/analyzerdam
analyzerdam/excelLib.py
ExcelWrite.writeCell
def writeCell(self, row, col, value): ''' write a cell ''' if self.__sheet is None: self.openSheet(super(ExcelWrite, self).DEFAULT_SHEET) self.__sheet.write(row, col, value)
python
def writeCell(self, row, col, value): ''' write a cell ''' if self.__sheet is None: self.openSheet(super(ExcelWrite, self).DEFAULT_SHEET) self.__sheet.write(row, col, value)
[ "def", "writeCell", "(", "self", ",", "row", ",", "col", ",", "value", ")", ":", "if", "self", ".", "__sheet", "is", "None", ":", "self", ".", "openSheet", "(", "super", "(", "ExcelWrite", ",", "self", ")", ".", "DEFAULT_SHEET", ")", "self", ".", "...
write a cell
[ "write", "a", "cell" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelLib.py#L135-L140
llazzaro/analyzerdam
analyzerdam/excelLib.py
ExcelWrite.writeRow
def writeRow(self, row, values): ''' write a row Not sure whether xlwt support write the same cell multiple times ''' if self.__sheet is None: self.openSheet(super(ExcelWrite, self).DEFAULT_SHEET) for index, value in enumerate(values): self.__sheet.write(row, index, value)
python
def writeRow(self, row, values): ''' write a row Not sure whether xlwt support write the same cell multiple times ''' if self.__sheet is None: self.openSheet(super(ExcelWrite, self).DEFAULT_SHEET) for index, value in enumerate(values): self.__sheet.write(row, index, value)
[ "def", "writeRow", "(", "self", ",", "row", ",", "values", ")", ":", "if", "self", ".", "__sheet", "is", "None", ":", "self", ".", "openSheet", "(", "super", "(", "ExcelWrite", ",", "self", ")", ".", "DEFAULT_SHEET", ")", "for", "index", ",", "value"...
write a row Not sure whether xlwt support write the same cell multiple times
[ "write", "a", "row", "Not", "sure", "whether", "xlwt", "support", "write", "the", "same", "cell", "multiple", "times" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelLib.py#L142-L151
llazzaro/analyzerdam
analyzerdam/excelLib.py
ExcelRead.readCell
def readCell(self, row, col): ''' read a cell''' try: if self.__sheet is None: self.openSheet(super(ExcelRead, self).DEFAULT_SHEET) return self.__sheet.cell(row, col).value except BaseException as excp: raise UfException(Errors.UNKNOWN_ERROR, "Unknown Error in Excellib.readCell %s" % excp)
python
def readCell(self, row, col): ''' read a cell''' try: if self.__sheet is None: self.openSheet(super(ExcelRead, self).DEFAULT_SHEET) return self.__sheet.cell(row, col).value except BaseException as excp: raise UfException(Errors.UNKNOWN_ERROR, "Unknown Error in Excellib.readCell %s" % excp)
[ "def", "readCell", "(", "self", ",", "row", ",", "col", ")", ":", "try", ":", "if", "self", ".", "__sheet", "is", "None", ":", "self", ".", "openSheet", "(", "super", "(", "ExcelRead", ",", "self", ")", ".", "DEFAULT_SHEET", ")", "return", "self", ...
read a cell
[ "read", "a", "cell" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelLib.py#L207-L215
openvax/datacache
datacache/cache.py
Cache.delete_url
def delete_url(self, url): """ Delete local files downloaded from given URL """ # file may exist locally in compressed and decompressed states # delete both for decompress in [False, True]: key = (url, decompress) if key in self._local_paths: path = self._local_paths[key] remove(path) del self._local_paths[key] # possible that file was downloaded via the download module without # using the Cache object, this wouldn't end up in the local_paths # but should still be deleted path = self.local_path( url, decompress=decompress, download=False) if exists(path): remove(path)
python
def delete_url(self, url): """ Delete local files downloaded from given URL """ # file may exist locally in compressed and decompressed states # delete both for decompress in [False, True]: key = (url, decompress) if key in self._local_paths: path = self._local_paths[key] remove(path) del self._local_paths[key] # possible that file was downloaded via the download module without # using the Cache object, this wouldn't end up in the local_paths # but should still be deleted path = self.local_path( url, decompress=decompress, download=False) if exists(path): remove(path)
[ "def", "delete_url", "(", "self", ",", "url", ")", ":", "# file may exist locally in compressed and decompressed states", "# delete both", "for", "decompress", "in", "[", "False", ",", "True", "]", ":", "key", "=", "(", "url", ",", "decompress", ")", "if", "key"...
Delete local files downloaded from given URL
[ "Delete", "local", "files", "downloaded", "from", "given", "URL" ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/cache.py#L36-L56
openvax/datacache
datacache/cache.py
Cache.exists
def exists(self, url, filename=None, decompress=False): """ Return True if a local file corresponding to these arguments exists. """ return download.file_exists( url, filename=filename, decompress=decompress, subdir=self.subdir)
python
def exists(self, url, filename=None, decompress=False): """ Return True if a local file corresponding to these arguments exists. """ return download.file_exists( url, filename=filename, decompress=decompress, subdir=self.subdir)
[ "def", "exists", "(", "self", ",", "url", ",", "filename", "=", "None", ",", "decompress", "=", "False", ")", ":", "return", "download", ".", "file_exists", "(", "url", ",", "filename", "=", "filename", ",", "decompress", "=", "decompress", ",", "subdir"...
Return True if a local file corresponding to these arguments exists.
[ "Return", "True", "if", "a", "local", "file", "corresponding", "to", "these", "arguments", "exists", "." ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/cache.py#L63-L72
openvax/datacache
datacache/cache.py
Cache.fetch
def fetch( self, url, filename=None, decompress=False, force=False, timeout=None, use_wget_if_available=True): """ Return the local path to the downloaded copy of a given URL. Don't download the file again if it's already present, unless `force` is True. """ key = (url, decompress) if not force and key in self._local_paths: path = self._local_paths[key] if exists(path): return path else: del self._local_paths[key] path = download.fetch_file( url, filename=filename, decompress=decompress, subdir=self.subdir, force=force, timeout=timeout, use_wget_if_available=use_wget_if_available) self._local_paths[key] = path return path
python
def fetch( self, url, filename=None, decompress=False, force=False, timeout=None, use_wget_if_available=True): """ Return the local path to the downloaded copy of a given URL. Don't download the file again if it's already present, unless `force` is True. """ key = (url, decompress) if not force and key in self._local_paths: path = self._local_paths[key] if exists(path): return path else: del self._local_paths[key] path = download.fetch_file( url, filename=filename, decompress=decompress, subdir=self.subdir, force=force, timeout=timeout, use_wget_if_available=use_wget_if_available) self._local_paths[key] = path return path
[ "def", "fetch", "(", "self", ",", "url", ",", "filename", "=", "None", ",", "decompress", "=", "False", ",", "force", "=", "False", ",", "timeout", "=", "None", ",", "use_wget_if_available", "=", "True", ")", ":", "key", "=", "(", "url", ",", "decomp...
Return the local path to the downloaded copy of a given URL. Don't download the file again if it's already present, unless `force` is True.
[ "Return", "the", "local", "path", "to", "the", "downloaded", "copy", "of", "a", "given", "URL", ".", "Don", "t", "download", "the", "file", "again", "if", "it", "s", "already", "present", "unless", "force", "is", "True", "." ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/cache.py#L74-L104
openvax/datacache
datacache/cache.py
Cache.local_filename
def local_filename( self, url=None, filename=None, decompress=False): """ What local filename will we use within the cache directory for the given URL/filename/decompress options. """ return common.build_local_filename(url, filename, decompress)
python
def local_filename( self, url=None, filename=None, decompress=False): """ What local filename will we use within the cache directory for the given URL/filename/decompress options. """ return common.build_local_filename(url, filename, decompress)
[ "def", "local_filename", "(", "self", ",", "url", "=", "None", ",", "filename", "=", "None", ",", "decompress", "=", "False", ")", ":", "return", "common", ".", "build_local_filename", "(", "url", ",", "filename", ",", "decompress", ")" ]
What local filename will we use within the cache directory for the given URL/filename/decompress options.
[ "What", "local", "filename", "will", "we", "use", "within", "the", "cache", "directory", "for", "the", "given", "URL", "/", "filename", "/", "decompress", "options", "." ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/cache.py#L106-L115
openvax/datacache
datacache/cache.py
Cache.local_path
def local_path(self, url, filename=None, decompress=False, download=False): """ What will the full local path be if we download the given file? """ if download: return self.fetch(url=url, filename=filename, decompress=decompress) else: filename = self.local_filename(url, filename, decompress) return join(self.cache_directory_path, filename)
python
def local_path(self, url, filename=None, decompress=False, download=False): """ What will the full local path be if we download the given file? """ if download: return self.fetch(url=url, filename=filename, decompress=decompress) else: filename = self.local_filename(url, filename, decompress) return join(self.cache_directory_path, filename)
[ "def", "local_path", "(", "self", ",", "url", ",", "filename", "=", "None", ",", "decompress", "=", "False", ",", "download", "=", "False", ")", ":", "if", "download", ":", "return", "self", ".", "fetch", "(", "url", "=", "url", ",", "filename", "=",...
What will the full local path be if we download the given file?
[ "What", "will", "the", "full", "local", "path", "be", "if", "we", "download", "the", "given", "file?" ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/cache.py#L117-L125
72squared/redpipe
redpipe/connections.py
connect_redis
def connect_redis(redis_client, name=None, transaction=False): """ Connect your redis-py instance to redpipe. Example: .. code:: python redpipe.connect_redis(redis.StrictRedis(), name='users') Do this during your application bootstrapping. You can also pass a redis-py-cluster instance to this method. .. code:: python redpipe.connect_redis(rediscluster.StrictRedisCluster(), name='users') You are allowed to pass in either the strict or regular instance. .. code:: python redpipe.connect_redis(redis.StrictRedis(), name='a') redpipe.connect_redis(redis.Redis(), name='b') redpipe.connect_redis(rediscluster.StrictRedisCluster(...), name='c') redpipe.connect_redis(rediscluster.RedisCluster(...), name='d') :param redis_client: :param name: nickname you want to give to your connection. :param transaction: :return: """ return ConnectionManager.connect_redis( redis_client=redis_client, name=name, transaction=transaction)
python
def connect_redis(redis_client, name=None, transaction=False): """ Connect your redis-py instance to redpipe. Example: .. code:: python redpipe.connect_redis(redis.StrictRedis(), name='users') Do this during your application bootstrapping. You can also pass a redis-py-cluster instance to this method. .. code:: python redpipe.connect_redis(rediscluster.StrictRedisCluster(), name='users') You are allowed to pass in either the strict or regular instance. .. code:: python redpipe.connect_redis(redis.StrictRedis(), name='a') redpipe.connect_redis(redis.Redis(), name='b') redpipe.connect_redis(rediscluster.StrictRedisCluster(...), name='c') redpipe.connect_redis(rediscluster.RedisCluster(...), name='d') :param redis_client: :param name: nickname you want to give to your connection. :param transaction: :return: """ return ConnectionManager.connect_redis( redis_client=redis_client, name=name, transaction=transaction)
[ "def", "connect_redis", "(", "redis_client", ",", "name", "=", "None", ",", "transaction", "=", "False", ")", ":", "return", "ConnectionManager", ".", "connect_redis", "(", "redis_client", "=", "redis_client", ",", "name", "=", "name", ",", "transaction", "=",...
Connect your redis-py instance to redpipe. Example: .. code:: python redpipe.connect_redis(redis.StrictRedis(), name='users') Do this during your application bootstrapping. You can also pass a redis-py-cluster instance to this method. .. code:: python redpipe.connect_redis(rediscluster.StrictRedisCluster(), name='users') You are allowed to pass in either the strict or regular instance. .. code:: python redpipe.connect_redis(redis.StrictRedis(), name='a') redpipe.connect_redis(redis.Redis(), name='b') redpipe.connect_redis(rediscluster.StrictRedisCluster(...), name='c') redpipe.connect_redis(rediscluster.RedisCluster(...), name='d') :param redis_client: :param name: nickname you want to give to your connection. :param transaction: :return:
[ "Connect", "your", "redis", "-", "py", "instance", "to", "redpipe", "." ]
train
https://github.com/72squared/redpipe/blob/e6ee518bc9f3e2fee323c8c53d08997799bd9b1b/redpipe/connections.py#L156-L191
72squared/redpipe
redpipe/connections.py
ConnectionManager.connect
def connect(cls, pipeline_method, name=None): """ Low level logic to bind a callable method to a name. Don't call this directly unless you know what you are doing. :param pipeline_method: callable :param name: str optional :return: None """ new_pool = pipeline_method().connection_pool try: if cls.get(name).connection_pool != new_pool: raise AlreadyConnected("can't change connection for %s" % name) except InvalidPipeline: pass cls.connections[name] = pipeline_method
python
def connect(cls, pipeline_method, name=None): """ Low level logic to bind a callable method to a name. Don't call this directly unless you know what you are doing. :param pipeline_method: callable :param name: str optional :return: None """ new_pool = pipeline_method().connection_pool try: if cls.get(name).connection_pool != new_pool: raise AlreadyConnected("can't change connection for %s" % name) except InvalidPipeline: pass cls.connections[name] = pipeline_method
[ "def", "connect", "(", "cls", ",", "pipeline_method", ",", "name", "=", "None", ")", ":", "new_pool", "=", "pipeline_method", "(", ")", ".", "connection_pool", "try", ":", "if", "cls", ".", "get", "(", "name", ")", ".", "connection_pool", "!=", "new_pool...
Low level logic to bind a callable method to a name. Don't call this directly unless you know what you are doing. :param pipeline_method: callable :param name: str optional :return: None
[ "Low", "level", "logic", "to", "bind", "a", "callable", "method", "to", "a", "name", ".", "Don", "t", "call", "this", "directly", "unless", "you", "know", "what", "you", "are", "doing", "." ]
train
https://github.com/72squared/redpipe/blob/e6ee518bc9f3e2fee323c8c53d08997799bd9b1b/redpipe/connections.py#L56-L72
72squared/redpipe
redpipe/connections.py
ConnectionManager.connect_redis
def connect_redis(cls, redis_client, name=None, transaction=False): """ Store the redis connection in our connector instance. Do this during your application bootstrapping. We call the pipeline method of the redis client. The ``redis_client`` can be either a redis or rediscluster client. We use the interface, not the actual class. That means we can handle either one identically. It doesn't matter if you pass in `Redis` or `StrictRedis`. the interface for direct redis commands will behave indentically. Keyspaces will work with either, but it presents the same interface that the Redis class does, not StrictRedis. The transaction flag is a boolean value we hold on to and pass to the invocation of something equivalent to: .. code-block:: python redis_client.pipeline(transaction=transation) Unlike redis-py, this flag defaults to False. You can configure it to always use the MULTI/EXEC flags, but I don't see much point. If you need transactional support I recommend using a LUA script. **RedPipe** is about improving network round-trip efficiency. :param redis_client: redis.StrictRedis() or redis.Redis() :param name: identifier for the connection, optional :param transaction: bool, defaults to False :return: None """ connection_pool = redis_client.connection_pool if connection_pool.connection_kwargs.get('decode_responses', False): raise InvalidPipeline('decode_responses set to True') def pipeline_method(): """ A closure wrapping the pipeline. :return: pipeline object """ return redis_client.pipeline(transaction=transaction) # set up the connection. cls.connect(pipeline_method=pipeline_method, name=name)
python
def connect_redis(cls, redis_client, name=None, transaction=False): """ Store the redis connection in our connector instance. Do this during your application bootstrapping. We call the pipeline method of the redis client. The ``redis_client`` can be either a redis or rediscluster client. We use the interface, not the actual class. That means we can handle either one identically. It doesn't matter if you pass in `Redis` or `StrictRedis`. the interface for direct redis commands will behave indentically. Keyspaces will work with either, but it presents the same interface that the Redis class does, not StrictRedis. The transaction flag is a boolean value we hold on to and pass to the invocation of something equivalent to: .. code-block:: python redis_client.pipeline(transaction=transation) Unlike redis-py, this flag defaults to False. You can configure it to always use the MULTI/EXEC flags, but I don't see much point. If you need transactional support I recommend using a LUA script. **RedPipe** is about improving network round-trip efficiency. :param redis_client: redis.StrictRedis() or redis.Redis() :param name: identifier for the connection, optional :param transaction: bool, defaults to False :return: None """ connection_pool = redis_client.connection_pool if connection_pool.connection_kwargs.get('decode_responses', False): raise InvalidPipeline('decode_responses set to True') def pipeline_method(): """ A closure wrapping the pipeline. :return: pipeline object """ return redis_client.pipeline(transaction=transaction) # set up the connection. cls.connect(pipeline_method=pipeline_method, name=name)
[ "def", "connect_redis", "(", "cls", ",", "redis_client", ",", "name", "=", "None", ",", "transaction", "=", "False", ")", ":", "connection_pool", "=", "redis_client", ".", "connection_pool", "if", "connection_pool", ".", "connection_kwargs", ".", "get", "(", "...
Store the redis connection in our connector instance. Do this during your application bootstrapping. We call the pipeline method of the redis client. The ``redis_client`` can be either a redis or rediscluster client. We use the interface, not the actual class. That means we can handle either one identically. It doesn't matter if you pass in `Redis` or `StrictRedis`. the interface for direct redis commands will behave indentically. Keyspaces will work with either, but it presents the same interface that the Redis class does, not StrictRedis. The transaction flag is a boolean value we hold on to and pass to the invocation of something equivalent to: .. code-block:: python redis_client.pipeline(transaction=transation) Unlike redis-py, this flag defaults to False. You can configure it to always use the MULTI/EXEC flags, but I don't see much point. If you need transactional support I recommend using a LUA script. **RedPipe** is about improving network round-trip efficiency. :param redis_client: redis.StrictRedis() or redis.Redis() :param name: identifier for the connection, optional :param transaction: bool, defaults to False :return: None
[ "Store", "the", "redis", "connection", "in", "our", "connector", "instance", "." ]
train
https://github.com/72squared/redpipe/blob/e6ee518bc9f3e2fee323c8c53d08997799bd9b1b/redpipe/connections.py#L75-L127
twisted/mantissa
xmantissa/fulltext.py
remoteIndexer1to2
def remoteIndexer1to2(oldIndexer): """ Previously external application code was responsible for adding a RemoteListener to a batch work source as a reliable listener. This precluded the possibility of the RemoteListener resetting itself unilaterally. With version 2, RemoteListener takes control of adding itself as a reliable listener and keeps track of the sources with which it is associated. This upgrader creates that tracking state. """ newIndexer = oldIndexer.upgradeVersion( oldIndexer.typeName, 1, 2, indexCount=oldIndexer.indexCount, installedOn=oldIndexer.installedOn, indexDirectory=oldIndexer.indexDirectory) listeners = newIndexer.store.query( batch._ReliableListener, batch._ReliableListener.listener == newIndexer) for listener in listeners: _IndexerInputSource( store=newIndexer.store, indexer=newIndexer, source=listener.processor) return newIndexer
python
def remoteIndexer1to2(oldIndexer): """ Previously external application code was responsible for adding a RemoteListener to a batch work source as a reliable listener. This precluded the possibility of the RemoteListener resetting itself unilaterally. With version 2, RemoteListener takes control of adding itself as a reliable listener and keeps track of the sources with which it is associated. This upgrader creates that tracking state. """ newIndexer = oldIndexer.upgradeVersion( oldIndexer.typeName, 1, 2, indexCount=oldIndexer.indexCount, installedOn=oldIndexer.installedOn, indexDirectory=oldIndexer.indexDirectory) listeners = newIndexer.store.query( batch._ReliableListener, batch._ReliableListener.listener == newIndexer) for listener in listeners: _IndexerInputSource( store=newIndexer.store, indexer=newIndexer, source=listener.processor) return newIndexer
[ "def", "remoteIndexer1to2", "(", "oldIndexer", ")", ":", "newIndexer", "=", "oldIndexer", ".", "upgradeVersion", "(", "oldIndexer", ".", "typeName", ",", "1", ",", "2", ",", "indexCount", "=", "oldIndexer", ".", "indexCount", ",", "installedOn", "=", "oldIndex...
Previously external application code was responsible for adding a RemoteListener to a batch work source as a reliable listener. This precluded the possibility of the RemoteListener resetting itself unilaterally. With version 2, RemoteListener takes control of adding itself as a reliable listener and keeps track of the sources with which it is associated. This upgrader creates that tracking state.
[ "Previously", "external", "application", "code", "was", "responsible", "for", "adding", "a", "RemoteListener", "to", "a", "batch", "work", "source", "as", "a", "reliable", "listener", ".", "This", "precluded", "the", "possibility", "of", "the", "RemoteListener", ...
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L660-L685
twisted/mantissa
xmantissa/fulltext.py
remoteIndexer2to3
def remoteIndexer2to3(oldIndexer): """ The documentType keyword was added to all indexable items. Indexes need to be regenerated for this to take effect. Also, PyLucene no longer stores the text of messages it indexes, so deleting and re-creating the indexes will make them much smaller. """ newIndexer = oldIndexer.upgradeVersion( oldIndexer.typeName, 2, 3, indexCount=oldIndexer.indexCount, installedOn=oldIndexer.installedOn, indexDirectory=oldIndexer.indexDirectory) # the 3->4 upgrader for PyLuceneIndexer calls reset(), so don't do it # here. also, it won't work because it's a DummyItem if oldIndexer.typeName != PyLuceneIndexer.typeName: newIndexer.reset() return newIndexer
python
def remoteIndexer2to3(oldIndexer): """ The documentType keyword was added to all indexable items. Indexes need to be regenerated for this to take effect. Also, PyLucene no longer stores the text of messages it indexes, so deleting and re-creating the indexes will make them much smaller. """ newIndexer = oldIndexer.upgradeVersion( oldIndexer.typeName, 2, 3, indexCount=oldIndexer.indexCount, installedOn=oldIndexer.installedOn, indexDirectory=oldIndexer.indexDirectory) # the 3->4 upgrader for PyLuceneIndexer calls reset(), so don't do it # here. also, it won't work because it's a DummyItem if oldIndexer.typeName != PyLuceneIndexer.typeName: newIndexer.reset() return newIndexer
[ "def", "remoteIndexer2to3", "(", "oldIndexer", ")", ":", "newIndexer", "=", "oldIndexer", ".", "upgradeVersion", "(", "oldIndexer", ".", "typeName", ",", "2", ",", "3", ",", "indexCount", "=", "oldIndexer", ".", "indexCount", ",", "installedOn", "=", "oldIndex...
The documentType keyword was added to all indexable items. Indexes need to be regenerated for this to take effect. Also, PyLucene no longer stores the text of messages it indexes, so deleting and re-creating the indexes will make them much smaller.
[ "The", "documentType", "keyword", "was", "added", "to", "all", "indexable", "items", ".", "Indexes", "need", "to", "be", "regenerated", "for", "this", "to", "take", "effect", ".", "Also", "PyLucene", "no", "longer", "stores", "the", "text", "of", "messages",...
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L687-L703
twisted/mantissa
xmantissa/fulltext.py
pyLuceneIndexer4to5
def pyLuceneIndexer4to5(old): """ Copy attributes, reset index due because information about deleted documents has been lost, and power up for IFulltextIndexer so other code can find this item. """ new = old.upgradeVersion(PyLuceneIndexer.typeName, 4, 5, indexCount=old.indexCount, installedOn=old.installedOn, indexDirectory=old.indexDirectory) new.reset() new.store.powerUp(new, ixmantissa.IFulltextIndexer) return new
python
def pyLuceneIndexer4to5(old): """ Copy attributes, reset index due because information about deleted documents has been lost, and power up for IFulltextIndexer so other code can find this item. """ new = old.upgradeVersion(PyLuceneIndexer.typeName, 4, 5, indexCount=old.indexCount, installedOn=old.installedOn, indexDirectory=old.indexDirectory) new.reset() new.store.powerUp(new, ixmantissa.IFulltextIndexer) return new
[ "def", "pyLuceneIndexer4to5", "(", "old", ")", ":", "new", "=", "old", ".", "upgradeVersion", "(", "PyLuceneIndexer", ".", "typeName", ",", "4", ",", "5", ",", "indexCount", "=", "old", ".", "indexCount", ",", "installedOn", "=", "old", ".", "installedOn",...
Copy attributes, reset index due because information about deleted documents has been lost, and power up for IFulltextIndexer so other code can find this item.
[ "Copy", "attributes", "reset", "index", "due", "because", "information", "about", "deleted", "documents", "has", "been", "lost", "and", "power", "up", "for", "IFulltextIndexer", "so", "other", "code", "can", "find", "this", "item", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L735-L747
twisted/mantissa
xmantissa/fulltext.py
RemoteIndexer.addSource
def addSource(self, itemSource): """ Add the given L{IBatchProcessor} as a source of input for this indexer. """ _IndexerInputSource(store=self.store, indexer=self, source=itemSource) itemSource.addReliableListener(self, style=iaxiom.REMOTE)
python
def addSource(self, itemSource): """ Add the given L{IBatchProcessor} as a source of input for this indexer. """ _IndexerInputSource(store=self.store, indexer=self, source=itemSource) itemSource.addReliableListener(self, style=iaxiom.REMOTE)
[ "def", "addSource", "(", "self", ",", "itemSource", ")", ":", "_IndexerInputSource", "(", "store", "=", "self", ".", "store", ",", "indexer", "=", "self", ",", "source", "=", "itemSource", ")", "itemSource", ".", "addReliableListener", "(", "self", ",", "s...
Add the given L{IBatchProcessor} as a source of input for this indexer.
[ "Add", "the", "given", "L", "{", "IBatchProcessor", "}", "as", "a", "source", "of", "input", "for", "this", "indexer", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L122-L127
twisted/mantissa
xmantissa/fulltext.py
RemoteIndexer.reset
def reset(self): """ Process everything all over again. """ self.indexCount = 0 indexDir = self.store.newDirectory(self.indexDirectory) if indexDir.exists(): indexDir.remove() for src in self.getSources(): src.removeReliableListener(self) src.addReliableListener(self, style=iaxiom.REMOTE)
python
def reset(self): """ Process everything all over again. """ self.indexCount = 0 indexDir = self.store.newDirectory(self.indexDirectory) if indexDir.exists(): indexDir.remove() for src in self.getSources(): src.removeReliableListener(self) src.addReliableListener(self, style=iaxiom.REMOTE)
[ "def", "reset", "(", "self", ")", ":", "self", ".", "indexCount", "=", "0", "indexDir", "=", "self", ".", "store", ".", "newDirectory", "(", "self", ".", "indexDirectory", ")", "if", "indexDir", ".", "exists", "(", ")", ":", "indexDir", ".", "remove", ...
Process everything all over again.
[ "Process", "everything", "all", "over", "again", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L134-L144
twisted/mantissa
xmantissa/fulltext.py
RemoteIndexer._flush
def _flush(self): """ Deal with pending result-affecting things. This should always be called before issuing a search. """ remove = self.store.query(_RemoveDocument) documentIdentifiers = list(remove.getColumn("documentIdentifier")) if VERBOSE: log.msg("%s/%d removing %r" % (self.store, self.storeID, documentIdentifiers)) reader = self.openReadIndex() map(reader.remove, documentIdentifiers) reader.close() remove.deleteFromStore()
python
def _flush(self): """ Deal with pending result-affecting things. This should always be called before issuing a search. """ remove = self.store.query(_RemoveDocument) documentIdentifiers = list(remove.getColumn("documentIdentifier")) if VERBOSE: log.msg("%s/%d removing %r" % (self.store, self.storeID, documentIdentifiers)) reader = self.openReadIndex() map(reader.remove, documentIdentifiers) reader.close() remove.deleteFromStore()
[ "def", "_flush", "(", "self", ")", ":", "remove", "=", "self", ".", "store", ".", "query", "(", "_RemoveDocument", ")", "documentIdentifiers", "=", "list", "(", "remove", ".", "getColumn", "(", "\"documentIdentifier\"", ")", ")", "if", "VERBOSE", ":", "log...
Deal with pending result-affecting things. This should always be called before issuing a search.
[ "Deal", "with", "pending", "result", "-", "affecting", "things", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L185-L198
twisted/mantissa
xmantissa/fulltext.py
PyLuceneIndexer.reset
def reset(self): """ In addition to the behavior of the superclass, delete any dangling lockfiles which may prevent this index from being opened. With the tested version of PyLucene (something pre-2.0), this appears to not actually be necessary: deleting the entire index directory but leaving the lockfile in place seems to still allow the index to be recreated (perhaps because when the directory does not exist, we pass True as the create flag when opening the FSDirectory, I am uncertain). Nevertheless, do this anyway for now. """ RemoteIndexer.reset(self) if hasattr(self, '_lockfile'): os.remove(self._lockfile) del self._lockfile
python
def reset(self): """ In addition to the behavior of the superclass, delete any dangling lockfiles which may prevent this index from being opened. With the tested version of PyLucene (something pre-2.0), this appears to not actually be necessary: deleting the entire index directory but leaving the lockfile in place seems to still allow the index to be recreated (perhaps because when the directory does not exist, we pass True as the create flag when opening the FSDirectory, I am uncertain). Nevertheless, do this anyway for now. """ RemoteIndexer.reset(self) if hasattr(self, '_lockfile'): os.remove(self._lockfile) del self._lockfile
[ "def", "reset", "(", "self", ")", ":", "RemoteIndexer", ".", "reset", "(", "self", ")", "if", "hasattr", "(", "self", ",", "'_lockfile'", ")", ":", "os", ".", "remove", "(", "self", ".", "_lockfile", ")", "del", "self", ".", "_lockfile" ]
In addition to the behavior of the superclass, delete any dangling lockfiles which may prevent this index from being opened. With the tested version of PyLucene (something pre-2.0), this appears to not actually be necessary: deleting the entire index directory but leaving the lockfile in place seems to still allow the index to be recreated (perhaps because when the directory does not exist, we pass True as the create flag when opening the FSDirectory, I am uncertain). Nevertheless, do this anyway for now.
[ "In", "addition", "to", "the", "behavior", "of", "the", "superclass", "delete", "any", "dangling", "lockfiles", "which", "may", "prevent", "this", "index", "from", "being", "opened", ".", "With", "the", "tested", "version", "of", "PyLucene", "(", "something", ...
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L592-L606
twisted/mantissa
xmantissa/fulltext.py
_SQLiteIndex.add
def add(self, document): """ Add a document to the database. """ docid = int(document.uniqueIdentifier()) text = u' '.join(document.textParts()) self.store.executeSQL(self.addSQL, (docid, text))
python
def add(self, document): """ Add a document to the database. """ docid = int(document.uniqueIdentifier()) text = u' '.join(document.textParts()) self.store.executeSQL(self.addSQL, (docid, text))
[ "def", "add", "(", "self", ",", "document", ")", ":", "docid", "=", "int", "(", "document", ".", "uniqueIdentifier", "(", ")", ")", "text", "=", "u' '", ".", "join", "(", "document", ".", "textParts", "(", ")", ")", "self", ".", "store", ".", "exec...
Add a document to the database.
[ "Add", "a", "document", "to", "the", "database", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L787-L794
twisted/mantissa
xmantissa/fulltext.py
_SQLiteIndex.remove
def remove(self, docid): """ Remove a document from the database. """ docid = int(docid) self.store.executeSQL(self.removeSQL, (docid,))
python
def remove(self, docid): """ Remove a document from the database. """ docid = int(docid) self.store.executeSQL(self.removeSQL, (docid,))
[ "def", "remove", "(", "self", ",", "docid", ")", ":", "docid", "=", "int", "(", "docid", ")", "self", ".", "store", ".", "executeSQL", "(", "self", ".", "removeSQL", ",", "(", "docid", ",", ")", ")" ]
Remove a document from the database.
[ "Remove", "a", "document", "from", "the", "database", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L797-L802
twisted/mantissa
xmantissa/fulltext.py
_SQLiteIndex.search
def search(self, term, keywords=None, sortAscending=True): """ Search the database. """ if sortAscending: direction = 'ASC' else: direction = 'DESC' return [_SQLiteResultWrapper(r[0]) for r in self.store.querySQL(self.searchSQL % (direction,), (term,))]
python
def search(self, term, keywords=None, sortAscending=True): """ Search the database. """ if sortAscending: direction = 'ASC' else: direction = 'DESC' return [_SQLiteResultWrapper(r[0]) for r in self.store.querySQL(self.searchSQL % (direction,), (term,))]
[ "def", "search", "(", "self", ",", "term", ",", "keywords", "=", "None", ",", "sortAscending", "=", "True", ")", ":", "if", "sortAscending", ":", "direction", "=", "'ASC'", "else", ":", "direction", "=", "'DESC'", "return", "[", "_SQLiteResultWrapper", "("...
Search the database.
[ "Search", "the", "database", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L805-L815
twisted/mantissa
xmantissa/fulltext.py
SQLiteIndexer._getStore
def _getStore(self): """ Get the Store used for FTS. If it does not exist, it is created and initialised. """ storeDir = self.store.newDirectory(self.indexDirectory) if not storeDir.exists(): store = Store(storeDir) self._initStore(store) return store else: return Store(storeDir)
python
def _getStore(self): """ Get the Store used for FTS. If it does not exist, it is created and initialised. """ storeDir = self.store.newDirectory(self.indexDirectory) if not storeDir.exists(): store = Store(storeDir) self._initStore(store) return store else: return Store(storeDir)
[ "def", "_getStore", "(", "self", ")", ":", "storeDir", "=", "self", ".", "store", ".", "newDirectory", "(", "self", ".", "indexDirectory", ")", "if", "not", "storeDir", ".", "exists", "(", ")", ":", "store", "=", "Store", "(", "storeDir", ")", "self", ...
Get the Store used for FTS. If it does not exist, it is created and initialised.
[ "Get", "the", "Store", "used", "for", "FTS", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/fulltext.py#L835-L847
richardliaw/track
track/log.py
init
def init(track_log_handler): """ (Re)initialize track's file handler for track package logger. Adds a stdout-printing handler automatically. """ logger = logging.getLogger(__package__) # TODO (just document prominently) # assume only one trial can run at once right now # multi-concurrent-trial support will require complex filter logic # based on the currently-running trial (maybe we shouldn't allow multiple # trials on different python threads, that's dumb) to_rm = [h for h in logger.handlers if isinstance(h, TrackLogHandler)] for h in to_rm: logger.removeHandler(h) if not any(isinstance(h, StdoutHandler) for h in logger.handlers): handler = StdoutHandler() handler.setFormatter(_FORMATTER) logger.addHandler(handler) track_log_handler.setFormatter(_FORMATTER) logger.addHandler(track_log_handler) logger.propagate = False logger.setLevel(logging.DEBUG)
python
def init(track_log_handler): """ (Re)initialize track's file handler for track package logger. Adds a stdout-printing handler automatically. """ logger = logging.getLogger(__package__) # TODO (just document prominently) # assume only one trial can run at once right now # multi-concurrent-trial support will require complex filter logic # based on the currently-running trial (maybe we shouldn't allow multiple # trials on different python threads, that's dumb) to_rm = [h for h in logger.handlers if isinstance(h, TrackLogHandler)] for h in to_rm: logger.removeHandler(h) if not any(isinstance(h, StdoutHandler) for h in logger.handlers): handler = StdoutHandler() handler.setFormatter(_FORMATTER) logger.addHandler(handler) track_log_handler.setFormatter(_FORMATTER) logger.addHandler(track_log_handler) logger.propagate = False logger.setLevel(logging.DEBUG)
[ "def", "init", "(", "track_log_handler", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "__package__", ")", "# TODO (just document prominently)", "# assume only one trial can run at once right now", "# multi-concurrent-trial support will require complex filter logic", "...
(Re)initialize track's file handler for track package logger. Adds a stdout-printing handler automatically.
[ "(", "Re", ")", "initialize", "track", "s", "file", "handler", "for", "track", "package", "logger", "." ]
train
https://github.com/richardliaw/track/blob/7ac42ea34e5c1d7bb92fd813e938835a06a63fc7/track/log.py#L30-L57
richardliaw/track
track/log.py
debug
def debug(s, *args): """debug(s, x1, ..., xn) logs s.format(x1, ..., xn).""" # Get the path name and line number of the function which called us. previous_frame = inspect.currentframe().f_back try: pathname, lineno, _, _, _ = inspect.getframeinfo(previous_frame) # if path is in cwd, simplify it cwd = os.path.abspath(os.getcwd()) pathname = os.path.abspath(pathname) if os.path.commonprefix([cwd, pathname]) == cwd: pathname = os.path.relpath(pathname, cwd) except Exception: # pylint: disable=broad-except pathname = '<UNKNOWN-FILE>.py' lineno = 0 if _FORMATTER: # log could have not been initialized. _FORMATTER.pathname = pathname _FORMATTER.lineno = lineno logger = logging.getLogger(__package__) logger.debug(s.format(*args))
python
def debug(s, *args): """debug(s, x1, ..., xn) logs s.format(x1, ..., xn).""" # Get the path name and line number of the function which called us. previous_frame = inspect.currentframe().f_back try: pathname, lineno, _, _, _ = inspect.getframeinfo(previous_frame) # if path is in cwd, simplify it cwd = os.path.abspath(os.getcwd()) pathname = os.path.abspath(pathname) if os.path.commonprefix([cwd, pathname]) == cwd: pathname = os.path.relpath(pathname, cwd) except Exception: # pylint: disable=broad-except pathname = '<UNKNOWN-FILE>.py' lineno = 0 if _FORMATTER: # log could have not been initialized. _FORMATTER.pathname = pathname _FORMATTER.lineno = lineno logger = logging.getLogger(__package__) logger.debug(s.format(*args))
[ "def", "debug", "(", "s", ",", "*", "args", ")", ":", "# Get the path name and line number of the function which called us.", "previous_frame", "=", "inspect", ".", "currentframe", "(", ")", ".", "f_back", "try", ":", "pathname", ",", "lineno", ",", "_", ",", "_...
debug(s, x1, ..., xn) logs s.format(x1, ..., xn).
[ "debug", "(", "s", "x1", "...", "xn", ")", "logs", "s", ".", "format", "(", "x1", "...", "xn", ")", "." ]
train
https://github.com/richardliaw/track/blob/7ac42ea34e5c1d7bb92fd813e938835a06a63fc7/track/log.py#L59-L77
p3trus/slave
slave/types.py
SingleType.dump
def dump(self, value): """Dumps the value to string. :returns: Returns the stringified version of the value. :raises: TypeError, ValueError """ value = self.__convert__(value) self.__validate__(value) return self.__serialize__(value)
python
def dump(self, value): """Dumps the value to string. :returns: Returns the stringified version of the value. :raises: TypeError, ValueError """ value = self.__convert__(value) self.__validate__(value) return self.__serialize__(value)
[ "def", "dump", "(", "self", ",", "value", ")", ":", "value", "=", "self", ".", "__convert__", "(", "value", ")", "self", ".", "__validate__", "(", "value", ")", "return", "self", ".", "__serialize__", "(", "value", ")" ]
Dumps the value to string. :returns: Returns the stringified version of the value. :raises: TypeError, ValueError
[ "Dumps", "the", "value", "to", "string", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/types.py#L114-L123
p3trus/slave
slave/types.py
Integer.simulate
def simulate(self): """Generates a random integer in the available range.""" min_ = (-sys.maxsize - 1) if self._min is None else self._min max_ = sys.maxsize if self._max is None else self._max return random.randint(min_, max_)
python
def simulate(self): """Generates a random integer in the available range.""" min_ = (-sys.maxsize - 1) if self._min is None else self._min max_ = sys.maxsize if self._max is None else self._max return random.randint(min_, max_)
[ "def", "simulate", "(", "self", ")", ":", "min_", "=", "(", "-", "sys", ".", "maxsize", "-", "1", ")", "if", "self", ".", "_min", "is", "None", "else", "self", ".", "_min", "max_", "=", "sys", ".", "maxsize", "if", "self", ".", "_max", "is", "N...
Generates a random integer in the available range.
[ "Generates", "a", "random", "integer", "in", "the", "available", "range", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/types.py#L191-L195
p3trus/slave
slave/types.py
String.simulate
def simulate(self): """Returns a randomly constructed string. Simulate randomly constructs a string with a length between min and max. If min is not present, a minimum length of 1 is assumed, if max is not present a maximum length of 10 is used. """ min_ = 1 if self._min is None else self._min max_ = 10 if self._max is None else self._max n = min_ if (min_ >= max_) else random.randint(min_, max_) chars = string.ascii_letters + string.digits return ''.join(random.choice(chars) for x in range(n))
python
def simulate(self): """Returns a randomly constructed string. Simulate randomly constructs a string with a length between min and max. If min is not present, a minimum length of 1 is assumed, if max is not present a maximum length of 10 is used. """ min_ = 1 if self._min is None else self._min max_ = 10 if self._max is None else self._max n = min_ if (min_ >= max_) else random.randint(min_, max_) chars = string.ascii_letters + string.digits return ''.join(random.choice(chars) for x in range(n))
[ "def", "simulate", "(", "self", ")", ":", "min_", "=", "1", "if", "self", ".", "_min", "is", "None", "else", "self", ".", "_min", "max_", "=", "10", "if", "self", ".", "_max", "is", "None", "else", "self", ".", "_max", "n", "=", "min_", "if", "...
Returns a randomly constructed string. Simulate randomly constructs a string with a length between min and max. If min is not present, a minimum length of 1 is assumed, if max is not present a maximum length of 10 is used.
[ "Returns", "a", "randomly", "constructed", "string", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/types.py#L233-L244
p3trus/slave
slave/types.py
Stream.simulate
def simulate(self): """Simulates a stream of types.""" # Simulates zero to 10 types return [t.simulate() for t in itertools.islice(self, random.choice(range(10)))]
python
def simulate(self): """Simulates a stream of types.""" # Simulates zero to 10 types return [t.simulate() for t in itertools.islice(self, random.choice(range(10)))]
[ "def", "simulate", "(", "self", ")", ":", "# Simulates zero to 10 types", "return", "[", "t", ".", "simulate", "(", ")", "for", "t", "in", "itertools", ".", "islice", "(", "self", ",", "random", ".", "choice", "(", "range", "(", "10", ")", ")", ")", ...
Simulates a stream of types.
[ "Simulates", "a", "stream", "of", "types", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/types.py#L381-L384
llazzaro/analyzerdam
analyzerdam/excelDAM.py
ExcelDAM.__findRange
def __findRange(self, excelLib, start, end): ''' return low and high as excel range ''' inc = 1 low = 0 high = 0 dates = excelLib.readCol(0, 1) for index, date in enumerate(dates): if int(start) <= int(date): low = index + inc break if low: for index, date in reversed(list(enumerate(dates))): if int(date) <= int(end): high = index + inc break return low, high
python
def __findRange(self, excelLib, start, end): ''' return low and high as excel range ''' inc = 1 low = 0 high = 0 dates = excelLib.readCol(0, 1) for index, date in enumerate(dates): if int(start) <= int(date): low = index + inc break if low: for index, date in reversed(list(enumerate(dates))): if int(date) <= int(end): high = index + inc break return low, high
[ "def", "__findRange", "(", "self", ",", "excelLib", ",", "start", ",", "end", ")", ":", "inc", "=", "1", "low", "=", "0", "high", "=", "0", "dates", "=", "excelLib", ".", "readCol", "(", "0", ",", "1", ")", "for", "index", ",", "date", "in", "e...
return low and high as excel range
[ "return", "low", "and", "high", "as", "excel", "range" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelDAM.py#L29-L47
llazzaro/analyzerdam
analyzerdam/excelDAM.py
ExcelDAM.__readData
def __readData(self, targetPath, start, end): ''' read data ''' ret = [] if not path.exists(targetPath): LOG.error("Target file doesn't exist: %s" % path.abspath(targetPath) ) return ret with ExcelLib(fileName = targetPath, mode = ExcelLib.READ_MODE) as excel: low, high = self.__findRange(excel, start, end) for index in range(low, high + 1): ret.append(excel.readRow(index)) return ret
python
def __readData(self, targetPath, start, end): ''' read data ''' ret = [] if not path.exists(targetPath): LOG.error("Target file doesn't exist: %s" % path.abspath(targetPath) ) return ret with ExcelLib(fileName = targetPath, mode = ExcelLib.READ_MODE) as excel: low, high = self.__findRange(excel, start, end) for index in range(low, high + 1): ret.append(excel.readRow(index)) return ret
[ "def", "__readData", "(", "self", ",", "targetPath", ",", "start", ",", "end", ")", ":", "ret", "=", "[", "]", "if", "not", "path", ".", "exists", "(", "targetPath", ")", ":", "LOG", ".", "error", "(", "\"Target file doesn't exist: %s\"", "%", "path", ...
read data
[ "read", "data" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelDAM.py#L49-L62
llazzaro/analyzerdam
analyzerdam/excelDAM.py
ExcelDAM.__writeData
def __writeData(self, targetPath, fields, rows): ''' write data ''' if path.exists(targetPath): LOG.error("Target file exists: %s" % path.abspath(targetPath) ) raise UfException(Errors.FILE_EXIST, "can't write to a existing file") #because xlwt doesn't support it with ExcelLib(fileName = targetPath, mode = ExcelLib.WRITE_MODE) as excel: excel.writeRow(0, fields) for index, row in enumerate(rows): excel.writeRow(index+1, row)
python
def __writeData(self, targetPath, fields, rows): ''' write data ''' if path.exists(targetPath): LOG.error("Target file exists: %s" % path.abspath(targetPath) ) raise UfException(Errors.FILE_EXIST, "can't write to a existing file") #because xlwt doesn't support it with ExcelLib(fileName = targetPath, mode = ExcelLib.WRITE_MODE) as excel: excel.writeRow(0, fields) for index, row in enumerate(rows): excel.writeRow(index+1, row)
[ "def", "__writeData", "(", "self", ",", "targetPath", ",", "fields", ",", "rows", ")", ":", "if", "path", ".", "exists", "(", "targetPath", ")", ":", "LOG", ".", "error", "(", "\"Target file exists: %s\"", "%", "path", ".", "abspath", "(", "targetPath", ...
write data
[ "write", "data" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelDAM.py#L64-L73
llazzaro/analyzerdam
analyzerdam/excelDAM.py
ExcelDAM.readQuotes
def readQuotes(self, start, end): ''' read quotes ''' quotes = self.__readData(self.targetPath(ExcelDAM.QUOTE), start, end) return [Quote(*quote) for quote in quotes]
python
def readQuotes(self, start, end): ''' read quotes ''' quotes = self.__readData(self.targetPath(ExcelDAM.QUOTE), start, end) return [Quote(*quote) for quote in quotes]
[ "def", "readQuotes", "(", "self", ",", "start", ",", "end", ")", ":", "quotes", "=", "self", ".", "__readData", "(", "self", ".", "targetPath", "(", "ExcelDAM", ".", "QUOTE", ")", ",", "start", ",", "end", ")", "return", "[", "Quote", "(", "*", "qu...
read quotes
[ "read", "quotes" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelDAM.py#L75-L78
llazzaro/analyzerdam
analyzerdam/excelDAM.py
ExcelDAM.writeQuotes
def writeQuotes(self, quotes): ''' write quotes ''' self.__writeData(self.targetPath(ExcelDAM.QUOTE), QUOTE_FIELDS, [[getattr(quote, field) for field in QUOTE_FIELDS] for quote in quotes])
python
def writeQuotes(self, quotes): ''' write quotes ''' self.__writeData(self.targetPath(ExcelDAM.QUOTE), QUOTE_FIELDS, [[getattr(quote, field) for field in QUOTE_FIELDS] for quote in quotes])
[ "def", "writeQuotes", "(", "self", ",", "quotes", ")", ":", "self", ".", "__writeData", "(", "self", ".", "targetPath", "(", "ExcelDAM", ".", "QUOTE", ")", ",", "QUOTE_FIELDS", ",", "[", "[", "getattr", "(", "quote", ",", "field", ")", "for", "field", ...
write quotes
[ "write", "quotes" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelDAM.py#L80-L84
llazzaro/analyzerdam
analyzerdam/excelDAM.py
ExcelDAM.readTicks
def readTicks(self, start, end): ''' read ticks ''' ticks = self.__readData(self.targetPath(ExcelDAM.TICK), start, end) return [Tick(*tick) for tick in ticks]
python
def readTicks(self, start, end): ''' read ticks ''' ticks = self.__readData(self.targetPath(ExcelDAM.TICK), start, end) return [Tick(*tick) for tick in ticks]
[ "def", "readTicks", "(", "self", ",", "start", ",", "end", ")", ":", "ticks", "=", "self", ".", "__readData", "(", "self", ".", "targetPath", "(", "ExcelDAM", ".", "TICK", ")", ",", "start", ",", "end", ")", "return", "[", "Tick", "(", "*", "tick",...
read ticks
[ "read", "ticks" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelDAM.py#L86-L89
llazzaro/analyzerdam
analyzerdam/excelDAM.py
ExcelDAM.writeTicks
def writeTicks(self, ticks): ''' read quotes ''' self.__writeData(self.targetPath(ExcelDAM.TICK), TICK_FIELDS, [[getattr(tick, field) for field in TICK_FIELDS] for tick in ticks])
python
def writeTicks(self, ticks): ''' read quotes ''' self.__writeData(self.targetPath(ExcelDAM.TICK), TICK_FIELDS, [[getattr(tick, field) for field in TICK_FIELDS] for tick in ticks])
[ "def", "writeTicks", "(", "self", ",", "ticks", ")", ":", "self", ".", "__writeData", "(", "self", ".", "targetPath", "(", "ExcelDAM", ".", "TICK", ")", ",", "TICK_FIELDS", ",", "[", "[", "getattr", "(", "tick", ",", "field", ")", "for", "field", "in...
read quotes
[ "read", "quotes" ]
train
https://github.com/llazzaro/analyzerdam/blob/c5bc7483dae23bd2e14bbf36147b7a43a0067bc0/analyzerdam/excelDAM.py#L91-L95
p3trus/slave
slave/srs/sr850.py
SR850.snap
def snap(self, *args): """Records multiple values at once. It takes two to six arguments specifying which values should be recorded together. Valid arguments are 'x', 'y', 'r', 'theta', 'aux1', 'aux2', 'aux3', 'aux4', 'frequency', 'trace1', 'trace2', 'trace3' and 'trace4'. snap is faster since it avoids communication overhead. 'x' and 'y' are recorded together, as well as 'r' and 'theta'. Between these pairs, there is a delay of approximately 10 us. 'aux1', 'aux2', 'aux3' and 'aux4' have am uncertainty of up to 32 us. It takes at least 40 ms or a period to calculate the frequency. E.g.:: lockin.snap('x', 'theta', 'trace3') """ length = len(args) if not 2 <= length <= 6: msg = 'snap takes 2 to 6 arguments, {0} given.'.format(length) raise TypeError(msg) # The program data type. param = Enum( 'x', 'y', 'r', 'theta', 'aux1', 'aux2', 'aux3', 'aux4', 'frequency', 'trace1', 'trace2', 'trace3', 'trace4' ) # construct command, cmd = 'SNAP?', (Float,) * length, (param, ) * length return self._ask(cmd, *args)
python
def snap(self, *args): """Records multiple values at once. It takes two to six arguments specifying which values should be recorded together. Valid arguments are 'x', 'y', 'r', 'theta', 'aux1', 'aux2', 'aux3', 'aux4', 'frequency', 'trace1', 'trace2', 'trace3' and 'trace4'. snap is faster since it avoids communication overhead. 'x' and 'y' are recorded together, as well as 'r' and 'theta'. Between these pairs, there is a delay of approximately 10 us. 'aux1', 'aux2', 'aux3' and 'aux4' have am uncertainty of up to 32 us. It takes at least 40 ms or a period to calculate the frequency. E.g.:: lockin.snap('x', 'theta', 'trace3') """ length = len(args) if not 2 <= length <= 6: msg = 'snap takes 2 to 6 arguments, {0} given.'.format(length) raise TypeError(msg) # The program data type. param = Enum( 'x', 'y', 'r', 'theta', 'aux1', 'aux2', 'aux3', 'aux4', 'frequency', 'trace1', 'trace2', 'trace3', 'trace4' ) # construct command, cmd = 'SNAP?', (Float,) * length, (param, ) * length return self._ask(cmd, *args)
[ "def", "snap", "(", "self", ",", "*", "args", ")", ":", "length", "=", "len", "(", "args", ")", "if", "not", "2", "<=", "length", "<=", "6", ":", "msg", "=", "'snap takes 2 to 6 arguments, {0} given.'", ".", "format", "(", "length", ")", "raise", "Type...
Records multiple values at once. It takes two to six arguments specifying which values should be recorded together. Valid arguments are 'x', 'y', 'r', 'theta', 'aux1', 'aux2', 'aux3', 'aux4', 'frequency', 'trace1', 'trace2', 'trace3' and 'trace4'. snap is faster since it avoids communication overhead. 'x' and 'y' are recorded together, as well as 'r' and 'theta'. Between these pairs, there is a delay of approximately 10 us. 'aux1', 'aux2', 'aux3' and 'aux4' have am uncertainty of up to 32 us. It takes at least 40 ms or a period to calculate the frequency. E.g.:: lockin.snap('x', 'theta', 'trace3')
[ "Records", "multiple", "values", "at", "once", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/srs/sr850.py#L698-L728
p3trus/slave
slave/srs/sr850.py
SR850.save
def save(self, mode='all'): """Saves to the file specified by :attr:`~SR850.filename`. :param mode: Defines what to save. ======= ================================================ Value Description ======= ================================================ 'all' Saves the active display's data trace, the trace definition and the instrument state. 'data' Saves the active display's data trace. 'state' Saves the instrument state. ======= ================================================ """ if mode == 'all': self._write('SDAT') elif mode == 'data': self._write('SASC') elif mode=='state': self._write('SSET') else: raise ValueError('Invalid save mode.')
python
def save(self, mode='all'): """Saves to the file specified by :attr:`~SR850.filename`. :param mode: Defines what to save. ======= ================================================ Value Description ======= ================================================ 'all' Saves the active display's data trace, the trace definition and the instrument state. 'data' Saves the active display's data trace. 'state' Saves the instrument state. ======= ================================================ """ if mode == 'all': self._write('SDAT') elif mode == 'data': self._write('SASC') elif mode=='state': self._write('SSET') else: raise ValueError('Invalid save mode.')
[ "def", "save", "(", "self", ",", "mode", "=", "'all'", ")", ":", "if", "mode", "==", "'all'", ":", "self", ".", "_write", "(", "'SDAT'", ")", "elif", "mode", "==", "'data'", ":", "self", ".", "_write", "(", "'SASC'", ")", "elif", "mode", "==", "'...
Saves to the file specified by :attr:`~SR850.filename`. :param mode: Defines what to save. ======= ================================================ Value Description ======= ================================================ 'all' Saves the active display's data trace, the trace definition and the instrument state. 'data' Saves the active display's data trace. 'state' Saves the instrument state. ======= ================================================
[ "Saves", "to", "the", "file", "specified", "by", ":", "attr", ":", "~SR850", ".", "filename", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/srs/sr850.py#L730-L752
p3trus/slave
slave/srs/sr850.py
SR850.recall
def recall(self, mode='all'): """Recalls from the file specified by :attr:`~SR850.filename`. :param mode: Specifies the recall mode. ======= ================================================== Value Description ======= ================================================== 'all' Recalls the active display's data trace, the trace definition and the instrument state. 'state' Recalls the instrument state. ======= ================================================== """ if mode == 'all': self._write('RDAT') elif mode == 'state': self._write('RSET') else: raise ValueError('Invalid recall mode.')
python
def recall(self, mode='all'): """Recalls from the file specified by :attr:`~SR850.filename`. :param mode: Specifies the recall mode. ======= ================================================== Value Description ======= ================================================== 'all' Recalls the active display's data trace, the trace definition and the instrument state. 'state' Recalls the instrument state. ======= ================================================== """ if mode == 'all': self._write('RDAT') elif mode == 'state': self._write('RSET') else: raise ValueError('Invalid recall mode.')
[ "def", "recall", "(", "self", ",", "mode", "=", "'all'", ")", ":", "if", "mode", "==", "'all'", ":", "self", ".", "_write", "(", "'RDAT'", ")", "elif", "mode", "==", "'state'", ":", "self", ".", "_write", "(", "'RSET'", ")", "else", ":", "raise", ...
Recalls from the file specified by :attr:`~SR850.filename`. :param mode: Specifies the recall mode. ======= ================================================== Value Description ======= ================================================== 'all' Recalls the active display's data trace, the trace definition and the instrument state. 'state' Recalls the instrument state. ======= ==================================================
[ "Recalls", "from", "the", "file", "specified", "by", ":", "attr", ":", "~SR850", ".", "filename", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/srs/sr850.py#L754-L773
p3trus/slave
slave/srs/sr850.py
SR850.fit
def fit(self, range, function=None): """Fits a function to the active display's data trace within a specified range of the time window. E.g.:: # Fit's a gaussian to the first 30% of the time window. lockin.fit(range=(0, 30), function='gauss') :param start: The left limit of the time window in percent. :param stop: The right limit of the time window in percent. :param function: The function used to fit the data, either 'line', 'exp', 'gauss' or None, the default. The configured fit function is left unchanged if function is None. .. note:: Fitting takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the fitting is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the fit command is executed. """ if function is not None: self.fit_function = function cmd = 'FITT', Integer(min=0, max=100), Integer(min=0, max=100) self._write(cmd, start, stop)
python
def fit(self, range, function=None): """Fits a function to the active display's data trace within a specified range of the time window. E.g.:: # Fit's a gaussian to the first 30% of the time window. lockin.fit(range=(0, 30), function='gauss') :param start: The left limit of the time window in percent. :param stop: The right limit of the time window in percent. :param function: The function used to fit the data, either 'line', 'exp', 'gauss' or None, the default. The configured fit function is left unchanged if function is None. .. note:: Fitting takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the fitting is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the fit command is executed. """ if function is not None: self.fit_function = function cmd = 'FITT', Integer(min=0, max=100), Integer(min=0, max=100) self._write(cmd, start, stop)
[ "def", "fit", "(", "self", ",", "range", ",", "function", "=", "None", ")", ":", "if", "function", "is", "not", "None", ":", "self", ".", "fit_function", "=", "function", "cmd", "=", "'FITT'", ",", "Integer", "(", "min", "=", "0", ",", "max", "=", ...
Fits a function to the active display's data trace within a specified range of the time window. E.g.:: # Fit's a gaussian to the first 30% of the time window. lockin.fit(range=(0, 30), function='gauss') :param start: The left limit of the time window in percent. :param stop: The right limit of the time window in percent. :param function: The function used to fit the data, either 'line', 'exp', 'gauss' or None, the default. The configured fit function is left unchanged if function is None. .. note:: Fitting takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the fitting is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the fit command is executed.
[ "Fits", "a", "function", "to", "the", "active", "display", "s", "data", "trace", "within", "a", "specified", "range", "of", "the", "time", "window", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/srs/sr850.py#L796-L826
p3trus/slave
slave/srs/sr850.py
SR850.calculate_statistics
def calculate_statistics(self, start, stop): """Starts the statistics calculation. :param start: The left limit of the time window in percent. :param stop: The right limit of the time window in percent. .. note:: The calculation takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the operation is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the command is executed. """ cmd = 'STAT', Integer, Integer self._write(cmd, start, stop)
python
def calculate_statistics(self, start, stop): """Starts the statistics calculation. :param start: The left limit of the time window in percent. :param stop: The right limit of the time window in percent. .. note:: The calculation takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the operation is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the command is executed. """ cmd = 'STAT', Integer, Integer self._write(cmd, start, stop)
[ "def", "calculate_statistics", "(", "self", ",", "start", ",", "stop", ")", ":", "cmd", "=", "'STAT'", ",", "Integer", ",", "Integer", "self", ".", "_write", "(", "cmd", ",", "start", ",", "stop", ")" ]
Starts the statistics calculation. :param start: The left limit of the time window in percent. :param stop: The right limit of the time window in percent. .. note:: The calculation takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the operation is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the command is executed.
[ "Starts", "the", "statistics", "calculation", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/srs/sr850.py#L828-L847
p3trus/slave
slave/srs/sr850.py
SR850.calculate
def calculate(self, operation=None, trace=None, constant=None, type=None): """Starts the calculation. The calculation operates on the trace graphed in the active display. The math operation is defined by the :attr:`~.SR850.math_operation`, the second argument by the :attr:`~.SR850.math_argument_type`. For convenience, the operation and the second argument, can be specified via the parameters :param operation: Set's the math operation if not `None`. See :attr:`~.SR850.math_operation` for details. :param trace: If the trace argument is used, it sets the :attr:`~.math_trace_argument` to it and sets the :attr:`~.math_argument_type` to 'trace' :param constant: If constant is not `None`, the :attr:`~.math_constant`is set with this value and the :attr:`~.math_argument_type` is set to 'constant' :param type: If type is not `None`, the :attr:`~.math_argument_type` is set to this value. E.g. instead of:: lockin.math_operation = '*' lockin.math_argument_type = 'constant' lockin.math_constant = 1.337 lockin.calculate() one can write:: lockin.calculate(operation='*', constant=1.337) .. note:: Do not use trace, constant and type together. .. note:: The calculation takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the operation is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the command is executed. """ if operation is not None: self.math_operation = operation if trace is not None: self.math_trace_argument = trace type = 'trace' elif constant is not None: self.math_constant = constant type = 'constant' if type is not None: self.math_argument_type = type self._write('CALC')
python
def calculate(self, operation=None, trace=None, constant=None, type=None): """Starts the calculation. The calculation operates on the trace graphed in the active display. The math operation is defined by the :attr:`~.SR850.math_operation`, the second argument by the :attr:`~.SR850.math_argument_type`. For convenience, the operation and the second argument, can be specified via the parameters :param operation: Set's the math operation if not `None`. See :attr:`~.SR850.math_operation` for details. :param trace: If the trace argument is used, it sets the :attr:`~.math_trace_argument` to it and sets the :attr:`~.math_argument_type` to 'trace' :param constant: If constant is not `None`, the :attr:`~.math_constant`is set with this value and the :attr:`~.math_argument_type` is set to 'constant' :param type: If type is not `None`, the :attr:`~.math_argument_type` is set to this value. E.g. instead of:: lockin.math_operation = '*' lockin.math_argument_type = 'constant' lockin.math_constant = 1.337 lockin.calculate() one can write:: lockin.calculate(operation='*', constant=1.337) .. note:: Do not use trace, constant and type together. .. note:: The calculation takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the operation is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the command is executed. """ if operation is not None: self.math_operation = operation if trace is not None: self.math_trace_argument = trace type = 'trace' elif constant is not None: self.math_constant = constant type = 'constant' if type is not None: self.math_argument_type = type self._write('CALC')
[ "def", "calculate", "(", "self", ",", "operation", "=", "None", ",", "trace", "=", "None", ",", "constant", "=", "None", ",", "type", "=", "None", ")", ":", "if", "operation", "is", "not", "None", ":", "self", ".", "math_operation", "=", "operation", ...
Starts the calculation. The calculation operates on the trace graphed in the active display. The math operation is defined by the :attr:`~.SR850.math_operation`, the second argument by the :attr:`~.SR850.math_argument_type`. For convenience, the operation and the second argument, can be specified via the parameters :param operation: Set's the math operation if not `None`. See :attr:`~.SR850.math_operation` for details. :param trace: If the trace argument is used, it sets the :attr:`~.math_trace_argument` to it and sets the :attr:`~.math_argument_type` to 'trace' :param constant: If constant is not `None`, the :attr:`~.math_constant`is set with this value and the :attr:`~.math_argument_type` is set to 'constant' :param type: If type is not `None`, the :attr:`~.math_argument_type` is set to this value. E.g. instead of:: lockin.math_operation = '*' lockin.math_argument_type = 'constant' lockin.math_constant = 1.337 lockin.calculate() one can write:: lockin.calculate(operation='*', constant=1.337) .. note:: Do not use trace, constant and type together. .. note:: The calculation takes some time. Check the status byte to see when the operation is done. A running scan will be paused until the operation is complete. .. warning:: The SR850 will generate an error if the active display trace is not stored when the command is executed.
[ "Starts", "the", "calculation", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/srs/sr850.py#L849-L905
p3trus/slave
slave/srs/sr850.py
Mark.bin
def bin(self): """The bin index of this mark. :returns: An integer bin index or None if the mark is inactive. """ bin = self._query(('MBIN?', Integer, Integer), self.idx) return None if bin == -1 else bin
python
def bin(self): """The bin index of this mark. :returns: An integer bin index or None if the mark is inactive. """ bin = self._query(('MBIN?', Integer, Integer), self.idx) return None if bin == -1 else bin
[ "def", "bin", "(", "self", ")", ":", "bin", "=", "self", ".", "_query", "(", "(", "'MBIN?'", ",", "Integer", ",", "Integer", ")", ",", "self", ".", "idx", ")", "return", "None", "if", "bin", "==", "-", "1", "else", "bin" ]
The bin index of this mark. :returns: An integer bin index or None if the mark is inactive.
[ "The", "bin", "index", "of", "this", "mark", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/srs/sr850.py#L1180-L1187
p3trus/slave
slave/srs/sr850.py
MarkList.active
def active(self): """The indices of the active marks.""" # TODO avoid direct usage of transport object. marks = tuple(int(x) for x in transport.ask('MACT').split(',')) return marks[1:]
python
def active(self): """The indices of the active marks.""" # TODO avoid direct usage of transport object. marks = tuple(int(x) for x in transport.ask('MACT').split(',')) return marks[1:]
[ "def", "active", "(", "self", ")", ":", "# TODO avoid direct usage of transport object.", "marks", "=", "tuple", "(", "int", "(", "x", ")", "for", "x", "in", "transport", ".", "ask", "(", "'MACT'", ")", ".", "split", "(", "','", ")", ")", "return", "mark...
The indices of the active marks.
[ "The", "indices", "of", "the", "active", "marks", "." ]
train
https://github.com/p3trus/slave/blob/bdc74e73bd0f47b74a090c43aa2283c469cde3be/slave/srs/sr850.py#L1220-L1224
datacratic/pymldb
pymldb/__init__.py
Connection.query
def query(self, sql, **kwargs): """ Shortcut for GET /v1/query, except with argument format='dataframe' (the default), in which case it will simply wrap the result of the GET query to /v1/query (with format='table') in a `pandas.DataFrame`. """ if 'format' not in kwargs or kwargs['format'] == 'dataframe': resp = self.get('/v1/query', data={'q': sql, 'format': 'table'}).json() if len(resp) == 0: return pd.DataFrame() else: return pd.DataFrame.from_records(resp[1:], columns=resp[0], index="_rowName") kwargs['q'] = sql return self.get('/v1/query', **kwargs).json()
python
def query(self, sql, **kwargs): """ Shortcut for GET /v1/query, except with argument format='dataframe' (the default), in which case it will simply wrap the result of the GET query to /v1/query (with format='table') in a `pandas.DataFrame`. """ if 'format' not in kwargs or kwargs['format'] == 'dataframe': resp = self.get('/v1/query', data={'q': sql, 'format': 'table'}).json() if len(resp) == 0: return pd.DataFrame() else: return pd.DataFrame.from_records(resp[1:], columns=resp[0], index="_rowName") kwargs['q'] = sql return self.get('/v1/query', **kwargs).json()
[ "def", "query", "(", "self", ",", "sql", ",", "*", "*", "kwargs", ")", ":", "if", "'format'", "not", "in", "kwargs", "or", "kwargs", "[", "'format'", "]", "==", "'dataframe'", ":", "resp", "=", "self", ".", "get", "(", "'/v1/query'", ",", "data", "...
Shortcut for GET /v1/query, except with argument format='dataframe' (the default), in which case it will simply wrap the result of the GET query to /v1/query (with format='table') in a `pandas.DataFrame`.
[ "Shortcut", "for", "GET", "/", "v1", "/", "query", "except", "with", "argument", "format", "=", "dataframe", "(", "the", "default", ")", "in", "which", "case", "it", "will", "simply", "wrap", "the", "result", "of", "the", "GET", "query", "to", "/", "v1...
train
https://github.com/datacratic/pymldb/blob/e41f3c37138e9fd4a82ef3db685899cdafa4125e/pymldb/__init__.py#L76-L90
datacratic/pymldb
pymldb/__init__.py
Connection.put_and_track
def put_and_track(self, url, payload, refresh_rate_sec=1): """ Put and track progress, displaying progress bars. May display the wrong progress if 2 things post/put on the same procedure name at the same time. """ if not url.startswith('/v1/procedures'): raise Exception("The only supported route is /v1/procedures") parts = url.split('/') len_parts = len(parts) if len_parts not in [4, 6]: raise Exception( "You must either PUT a procedure or a procedure run") proc_id = parts[3] run_id = None if len_parts == 4: if 'params' not in payload: payload['params'] = {} payload['params']['runOnCreation'] = True elif len_parts == 6: run_id = parts[-1] pm = ProgressMonitor(self, refresh_rate_sec, proc_id, run_id, self.notebook) t = threading.Thread(target=pm.monitor_progress) t.start() try: return self.put(url, payload) except Exception as e: print(e) finally: pass pm.event.set() t.join()
python
def put_and_track(self, url, payload, refresh_rate_sec=1): """ Put and track progress, displaying progress bars. May display the wrong progress if 2 things post/put on the same procedure name at the same time. """ if not url.startswith('/v1/procedures'): raise Exception("The only supported route is /v1/procedures") parts = url.split('/') len_parts = len(parts) if len_parts not in [4, 6]: raise Exception( "You must either PUT a procedure or a procedure run") proc_id = parts[3] run_id = None if len_parts == 4: if 'params' not in payload: payload['params'] = {} payload['params']['runOnCreation'] = True elif len_parts == 6: run_id = parts[-1] pm = ProgressMonitor(self, refresh_rate_sec, proc_id, run_id, self.notebook) t = threading.Thread(target=pm.monitor_progress) t.start() try: return self.put(url, payload) except Exception as e: print(e) finally: pass pm.event.set() t.join()
[ "def", "put_and_track", "(", "self", ",", "url", ",", "payload", ",", "refresh_rate_sec", "=", "1", ")", ":", "if", "not", "url", ".", "startswith", "(", "'/v1/procedures'", ")", ":", "raise", "Exception", "(", "\"The only supported route is /v1/procedures\"", "...
Put and track progress, displaying progress bars. May display the wrong progress if 2 things post/put on the same procedure name at the same time.
[ "Put", "and", "track", "progress", "displaying", "progress", "bars", "." ]
train
https://github.com/datacratic/pymldb/blob/e41f3c37138e9fd4a82ef3db685899cdafa4125e/pymldb/__init__.py#L92-L129
datacratic/pymldb
pymldb/__init__.py
Connection.post_and_track
def post_and_track(self, url, payload, refresh_rate_sec=1): """ Post and track progress, displaying progress bars. May display the wrong progress if 2 things post/put on the same procedure name at the same time. """ if not url.startswith('/v1/procedures'): raise Exception("The only supported route is /v1/procedures") if url.endswith('/runs'): raise Exception( "Posting and tracking run is unsupported at the moment") if len(url.split('/')) != 3: raise Exception("You must POST a procedure") if 'params' not in payload: payload['params'] = {} payload['params']['runOnCreation'] = False res = self.post('/v1/procedures', payload).json() proc_id = res['id'] pm = ProgressMonitor(self, refresh_rate_sec, proc_id, notebook=self.notebook) t = threading.Thread(target=pm.monitor_progress) t.start() try: return self.post('/v1/procedures/{}/runs'.format(proc_id), {}) except Exception as e: print(e) finally: pm.event.set() t.join()
python
def post_and_track(self, url, payload, refresh_rate_sec=1): """ Post and track progress, displaying progress bars. May display the wrong progress if 2 things post/put on the same procedure name at the same time. """ if not url.startswith('/v1/procedures'): raise Exception("The only supported route is /v1/procedures") if url.endswith('/runs'): raise Exception( "Posting and tracking run is unsupported at the moment") if len(url.split('/')) != 3: raise Exception("You must POST a procedure") if 'params' not in payload: payload['params'] = {} payload['params']['runOnCreation'] = False res = self.post('/v1/procedures', payload).json() proc_id = res['id'] pm = ProgressMonitor(self, refresh_rate_sec, proc_id, notebook=self.notebook) t = threading.Thread(target=pm.monitor_progress) t.start() try: return self.post('/v1/procedures/{}/runs'.format(proc_id), {}) except Exception as e: print(e) finally: pm.event.set() t.join()
[ "def", "post_and_track", "(", "self", ",", "url", ",", "payload", ",", "refresh_rate_sec", "=", "1", ")", ":", "if", "not", "url", ".", "startswith", "(", "'/v1/procedures'", ")", ":", "raise", "Exception", "(", "\"The only supported route is /v1/procedures\"", ...
Post and track progress, displaying progress bars. May display the wrong progress if 2 things post/put on the same procedure name at the same time.
[ "Post", "and", "track", "progress", "displaying", "progress", "bars", "." ]
train
https://github.com/datacratic/pymldb/blob/e41f3c37138e9fd4a82ef3db685899cdafa4125e/pymldb/__init__.py#L131-L165
twisted/mantissa
xmantissa/port.py
StringEndpointPort._makeService
def _makeService(self): """ Construct a service for the endpoint as described. """ if self._endpointService is None: _service = service else: _service = self._endpointService return _service( self.description.encode('ascii'), self.factory.getFactory())
python
def _makeService(self): """ Construct a service for the endpoint as described. """ if self._endpointService is None: _service = service else: _service = self._endpointService return _service( self.description.encode('ascii'), self.factory.getFactory())
[ "def", "_makeService", "(", "self", ")", ":", "if", "self", ".", "_endpointService", "is", "None", ":", "_service", "=", "service", "else", ":", "_service", "=", "self", ".", "_endpointService", "return", "_service", "(", "self", ".", "description", ".", "...
Construct a service for the endpoint as described.
[ "Construct", "a", "service", "for", "the", "endpoint", "as", "described", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/port.py#L313-L322
twisted/mantissa
xmantissa/port.py
ListOptions.postOptions
def postOptions(self): """ Display details about the ports which already exist. """ store = self.parent.parent.getStore() port = None factories = {} for portType in [TCPPort, SSLPort, StringEndpointPort]: for port in store.query(portType): key = port.factory.storeID if key not in factories: factories[key] = (port.factory, []) factories[key][1].append(port) for factory in store.powerupsFor(IProtocolFactoryFactory): key = factory.storeID if key not in factories: factories[key] = (factory, []) def key((factory, ports)): return factory.storeID for factory, ports in sorted(factories.values(), key=key): if ports: print '%d) %r listening on:' % (factory.storeID, factory) for port in ports: if getattr(port, 'interface', None): interface = "interface " + port.interface else: interface = "any interface" if isinstance(port, TCPPort): print ' %d) TCP, %s, port %d' % ( port.storeID, interface, port.portNumber) elif isinstance(port, SSLPort): if port.certificatePath is not None: pathPart = 'certificate %s' % ( port.certificatePath.path,) else: pathPart = 'NO CERTIFICATE' if port.portNumber is not None: portPart = 'port %d' % (port.portNumber,) else: portPart = 'NO PORT' print ' %d) SSL, %s, %s, %s' % ( port.storeID, interface, portPart, pathPart) elif isinstance(port, StringEndpointPort): print ' {:d}) Endpoint {!r}'.format( port.storeID, port.description) else: print '%d) %r is not listening.' % (factory.storeID, factory) if not factories: print "There are no ports configured." raise SystemExit(0)
python
def postOptions(self): """ Display details about the ports which already exist. """ store = self.parent.parent.getStore() port = None factories = {} for portType in [TCPPort, SSLPort, StringEndpointPort]: for port in store.query(portType): key = port.factory.storeID if key not in factories: factories[key] = (port.factory, []) factories[key][1].append(port) for factory in store.powerupsFor(IProtocolFactoryFactory): key = factory.storeID if key not in factories: factories[key] = (factory, []) def key((factory, ports)): return factory.storeID for factory, ports in sorted(factories.values(), key=key): if ports: print '%d) %r listening on:' % (factory.storeID, factory) for port in ports: if getattr(port, 'interface', None): interface = "interface " + port.interface else: interface = "any interface" if isinstance(port, TCPPort): print ' %d) TCP, %s, port %d' % ( port.storeID, interface, port.portNumber) elif isinstance(port, SSLPort): if port.certificatePath is not None: pathPart = 'certificate %s' % ( port.certificatePath.path,) else: pathPart = 'NO CERTIFICATE' if port.portNumber is not None: portPart = 'port %d' % (port.portNumber,) else: portPart = 'NO PORT' print ' %d) SSL, %s, %s, %s' % ( port.storeID, interface, portPart, pathPart) elif isinstance(port, StringEndpointPort): print ' {:d}) Endpoint {!r}'.format( port.storeID, port.description) else: print '%d) %r is not listening.' % (factory.storeID, factory) if not factories: print "There are no ports configured." raise SystemExit(0)
[ "def", "postOptions", "(", "self", ")", ":", "store", "=", "self", ".", "parent", ".", "parent", ".", "getStore", "(", ")", "port", "=", "None", "factories", "=", "{", "}", "for", "portType", "in", "[", "TCPPort", ",", "SSLPort", ",", "StringEndpointPo...
Display details about the ports which already exist.
[ "Display", "details", "about", "the", "ports", "which", "already", "exist", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/port.py#L349-L398
twisted/mantissa
xmantissa/port.py
DeleteOptions._delete
def _delete(self, store, portIDs): """ Try to delete the ports with the given store IDs. @param store: The Axiom store from which to delete items. @param portIDs: A list of Axiom store IDs for TCPPort or SSLPort items. @raise L{SystemExit}: If one of the store IDs does not identify a port item. """ for portID in portIDs: try: port = store.getItemByID(portID) except KeyError: print "%d does not identify an item." % (portID,) raise SystemExit(1) if isinstance(port, (TCPPort, SSLPort, StringEndpointPort)): port.deleteFromStore() else: print "%d does not identify a port." % (portID,) raise SystemExit(1)
python
def _delete(self, store, portIDs): """ Try to delete the ports with the given store IDs. @param store: The Axiom store from which to delete items. @param portIDs: A list of Axiom store IDs for TCPPort or SSLPort items. @raise L{SystemExit}: If one of the store IDs does not identify a port item. """ for portID in portIDs: try: port = store.getItemByID(portID) except KeyError: print "%d does not identify an item." % (portID,) raise SystemExit(1) if isinstance(port, (TCPPort, SSLPort, StringEndpointPort)): port.deleteFromStore() else: print "%d does not identify a port." % (portID,) raise SystemExit(1)
[ "def", "_delete", "(", "self", ",", "store", ",", "portIDs", ")", ":", "for", "portID", "in", "portIDs", ":", "try", ":", "port", "=", "store", ".", "getItemByID", "(", "portID", ")", "except", "KeyError", ":", "print", "\"%d does not identify an item.\"", ...
Try to delete the ports with the given store IDs. @param store: The Axiom store from which to delete items. @param portIDs: A list of Axiom store IDs for TCPPort or SSLPort items. @raise L{SystemExit}: If one of the store IDs does not identify a port item.
[ "Try", "to", "delete", "the", "ports", "with", "the", "given", "store", "IDs", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/port.py#L428-L449
twisted/mantissa
xmantissa/port.py
DeleteOptions.postOptions
def postOptions(self): """ Delete the ports specified with the port-identifier option. """ if self.portIdentifiers: store = self.parent.parent.getStore() store.transact(self._delete, store, self.portIdentifiers) print "Deleted." raise SystemExit(0) else: self.opt_help()
python
def postOptions(self): """ Delete the ports specified with the port-identifier option. """ if self.portIdentifiers: store = self.parent.parent.getStore() store.transact(self._delete, store, self.portIdentifiers) print "Deleted." raise SystemExit(0) else: self.opt_help()
[ "def", "postOptions", "(", "self", ")", ":", "if", "self", ".", "portIdentifiers", ":", "store", "=", "self", ".", "parent", ".", "parent", ".", "getStore", "(", ")", "store", ".", "transact", "(", "self", ".", "_delete", ",", "store", ",", "self", "...
Delete the ports specified with the port-identifier option.
[ "Delete", "the", "ports", "specified", "with", "the", "port", "-", "identifier", "option", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/port.py#L452-L462
richardliaw/track
track/convenience.py
absl_flags
def absl_flags(): """ Extracts absl-py flags that the user has specified and outputs their key-value mapping. By default, extracts only those flags in the current __package__ and mainfile. Useful to put into a trial's param_map. """ # TODO: need same thing for argparse flags_dict = flags.FLAGS.flags_by_module_dict() # only include parameters from modules the user probably cares about def _relevant_module(module_name): if __package__ and __package__ in module_name: return True if module_name == sys.argv[0]: return True return False return { flag.name: flag.value for module, flags in flags_dict.items() for flag in flags if _relevant_module(module)}
python
def absl_flags(): """ Extracts absl-py flags that the user has specified and outputs their key-value mapping. By default, extracts only those flags in the current __package__ and mainfile. Useful to put into a trial's param_map. """ # TODO: need same thing for argparse flags_dict = flags.FLAGS.flags_by_module_dict() # only include parameters from modules the user probably cares about def _relevant_module(module_name): if __package__ and __package__ in module_name: return True if module_name == sys.argv[0]: return True return False return { flag.name: flag.value for module, flags in flags_dict.items() for flag in flags if _relevant_module(module)}
[ "def", "absl_flags", "(", ")", ":", "# TODO: need same thing for argparse", "flags_dict", "=", "flags", ".", "FLAGS", ".", "flags_by_module_dict", "(", ")", "# only include parameters from modules the user probably cares about", "def", "_relevant_module", "(", "module_name", ...
Extracts absl-py flags that the user has specified and outputs their key-value mapping. By default, extracts only those flags in the current __package__ and mainfile. Useful to put into a trial's param_map.
[ "Extracts", "absl", "-", "py", "flags", "that", "the", "user", "has", "specified", "and", "outputs", "their", "key", "-", "value", "mapping", "." ]
train
https://github.com/richardliaw/track/blob/7ac42ea34e5c1d7bb92fd813e938835a06a63fc7/track/convenience.py#L10-L31
72squared/redpipe
redpipe/pipelines.py
pipeline
def pipeline(pipe=None, name=None, autoexec=False, exit_handler=None): """ This is the foundational function for all of redpipe. Everything goes through here. create pipelines, nest pipelines, get pipelines for a specific name. It all happens here. Here's a simple example: .. code:: python with pipeline() as pipe: pipe.set('foo', 'bar') foo = pipe.get('foo') pipe.execute() print(foo) > bar Now let's look at how we can nest a pipeline. .. code:: python def process(key, pipe=None): with pipeline(pipe, autoexec=True) as pipe: return pipe.incr(key) with pipeline() as pipe: key1 = process('key1', pipe) key2 = process('key2', pipe) pipe.execute() print([key1, key2]) > [1, 1] :param pipe: a Pipeline() or NestedPipeline() object, or None :param name: str, optional. the name of the connection to use. :param autoexec: bool, if true, implicitly execute the pipe :return: Pipeline or NestedPipeline """ if pipe is None: return Pipeline(name=name, autoexec=autoexec, exit_handler=exit_handler) try: if pipe.supports_redpipe_pipeline(): return NestedPipeline( parent=pipe, name=name, autoexec=autoexec, exit_handler=exit_handler ) except AttributeError: pass raise InvalidPipeline('check your configuration')
python
def pipeline(pipe=None, name=None, autoexec=False, exit_handler=None): """ This is the foundational function for all of redpipe. Everything goes through here. create pipelines, nest pipelines, get pipelines for a specific name. It all happens here. Here's a simple example: .. code:: python with pipeline() as pipe: pipe.set('foo', 'bar') foo = pipe.get('foo') pipe.execute() print(foo) > bar Now let's look at how we can nest a pipeline. .. code:: python def process(key, pipe=None): with pipeline(pipe, autoexec=True) as pipe: return pipe.incr(key) with pipeline() as pipe: key1 = process('key1', pipe) key2 = process('key2', pipe) pipe.execute() print([key1, key2]) > [1, 1] :param pipe: a Pipeline() or NestedPipeline() object, or None :param name: str, optional. the name of the connection to use. :param autoexec: bool, if true, implicitly execute the pipe :return: Pipeline or NestedPipeline """ if pipe is None: return Pipeline(name=name, autoexec=autoexec, exit_handler=exit_handler) try: if pipe.supports_redpipe_pipeline(): return NestedPipeline( parent=pipe, name=name, autoexec=autoexec, exit_handler=exit_handler ) except AttributeError: pass raise InvalidPipeline('check your configuration')
[ "def", "pipeline", "(", "pipe", "=", "None", ",", "name", "=", "None", ",", "autoexec", "=", "False", ",", "exit_handler", "=", "None", ")", ":", "if", "pipe", "is", "None", ":", "return", "Pipeline", "(", "name", "=", "name", ",", "autoexec", "=", ...
This is the foundational function for all of redpipe. Everything goes through here. create pipelines, nest pipelines, get pipelines for a specific name. It all happens here. Here's a simple example: .. code:: python with pipeline() as pipe: pipe.set('foo', 'bar') foo = pipe.get('foo') pipe.execute() print(foo) > bar Now let's look at how we can nest a pipeline. .. code:: python def process(key, pipe=None): with pipeline(pipe, autoexec=True) as pipe: return pipe.incr(key) with pipeline() as pipe: key1 = process('key1', pipe) key2 = process('key2', pipe) pipe.execute() print([key1, key2]) > [1, 1] :param pipe: a Pipeline() or NestedPipeline() object, or None :param name: str, optional. the name of the connection to use. :param autoexec: bool, if true, implicitly execute the pipe :return: Pipeline or NestedPipeline
[ "This", "is", "the", "foundational", "function", "for", "all", "of", "redpipe", ".", "Everything", "goes", "through", "here", ".", "create", "pipelines", "nest", "pipelines", "get", "pipelines", "for", "a", "specific", "name", ".", "It", "all", "happens", "h...
train
https://github.com/72squared/redpipe/blob/e6ee518bc9f3e2fee323c8c53d08997799bd9b1b/redpipe/pipelines.py#L392-L448