repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
shmir/PyTrafficGenerator | trafficgenerator/tgn_tcl.py | TgnTclWrapper.eval | def eval(self, command):
""" Execute Tcl command.
Write the command to tcl script (.tcl) log file.
Execute the command.
Write the command and the output to general (.txt) log file.
:param command: Command to execute.
:returns: command raw output.
"""
if self.logger.handlers:
self.logger.debug(command.decode('utf-8'))
if self.tcl_script:
self.tcl_script.info(command)
self.rc = self.tcl_interp.eval(command)
if self.logger.handlers:
self.logger.debug('\t' + self.rc.decode('utf-8'))
return self.rc | python | def eval(self, command):
""" Execute Tcl command.
Write the command to tcl script (.tcl) log file.
Execute the command.
Write the command and the output to general (.txt) log file.
:param command: Command to execute.
:returns: command raw output.
"""
if self.logger.handlers:
self.logger.debug(command.decode('utf-8'))
if self.tcl_script:
self.tcl_script.info(command)
self.rc = self.tcl_interp.eval(command)
if self.logger.handlers:
self.logger.debug('\t' + self.rc.decode('utf-8'))
return self.rc | [
"def",
"eval",
"(",
"self",
",",
"command",
")",
":",
"if",
"self",
".",
"logger",
".",
"handlers",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"command",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"if",
"self",
".",
"tcl_script",
":",
"self",
"."... | Execute Tcl command.
Write the command to tcl script (.tcl) log file.
Execute the command.
Write the command and the output to general (.txt) log file.
:param command: Command to execute.
:returns: command raw output. | [
"Execute",
"Tcl",
"command",
"."
] | 382e5d549c83404af2a6571fe19c9e71df8bac14 | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_tcl.py#L208-L226 | train | 52,100 |
Nic30/pyDigitalWaveTools | pyDigitalWaveTools/vcd/parser.py | VcdParser.value_change | def value_change(self, vcdId, value):
'''append change from VCD file signal data series'''
self.idcode2series[vcdId].append((self.now, value)) | python | def value_change(self, vcdId, value):
'''append change from VCD file signal data series'''
self.idcode2series[vcdId].append((self.now, value)) | [
"def",
"value_change",
"(",
"self",
",",
"vcdId",
",",
"value",
")",
":",
"self",
".",
"idcode2series",
"[",
"vcdId",
"]",
".",
"append",
"(",
"(",
"self",
".",
"now",
",",
"value",
")",
")"
] | append change from VCD file signal data series | [
"append",
"change",
"from",
"VCD",
"file",
"signal",
"data",
"series"
] | 95b96fa5f52ffd7ca916db51a4f22ee1bd9e46fc | https://github.com/Nic30/pyDigitalWaveTools/blob/95b96fa5f52ffd7ca916db51a4f22ee1bd9e46fc/pyDigitalWaveTools/vcd/parser.py#L82-L84 | train | 52,101 |
Nic30/pyDigitalWaveTools | pyDigitalWaveTools/vcd/parser.py | VcdParser.parse | def parse(self, file_handle):
'''
Tokenize and parse the VCD file
:ivar file_handle: opened file with vcd string
'''
# open the VCD file and create a token generator
lineIterator = iter(enumerate(file_handle))
tokeniser = ((lineNo, word) for lineNo, line in lineIterator
for word in line.split() if word)
while True:
token = next(tokeniser)
# parse VCD until the end of definitions
self.keyword_dispatch[token[1]](tokeniser, token[1])
if self.end_of_definitions:
break
while True:
try:
lineNo, token = next(lineIterator)
except StopIteration:
break
# parse changes
c = token[0]
if c == '$':
# skip $dump* tokens and $end tokens in sim section
continue
elif c == '#':
# [TODO] may be a float
self.now = int(token[1:])
else:
sp = token.split()
sp_len = len(sp)
if sp_len == 1:
# 1 bit value
value = c
vcdId = token[1:]
elif sp_len == 2:
# vectors and strings
value, vcdId = sp
else:
raise VcdSyntaxError(
"Line %d: Don't understand: %s " % (lineNo, token))
self.value_change(vcdId.strip(), value.strip()) | python | def parse(self, file_handle):
'''
Tokenize and parse the VCD file
:ivar file_handle: opened file with vcd string
'''
# open the VCD file and create a token generator
lineIterator = iter(enumerate(file_handle))
tokeniser = ((lineNo, word) for lineNo, line in lineIterator
for word in line.split() if word)
while True:
token = next(tokeniser)
# parse VCD until the end of definitions
self.keyword_dispatch[token[1]](tokeniser, token[1])
if self.end_of_definitions:
break
while True:
try:
lineNo, token = next(lineIterator)
except StopIteration:
break
# parse changes
c = token[0]
if c == '$':
# skip $dump* tokens and $end tokens in sim section
continue
elif c == '#':
# [TODO] may be a float
self.now = int(token[1:])
else:
sp = token.split()
sp_len = len(sp)
if sp_len == 1:
# 1 bit value
value = c
vcdId = token[1:]
elif sp_len == 2:
# vectors and strings
value, vcdId = sp
else:
raise VcdSyntaxError(
"Line %d: Don't understand: %s " % (lineNo, token))
self.value_change(vcdId.strip(), value.strip()) | [
"def",
"parse",
"(",
"self",
",",
"file_handle",
")",
":",
"# open the VCD file and create a token generator",
"lineIterator",
"=",
"iter",
"(",
"enumerate",
"(",
"file_handle",
")",
")",
"tokeniser",
"=",
"(",
"(",
"lineNo",
",",
"word",
")",
"for",
"lineNo",
... | Tokenize and parse the VCD file
:ivar file_handle: opened file with vcd string | [
"Tokenize",
"and",
"parse",
"the",
"VCD",
"file"
] | 95b96fa5f52ffd7ca916db51a4f22ee1bd9e46fc | https://github.com/Nic30/pyDigitalWaveTools/blob/95b96fa5f52ffd7ca916db51a4f22ee1bd9e46fc/pyDigitalWaveTools/vcd/parser.py#L86-L132 | train | 52,102 |
artefactual-labs/agentarchives | agentarchives/archivists_toolkit/client.py | ArchivistsToolkitClient.edit_record | def edit_record(self, new_record):
"""
Update a record in Archivist's Toolkit using the provided new_record.
The format of new_record is identical to the format returned by get_resource_component_and_children and related methods.
This means it's possible, for example, to request a record, modify the returned dict, and pass that dict to this method to update the server.
Currently supported fields are:
* title
* targetfield
:raises ValueError: if the 'id' field isn't specified, or no fields to edit were specified.
"""
try:
record_id = new_record["id"]
except KeyError:
raise ValueError("No record ID provided!")
record_type = self.resource_type(record_id)
if record_type is None:
raise ArchivistsToolkitError(
"Could not determine type for record with ID {}; not in database?".format(
record_id
)
)
clause = []
values = []
if "title" in new_record:
clause.append("title=%s")
values.append(new_record["title"])
if "levelOfDescription" in new_record:
clause.append("resourceLevel=%s")
values.append(new_record["levelOfDescription"])
# nothing to update
if not clause:
raise ValueError("No fields to update specified!")
clause = ", ".join(clause)
if record_type == ArchivistsToolkitClient.RESOURCE:
db_type = "Resources"
db_id_field = "resourceId"
else:
db_type = "ResourcesComponents"
db_id_field = "resourceComponentId"
sql = "UPDATE {} SET {} WHERE {}=%s".format(db_type, clause, db_id_field)
cursor = self.db.cursor()
cursor.execute(sql, tuple(values)) | python | def edit_record(self, new_record):
"""
Update a record in Archivist's Toolkit using the provided new_record.
The format of new_record is identical to the format returned by get_resource_component_and_children and related methods.
This means it's possible, for example, to request a record, modify the returned dict, and pass that dict to this method to update the server.
Currently supported fields are:
* title
* targetfield
:raises ValueError: if the 'id' field isn't specified, or no fields to edit were specified.
"""
try:
record_id = new_record["id"]
except KeyError:
raise ValueError("No record ID provided!")
record_type = self.resource_type(record_id)
if record_type is None:
raise ArchivistsToolkitError(
"Could not determine type for record with ID {}; not in database?".format(
record_id
)
)
clause = []
values = []
if "title" in new_record:
clause.append("title=%s")
values.append(new_record["title"])
if "levelOfDescription" in new_record:
clause.append("resourceLevel=%s")
values.append(new_record["levelOfDescription"])
# nothing to update
if not clause:
raise ValueError("No fields to update specified!")
clause = ", ".join(clause)
if record_type == ArchivistsToolkitClient.RESOURCE:
db_type = "Resources"
db_id_field = "resourceId"
else:
db_type = "ResourcesComponents"
db_id_field = "resourceComponentId"
sql = "UPDATE {} SET {} WHERE {}=%s".format(db_type, clause, db_id_field)
cursor = self.db.cursor()
cursor.execute(sql, tuple(values)) | [
"def",
"edit_record",
"(",
"self",
",",
"new_record",
")",
":",
"try",
":",
"record_id",
"=",
"new_record",
"[",
"\"id\"",
"]",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"\"No record ID provided!\"",
")",
"record_type",
"=",
"self",
".",
"resource... | Update a record in Archivist's Toolkit using the provided new_record.
The format of new_record is identical to the format returned by get_resource_component_and_children and related methods.
This means it's possible, for example, to request a record, modify the returned dict, and pass that dict to this method to update the server.
Currently supported fields are:
* title
* targetfield
:raises ValueError: if the 'id' field isn't specified, or no fields to edit were specified. | [
"Update",
"a",
"record",
"in",
"Archivist",
"s",
"Toolkit",
"using",
"the",
"provided",
"new_record",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivists_toolkit/client.py#L47-L95 | train | 52,103 |
artefactual-labs/agentarchives | agentarchives/archivists_toolkit/client.py | ArchivistsToolkitClient.get_levels_of_description | def get_levels_of_description(self):
"""
Returns an array of all levels of description defined in this Archivist's Toolkit instance.
"""
if not hasattr(self, "levels_of_description"):
cursor = self.db.cursor()
levels = set()
cursor.execute("SELECT distinct(resourceLevel) FROM Resources")
for row in cursor:
levels.add(row)
cursor.execute("SELECT distinct(resourceLevel) FROM ResourcesComponents")
for row in cursor:
levels.add(row)
self.levels_of_description = list(levels)
return self.levels_of_description | python | def get_levels_of_description(self):
"""
Returns an array of all levels of description defined in this Archivist's Toolkit instance.
"""
if not hasattr(self, "levels_of_description"):
cursor = self.db.cursor()
levels = set()
cursor.execute("SELECT distinct(resourceLevel) FROM Resources")
for row in cursor:
levels.add(row)
cursor.execute("SELECT distinct(resourceLevel) FROM ResourcesComponents")
for row in cursor:
levels.add(row)
self.levels_of_description = list(levels)
return self.levels_of_description | [
"def",
"get_levels_of_description",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"\"levels_of_description\"",
")",
":",
"cursor",
"=",
"self",
".",
"db",
".",
"cursor",
"(",
")",
"levels",
"=",
"set",
"(",
")",
"cursor",
".",
"execute... | Returns an array of all levels of description defined in this Archivist's Toolkit instance. | [
"Returns",
"an",
"array",
"of",
"all",
"levels",
"of",
"description",
"defined",
"in",
"this",
"Archivist",
"s",
"Toolkit",
"instance",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivists_toolkit/client.py#L97-L112 | train | 52,104 |
artefactual-labs/agentarchives | agentarchives/archivists_toolkit/client.py | ArchivistsToolkitClient.collection_list | def collection_list(self, resource_id, resource_type="collection"):
"""
Fetches a list of all resource and component IDs within the specified resource.
:param long resource_id: The ID of the resource to fetch children from.
:param string resource_type: Specifies whether the resource to fetch is a collection or a child element.
Defaults to 'collection'.
:return: A list of longs representing the database resource IDs for all children of the requested record.
:rtype list:
"""
ret = []
cursor = self.db.cursor()
if resource_type == "collection":
cursor.execute(
"SELECT resourceComponentId FROM ResourcesComponents WHERE parentResourceComponentId IS NULL AND resourceId=%s",
(resource_id),
)
else:
ret.append(resource_id)
cursor.execute(
"SELECT resourceComponentId FROM ResourcesComponents WHERE parentResourceComponentId=%s",
(resource_id),
)
rows = cursor.fetchall()
if len(rows):
for row in rows:
ret.extend(self.collection_list(row[0], "description"))
return ret | python | def collection_list(self, resource_id, resource_type="collection"):
"""
Fetches a list of all resource and component IDs within the specified resource.
:param long resource_id: The ID of the resource to fetch children from.
:param string resource_type: Specifies whether the resource to fetch is a collection or a child element.
Defaults to 'collection'.
:return: A list of longs representing the database resource IDs for all children of the requested record.
:rtype list:
"""
ret = []
cursor = self.db.cursor()
if resource_type == "collection":
cursor.execute(
"SELECT resourceComponentId FROM ResourcesComponents WHERE parentResourceComponentId IS NULL AND resourceId=%s",
(resource_id),
)
else:
ret.append(resource_id)
cursor.execute(
"SELECT resourceComponentId FROM ResourcesComponents WHERE parentResourceComponentId=%s",
(resource_id),
)
rows = cursor.fetchall()
if len(rows):
for row in rows:
ret.extend(self.collection_list(row[0], "description"))
return ret | [
"def",
"collection_list",
"(",
"self",
",",
"resource_id",
",",
"resource_type",
"=",
"\"collection\"",
")",
":",
"ret",
"=",
"[",
"]",
"cursor",
"=",
"self",
".",
"db",
".",
"cursor",
"(",
")",
"if",
"resource_type",
"==",
"\"collection\"",
":",
"cursor",... | Fetches a list of all resource and component IDs within the specified resource.
:param long resource_id: The ID of the resource to fetch children from.
:param string resource_type: Specifies whether the resource to fetch is a collection or a child element.
Defaults to 'collection'.
:return: A list of longs representing the database resource IDs for all children of the requested record.
:rtype list: | [
"Fetches",
"a",
"list",
"of",
"all",
"resource",
"and",
"component",
"IDs",
"within",
"the",
"specified",
"resource",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivists_toolkit/client.py#L114-L145 | train | 52,105 |
artefactual-labs/agentarchives | agentarchives/archivists_toolkit/client.py | ArchivistsToolkitClient.find_resource_id_for_component | def find_resource_id_for_component(self, component_id):
"""
Given the ID of a component, returns the parent resource ID.
If the immediate parent of the component is itself a component, this method will progress up the tree until a resource is found.
:param long component_id: The ID of the ResourceComponent.
:return: The ID of the component's parent resource.
:rtype: long
"""
cursor = self.db.cursor()
sql = "SELECT resourceId, parentResourceComponentId FROM ResourcesComponents WHERE resourceComponentId=%s"
cursor.execute(sql, (component_id,))
resource_id, parent_id = cursor.fetchone()
if resource_id is None:
return self.find_resource_id_for_component(parent_id)
else:
return resource_id | python | def find_resource_id_for_component(self, component_id):
"""
Given the ID of a component, returns the parent resource ID.
If the immediate parent of the component is itself a component, this method will progress up the tree until a resource is found.
:param long component_id: The ID of the ResourceComponent.
:return: The ID of the component's parent resource.
:rtype: long
"""
cursor = self.db.cursor()
sql = "SELECT resourceId, parentResourceComponentId FROM ResourcesComponents WHERE resourceComponentId=%s"
cursor.execute(sql, (component_id,))
resource_id, parent_id = cursor.fetchone()
if resource_id is None:
return self.find_resource_id_for_component(parent_id)
else:
return resource_id | [
"def",
"find_resource_id_for_component",
"(",
"self",
",",
"component_id",
")",
":",
"cursor",
"=",
"self",
".",
"db",
".",
"cursor",
"(",
")",
"sql",
"=",
"\"SELECT resourceId, parentResourceComponentId FROM ResourcesComponents WHERE resourceComponentId=%s\"",
"cursor",
".... | Given the ID of a component, returns the parent resource ID.
If the immediate parent of the component is itself a component, this method will progress up the tree until a resource is found.
:param long component_id: The ID of the ResourceComponent.
:return: The ID of the component's parent resource.
:rtype: long | [
"Given",
"the",
"ID",
"of",
"a",
"component",
"returns",
"the",
"parent",
"resource",
"ID",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivists_toolkit/client.py#L334-L353 | train | 52,106 |
artefactual-labs/agentarchives | agentarchives/archivists_toolkit/client.py | ArchivistsToolkitClient.find_parent_id_for_component | def find_parent_id_for_component(self, component_id):
"""
Given the ID of a component, returns the parent component's ID.
:param string component_id: The ID of the component.
:return: A tuple containing:
* The type of the parent record; valid values are ArchivesSpaceClient.RESOURCE and ArchivesSpaceClient.RESOURCE_COMPONENT.
* The ID of the parent record.
:rtype tuple:
"""
cursor = self.db.cursor()
sql = "SELECT parentResourceComponentId FROM ResourcesComponents WHERE resourceComponentId=%s"
count = cursor.execute(sql, (component_id,))
if count > 0:
return (ArchivistsToolkitClient.RESOURCE_COMPONENT, cursor.fetchone())
return (
ArchivistsToolkitClient.RESOURCE,
self.find_resource_id_for_component(component_id),
) | python | def find_parent_id_for_component(self, component_id):
"""
Given the ID of a component, returns the parent component's ID.
:param string component_id: The ID of the component.
:return: A tuple containing:
* The type of the parent record; valid values are ArchivesSpaceClient.RESOURCE and ArchivesSpaceClient.RESOURCE_COMPONENT.
* The ID of the parent record.
:rtype tuple:
"""
cursor = self.db.cursor()
sql = "SELECT parentResourceComponentId FROM ResourcesComponents WHERE resourceComponentId=%s"
count = cursor.execute(sql, (component_id,))
if count > 0:
return (ArchivistsToolkitClient.RESOURCE_COMPONENT, cursor.fetchone())
return (
ArchivistsToolkitClient.RESOURCE,
self.find_resource_id_for_component(component_id),
) | [
"def",
"find_parent_id_for_component",
"(",
"self",
",",
"component_id",
")",
":",
"cursor",
"=",
"self",
".",
"db",
".",
"cursor",
"(",
")",
"sql",
"=",
"\"SELECT parentResourceComponentId FROM ResourcesComponents WHERE resourceComponentId=%s\"",
"count",
"=",
"cursor",
... | Given the ID of a component, returns the parent component's ID.
:param string component_id: The ID of the component.
:return: A tuple containing:
* The type of the parent record; valid values are ArchivesSpaceClient.RESOURCE and ArchivesSpaceClient.RESOURCE_COMPONENT.
* The ID of the parent record.
:rtype tuple: | [
"Given",
"the",
"ID",
"of",
"a",
"component",
"returns",
"the",
"parent",
"component",
"s",
"ID",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivists_toolkit/client.py#L355-L375 | train | 52,107 |
mirca/vaneska | vaneska/models.py | Gaussian.evaluate | def evaluate(self, flux, xo, yo, a, b, c):
"""
Evaluate the Gaussian model
Parameters
----------
flux : tf.Variable
xo, yo : tf.Variable, tf.Variable
Center coordiantes of the Gaussian.
a, b, c : tf.Variable, tf.Variable
Parameters that control the rotation angle
and the stretch along the major axis of the Gaussian,
such that the matrix M = [a b ; b c] is positive-definite.
References
----------
https://en.wikipedia.org/wiki/Gaussian_function#Two-dimensional_Gaussian_function
"""
dx = self.x - xo
dy = self.y - yo
psf = tf.exp(-(a * dx ** 2 + 2 * b * dx * dy + c * dy ** 2))
psf_sum = tf.reduce_sum(psf)
return flux * psf / psf_sum | python | def evaluate(self, flux, xo, yo, a, b, c):
"""
Evaluate the Gaussian model
Parameters
----------
flux : tf.Variable
xo, yo : tf.Variable, tf.Variable
Center coordiantes of the Gaussian.
a, b, c : tf.Variable, tf.Variable
Parameters that control the rotation angle
and the stretch along the major axis of the Gaussian,
such that the matrix M = [a b ; b c] is positive-definite.
References
----------
https://en.wikipedia.org/wiki/Gaussian_function#Two-dimensional_Gaussian_function
"""
dx = self.x - xo
dy = self.y - yo
psf = tf.exp(-(a * dx ** 2 + 2 * b * dx * dy + c * dy ** 2))
psf_sum = tf.reduce_sum(psf)
return flux * psf / psf_sum | [
"def",
"evaluate",
"(",
"self",
",",
"flux",
",",
"xo",
",",
"yo",
",",
"a",
",",
"b",
",",
"c",
")",
":",
"dx",
"=",
"self",
".",
"x",
"-",
"xo",
"dy",
"=",
"self",
".",
"y",
"-",
"yo",
"psf",
"=",
"tf",
".",
"exp",
"(",
"-",
"(",
"a",... | Evaluate the Gaussian model
Parameters
----------
flux : tf.Variable
xo, yo : tf.Variable, tf.Variable
Center coordiantes of the Gaussian.
a, b, c : tf.Variable, tf.Variable
Parameters that control the rotation angle
and the stretch along the major axis of the Gaussian,
such that the matrix M = [a b ; b c] is positive-definite.
References
----------
https://en.wikipedia.org/wiki/Gaussian_function#Two-dimensional_Gaussian_function | [
"Evaluate",
"the",
"Gaussian",
"model"
] | 9bbf0b16957ec765e5f30872c8d22470c66bfd83 | https://github.com/mirca/vaneska/blob/9bbf0b16957ec765e5f30872c8d22470c66bfd83/vaneska/models.py#L49-L71 | train | 52,108 |
guillermo-carrasco/bcbio-nextgen-monitor | bcbio_monitor/parser/__init__.py | parse_log_line | def parse_log_line(line):
"""Parses a log line and returns it with more information
:param line: str - A line from a bcbio-nextgen log
:returns dict: A dictionary containing the line, if its a new step if its a Traceback or if the
analysis is finished
"""
matches = re.search(r'^\[([^\]]+)\] ([^:]+: .*)', line)
error = re.search(r'Traceback', line)
if error:
return {'line': line, 'step': 'error'}
if not matches:
return {'line': line, 'step': None}
tstamp = matches.group(1)
msg = matches.group(2)
if not msg.find('Timing: ') >= 0:
return {'line': line, 'step': None}
when = datetime.strptime(tstamp, '%Y-%m-%dT%H:%MZ').replace(
tzinfo=pytz.timezone('UTC'))
step = msg.split(":")[-1].strip()
return {'line': line, 'step': step, 'when': when} | python | def parse_log_line(line):
"""Parses a log line and returns it with more information
:param line: str - A line from a bcbio-nextgen log
:returns dict: A dictionary containing the line, if its a new step if its a Traceback or if the
analysis is finished
"""
matches = re.search(r'^\[([^\]]+)\] ([^:]+: .*)', line)
error = re.search(r'Traceback', line)
if error:
return {'line': line, 'step': 'error'}
if not matches:
return {'line': line, 'step': None}
tstamp = matches.group(1)
msg = matches.group(2)
if not msg.find('Timing: ') >= 0:
return {'line': line, 'step': None}
when = datetime.strptime(tstamp, '%Y-%m-%dT%H:%MZ').replace(
tzinfo=pytz.timezone('UTC'))
step = msg.split(":")[-1].strip()
return {'line': line, 'step': step, 'when': when} | [
"def",
"parse_log_line",
"(",
"line",
")",
":",
"matches",
"=",
"re",
".",
"search",
"(",
"r'^\\[([^\\]]+)\\] ([^:]+: .*)'",
",",
"line",
")",
"error",
"=",
"re",
".",
"search",
"(",
"r'Traceback'",
",",
"line",
")",
"if",
"error",
":",
"return",
"{",
"'... | Parses a log line and returns it with more information
:param line: str - A line from a bcbio-nextgen log
:returns dict: A dictionary containing the line, if its a new step if its a Traceback or if the
analysis is finished | [
"Parses",
"a",
"log",
"line",
"and",
"returns",
"it",
"with",
"more",
"information"
] | 6d059154d774140e1fd03a0e3625f607cef06f5a | https://github.com/guillermo-carrasco/bcbio-nextgen-monitor/blob/6d059154d774140e1fd03a0e3625f607cef06f5a/bcbio_monitor/parser/__init__.py#L12-L35 | train | 52,109 |
Leeps-Lab/otree-redwood | otree_redwood/models.py | Event.message | def message(self):
"""Dictionary representation of the Event appropriate for JSON-encoding."""
return {
'timestamp': time.mktime(self.timestamp.timetuple())*1e3 + self.timestamp.microsecond/1e3,
'group': self.group_pk,
'participant': None if not self.participant else self.participant.code,
'channel': self.channel,
'value': self.value
} | python | def message(self):
"""Dictionary representation of the Event appropriate for JSON-encoding."""
return {
'timestamp': time.mktime(self.timestamp.timetuple())*1e3 + self.timestamp.microsecond/1e3,
'group': self.group_pk,
'participant': None if not self.participant else self.participant.code,
'channel': self.channel,
'value': self.value
} | [
"def",
"message",
"(",
"self",
")",
":",
"return",
"{",
"'timestamp'",
":",
"time",
".",
"mktime",
"(",
"self",
".",
"timestamp",
".",
"timetuple",
"(",
")",
")",
"*",
"1e3",
"+",
"self",
".",
"timestamp",
".",
"microsecond",
"/",
"1e3",
",",
"'group... | Dictionary representation of the Event appropriate for JSON-encoding. | [
"Dictionary",
"representation",
"of",
"the",
"Event",
"appropriate",
"for",
"JSON",
"-",
"encoding",
"."
] | 59212f61a256ef77e0a9ed392ff497ea83ee6245 | https://github.com/Leeps-Lab/otree-redwood/blob/59212f61a256ef77e0a9ed392ff497ea83ee6245/otree_redwood/models.py#L50-L58 | train | 52,110 |
Leeps-Lab/otree-redwood | otree_redwood/models.py | Event.save | def save(self, *args, **kwargs):
"""Saving an Event automatically sets the timestamp if not already set."""
if self.timestamp is None:
self.timestamp = timezone.now()
super().save(*args, **kwargs) | python | def save(self, *args, **kwargs):
"""Saving an Event automatically sets the timestamp if not already set."""
if self.timestamp is None:
self.timestamp = timezone.now()
super().save(*args, **kwargs) | [
"def",
"save",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"timestamp",
"is",
"None",
":",
"self",
".",
"timestamp",
"=",
"timezone",
".",
"now",
"(",
")",
"super",
"(",
")",
".",
"save",
"(",
"*",
"args... | Saving an Event automatically sets the timestamp if not already set. | [
"Saving",
"an",
"Event",
"automatically",
"sets",
"the",
"timestamp",
"if",
"not",
"already",
"set",
"."
] | 59212f61a256ef77e0a9ed392ff497ea83ee6245 | https://github.com/Leeps-Lab/otree-redwood/blob/59212f61a256ef77e0a9ed392ff497ea83ee6245/otree_redwood/models.py#L60-L65 | train | 52,111 |
Leeps-Lab/otree-redwood | otree_redwood/models.py | Group.send | def send(self, channel, payload):
"""Send a message with the given payload on the given channel.
Messages are broadcast to all players in the group.
"""
with track('send_channel=' + channel):
with track('create event'):
Event.objects.create(
group=self,
channel=channel,
value=payload)
ChannelGroup(str(self.pk)).send(
{'text': json.dumps({
'channel': channel,
'payload': payload
})}) | python | def send(self, channel, payload):
"""Send a message with the given payload on the given channel.
Messages are broadcast to all players in the group.
"""
with track('send_channel=' + channel):
with track('create event'):
Event.objects.create(
group=self,
channel=channel,
value=payload)
ChannelGroup(str(self.pk)).send(
{'text': json.dumps({
'channel': channel,
'payload': payload
})}) | [
"def",
"send",
"(",
"self",
",",
"channel",
",",
"payload",
")",
":",
"with",
"track",
"(",
"'send_channel='",
"+",
"channel",
")",
":",
"with",
"track",
"(",
"'create event'",
")",
":",
"Event",
".",
"objects",
".",
"create",
"(",
"group",
"=",
"self"... | Send a message with the given payload on the given channel.
Messages are broadcast to all players in the group. | [
"Send",
"a",
"message",
"with",
"the",
"given",
"payload",
"on",
"the",
"given",
"channel",
".",
"Messages",
"are",
"broadcast",
"to",
"all",
"players",
"in",
"the",
"group",
"."
] | 59212f61a256ef77e0a9ed392ff497ea83ee6245 | https://github.com/Leeps-Lab/otree-redwood/blob/59212f61a256ef77e0a9ed392ff497ea83ee6245/otree_redwood/models.py#L144-L158 | train | 52,112 |
Leeps-Lab/otree-redwood | otree_redwood/models.py | DecisionGroup._subperiod_tick | def _subperiod_tick(self, current_interval, intervals):
"""Tick each sub-period, copying group_decisions to subperiod_group_decisions."""
self.refresh_from_db()
for key, value in self.group_decisions.items():
self.subperiod_group_decisions[key] = value
self.send('group_decisions', self.subperiod_group_decisions)
self.save(update_fields=['subperiod_group_decisions']) | python | def _subperiod_tick(self, current_interval, intervals):
"""Tick each sub-period, copying group_decisions to subperiod_group_decisions."""
self.refresh_from_db()
for key, value in self.group_decisions.items():
self.subperiod_group_decisions[key] = value
self.send('group_decisions', self.subperiod_group_decisions)
self.save(update_fields=['subperiod_group_decisions']) | [
"def",
"_subperiod_tick",
"(",
"self",
",",
"current_interval",
",",
"intervals",
")",
":",
"self",
".",
"refresh_from_db",
"(",
")",
"for",
"key",
",",
"value",
"in",
"self",
".",
"group_decisions",
".",
"items",
"(",
")",
":",
"self",
".",
"subperiod_gro... | Tick each sub-period, copying group_decisions to subperiod_group_decisions. | [
"Tick",
"each",
"sub",
"-",
"period",
"copying",
"group_decisions",
"to",
"subperiod_group_decisions",
"."
] | 59212f61a256ef77e0a9ed392ff497ea83ee6245 | https://github.com/Leeps-Lab/otree-redwood/blob/59212f61a256ef77e0a9ed392ff497ea83ee6245/otree_redwood/models.py#L247-L253 | train | 52,113 |
Leeps-Lab/otree-redwood | otree_redwood/models.py | DecisionGroup._on_decisions_event | def _on_decisions_event(self, event=None, **kwargs):
"""Called when an Event is received on the decisions channel. Saves
the value in group_decisions. If num_subperiods is None, immediately
broadcasts the event back out on the group_decisions channel.
"""
if not self.ran_ready_function:
logger.warning('ignoring decision from {} before when_all_players_ready: {}'.format(event.participant.code, event.value))
return
with track('_on_decisions_event'):
self.group_decisions[event.participant.code] = event.value
self._group_decisions_updated = True
self.save(update_fields=['group_decisions', '_group_decisions_updated'])
if not self.num_subperiods() and not self.rate_limit():
self.send('group_decisions', self.group_decisions) | python | def _on_decisions_event(self, event=None, **kwargs):
"""Called when an Event is received on the decisions channel. Saves
the value in group_decisions. If num_subperiods is None, immediately
broadcasts the event back out on the group_decisions channel.
"""
if not self.ran_ready_function:
logger.warning('ignoring decision from {} before when_all_players_ready: {}'.format(event.participant.code, event.value))
return
with track('_on_decisions_event'):
self.group_decisions[event.participant.code] = event.value
self._group_decisions_updated = True
self.save(update_fields=['group_decisions', '_group_decisions_updated'])
if not self.num_subperiods() and not self.rate_limit():
self.send('group_decisions', self.group_decisions) | [
"def",
"_on_decisions_event",
"(",
"self",
",",
"event",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"ran_ready_function",
":",
"logger",
".",
"warning",
"(",
"'ignoring decision from {} before when_all_players_ready: {}'",
".",
"form... | Called when an Event is received on the decisions channel. Saves
the value in group_decisions. If num_subperiods is None, immediately
broadcasts the event back out on the group_decisions channel. | [
"Called",
"when",
"an",
"Event",
"is",
"received",
"on",
"the",
"decisions",
"channel",
".",
"Saves",
"the",
"value",
"in",
"group_decisions",
".",
"If",
"num_subperiods",
"is",
"None",
"immediately",
"broadcasts",
"the",
"event",
"back",
"out",
"on",
"the",
... | 59212f61a256ef77e0a9ed392ff497ea83ee6245 | https://github.com/Leeps-Lab/otree-redwood/blob/59212f61a256ef77e0a9ed392ff497ea83ee6245/otree_redwood/models.py#L255-L268 | train | 52,114 |
Aluriak/tergraw | tergraw/view.py | clean | def clean(matrix):
"""Return a copy of given matrix where keys associated
to space values are discarded"""
return defaultdict(lambda: ' ', {
k: v for k, v in matrix.items() if v != ' '
}) | python | def clean(matrix):
"""Return a copy of given matrix where keys associated
to space values are discarded"""
return defaultdict(lambda: ' ', {
k: v for k, v in matrix.items() if v != ' '
}) | [
"def",
"clean",
"(",
"matrix",
")",
":",
"return",
"defaultdict",
"(",
"lambda",
":",
"' '",
",",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"matrix",
".",
"items",
"(",
")",
"if",
"v",
"!=",
"' '",
"}",
")"
] | Return a copy of given matrix where keys associated
to space values are discarded | [
"Return",
"a",
"copy",
"of",
"given",
"matrix",
"where",
"keys",
"associated",
"to",
"space",
"values",
"are",
"discarded"
] | 7f73cd286a77611e9c73f50b1e43be4f6643ac9f | https://github.com/Aluriak/tergraw/blob/7f73cd286a77611e9c73f50b1e43be4f6643ac9f/tergraw/view.py#L11-L16 | train | 52,115 |
Aluriak/tergraw | tergraw/view.py | build | def build(matrix):
"""Yield lines generated from given matrix"""
max_x = max(matrix, key=lambda t: t[0])[0]
min_x = min(matrix, key=lambda t: t[0])[0]
max_y = max(matrix, key=lambda t: t[1])[1]
min_y = min(matrix, key=lambda t: t[1])[1]
yield from (
# '{}:'.format(j).ljust(4) + ''.join(matrix[i, j] for i in range(min_x, max_x+1))
''.join(matrix[i, j] for i in range(min_x, max_x+1))
for j in range(min_y, max_y+1)
) | python | def build(matrix):
"""Yield lines generated from given matrix"""
max_x = max(matrix, key=lambda t: t[0])[0]
min_x = min(matrix, key=lambda t: t[0])[0]
max_y = max(matrix, key=lambda t: t[1])[1]
min_y = min(matrix, key=lambda t: t[1])[1]
yield from (
# '{}:'.format(j).ljust(4) + ''.join(matrix[i, j] for i in range(min_x, max_x+1))
''.join(matrix[i, j] for i in range(min_x, max_x+1))
for j in range(min_y, max_y+1)
) | [
"def",
"build",
"(",
"matrix",
")",
":",
"max_x",
"=",
"max",
"(",
"matrix",
",",
"key",
"=",
"lambda",
"t",
":",
"t",
"[",
"0",
"]",
")",
"[",
"0",
"]",
"min_x",
"=",
"min",
"(",
"matrix",
",",
"key",
"=",
"lambda",
"t",
":",
"t",
"[",
"0"... | Yield lines generated from given matrix | [
"Yield",
"lines",
"generated",
"from",
"given",
"matrix"
] | 7f73cd286a77611e9c73f50b1e43be4f6643ac9f | https://github.com/Aluriak/tergraw/blob/7f73cd286a77611e9c73f50b1e43be4f6643ac9f/tergraw/view.py#L19-L29 | train | 52,116 |
artefactual-labs/agentarchives | agentarchives/archivesspace/client.py | ArchivesSpaceClient._build_base_url | def _build_base_url(self, host, port):
"""Return the API base URL string based on ``host`` and ``port``.
It returns a valid URL when ``host`` isn't. The endling slash is always
removed so it always need to be added by the consumer.
"""
parsed = urlparse(host)
if not parsed.scheme:
parsed = parsed._replace(scheme="http")
parsed = parsed._replace(path="")
netloc, parts = host, host.partition(":")
if parts[1] == "" and port is not None:
netloc = "{}:{}".format(parts[0], port)
parsed = parsed._replace(netloc=netloc)
parsed = parsed._replace(path=parsed.path.rstrip("/"))
return parsed.geturl() | python | def _build_base_url(self, host, port):
"""Return the API base URL string based on ``host`` and ``port``.
It returns a valid URL when ``host`` isn't. The endling slash is always
removed so it always need to be added by the consumer.
"""
parsed = urlparse(host)
if not parsed.scheme:
parsed = parsed._replace(scheme="http")
parsed = parsed._replace(path="")
netloc, parts = host, host.partition(":")
if parts[1] == "" and port is not None:
netloc = "{}:{}".format(parts[0], port)
parsed = parsed._replace(netloc=netloc)
parsed = parsed._replace(path=parsed.path.rstrip("/"))
return parsed.geturl() | [
"def",
"_build_base_url",
"(",
"self",
",",
"host",
",",
"port",
")",
":",
"parsed",
"=",
"urlparse",
"(",
"host",
")",
"if",
"not",
"parsed",
".",
"scheme",
":",
"parsed",
"=",
"parsed",
".",
"_replace",
"(",
"scheme",
"=",
"\"http\"",
")",
"parsed",
... | Return the API base URL string based on ``host`` and ``port``.
It returns a valid URL when ``host`` isn't. The endling slash is always
removed so it always need to be added by the consumer. | [
"Return",
"the",
"API",
"base",
"URL",
"string",
"based",
"on",
"host",
"and",
"port",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivesspace/client.py#L86-L101 | train | 52,117 |
artefactual-labs/agentarchives | agentarchives/archivesspace/client.py | ArchivesSpaceClient._process_notes | def _process_notes(record, new_record):
"""
Populate the notes property using the provided new_record.
If the new_record field was populated, assume that we want to replace
the notes. If there are valid changes to be made, they will be added to
the new_notes list. An empty list is counted as a request to delete all
notes.
Returns a boolean indicating whether changes were made.
"""
if "notes" not in new_record or not new_record["notes"]:
return False
# This assumes any notes passed into the edit record are intended to
# replace the existing set.
new_notes = []
for note in new_record["notes"]:
# Whitelist of supported types of notes to edit
# A note with an empty string as content is counted as a request to
# delete the note, and will not be added to the list.
if note["type"] in ("odd", "accessrestrict") and note.get("content"):
new_notes.append(
{
"jsonmodel_type": "note_multipart",
"publish": True,
"subnotes": [
{
"content": note["content"],
"jsonmodel_type": "note_text",
"publish": True,
}
],
"type": note["type"],
}
)
record["notes"] = new_notes
return True | python | def _process_notes(record, new_record):
"""
Populate the notes property using the provided new_record.
If the new_record field was populated, assume that we want to replace
the notes. If there are valid changes to be made, they will be added to
the new_notes list. An empty list is counted as a request to delete all
notes.
Returns a boolean indicating whether changes were made.
"""
if "notes" not in new_record or not new_record["notes"]:
return False
# This assumes any notes passed into the edit record are intended to
# replace the existing set.
new_notes = []
for note in new_record["notes"]:
# Whitelist of supported types of notes to edit
# A note with an empty string as content is counted as a request to
# delete the note, and will not be added to the list.
if note["type"] in ("odd", "accessrestrict") and note.get("content"):
new_notes.append(
{
"jsonmodel_type": "note_multipart",
"publish": True,
"subnotes": [
{
"content": note["content"],
"jsonmodel_type": "note_text",
"publish": True,
}
],
"type": note["type"],
}
)
record["notes"] = new_notes
return True | [
"def",
"_process_notes",
"(",
"record",
",",
"new_record",
")",
":",
"if",
"\"notes\"",
"not",
"in",
"new_record",
"or",
"not",
"new_record",
"[",
"\"notes\"",
"]",
":",
"return",
"False",
"# This assumes any notes passed into the edit record are intended to",
"# replac... | Populate the notes property using the provided new_record.
If the new_record field was populated, assume that we want to replace
the notes. If there are valid changes to be made, they will be added to
the new_notes list. An empty list is counted as a request to delete all
notes.
Returns a boolean indicating whether changes were made. | [
"Populate",
"the",
"notes",
"property",
"using",
"the",
"provided",
"new_record",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivesspace/client.py#L210-L249 | train | 52,118 |
artefactual-labs/agentarchives | agentarchives/archivesspace/client.py | ArchivesSpaceClient.resource_type | def resource_type(self, resource_id):
"""
Given an ID, determines whether a given resource is a resource or a resource_component.
:param resource_id string: The URI of the resource whose type to determine.
:raises ArchivesSpaceError: if the resource_id does not appear to be either type.
"""
match = re.search(
r"repositories/\d+/(resources|archival_objects)/\d+", resource_id
)
if match and match.groups():
type_ = match.groups()[0]
return "resource" if type_ == "resources" else "resource_component"
else:
raise ArchivesSpaceError(
"Unable to determine type of provided ID: {}".format(resource_id)
) | python | def resource_type(self, resource_id):
"""
Given an ID, determines whether a given resource is a resource or a resource_component.
:param resource_id string: The URI of the resource whose type to determine.
:raises ArchivesSpaceError: if the resource_id does not appear to be either type.
"""
match = re.search(
r"repositories/\d+/(resources|archival_objects)/\d+", resource_id
)
if match and match.groups():
type_ = match.groups()[0]
return "resource" if type_ == "resources" else "resource_component"
else:
raise ArchivesSpaceError(
"Unable to determine type of provided ID: {}".format(resource_id)
) | [
"def",
"resource_type",
"(",
"self",
",",
"resource_id",
")",
":",
"match",
"=",
"re",
".",
"search",
"(",
"r\"repositories/\\d+/(resources|archival_objects)/\\d+\"",
",",
"resource_id",
")",
"if",
"match",
"and",
"match",
".",
"groups",
"(",
")",
":",
"type_",
... | Given an ID, determines whether a given resource is a resource or a resource_component.
:param resource_id string: The URI of the resource whose type to determine.
:raises ArchivesSpaceError: if the resource_id does not appear to be either type. | [
"Given",
"an",
"ID",
"determines",
"whether",
"a",
"given",
"resource",
"is",
"a",
"resource",
"or",
"a",
"resource_component",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivesspace/client.py#L266-L282 | train | 52,119 |
artefactual-labs/agentarchives | agentarchives/archivesspace/client.py | ArchivesSpaceClient.edit_record | def edit_record(self, new_record):
"""
Update a record in ArchivesSpace using the provided new_record.
The format of new_record is identical to the format returned by get_resource_component_and_children and related methods; consult the documentation for that method in ArchivistsToolkitClient to see the format.
This means it's possible, for example, to request a record, modify the returned dict, and pass that dict to this method to update the server.
Currently supported fields are:
* title
* targetfield
* notes
* start_date
* end_date
* date_expression
:raises ValueError: if the 'id' field isn't specified, or no fields to edit were specified.
"""
try:
record_id = new_record["id"]
except KeyError:
raise ValueError("No record ID provided!")
record = self.get_record(record_id)
# TODO: add more fields?
field_map = {"title": "title", "level": "levelOfDescription"}
fields_updated = False
for field, targetfield in field_map.items():
try:
record[targetfield] = new_record[field]
fields_updated = True
except KeyError:
continue
if self._process_notes(record, new_record):
fields_updated = True
# Create dates object if any of the date fields is populated
if (
"start_date" in new_record
or "end_date" in new_record
or "date_expression" in new_record
):
date = {
"jsonmodel_type": "date",
"date_type": "inclusive",
"label": "creation",
}
if "date_expression" in new_record:
date["expression"] = new_record["date_expression"]
if "start_date" in new_record:
date["begin"] = new_record["start_date"]
if "end_date" in new_record:
date["end"] = new_record["end_date"]
if len(record["dates"]) == 0:
record["dates"] = [date]
else:
record["dates"][0] = date
fields_updated = True
if not fields_updated:
raise ValueError("No fields to update specified!")
self._post(record_id, data=json.dumps(record)) | python | def edit_record(self, new_record):
"""
Update a record in ArchivesSpace using the provided new_record.
The format of new_record is identical to the format returned by get_resource_component_and_children and related methods; consult the documentation for that method in ArchivistsToolkitClient to see the format.
This means it's possible, for example, to request a record, modify the returned dict, and pass that dict to this method to update the server.
Currently supported fields are:
* title
* targetfield
* notes
* start_date
* end_date
* date_expression
:raises ValueError: if the 'id' field isn't specified, or no fields to edit were specified.
"""
try:
record_id = new_record["id"]
except KeyError:
raise ValueError("No record ID provided!")
record = self.get_record(record_id)
# TODO: add more fields?
field_map = {"title": "title", "level": "levelOfDescription"}
fields_updated = False
for field, targetfield in field_map.items():
try:
record[targetfield] = new_record[field]
fields_updated = True
except KeyError:
continue
if self._process_notes(record, new_record):
fields_updated = True
# Create dates object if any of the date fields is populated
if (
"start_date" in new_record
or "end_date" in new_record
or "date_expression" in new_record
):
date = {
"jsonmodel_type": "date",
"date_type": "inclusive",
"label": "creation",
}
if "date_expression" in new_record:
date["expression"] = new_record["date_expression"]
if "start_date" in new_record:
date["begin"] = new_record["start_date"]
if "end_date" in new_record:
date["end"] = new_record["end_date"]
if len(record["dates"]) == 0:
record["dates"] = [date]
else:
record["dates"][0] = date
fields_updated = True
if not fields_updated:
raise ValueError("No fields to update specified!")
self._post(record_id, data=json.dumps(record)) | [
"def",
"edit_record",
"(",
"self",
",",
"new_record",
")",
":",
"try",
":",
"record_id",
"=",
"new_record",
"[",
"\"id\"",
"]",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"\"No record ID provided!\"",
")",
"record",
"=",
"self",
".",
"get_record",
... | Update a record in ArchivesSpace using the provided new_record.
The format of new_record is identical to the format returned by get_resource_component_and_children and related methods; consult the documentation for that method in ArchivistsToolkitClient to see the format.
This means it's possible, for example, to request a record, modify the returned dict, and pass that dict to this method to update the server.
Currently supported fields are:
* title
* targetfield
* notes
* start_date
* end_date
* date_expression
:raises ValueError: if the 'id' field isn't specified, or no fields to edit were specified. | [
"Update",
"a",
"record",
"in",
"ArchivesSpace",
"using",
"the",
"provided",
"new_record",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivesspace/client.py#L287-L352 | train | 52,120 |
artefactual-labs/agentarchives | agentarchives/archivesspace/client.py | ArchivesSpaceClient.get_levels_of_description | def get_levels_of_description(self):
"""Returns an array of all levels of description defined in this
ArchivesSpace instance."""
if not hasattr(self, "levels_of_description"):
# TODO: * fetch human-formatted strings
# * is hardcoding this ID okay?
self.levels_of_description = self._get("/config/enumerations/32").json()[
"values"
]
return self.levels_of_description | python | def get_levels_of_description(self):
"""Returns an array of all levels of description defined in this
ArchivesSpace instance."""
if not hasattr(self, "levels_of_description"):
# TODO: * fetch human-formatted strings
# * is hardcoding this ID okay?
self.levels_of_description = self._get("/config/enumerations/32").json()[
"values"
]
return self.levels_of_description | [
"def",
"get_levels_of_description",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"\"levels_of_description\"",
")",
":",
"# TODO: * fetch human-formatted strings",
"# * is hardcoding this ID okay?",
"self",
".",
"levels_of_description",
"=",
"self",... | Returns an array of all levels of description defined in this
ArchivesSpace instance. | [
"Returns",
"an",
"array",
"of",
"all",
"levels",
"of",
"description",
"defined",
"in",
"this",
"ArchivesSpace",
"instance",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivesspace/client.py#L354-L364 | train | 52,121 |
artefactual-labs/agentarchives | agentarchives/archivesspace/client.py | ArchivesSpaceClient.get_resource_component_children | def get_resource_component_children(self, resource_component_id):
"""
Given a resource component, fetches detailed metadata for it and all of its children.
This is implemented using ArchivesSpaceClient.get_resource_component_children and uses its default options when fetching children.
:param string resource_component_id: The URL of the resource component from which to fetch metadata.
"""
resource_type = self.resource_type(resource_component_id)
return self.get_resource_component_and_children(
resource_component_id, resource_type
) | python | def get_resource_component_children(self, resource_component_id):
"""
Given a resource component, fetches detailed metadata for it and all of its children.
This is implemented using ArchivesSpaceClient.get_resource_component_children and uses its default options when fetching children.
:param string resource_component_id: The URL of the resource component from which to fetch metadata.
"""
resource_type = self.resource_type(resource_component_id)
return self.get_resource_component_and_children(
resource_component_id, resource_type
) | [
"def",
"get_resource_component_children",
"(",
"self",
",",
"resource_component_id",
")",
":",
"resource_type",
"=",
"self",
".",
"resource_type",
"(",
"resource_component_id",
")",
"return",
"self",
".",
"get_resource_component_and_children",
"(",
"resource_component_id",
... | Given a resource component, fetches detailed metadata for it and all of its children.
This is implemented using ArchivesSpaceClient.get_resource_component_children and uses its default options when fetching children.
:param string resource_component_id: The URL of the resource component from which to fetch metadata. | [
"Given",
"a",
"resource",
"component",
"fetches",
"detailed",
"metadata",
"for",
"it",
"and",
"all",
"of",
"its",
"children",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivesspace/client.py#L392-L403 | train | 52,122 |
artefactual-labs/agentarchives | agentarchives/archivesspace/client.py | ArchivesSpaceClient.find_parent_id_for_component | def find_parent_id_for_component(self, component_id):
"""
Given the URL to a component, returns the parent component's URL.
:param string component_id: The URL of the component.
:return: A tuple containing:
* The type of the parent record; valid values are ArchivesSpaceClient.RESOURCE and ArchivesSpaceClient.RESOURCE_COMPONENT.
* The URL of the parent record.
If the provided URL fragment references a resource, this method will simply return the same URL.
:rtype tuple:
"""
response = self.get_record(component_id)
if "parent" in response:
return (ArchivesSpaceClient.RESOURCE_COMPONENT, response["parent"]["ref"])
# if this is the top archival object, return the resource instead
elif "resource" in response:
return (ArchivesSpaceClient.RESOURCE, response["resource"]["ref"])
# resource was passed in, which has no higher-up record;
# return the same ID
else:
return (ArchivesSpaceClient.RESOURCE, component_id) | python | def find_parent_id_for_component(self, component_id):
"""
Given the URL to a component, returns the parent component's URL.
:param string component_id: The URL of the component.
:return: A tuple containing:
* The type of the parent record; valid values are ArchivesSpaceClient.RESOURCE and ArchivesSpaceClient.RESOURCE_COMPONENT.
* The URL of the parent record.
If the provided URL fragment references a resource, this method will simply return the same URL.
:rtype tuple:
"""
response = self.get_record(component_id)
if "parent" in response:
return (ArchivesSpaceClient.RESOURCE_COMPONENT, response["parent"]["ref"])
# if this is the top archival object, return the resource instead
elif "resource" in response:
return (ArchivesSpaceClient.RESOURCE, response["resource"]["ref"])
# resource was passed in, which has no higher-up record;
# return the same ID
else:
return (ArchivesSpaceClient.RESOURCE, component_id) | [
"def",
"find_parent_id_for_component",
"(",
"self",
",",
"component_id",
")",
":",
"response",
"=",
"self",
".",
"get_record",
"(",
"component_id",
")",
"if",
"\"parent\"",
"in",
"response",
":",
"return",
"(",
"ArchivesSpaceClient",
".",
"RESOURCE_COMPONENT",
","... | Given the URL to a component, returns the parent component's URL.
:param string component_id: The URL of the component.
:return: A tuple containing:
* The type of the parent record; valid values are ArchivesSpaceClient.RESOURCE and ArchivesSpaceClient.RESOURCE_COMPONENT.
* The URL of the parent record.
If the provided URL fragment references a resource, this method will simply return the same URL.
:rtype tuple: | [
"Given",
"the",
"URL",
"to",
"a",
"component",
"returns",
"the",
"parent",
"component",
"s",
"URL",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivesspace/client.py#L575-L595 | train | 52,123 |
artefactual-labs/agentarchives | agentarchives/archivesspace/client.py | ArchivesSpaceClient.find_collection_ids | def find_collection_ids(self, search_pattern="", identifier="", fetched=0, page=1):
"""
Fetches a list of resource URLs for every resource in the database.
:param string search_pattern: A search pattern to use in looking up resources by title or resourceid.
The search will match any title containing this string;
for example, "text" will match "this title has this text in it".
If omitted, then all resources will be fetched.
:param string identifier: Only records containing this identifier will be returned.
Substring matching will not be performed; however, wildcards are supported.
For example, searching "F1" will only return records with the identifier "F1", while searching "F*" will return "F1", "F2", etc.
:return: A list containing every matched resource's URL.
:rtype list:
"""
params = {"page": page, "q": "primary_type:resource"}
if search_pattern != "":
search_pattern = self._escape_solr_query(search_pattern, field="title")
params["q"] = params["q"] + " AND title:{}".format(search_pattern)
if identifier != "":
identifier = self._escape_solr_query(identifier, field="identifier")
params["q"] = params["q"] + " AND identifier:{}".format(identifier)
response = self._get(self.repository + "/search", params=params)
hits = response.json()
results = [r["uri"] for r in hits["results"]]
results_so_far = fetched + hits["this_page"]
if hits["total_hits"] > results_so_far:
results.extend(
self.find_collection_ids(fetched=results_so_far, page=page + 1)
)
return results | python | def find_collection_ids(self, search_pattern="", identifier="", fetched=0, page=1):
"""
Fetches a list of resource URLs for every resource in the database.
:param string search_pattern: A search pattern to use in looking up resources by title or resourceid.
The search will match any title containing this string;
for example, "text" will match "this title has this text in it".
If omitted, then all resources will be fetched.
:param string identifier: Only records containing this identifier will be returned.
Substring matching will not be performed; however, wildcards are supported.
For example, searching "F1" will only return records with the identifier "F1", while searching "F*" will return "F1", "F2", etc.
:return: A list containing every matched resource's URL.
:rtype list:
"""
params = {"page": page, "q": "primary_type:resource"}
if search_pattern != "":
search_pattern = self._escape_solr_query(search_pattern, field="title")
params["q"] = params["q"] + " AND title:{}".format(search_pattern)
if identifier != "":
identifier = self._escape_solr_query(identifier, field="identifier")
params["q"] = params["q"] + " AND identifier:{}".format(identifier)
response = self._get(self.repository + "/search", params=params)
hits = response.json()
results = [r["uri"] for r in hits["results"]]
results_so_far = fetched + hits["this_page"]
if hits["total_hits"] > results_so_far:
results.extend(
self.find_collection_ids(fetched=results_so_far, page=page + 1)
)
return results | [
"def",
"find_collection_ids",
"(",
"self",
",",
"search_pattern",
"=",
"\"\"",
",",
"identifier",
"=",
"\"\"",
",",
"fetched",
"=",
"0",
",",
"page",
"=",
"1",
")",
":",
"params",
"=",
"{",
"\"page\"",
":",
"page",
",",
"\"q\"",
":",
"\"primary_type:reso... | Fetches a list of resource URLs for every resource in the database.
:param string search_pattern: A search pattern to use in looking up resources by title or resourceid.
The search will match any title containing this string;
for example, "text" will match "this title has this text in it".
If omitted, then all resources will be fetched.
:param string identifier: Only records containing this identifier will be returned.
Substring matching will not be performed; however, wildcards are supported.
For example, searching "F1" will only return records with the identifier "F1", while searching "F*" will return "F1", "F2", etc.
:return: A list containing every matched resource's URL.
:rtype list: | [
"Fetches",
"a",
"list",
"of",
"resource",
"URLs",
"for",
"every",
"resource",
"in",
"the",
"database",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivesspace/client.py#L597-L632 | train | 52,124 |
artefactual-labs/agentarchives | agentarchives/archivesspace/client.py | ArchivesSpaceClient.find_by_id | def find_by_id(self, object_type, field, value):
"""
Find resource by a specific ID.
Results are a dict in the format:
{
'id': <resource URI fragment>,
'identifier': <resource identifier>,
'title': <title of the resource>,
'levelOfDescription': <level of description>,
}
:param str object_type: One of 'digital_object_components' or 'archival_objects'
:param str field: Name of the field to search. One of 'component_id' or 'ref_id'.
:param value: Value of the field to search for
:return: List of dicts containing results.
"""
def format_record(record):
resolved = record["_resolved"]
identifier = (
resolved["ref_id"]
if "ref_id" in resolved
else resolved.get("component_id", "")
)
return {
"id": record["ref"],
"type": self.resource_type(record["ref"]),
"identifier": identifier,
"title": resolved.get("title", ""),
"levelOfDescription": resolved.get("level", ""),
"fullrecord": resolved,
}
if object_type not in ("digital_object_components", "archival_objects"):
raise ValueError(
"object_type must be 'digital_object_components' or 'archival_objects'"
)
if field not in ("ref_id", "component_id"):
raise ValueError("field must be 'component_id' or 'ref_id'")
params = {field + "[]": value, "resolve[]": object_type}
url = self.repository + "/find_by_id/" + object_type
response = self._get(url, params=params)
hits = response.json()
return [format_record(r) for r in hits[object_type]] | python | def find_by_id(self, object_type, field, value):
"""
Find resource by a specific ID.
Results are a dict in the format:
{
'id': <resource URI fragment>,
'identifier': <resource identifier>,
'title': <title of the resource>,
'levelOfDescription': <level of description>,
}
:param str object_type: One of 'digital_object_components' or 'archival_objects'
:param str field: Name of the field to search. One of 'component_id' or 'ref_id'.
:param value: Value of the field to search for
:return: List of dicts containing results.
"""
def format_record(record):
resolved = record["_resolved"]
identifier = (
resolved["ref_id"]
if "ref_id" in resolved
else resolved.get("component_id", "")
)
return {
"id": record["ref"],
"type": self.resource_type(record["ref"]),
"identifier": identifier,
"title": resolved.get("title", ""),
"levelOfDescription": resolved.get("level", ""),
"fullrecord": resolved,
}
if object_type not in ("digital_object_components", "archival_objects"):
raise ValueError(
"object_type must be 'digital_object_components' or 'archival_objects'"
)
if field not in ("ref_id", "component_id"):
raise ValueError("field must be 'component_id' or 'ref_id'")
params = {field + "[]": value, "resolve[]": object_type}
url = self.repository + "/find_by_id/" + object_type
response = self._get(url, params=params)
hits = response.json()
return [format_record(r) for r in hits[object_type]] | [
"def",
"find_by_id",
"(",
"self",
",",
"object_type",
",",
"field",
",",
"value",
")",
":",
"def",
"format_record",
"(",
"record",
")",
":",
"resolved",
"=",
"record",
"[",
"\"_resolved\"",
"]",
"identifier",
"=",
"(",
"resolved",
"[",
"\"ref_id\"",
"]",
... | Find resource by a specific ID.
Results are a dict in the format:
{
'id': <resource URI fragment>,
'identifier': <resource identifier>,
'title': <title of the resource>,
'levelOfDescription': <level of description>,
}
:param str object_type: One of 'digital_object_components' or 'archival_objects'
:param str field: Name of the field to search. One of 'component_id' or 'ref_id'.
:param value: Value of the field to search for
:return: List of dicts containing results. | [
"Find",
"resource",
"by",
"a",
"specific",
"ID",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/archivesspace/client.py#L718-L764 | train | 52,125 |
kevinconway/iface | iface/decorators.py | attribute | def attribute(func):
"""Wrap a function as an attribute."""
attr = abc.abstractmethod(func)
attr.__iattribute__ = True
attr = _property(attr)
return attr | python | def attribute(func):
"""Wrap a function as an attribute."""
attr = abc.abstractmethod(func)
attr.__iattribute__ = True
attr = _property(attr)
return attr | [
"def",
"attribute",
"(",
"func",
")",
":",
"attr",
"=",
"abc",
".",
"abstractmethod",
"(",
"func",
")",
"attr",
".",
"__iattribute__",
"=",
"True",
"attr",
"=",
"_property",
"(",
"attr",
")",
"return",
"attr"
] | Wrap a function as an attribute. | [
"Wrap",
"a",
"function",
"as",
"an",
"attribute",
"."
] | 2687f7965eed155b9594a298ffa260a2f9f821f9 | https://github.com/kevinconway/iface/blob/2687f7965eed155b9594a298ffa260a2f9f821f9/iface/decorators.py#L18-L23 | train | 52,126 |
kevinconway/iface | iface/decorators.py | property | def property(func):
"""Wrap a function as a property.
This differs from attribute by identifying properties explicitly listed
in the class definition rather than named attributes defined on instances
of a class at init time.
"""
attr = abc.abstractmethod(func)
attr.__iproperty__ = True
attr = Property(attr)
return attr | python | def property(func):
"""Wrap a function as a property.
This differs from attribute by identifying properties explicitly listed
in the class definition rather than named attributes defined on instances
of a class at init time.
"""
attr = abc.abstractmethod(func)
attr.__iproperty__ = True
attr = Property(attr)
return attr | [
"def",
"property",
"(",
"func",
")",
":",
"attr",
"=",
"abc",
".",
"abstractmethod",
"(",
"func",
")",
"attr",
".",
"__iproperty__",
"=",
"True",
"attr",
"=",
"Property",
"(",
"attr",
")",
"return",
"attr"
] | Wrap a function as a property.
This differs from attribute by identifying properties explicitly listed
in the class definition rather than named attributes defined on instances
of a class at init time. | [
"Wrap",
"a",
"function",
"as",
"a",
"property",
"."
] | 2687f7965eed155b9594a298ffa260a2f9f821f9 | https://github.com/kevinconway/iface/blob/2687f7965eed155b9594a298ffa260a2f9f821f9/iface/decorators.py#L26-L36 | train | 52,127 |
kevinconway/iface | iface/decorators.py | classattribute | def classattribute(func):
"""Wrap a function as a class attribute.
This differs from attribute by identifying attributes explicitly listed
in a class definition rather than those only defined on instances of
a class.
"""
attr = abc.abstractmethod(func)
attr.__iclassattribute__ = True
attr = _property(attr)
return attr | python | def classattribute(func):
"""Wrap a function as a class attribute.
This differs from attribute by identifying attributes explicitly listed
in a class definition rather than those only defined on instances of
a class.
"""
attr = abc.abstractmethod(func)
attr.__iclassattribute__ = True
attr = _property(attr)
return attr | [
"def",
"classattribute",
"(",
"func",
")",
":",
"attr",
"=",
"abc",
".",
"abstractmethod",
"(",
"func",
")",
"attr",
".",
"__iclassattribute__",
"=",
"True",
"attr",
"=",
"_property",
"(",
"attr",
")",
"return",
"attr"
] | Wrap a function as a class attribute.
This differs from attribute by identifying attributes explicitly listed
in a class definition rather than those only defined on instances of
a class. | [
"Wrap",
"a",
"function",
"as",
"a",
"class",
"attribute",
"."
] | 2687f7965eed155b9594a298ffa260a2f9f821f9 | https://github.com/kevinconway/iface/blob/2687f7965eed155b9594a298ffa260a2f9f821f9/iface/decorators.py#L39-L49 | train | 52,128 |
kevinconway/iface | iface/decorators.py | method | def method(func):
"""Wrap a function as a method."""
attr = abc.abstractmethod(func)
attr.__imethod__ = True
return attr | python | def method(func):
"""Wrap a function as a method."""
attr = abc.abstractmethod(func)
attr.__imethod__ = True
return attr | [
"def",
"method",
"(",
"func",
")",
":",
"attr",
"=",
"abc",
".",
"abstractmethod",
"(",
"func",
")",
"attr",
".",
"__imethod__",
"=",
"True",
"return",
"attr"
] | Wrap a function as a method. | [
"Wrap",
"a",
"function",
"as",
"a",
"method",
"."
] | 2687f7965eed155b9594a298ffa260a2f9f821f9 | https://github.com/kevinconway/iface/blob/2687f7965eed155b9594a298ffa260a2f9f821f9/iface/decorators.py#L52-L56 | train | 52,129 |
kevinconway/iface | iface/decorators.py | classmethod | def classmethod(func):
"""Wrap a function as a classmethod.
This applies the classmethod decorator.
"""
attr = abc.abstractmethod(func)
attr.__iclassmethod__ = True
attr = _classmethod(attr)
return attr | python | def classmethod(func):
"""Wrap a function as a classmethod.
This applies the classmethod decorator.
"""
attr = abc.abstractmethod(func)
attr.__iclassmethod__ = True
attr = _classmethod(attr)
return attr | [
"def",
"classmethod",
"(",
"func",
")",
":",
"attr",
"=",
"abc",
".",
"abstractmethod",
"(",
"func",
")",
"attr",
".",
"__iclassmethod__",
"=",
"True",
"attr",
"=",
"_classmethod",
"(",
"attr",
")",
"return",
"attr"
] | Wrap a function as a classmethod.
This applies the classmethod decorator. | [
"Wrap",
"a",
"function",
"as",
"a",
"classmethod",
"."
] | 2687f7965eed155b9594a298ffa260a2f9f821f9 | https://github.com/kevinconway/iface/blob/2687f7965eed155b9594a298ffa260a2f9f821f9/iface/decorators.py#L59-L67 | train | 52,130 |
guillermo-carrasco/bcbio-nextgen-monitor | bcbio_monitor/config/__init__.py | parse_config | def parse_config(config_file):
"""Parse a YAML configuration file"""
try:
with open(config_file, 'r') as f:
return yaml.load(f)
except IOError:
print "Configuration file {} not found or not readable.".format(config_file)
raise | python | def parse_config(config_file):
"""Parse a YAML configuration file"""
try:
with open(config_file, 'r') as f:
return yaml.load(f)
except IOError:
print "Configuration file {} not found or not readable.".format(config_file)
raise | [
"def",
"parse_config",
"(",
"config_file",
")",
":",
"try",
":",
"with",
"open",
"(",
"config_file",
",",
"'r'",
")",
"as",
"f",
":",
"return",
"yaml",
".",
"load",
"(",
"f",
")",
"except",
"IOError",
":",
"print",
"\"Configuration file {} not found or not r... | Parse a YAML configuration file | [
"Parse",
"a",
"YAML",
"configuration",
"file"
] | 6d059154d774140e1fd03a0e3625f607cef06f5a | https://github.com/guillermo-carrasco/bcbio-nextgen-monitor/blob/6d059154d774140e1fd03a0e3625f607cef06f5a/bcbio_monitor/config/__init__.py#L4-L11 | train | 52,131 |
kevinconway/iface | iface/checks.py | _ensure_ifaces_tuple | def _ensure_ifaces_tuple(ifaces):
"""Convert to a tuple of interfaces and raise if not interfaces."""
try:
ifaces = tuple(ifaces)
except TypeError:
ifaces = (ifaces,)
for iface in ifaces:
if not _issubclass(iface, ibc.Iface):
raise TypeError('Can only compare against interfaces.')
return ifaces | python | def _ensure_ifaces_tuple(ifaces):
"""Convert to a tuple of interfaces and raise if not interfaces."""
try:
ifaces = tuple(ifaces)
except TypeError:
ifaces = (ifaces,)
for iface in ifaces:
if not _issubclass(iface, ibc.Iface):
raise TypeError('Can only compare against interfaces.')
return ifaces | [
"def",
"_ensure_ifaces_tuple",
"(",
"ifaces",
")",
":",
"try",
":",
"ifaces",
"=",
"tuple",
"(",
"ifaces",
")",
"except",
"TypeError",
":",
"ifaces",
"=",
"(",
"ifaces",
",",
")",
"for",
"iface",
"in",
"ifaces",
":",
"if",
"not",
"_issubclass",
"(",
"i... | Convert to a tuple of interfaces and raise if not interfaces. | [
"Convert",
"to",
"a",
"tuple",
"of",
"interfaces",
"and",
"raise",
"if",
"not",
"interfaces",
"."
] | 2687f7965eed155b9594a298ffa260a2f9f821f9 | https://github.com/kevinconway/iface/blob/2687f7965eed155b9594a298ffa260a2f9f821f9/iface/checks.py#L9-L25 | train | 52,132 |
kevinconway/iface | iface/checks.py | _check_for_definition | def _check_for_definition(iface, cls, tag, defines):
"""Check for a valid definition of a value.
Args:
iface (Iface): An Iface specification.
cls (type): Some type to check for a definition.
tag (str): The name of the tag attribute used to mark the abstract
methods.
defines (callable): A callable that accepts an attribute and returns
True if the attribute is a valid definition.
Returns:
bool: Whether or not the definition is found.
"""
attributes = (
attr
for attr in iface.__abstractmethods__
if hasattr(getattr(iface, attr), tag)
)
for attribute in attributes:
for node in cls.__mro__:
if hasattr(node, attribute) and defines(getattr(node, attribute)):
return True
try:
attribute
return False
except NameError:
# Pass the test if the loop was never executed. This indicates there
# were no iface elements defined in the search.
return True | python | def _check_for_definition(iface, cls, tag, defines):
"""Check for a valid definition of a value.
Args:
iface (Iface): An Iface specification.
cls (type): Some type to check for a definition.
tag (str): The name of the tag attribute used to mark the abstract
methods.
defines (callable): A callable that accepts an attribute and returns
True if the attribute is a valid definition.
Returns:
bool: Whether or not the definition is found.
"""
attributes = (
attr
for attr in iface.__abstractmethods__
if hasattr(getattr(iface, attr), tag)
)
for attribute in attributes:
for node in cls.__mro__:
if hasattr(node, attribute) and defines(getattr(node, attribute)):
return True
try:
attribute
return False
except NameError:
# Pass the test if the loop was never executed. This indicates there
# were no iface elements defined in the search.
return True | [
"def",
"_check_for_definition",
"(",
"iface",
",",
"cls",
",",
"tag",
",",
"defines",
")",
":",
"attributes",
"=",
"(",
"attr",
"for",
"attr",
"in",
"iface",
".",
"__abstractmethods__",
"if",
"hasattr",
"(",
"getattr",
"(",
"iface",
",",
"attr",
")",
","... | Check for a valid definition of a value.
Args:
iface (Iface): An Iface specification.
cls (type): Some type to check for a definition.
tag (str): The name of the tag attribute used to mark the abstract
methods.
defines (callable): A callable that accepts an attribute and returns
True if the attribute is a valid definition.
Returns:
bool: Whether or not the definition is found. | [
"Check",
"for",
"a",
"valid",
"definition",
"of",
"a",
"value",
"."
] | 2687f7965eed155b9594a298ffa260a2f9f821f9 | https://github.com/kevinconway/iface/blob/2687f7965eed155b9594a298ffa260a2f9f821f9/iface/checks.py#L28-L64 | train | 52,133 |
kevinconway/iface | iface/checks.py | issubclass | def issubclass(cls, ifaces):
"""Check if the given class is an implementation of the given iface."""
ifaces = _ensure_ifaces_tuple(ifaces)
for iface in ifaces:
return all((
_check_for_definition(
iface,
cls,
'__iclassattribute__',
_is_attribute,
),
_check_for_definition(
iface,
cls,
'__iproperty__',
_is_property,
),
_check_for_definition(
iface,
cls,
'__imethod__',
_is_method,
),
_check_for_definition(
iface,
cls,
'__iclassmethod__',
_is_classmethod,
),
)) | python | def issubclass(cls, ifaces):
"""Check if the given class is an implementation of the given iface."""
ifaces = _ensure_ifaces_tuple(ifaces)
for iface in ifaces:
return all((
_check_for_definition(
iface,
cls,
'__iclassattribute__',
_is_attribute,
),
_check_for_definition(
iface,
cls,
'__iproperty__',
_is_property,
),
_check_for_definition(
iface,
cls,
'__imethod__',
_is_method,
),
_check_for_definition(
iface,
cls,
'__iclassmethod__',
_is_classmethod,
),
)) | [
"def",
"issubclass",
"(",
"cls",
",",
"ifaces",
")",
":",
"ifaces",
"=",
"_ensure_ifaces_tuple",
"(",
"ifaces",
")",
"for",
"iface",
"in",
"ifaces",
":",
"return",
"all",
"(",
"(",
"_check_for_definition",
"(",
"iface",
",",
"cls",
",",
"'__iclassattribute__... | Check if the given class is an implementation of the given iface. | [
"Check",
"if",
"the",
"given",
"class",
"is",
"an",
"implementation",
"of",
"the",
"given",
"iface",
"."
] | 2687f7965eed155b9594a298ffa260a2f9f821f9 | https://github.com/kevinconway/iface/blob/2687f7965eed155b9594a298ffa260a2f9f821f9/iface/checks.py#L91-L121 | train | 52,134 |
kevinconway/iface | iface/checks.py | isinstance | def isinstance(instance, ifaces):
"""Check if a given instance is an implementation of the interface."""
ifaces = _ensure_ifaces_tuple(ifaces)
for iface in ifaces:
attributes = (
attr
for attr in iface.__abstractmethods__
if hasattr(getattr(iface, attr), '__iattribute__')
)
for attribute in attributes:
if not hasattr(instance, attribute):
return False
if not issubclass(type(instance), ifaces):
return False
return True | python | def isinstance(instance, ifaces):
"""Check if a given instance is an implementation of the interface."""
ifaces = _ensure_ifaces_tuple(ifaces)
for iface in ifaces:
attributes = (
attr
for attr in iface.__abstractmethods__
if hasattr(getattr(iface, attr), '__iattribute__')
)
for attribute in attributes:
if not hasattr(instance, attribute):
return False
if not issubclass(type(instance), ifaces):
return False
return True | [
"def",
"isinstance",
"(",
"instance",
",",
"ifaces",
")",
":",
"ifaces",
"=",
"_ensure_ifaces_tuple",
"(",
"ifaces",
")",
"for",
"iface",
"in",
"ifaces",
":",
"attributes",
"=",
"(",
"attr",
"for",
"attr",
"in",
"iface",
".",
"__abstractmethods__",
"if",
"... | Check if a given instance is an implementation of the interface. | [
"Check",
"if",
"a",
"given",
"instance",
"is",
"an",
"implementation",
"of",
"the",
"interface",
"."
] | 2687f7965eed155b9594a298ffa260a2f9f821f9 | https://github.com/kevinconway/iface/blob/2687f7965eed155b9594a298ffa260a2f9f821f9/iface/checks.py#L124-L144 | train | 52,135 |
jasonkeene/python-ubersmith | ubersmith/calls/__init__.py | _get_call_class | def _get_call_class(method):
"""Find the call class for method if it exists else create one."""
call_base, call_name = method.split('.', 1)
# import the call class's module
mod = __import__('ubersmith.calls.{0}'.format(call_base), fromlist=[''])
# grab all the public members of the module
gen = (getattr(mod, x) for x in dir(mod) if not x.startswith('_'))
# filter them down to subclasses of BaseCall
gen = (x for x in gen if type(x) is type and issubclass(x, BaseCall))
# return first one that matches our method
for call_class in gen:
if call_class.method == method:
return call_class
else:
class GenericCall(BaseCall):
method = '.'.join((call_base, call_name))
return GenericCall | python | def _get_call_class(method):
"""Find the call class for method if it exists else create one."""
call_base, call_name = method.split('.', 1)
# import the call class's module
mod = __import__('ubersmith.calls.{0}'.format(call_base), fromlist=[''])
# grab all the public members of the module
gen = (getattr(mod, x) for x in dir(mod) if not x.startswith('_'))
# filter them down to subclasses of BaseCall
gen = (x for x in gen if type(x) is type and issubclass(x, BaseCall))
# return first one that matches our method
for call_class in gen:
if call_class.method == method:
return call_class
else:
class GenericCall(BaseCall):
method = '.'.join((call_base, call_name))
return GenericCall | [
"def",
"_get_call_class",
"(",
"method",
")",
":",
"call_base",
",",
"call_name",
"=",
"method",
".",
"split",
"(",
"'.'",
",",
"1",
")",
"# import the call class's module",
"mod",
"=",
"__import__",
"(",
"'ubersmith.calls.{0}'",
".",
"format",
"(",
"call_base",... | Find the call class for method if it exists else create one. | [
"Find",
"the",
"call",
"class",
"for",
"method",
"if",
"it",
"exists",
"else",
"create",
"one",
"."
] | 0c594e2eb41066d1fe7860e3a6f04b14c14f6e6a | https://github.com/jasonkeene/python-ubersmith/blob/0c594e2eb41066d1fe7860e3a6f04b14c14f6e6a/ubersmith/calls/__init__.py#L85-L101 | train | 52,136 |
jasonkeene/python-ubersmith | ubersmith/calls/__init__.py | BaseCall.render | def render(self):
"""Validate, process, clean and return the result of the call."""
if not self.validate():
raise ValidationError
self.process_request()
self.clean()
return self.response | python | def render(self):
"""Validate, process, clean and return the result of the call."""
if not self.validate():
raise ValidationError
self.process_request()
self.clean()
return self.response | [
"def",
"render",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"validate",
"(",
")",
":",
"raise",
"ValidationError",
"self",
".",
"process_request",
"(",
")",
"self",
".",
"clean",
"(",
")",
"return",
"self",
".",
"response"
] | Validate, process, clean and return the result of the call. | [
"Validate",
"process",
"clean",
"and",
"return",
"the",
"result",
"of",
"the",
"call",
"."
] | 0c594e2eb41066d1fe7860e3a6f04b14c14f6e6a | https://github.com/jasonkeene/python-ubersmith/blob/0c594e2eb41066d1fe7860e3a6f04b14c14f6e6a/ubersmith/calls/__init__.py#L44-L52 | train | 52,137 |
jasonkeene/python-ubersmith | ubersmith/calls/__init__.py | BaseCall.process_request | def process_request(self):
"""Processing the call and set response_data."""
self.response = self.request_handler.process_request(
self.method, self.request_data) | python | def process_request(self):
"""Processing the call and set response_data."""
self.response = self.request_handler.process_request(
self.method, self.request_data) | [
"def",
"process_request",
"(",
"self",
")",
":",
"self",
".",
"response",
"=",
"self",
".",
"request_handler",
".",
"process_request",
"(",
"self",
".",
"method",
",",
"self",
".",
"request_data",
")"
] | Processing the call and set response_data. | [
"Processing",
"the",
"call",
"and",
"set",
"response_data",
"."
] | 0c594e2eb41066d1fe7860e3a6f04b14c14f6e6a | https://github.com/jasonkeene/python-ubersmith/blob/0c594e2eb41066d1fe7860e3a6f04b14c14f6e6a/ubersmith/calls/__init__.py#L64-L67 | train | 52,138 |
jasonkeene/python-ubersmith | ubersmith/calls/__init__.py | BaseCall.clean | def clean(self):
"""Clean response."""
if self.response.type == 'application/json':
cleaned = copy.deepcopy(self.response.data)
if self.cleaner is not None:
cleaned = self.cleaner(cleaned)
typed_response = {
dict: DictResponse,
int: IntResponse,
}.get(type(cleaned), BaseResponse)
self.response = typed_response.from_cleaned(self.response, cleaned)
else:
self.response = FileResponse(self.response.response) | python | def clean(self):
"""Clean response."""
if self.response.type == 'application/json':
cleaned = copy.deepcopy(self.response.data)
if self.cleaner is not None:
cleaned = self.cleaner(cleaned)
typed_response = {
dict: DictResponse,
int: IntResponse,
}.get(type(cleaned), BaseResponse)
self.response = typed_response.from_cleaned(self.response, cleaned)
else:
self.response = FileResponse(self.response.response) | [
"def",
"clean",
"(",
"self",
")",
":",
"if",
"self",
".",
"response",
".",
"type",
"==",
"'application/json'",
":",
"cleaned",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"response",
".",
"data",
")",
"if",
"self",
".",
"cleaner",
"is",
"not",
"N... | Clean response. | [
"Clean",
"response",
"."
] | 0c594e2eb41066d1fe7860e3a6f04b14c14f6e6a | https://github.com/jasonkeene/python-ubersmith/blob/0c594e2eb41066d1fe7860e3a6f04b14c14f6e6a/ubersmith/calls/__init__.py#L69-L82 | train | 52,139 |
artefactual-labs/agentarchives | agentarchives/atom/client.py | AtomClient.edit_record | def edit_record(self, new_record):
"""
Update a record in AtoM using the provided new_record.
The format of new_record is identical to the format returned by get_resource_component_and_children and related methods; consult the documentation for that method in ArchivistsToolkitClient to see the format.
This means it's possible, for example, to request a record, modify the returned dict, and pass that dict to this method to update the server.
Currently supported fields are:
* title
* notes
* start_date
* end_date
* date_expression
:raises ValueError: if the 'slug' field isn't specified, or no fields to edit were specified.
"""
try:
record_id = new_record["slug"]
except KeyError:
raise ValueError("No slug provided!")
record = self.get_record(record_id)
field_map = {"title": "title", "level": "levelOfDescription"}
fields_updated = False
for field, targetfield in field_map.items():
try:
record[targetfield] = new_record[field]
fields_updated = True
except KeyError:
continue
# Optionally add notes
if "notes" in new_record and new_record["notes"]:
note = new_record["notes"][0]
new_note = {"content": note["content"], "type": note["type"]}
# This only supports editing a single note, and a single piece of content
# within that note.
# If the record already has at least one note, then replace the first note
# within that record with this one.
if "notes" not in record or record["notes"] == []:
record["notes"] = [new_note]
else:
record["notes"][0] = new_note
fields_updated = True
else:
# Remove existing notes if the record didn't have a valid note;
# a note with an empty string as content should be counted as
# a request to delete the note.
record["notes"] = []
# Update date
updated_date = {}
# Only single dates are currently supported
if "dates" in new_record and type(new_record["dates"]) is list:
new_record["dates"] = new_record["dates"][0]
# Map agentarchives date specification to AtoM specification
date_mapping = {
"start_date": "start_date",
# 'begin': 'start_date',
"end_date": "end_date",
# 'end': 'end_date',
"date_expression": "date",
}
for date_field in date_mapping:
if date_field in new_record:
updated_date[date_mapping[date_field]] = new_record[date_field]
# Add updated date specification to record update
if updated_date != {}:
record["dates"] = [updated_date]
fields_updated = True
if not fields_updated:
raise ValueError("No fields to update specified!")
self._put(
urljoin(self.base_url, "informationobjects/{}".format(record_id)),
data=json.dumps(record),
) | python | def edit_record(self, new_record):
"""
Update a record in AtoM using the provided new_record.
The format of new_record is identical to the format returned by get_resource_component_and_children and related methods; consult the documentation for that method in ArchivistsToolkitClient to see the format.
This means it's possible, for example, to request a record, modify the returned dict, and pass that dict to this method to update the server.
Currently supported fields are:
* title
* notes
* start_date
* end_date
* date_expression
:raises ValueError: if the 'slug' field isn't specified, or no fields to edit were specified.
"""
try:
record_id = new_record["slug"]
except KeyError:
raise ValueError("No slug provided!")
record = self.get_record(record_id)
field_map = {"title": "title", "level": "levelOfDescription"}
fields_updated = False
for field, targetfield in field_map.items():
try:
record[targetfield] = new_record[field]
fields_updated = True
except KeyError:
continue
# Optionally add notes
if "notes" in new_record and new_record["notes"]:
note = new_record["notes"][0]
new_note = {"content": note["content"], "type": note["type"]}
# This only supports editing a single note, and a single piece of content
# within that note.
# If the record already has at least one note, then replace the first note
# within that record with this one.
if "notes" not in record or record["notes"] == []:
record["notes"] = [new_note]
else:
record["notes"][0] = new_note
fields_updated = True
else:
# Remove existing notes if the record didn't have a valid note;
# a note with an empty string as content should be counted as
# a request to delete the note.
record["notes"] = []
# Update date
updated_date = {}
# Only single dates are currently supported
if "dates" in new_record and type(new_record["dates"]) is list:
new_record["dates"] = new_record["dates"][0]
# Map agentarchives date specification to AtoM specification
date_mapping = {
"start_date": "start_date",
# 'begin': 'start_date',
"end_date": "end_date",
# 'end': 'end_date',
"date_expression": "date",
}
for date_field in date_mapping:
if date_field in new_record:
updated_date[date_mapping[date_field]] = new_record[date_field]
# Add updated date specification to record update
if updated_date != {}:
record["dates"] = [updated_date]
fields_updated = True
if not fields_updated:
raise ValueError("No fields to update specified!")
self._put(
urljoin(self.base_url, "informationobjects/{}".format(record_id)),
data=json.dumps(record),
) | [
"def",
"edit_record",
"(",
"self",
",",
"new_record",
")",
":",
"try",
":",
"record_id",
"=",
"new_record",
"[",
"\"slug\"",
"]",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"\"No slug provided!\"",
")",
"record",
"=",
"self",
".",
"get_record",
"... | Update a record in AtoM using the provided new_record.
The format of new_record is identical to the format returned by get_resource_component_and_children and related methods; consult the documentation for that method in ArchivistsToolkitClient to see the format.
This means it's possible, for example, to request a record, modify the returned dict, and pass that dict to this method to update the server.
Currently supported fields are:
* title
* notes
* start_date
* end_date
* date_expression
:raises ValueError: if the 'slug' field isn't specified, or no fields to edit were specified. | [
"Update",
"a",
"record",
"in",
"AtoM",
"using",
"the",
"provided",
"new_record",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/atom/client.py#L181-L264 | train | 52,140 |
artefactual-labs/agentarchives | agentarchives/atom/client.py | AtomClient.get_levels_of_description | def get_levels_of_description(self):
"""
Returns an array of all levels of description defined in this AtoM instance.
"""
if not hasattr(self, "levels_of_description"):
self.levels_of_description = [
item["name"]
for item in self._get(urljoin(self.base_url, "taxonomies/34")).json()
]
return self.levels_of_description | python | def get_levels_of_description(self):
"""
Returns an array of all levels of description defined in this AtoM instance.
"""
if not hasattr(self, "levels_of_description"):
self.levels_of_description = [
item["name"]
for item in self._get(urljoin(self.base_url, "taxonomies/34")).json()
]
return self.levels_of_description | [
"def",
"get_levels_of_description",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"\"levels_of_description\"",
")",
":",
"self",
".",
"levels_of_description",
"=",
"[",
"item",
"[",
"\"name\"",
"]",
"for",
"item",
"in",
"self",
".",
"_get"... | Returns an array of all levels of description defined in this AtoM instance. | [
"Returns",
"an",
"array",
"of",
"all",
"levels",
"of",
"description",
"defined",
"in",
"this",
"AtoM",
"instance",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/atom/client.py#L266-L276 | train | 52,141 |
artefactual-labs/agentarchives | agentarchives/atom/client.py | AtomClient.collection_list | def collection_list(self, resource_id, resource_type="collection"):
"""
Fetches a list of slug representing descriptions within the specified parent description.
:param resource_id str: The slug of the description to fetch children from.
:param resource_type str: no-op; not required or used in this implementation.
:return: A list of strings representing the slugs for all children of the requested description.
:rtype list:
"""
def fetch_children(children):
results = []
for child in children:
results.append(child["slug"])
if "children" in child:
results.extend(fetch_children(child["children"]))
return results
response = self._get(
urljoin(self.base_url, "informationobjects/tree/{}".format(resource_id))
)
tree = response.json()
return fetch_children(tree["children"]) | python | def collection_list(self, resource_id, resource_type="collection"):
"""
Fetches a list of slug representing descriptions within the specified parent description.
:param resource_id str: The slug of the description to fetch children from.
:param resource_type str: no-op; not required or used in this implementation.
:return: A list of strings representing the slugs for all children of the requested description.
:rtype list:
"""
def fetch_children(children):
results = []
for child in children:
results.append(child["slug"])
if "children" in child:
results.extend(fetch_children(child["children"]))
return results
response = self._get(
urljoin(self.base_url, "informationobjects/tree/{}".format(resource_id))
)
tree = response.json()
return fetch_children(tree["children"]) | [
"def",
"collection_list",
"(",
"self",
",",
"resource_id",
",",
"resource_type",
"=",
"\"collection\"",
")",
":",
"def",
"fetch_children",
"(",
"children",
")",
":",
"results",
"=",
"[",
"]",
"for",
"child",
"in",
"children",
":",
"results",
".",
"append",
... | Fetches a list of slug representing descriptions within the specified parent description.
:param resource_id str: The slug of the description to fetch children from.
:param resource_type str: no-op; not required or used in this implementation.
:return: A list of strings representing the slugs for all children of the requested description.
:rtype list: | [
"Fetches",
"a",
"list",
"of",
"slug",
"representing",
"descriptions",
"within",
"the",
"specified",
"parent",
"description",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/atom/client.py#L278-L304 | train | 52,142 |
artefactual-labs/agentarchives | agentarchives/atom/client.py | AtomClient.find_parent_id_for_component | def find_parent_id_for_component(self, slug):
"""
Given the slug of a description, returns the parent description's slug.
:param string slug: The slug of a description.
:return: The URL of the parent record.
:rtype: string
"""
response = self.get_record(slug)
if "parent" in response:
return response["parent"]
# resource was passed in, which has no higher-up record;
# return the same ID
else:
return slug | python | def find_parent_id_for_component(self, slug):
"""
Given the slug of a description, returns the parent description's slug.
:param string slug: The slug of a description.
:return: The URL of the parent record.
:rtype: string
"""
response = self.get_record(slug)
if "parent" in response:
return response["parent"]
# resource was passed in, which has no higher-up record;
# return the same ID
else:
return slug | [
"def",
"find_parent_id_for_component",
"(",
"self",
",",
"slug",
")",
":",
"response",
"=",
"self",
".",
"get_record",
"(",
"slug",
")",
"if",
"\"parent\"",
"in",
"response",
":",
"return",
"response",
"[",
"\"parent\"",
"]",
"# resource was passed in, which has n... | Given the slug of a description, returns the parent description's slug.
:param string slug: The slug of a description.
:return: The URL of the parent record.
:rtype: string | [
"Given",
"the",
"slug",
"of",
"a",
"description",
"returns",
"the",
"parent",
"description",
"s",
"slug",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/atom/client.py#L437-L452 | train | 52,143 |
artefactual-labs/agentarchives | agentarchives/atom/client.py | AtomClient.augment_resource_ids | def augment_resource_ids(self, resource_ids):
"""
Given a list of resource IDs, returns a list of dicts containing detailed information about the specified resources and their children.
This function recurses to a maximum of two levels when fetching children from the specified resources.
Consult the documentation of ArchivistsToolkitClient.get_resource_component_children for the format of the returned dicts.
:param list resource_ids: A list of one or more resource IDs.
:return: A list containing metadata dicts.
:rtype list:
"""
resources_augmented = []
for id in resource_ids:
# resource_data = self.get_resource_component_and_children(id, recurse_max_level=2)
# resources_augmented.append(resource_data)
resources_augmented.append(
self.get_resource_component_and_children(id, recurse_max_level=2)
)
return resources_augmented | python | def augment_resource_ids(self, resource_ids):
"""
Given a list of resource IDs, returns a list of dicts containing detailed information about the specified resources and their children.
This function recurses to a maximum of two levels when fetching children from the specified resources.
Consult the documentation of ArchivistsToolkitClient.get_resource_component_children for the format of the returned dicts.
:param list resource_ids: A list of one or more resource IDs.
:return: A list containing metadata dicts.
:rtype list:
"""
resources_augmented = []
for id in resource_ids:
# resource_data = self.get_resource_component_and_children(id, recurse_max_level=2)
# resources_augmented.append(resource_data)
resources_augmented.append(
self.get_resource_component_and_children(id, recurse_max_level=2)
)
return resources_augmented | [
"def",
"augment_resource_ids",
"(",
"self",
",",
"resource_ids",
")",
":",
"resources_augmented",
"=",
"[",
"]",
"for",
"id",
"in",
"resource_ids",
":",
"# resource_data = self.get_resource_component_and_children(id, recurse_max_level=2)",
"# resources_augmented.append(resource_d... | Given a list of resource IDs, returns a list of dicts containing detailed information about the specified resources and their children.
This function recurses to a maximum of two levels when fetching children from the specified resources.
Consult the documentation of ArchivistsToolkitClient.get_resource_component_children for the format of the returned dicts.
:param list resource_ids: A list of one or more resource IDs.
:return: A list containing metadata dicts.
:rtype list: | [
"Given",
"a",
"list",
"of",
"resource",
"IDs",
"returns",
"a",
"list",
"of",
"dicts",
"containing",
"detailed",
"information",
"about",
"the",
"specified",
"resources",
"and",
"their",
"children",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/atom/client.py#L594-L613 | train | 52,144 |
artefactual-labs/agentarchives | agentarchives/atom/client.py | AtomClient.delete_record | def delete_record(self, record_id):
"""
Delete a record with record_id.
"""
self._delete(
urljoin(self.base_url, "informationobjects/{}".format(record_id)),
expected_response=204,
)
return {"status": "Deleted"} | python | def delete_record(self, record_id):
"""
Delete a record with record_id.
"""
self._delete(
urljoin(self.base_url, "informationobjects/{}".format(record_id)),
expected_response=204,
)
return {"status": "Deleted"} | [
"def",
"delete_record",
"(",
"self",
",",
"record_id",
")",
":",
"self",
".",
"_delete",
"(",
"urljoin",
"(",
"self",
".",
"base_url",
",",
"\"informationobjects/{}\"",
".",
"format",
"(",
"record_id",
")",
")",
",",
"expected_response",
"=",
"204",
",",
"... | Delete a record with record_id. | [
"Delete",
"a",
"record",
"with",
"record_id",
"."
] | af19ade56a90c64069cf46b50972fe72b6f10a45 | https://github.com/artefactual-labs/agentarchives/blob/af19ade56a90c64069cf46b50972fe72b6f10a45/agentarchives/atom/client.py#L749-L757 | train | 52,145 |
moonso/extract_vcf | extract_vcf/config_parser.py | ConfigParser.version_check | def version_check(self):
"""
Check if the version entry is in the proper format
"""
try:
version_info = self['Version']
except KeyError:
raise ValidateError('Config file has to have a Version section')
try:
float(version_info['version'])
except KeyError:
raise ValidateError('Config file has to have a version section')
except ValueError:
raise ValidateError('Version has to be a float.')
try:
version_info['name']
except KeyError:
raise ValidateError("Config file has to have a name")
return | python | def version_check(self):
"""
Check if the version entry is in the proper format
"""
try:
version_info = self['Version']
except KeyError:
raise ValidateError('Config file has to have a Version section')
try:
float(version_info['version'])
except KeyError:
raise ValidateError('Config file has to have a version section')
except ValueError:
raise ValidateError('Version has to be a float.')
try:
version_info['name']
except KeyError:
raise ValidateError("Config file has to have a name")
return | [
"def",
"version_check",
"(",
"self",
")",
":",
"try",
":",
"version_info",
"=",
"self",
"[",
"'Version'",
"]",
"except",
"KeyError",
":",
"raise",
"ValidateError",
"(",
"'Config file has to have a Version section'",
")",
"try",
":",
"float",
"(",
"version_info",
... | Check if the version entry is in the proper format | [
"Check",
"if",
"the",
"version",
"entry",
"is",
"in",
"the",
"proper",
"format"
] | c8381b362fa6734cd2ee65ef260738868d981aaf | https://github.com/moonso/extract_vcf/blob/c8381b362fa6734cd2ee65ef260738868d981aaf/extract_vcf/config_parser.py#L175-L194 | train | 52,146 |
moonso/extract_vcf | extract_vcf/config_parser.py | ConfigParser.check_plugin | def check_plugin(self, plugin):
"""
Check if the section is in the proper format vcf format.
Args:
vcf_section (dict): The information from a vcf section
Returns:
True is it is in the proper format
"""
vcf_section = self[plugin]
try:
vcf_field = vcf_section['field']
if not vcf_field in self.vcf_columns:
raise ValidateError(
"field has to be in {0}\n"
"Wrong field name in plugin: {1}".format(
self.vcf_columns, plugin
))
if vcf_field == 'INFO':
try:
info_key = vcf_section['info_key']
if info_key == 'CSQ':
try:
csq_key = vcf_section['csq_key']
except KeyError:
raise ValidateError(
"CSQ entrys has to refer to an csq field.\n"
"Refer with keyword 'csq_key'\n"
"csq_key is missing in section: {0}".format(
plugin
)
)
except KeyError:
raise ValidateError(
"INFO entrys has to refer to an INFO field.\n"
"Refer with keyword 'info_key'\n"
"info_key is missing in section: {0}".format(
plugin
)
)
except KeyError:
raise ValidateError(
"Vcf entrys have to refer to a field in the VCF with keyword"
" 'field'.\nMissing keyword 'field' in plugin: {0}".format(
plugin
))
try:
data_type = vcf_section['data_type']
if not data_type in self.data_types:
raise ValidateError(
"data_type has to be in {0}\n"
"Wrong data_type in plugin: {1}".format(
self.data_types, plugin)
)
except KeyError:
raise ValidateError(
"Vcf entrys have to refer to a data type in the VCF with "
"keyword 'data_type'.\n"
"Missing data_type in plugin: {0}".format(plugin)
)
separators = vcf_section.get('separators', None)
if separators:
if len(separators) == 1:
self[plugin]['separators'] = list(separators)
else:
if data_type != 'flag':
raise ValidateError(
"If data_type != flag the separators have to be defined"
"Missing separators in plugin: {0}".format(plugin)
)
record_rule = vcf_section.get('record_rule', None)
if record_rule:
if not record_rule in ['min', 'max']:
raise ValidateError(
"Record rules have to be in {0}\n"
"Wrong record_rule in plugin: {1}".format(
['min', 'max'], plugin)
)
else:
self.logger.info("Setting record rule to default: 'max'")
return True | python | def check_plugin(self, plugin):
"""
Check if the section is in the proper format vcf format.
Args:
vcf_section (dict): The information from a vcf section
Returns:
True is it is in the proper format
"""
vcf_section = self[plugin]
try:
vcf_field = vcf_section['field']
if not vcf_field in self.vcf_columns:
raise ValidateError(
"field has to be in {0}\n"
"Wrong field name in plugin: {1}".format(
self.vcf_columns, plugin
))
if vcf_field == 'INFO':
try:
info_key = vcf_section['info_key']
if info_key == 'CSQ':
try:
csq_key = vcf_section['csq_key']
except KeyError:
raise ValidateError(
"CSQ entrys has to refer to an csq field.\n"
"Refer with keyword 'csq_key'\n"
"csq_key is missing in section: {0}".format(
plugin
)
)
except KeyError:
raise ValidateError(
"INFO entrys has to refer to an INFO field.\n"
"Refer with keyword 'info_key'\n"
"info_key is missing in section: {0}".format(
plugin
)
)
except KeyError:
raise ValidateError(
"Vcf entrys have to refer to a field in the VCF with keyword"
" 'field'.\nMissing keyword 'field' in plugin: {0}".format(
plugin
))
try:
data_type = vcf_section['data_type']
if not data_type in self.data_types:
raise ValidateError(
"data_type has to be in {0}\n"
"Wrong data_type in plugin: {1}".format(
self.data_types, plugin)
)
except KeyError:
raise ValidateError(
"Vcf entrys have to refer to a data type in the VCF with "
"keyword 'data_type'.\n"
"Missing data_type in plugin: {0}".format(plugin)
)
separators = vcf_section.get('separators', None)
if separators:
if len(separators) == 1:
self[plugin]['separators'] = list(separators)
else:
if data_type != 'flag':
raise ValidateError(
"If data_type != flag the separators have to be defined"
"Missing separators in plugin: {0}".format(plugin)
)
record_rule = vcf_section.get('record_rule', None)
if record_rule:
if not record_rule in ['min', 'max']:
raise ValidateError(
"Record rules have to be in {0}\n"
"Wrong record_rule in plugin: {1}".format(
['min', 'max'], plugin)
)
else:
self.logger.info("Setting record rule to default: 'max'")
return True | [
"def",
"check_plugin",
"(",
"self",
",",
"plugin",
")",
":",
"vcf_section",
"=",
"self",
"[",
"plugin",
"]",
"try",
":",
"vcf_field",
"=",
"vcf_section",
"[",
"'field'",
"]",
"if",
"not",
"vcf_field",
"in",
"self",
".",
"vcf_columns",
":",
"raise",
"Vali... | Check if the section is in the proper format vcf format.
Args:
vcf_section (dict): The information from a vcf section
Returns:
True is it is in the proper format | [
"Check",
"if",
"the",
"section",
"is",
"in",
"the",
"proper",
"format",
"vcf",
"format",
"."
] | c8381b362fa6734cd2ee65ef260738868d981aaf | https://github.com/moonso/extract_vcf/blob/c8381b362fa6734cd2ee65ef260738868d981aaf/extract_vcf/config_parser.py#L196-L290 | train | 52,147 |
davidcarboni/Flask-B3 | b3/__init__.py | span | def span(route):
"""Optional decorator for Flask routes.
If you don't want to trace all routes using `Flask.before_request()' and 'Flask.after_request()'
you can use this decorator as an alternative way to handle incoming B3 headers:
@app.route('/instrumented')
@span
def instrumented():
...
...
...
NB @span needs to come after (not before) @app.route.
"""
@wraps(route)
def route_decorator(*args, **kwargs):
start_span()
try:
return route(*args, **kwargs)
finally:
end_span()
return route_decorator | python | def span(route):
"""Optional decorator for Flask routes.
If you don't want to trace all routes using `Flask.before_request()' and 'Flask.after_request()'
you can use this decorator as an alternative way to handle incoming B3 headers:
@app.route('/instrumented')
@span
def instrumented():
...
...
...
NB @span needs to come after (not before) @app.route.
"""
@wraps(route)
def route_decorator(*args, **kwargs):
start_span()
try:
return route(*args, **kwargs)
finally:
end_span()
return route_decorator | [
"def",
"span",
"(",
"route",
")",
":",
"@",
"wraps",
"(",
"route",
")",
"def",
"route_decorator",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"start_span",
"(",
")",
"try",
":",
"return",
"route",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs... | Optional decorator for Flask routes.
If you don't want to trace all routes using `Flask.before_request()' and 'Flask.after_request()'
you can use this decorator as an alternative way to handle incoming B3 headers:
@app.route('/instrumented')
@span
def instrumented():
...
...
...
NB @span needs to come after (not before) @app.route. | [
"Optional",
"decorator",
"for",
"Flask",
"routes",
"."
] | 55092cb1070568aeecfd2c07c5ad6122e15ca345 | https://github.com/davidcarboni/Flask-B3/blob/55092cb1070568aeecfd2c07c5ad6122e15ca345/b3/__init__.py#L99-L123 | train | 52,148 |
davidcarboni/Flask-B3 | b3/__init__.py | _start_subspan | def _start_subspan(headers=None):
""" Sets up a new span to contact a downstream service.
This is used when making a downstream service call. It returns a dict containing the required sub-span headers.
Each downstream call you make is handled as a new span, so call this every time you need to contact another service.
This temporarily updates what's returned by values() to match the sub-span, so it can can also be used when calling
e.g. a database that doesn't support B3. You'll still be able to record the client side of an interaction,
even if the downstream server doesn't use the propagated trace information.
You'll need to call end_subspan when you're done. You can do this using the `SubSpan` class:
with SubSpan([headers]) as headers_b3:
... log.debug("Client start: calling downstream service")
... requests.get(<downstream service>, headers=headers_b3)
... log.debug("Client receive: downstream service responded")
For the specification, see: https://github.com/openzipkin/b3-propagation
:param headers: The headers dict. Headers will be added to this as needed.
:return: A dict containing header values for a downstream request.
This can be passed directly to e.g. requests.get(...).
"""
b3 = values()
g.subspan = {
# Propagate the trace ID
b3_trace_id: b3[b3_trace_id],
# Start a new span for the outgoing request
b3_span_id: _generate_identifier(),
# Set the current span as the parent span
b3_parent_span_id: b3[b3_span_id],
b3_sampled: b3[b3_sampled],
b3_flags: b3[b3_flags],
}
# Set up headers
# NB dict() ensures we don't alter the value passed in. Maybe that's too conservative?
result = dict(headers or {})
result.update({
b3_trace_id: g.subspan[b3_trace_id],
b3_span_id: g.subspan[b3_span_id],
b3_parent_span_id: g.subspan[b3_parent_span_id],
})
# Propagate only if set:
if g.subspan[b3_sampled]:
result[b3_sampled] = g.subspan[b3_sampled]
if g.subspan[b3_flags]:
result[b3_flags] = g.subspan[b3_flags]
_info("Client start. Starting sub-span")
_log.debug("B3 values for sub-span: {b3_headers}".format(b3_headers=values()))
_log.debug("All headers for downstream request: {b3_headers}".format(b3_headers=result))
return result | python | def _start_subspan(headers=None):
""" Sets up a new span to contact a downstream service.
This is used when making a downstream service call. It returns a dict containing the required sub-span headers.
Each downstream call you make is handled as a new span, so call this every time you need to contact another service.
This temporarily updates what's returned by values() to match the sub-span, so it can can also be used when calling
e.g. a database that doesn't support B3. You'll still be able to record the client side of an interaction,
even if the downstream server doesn't use the propagated trace information.
You'll need to call end_subspan when you're done. You can do this using the `SubSpan` class:
with SubSpan([headers]) as headers_b3:
... log.debug("Client start: calling downstream service")
... requests.get(<downstream service>, headers=headers_b3)
... log.debug("Client receive: downstream service responded")
For the specification, see: https://github.com/openzipkin/b3-propagation
:param headers: The headers dict. Headers will be added to this as needed.
:return: A dict containing header values for a downstream request.
This can be passed directly to e.g. requests.get(...).
"""
b3 = values()
g.subspan = {
# Propagate the trace ID
b3_trace_id: b3[b3_trace_id],
# Start a new span for the outgoing request
b3_span_id: _generate_identifier(),
# Set the current span as the parent span
b3_parent_span_id: b3[b3_span_id],
b3_sampled: b3[b3_sampled],
b3_flags: b3[b3_flags],
}
# Set up headers
# NB dict() ensures we don't alter the value passed in. Maybe that's too conservative?
result = dict(headers or {})
result.update({
b3_trace_id: g.subspan[b3_trace_id],
b3_span_id: g.subspan[b3_span_id],
b3_parent_span_id: g.subspan[b3_parent_span_id],
})
# Propagate only if set:
if g.subspan[b3_sampled]:
result[b3_sampled] = g.subspan[b3_sampled]
if g.subspan[b3_flags]:
result[b3_flags] = g.subspan[b3_flags]
_info("Client start. Starting sub-span")
_log.debug("B3 values for sub-span: {b3_headers}".format(b3_headers=values()))
_log.debug("All headers for downstream request: {b3_headers}".format(b3_headers=result))
return result | [
"def",
"_start_subspan",
"(",
"headers",
"=",
"None",
")",
":",
"b3",
"=",
"values",
"(",
")",
"g",
".",
"subspan",
"=",
"{",
"# Propagate the trace ID",
"b3_trace_id",
":",
"b3",
"[",
"b3_trace_id",
"]",
",",
"# Start a new span for the outgoing request",
"b3_s... | Sets up a new span to contact a downstream service.
This is used when making a downstream service call. It returns a dict containing the required sub-span headers.
Each downstream call you make is handled as a new span, so call this every time you need to contact another service.
This temporarily updates what's returned by values() to match the sub-span, so it can can also be used when calling
e.g. a database that doesn't support B3. You'll still be able to record the client side of an interaction,
even if the downstream server doesn't use the propagated trace information.
You'll need to call end_subspan when you're done. You can do this using the `SubSpan` class:
with SubSpan([headers]) as headers_b3:
... log.debug("Client start: calling downstream service")
... requests.get(<downstream service>, headers=headers_b3)
... log.debug("Client receive: downstream service responded")
For the specification, see: https://github.com/openzipkin/b3-propagation
:param headers: The headers dict. Headers will be added to this as needed.
:return: A dict containing header values for a downstream request.
This can be passed directly to e.g. requests.get(...). | [
"Sets",
"up",
"a",
"new",
"span",
"to",
"contact",
"a",
"downstream",
"service",
".",
"This",
"is",
"used",
"when",
"making",
"a",
"downstream",
"service",
"call",
".",
"It",
"returns",
"a",
"dict",
"containing",
"the",
"required",
"sub",
"-",
"span",
"h... | 55092cb1070568aeecfd2c07c5ad6122e15ca345 | https://github.com/davidcarboni/Flask-B3/blob/55092cb1070568aeecfd2c07c5ad6122e15ca345/b3/__init__.py#L153-L209 | train | 52,149 |
davidcarboni/Flask-B3 | b3/__init__.py | _info | def _info(message):
"""Convenience function to log current span values.
"""
span = values()
_log.debug(message + ": {span} in trace {trace}. (Parent span: {parent}).".format(
span=span.get(b3_span_id),
trace=span.get(b3_trace_id),
parent=span.get(b3_parent_span_id),
)) | python | def _info(message):
"""Convenience function to log current span values.
"""
span = values()
_log.debug(message + ": {span} in trace {trace}. (Parent span: {parent}).".format(
span=span.get(b3_span_id),
trace=span.get(b3_trace_id),
parent=span.get(b3_parent_span_id),
)) | [
"def",
"_info",
"(",
"message",
")",
":",
"span",
"=",
"values",
"(",
")",
"_log",
".",
"debug",
"(",
"message",
"+",
"\": {span} in trace {trace}. (Parent span: {parent}).\"",
".",
"format",
"(",
"span",
"=",
"span",
".",
"get",
"(",
"b3_span_id",
")",
",",... | Convenience function to log current span values. | [
"Convenience",
"function",
"to",
"log",
"current",
"span",
"values",
"."
] | 55092cb1070568aeecfd2c07c5ad6122e15ca345 | https://github.com/davidcarboni/Flask-B3/blob/55092cb1070568aeecfd2c07c5ad6122e15ca345/b3/__init__.py#L238-L246 | train | 52,150 |
proycon/flat | flat/comm.py | checkversion | def checkversion(version):
"""Checks foliadocserve version, returns 1 if the document is newer than the library, -1 if it is older, 0 if it is equal"""
try:
for refversion, responseversion in zip([int(x) for x in REQUIREFOLIADOCSERVE.split('.')], [int(x) for x in version.split('.')]):
if responseversion > refversion:
return 1 #response is newer than library
elif responseversion < refversion:
return -1 #response is older than library
return 0 #versions are equal
except ValueError:
raise ValueError("Unable to parse version, invalid syntax") | python | def checkversion(version):
"""Checks foliadocserve version, returns 1 if the document is newer than the library, -1 if it is older, 0 if it is equal"""
try:
for refversion, responseversion in zip([int(x) for x in REQUIREFOLIADOCSERVE.split('.')], [int(x) for x in version.split('.')]):
if responseversion > refversion:
return 1 #response is newer than library
elif responseversion < refversion:
return -1 #response is older than library
return 0 #versions are equal
except ValueError:
raise ValueError("Unable to parse version, invalid syntax") | [
"def",
"checkversion",
"(",
"version",
")",
":",
"try",
":",
"for",
"refversion",
",",
"responseversion",
"in",
"zip",
"(",
"[",
"int",
"(",
"x",
")",
"for",
"x",
"in",
"REQUIREFOLIADOCSERVE",
".",
"split",
"(",
"'.'",
")",
"]",
",",
"[",
"int",
"(",... | Checks foliadocserve version, returns 1 if the document is newer than the library, -1 if it is older, 0 if it is equal | [
"Checks",
"foliadocserve",
"version",
"returns",
"1",
"if",
"the",
"document",
"is",
"newer",
"than",
"the",
"library",
"-",
"1",
"if",
"it",
"is",
"older",
"0",
"if",
"it",
"is",
"equal"
] | f14eea61edcae8656dadccd9a43481ff7e710ffb | https://github.com/proycon/flat/blob/f14eea61edcae8656dadccd9a43481ff7e710ffb/flat/comm.py#L12-L22 | train | 52,151 |
proycon/flat | flat/modes/viewer/views.py | pub_poll | def pub_poll(request, docid):
"""The initial viewer, does not provide the document content yet"""
try:
r = flat.comm.get(request, '/poll/pub/' + docid + '/', False)
except URLError:
return HttpResponseForbidden("Unable to connect to the document server [viewer/poll]")
return HttpResponse(r, content_type='application/json') | python | def pub_poll(request, docid):
"""The initial viewer, does not provide the document content yet"""
try:
r = flat.comm.get(request, '/poll/pub/' + docid + '/', False)
except URLError:
return HttpResponseForbidden("Unable to connect to the document server [viewer/poll]")
return HttpResponse(r, content_type='application/json') | [
"def",
"pub_poll",
"(",
"request",
",",
"docid",
")",
":",
"try",
":",
"r",
"=",
"flat",
".",
"comm",
".",
"get",
"(",
"request",
",",
"'/poll/pub/'",
"+",
"docid",
"+",
"'/'",
",",
"False",
")",
"except",
"URLError",
":",
"return",
"HttpResponseForbid... | The initial viewer, does not provide the document content yet | [
"The",
"initial",
"viewer",
"does",
"not",
"provide",
"the",
"document",
"content",
"yet"
] | f14eea61edcae8656dadccd9a43481ff7e710ffb | https://github.com/proycon/flat/blob/f14eea61edcae8656dadccd9a43481ff7e710ffb/flat/modes/viewer/views.py#L37-L43 | train | 52,152 |
bluedynamics/cone.ugm | src/cone/ugm/browser/autoincrement.py | AutoIncrementForm.prepare | def prepare(_next, self):
"""Hook after prepare and set 'id' disabled.
"""
_next(self)
if not self.autoincrement_support:
return
id_field = self.form['id']
del id_field.attrs['required']
id_field.attrs['disabled'] = 'disabled'
id_field.getter = _('auto_incremented', default='auto incremented') | python | def prepare(_next, self):
"""Hook after prepare and set 'id' disabled.
"""
_next(self)
if not self.autoincrement_support:
return
id_field = self.form['id']
del id_field.attrs['required']
id_field.attrs['disabled'] = 'disabled'
id_field.getter = _('auto_incremented', default='auto incremented') | [
"def",
"prepare",
"(",
"_next",
",",
"self",
")",
":",
"_next",
"(",
"self",
")",
"if",
"not",
"self",
".",
"autoincrement_support",
":",
"return",
"id_field",
"=",
"self",
".",
"form",
"[",
"'id'",
"]",
"del",
"id_field",
".",
"attrs",
"[",
"'required... | Hook after prepare and set 'id' disabled. | [
"Hook",
"after",
"prepare",
"and",
"set",
"id",
"disabled",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/autoincrement.py#L52-L61 | train | 52,153 |
bluedynamics/cone.ugm | src/cone/ugm/browser/actions.py | delete_user_action | def delete_user_action(model, request):
"""Delete user from database.
"""
try:
users = model.parent.backend
uid = model.model.name
del users[uid]
users()
model.parent.invalidate()
localizer = get_localizer(request)
message = localizer.translate(_(
'delete_user_from_database',
default="Deleted user '${uid}' from database.",
mapping={'uid': uid}
))
return {
'success': True,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
} | python | def delete_user_action(model, request):
"""Delete user from database.
"""
try:
users = model.parent.backend
uid = model.model.name
del users[uid]
users()
model.parent.invalidate()
localizer = get_localizer(request)
message = localizer.translate(_(
'delete_user_from_database',
default="Deleted user '${uid}' from database.",
mapping={'uid': uid}
))
return {
'success': True,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
} | [
"def",
"delete_user_action",
"(",
"model",
",",
"request",
")",
":",
"try",
":",
"users",
"=",
"model",
".",
"parent",
".",
"backend",
"uid",
"=",
"model",
".",
"model",
".",
"name",
"del",
"users",
"[",
"uid",
"]",
"users",
"(",
")",
"model",
".",
... | Delete user from database. | [
"Delete",
"user",
"from",
"database",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/actions.py#L67-L90 | train | 52,154 |
bluedynamics/cone.ugm | src/cone/ugm/browser/actions.py | delete_group_action | def delete_group_action(model, request):
"""Delete group from database.
"""
try:
groups = model.parent.backend
uid = model.model.name
del groups[uid]
groups()
model.parent.invalidate()
except Exception as e:
return {
'success': False,
'message': str(e)
}
localizer = get_localizer(request)
message = localizer.translate(_(
'deleted_group',
default='Deleted group from database'
))
return {
'success': True,
'message': message
} | python | def delete_group_action(model, request):
"""Delete group from database.
"""
try:
groups = model.parent.backend
uid = model.model.name
del groups[uid]
groups()
model.parent.invalidate()
except Exception as e:
return {
'success': False,
'message': str(e)
}
localizer = get_localizer(request)
message = localizer.translate(_(
'deleted_group',
default='Deleted group from database'
))
return {
'success': True,
'message': message
} | [
"def",
"delete_group_action",
"(",
"model",
",",
"request",
")",
":",
"try",
":",
"groups",
"=",
"model",
".",
"parent",
".",
"backend",
"uid",
"=",
"model",
".",
"model",
".",
"name",
"del",
"groups",
"[",
"uid",
"]",
"groups",
"(",
")",
"model",
".... | Delete group from database. | [
"Delete",
"group",
"from",
"database",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/actions.py#L237-L259 | train | 52,155 |
bluedynamics/cone.ugm | src/cone/ugm/browser/roles.py | PrincipalRolesForm.prepare | def prepare(_next, self):
"""Hook after prepare and set 'principal_roles' as selection to
``self.form``.
"""
_next(self)
if not self.roles_support:
return
if not self.request.has_permission('manage', self.model.parent):
# XXX: yafowil selection display renderer
return
value = []
if self.action_resource == 'edit':
value = self.model.model.roles
roles_widget = factory(
'field:label:select',
name='principal_roles',
value=value,
props={
'label': _('roles', default='Roles'),
'multivalued': True,
'vocabulary': self.roles_vocab,
'format': 'single',
'listing_tag': 'ul',
'listing_label_position': 'after',
})
save_widget = self.form['save']
self.form.insertbefore(roles_widget, save_widget) | python | def prepare(_next, self):
"""Hook after prepare and set 'principal_roles' as selection to
``self.form``.
"""
_next(self)
if not self.roles_support:
return
if not self.request.has_permission('manage', self.model.parent):
# XXX: yafowil selection display renderer
return
value = []
if self.action_resource == 'edit':
value = self.model.model.roles
roles_widget = factory(
'field:label:select',
name='principal_roles',
value=value,
props={
'label': _('roles', default='Roles'),
'multivalued': True,
'vocabulary': self.roles_vocab,
'format': 'single',
'listing_tag': 'ul',
'listing_label_position': 'after',
})
save_widget = self.form['save']
self.form.insertbefore(roles_widget, save_widget) | [
"def",
"prepare",
"(",
"_next",
",",
"self",
")",
":",
"_next",
"(",
"self",
")",
"if",
"not",
"self",
".",
"roles_support",
":",
"return",
"if",
"not",
"self",
".",
"request",
".",
"has_permission",
"(",
"'manage'",
",",
"self",
".",
"model",
".",
"... | Hook after prepare and set 'principal_roles' as selection to
``self.form``. | [
"Hook",
"after",
"prepare",
"and",
"set",
"principal_roles",
"as",
"selection",
"to",
"self",
".",
"form",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/roles.py#L26-L52 | train | 52,156 |
bluedynamics/cone.ugm | src/cone/ugm/__init__.py | initialize_ugm | def initialize_ugm(config, global_config, local_config):
"""Initialize UGM.
"""
# custom UGM styles
cfg.merged.css.protected.append((static_resources, 'styles.css'))
# custom UGM javascript
cfg.merged.js.protected.append((static_resources, 'ugm.js'))
# UGM settings
register_config('ugm_general', GeneralSettings)
register_config('ugm_server', ServerSettings)
register_config('ugm_users', UsersSettings)
register_config('ugm_groups', GroupsSettings)
register_config('ugm_roles', RolesSettings)
register_config('ugm_localmanager', LocalManagerSettings)
# Users container
register_entry('users', users_factory)
# Groups container
register_entry('groups', groups_factory)
# register default acl's
# XXX: define permissions referring users, user, groups respective group only
acl_registry.register(ugm_user_acl, User, 'user')
acl_registry.register(ugm_default_acl, Users, 'users')
acl_registry.register(ugm_default_acl, Group, 'group')
acl_registry.register(ugm_default_acl, Groups, 'groups')
# localmanager config file location
lm_config = local_config.get('ugm.localmanager_config', '')
os.environ['LOCAL_MANAGER_CFG_FILE'] = lm_config
# add translation
config.add_translation_dirs('cone.ugm:locale/')
# static resources
config.add_view(static_resources, name='cone.ugm.static')
# scan browser package
config.scan('cone.ugm.browser') | python | def initialize_ugm(config, global_config, local_config):
"""Initialize UGM.
"""
# custom UGM styles
cfg.merged.css.protected.append((static_resources, 'styles.css'))
# custom UGM javascript
cfg.merged.js.protected.append((static_resources, 'ugm.js'))
# UGM settings
register_config('ugm_general', GeneralSettings)
register_config('ugm_server', ServerSettings)
register_config('ugm_users', UsersSettings)
register_config('ugm_groups', GroupsSettings)
register_config('ugm_roles', RolesSettings)
register_config('ugm_localmanager', LocalManagerSettings)
# Users container
register_entry('users', users_factory)
# Groups container
register_entry('groups', groups_factory)
# register default acl's
# XXX: define permissions referring users, user, groups respective group only
acl_registry.register(ugm_user_acl, User, 'user')
acl_registry.register(ugm_default_acl, Users, 'users')
acl_registry.register(ugm_default_acl, Group, 'group')
acl_registry.register(ugm_default_acl, Groups, 'groups')
# localmanager config file location
lm_config = local_config.get('ugm.localmanager_config', '')
os.environ['LOCAL_MANAGER_CFG_FILE'] = lm_config
# add translation
config.add_translation_dirs('cone.ugm:locale/')
# static resources
config.add_view(static_resources, name='cone.ugm.static')
# scan browser package
config.scan('cone.ugm.browser') | [
"def",
"initialize_ugm",
"(",
"config",
",",
"global_config",
",",
"local_config",
")",
":",
"# custom UGM styles",
"cfg",
".",
"merged",
".",
"css",
".",
"protected",
".",
"append",
"(",
"(",
"static_resources",
",",
"'styles.css'",
")",
")",
"# custom UGM java... | Initialize UGM. | [
"Initialize",
"UGM",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/__init__.py#L62-L103 | train | 52,157 |
bluedynamics/cone.ugm | src/cone/ugm/browser/expires.py | expiration_extractor | def expiration_extractor(widget, data):
"""Extract expiration information.
- If active flag not set, Account is disabled (value 0).
- If active flag set and value is UNSET, account never expires.
- If active flag set and datetime choosen, account expires at given
datetime.
- Timestamp in seconds since epoch is returned.
"""
active = int(data.request.get('%s.active' % widget.name, '0'))
if not active:
return 0
expires = data.extracted
if expires:
return time.mktime(expires.utctimetuple())
return UNSET | python | def expiration_extractor(widget, data):
"""Extract expiration information.
- If active flag not set, Account is disabled (value 0).
- If active flag set and value is UNSET, account never expires.
- If active flag set and datetime choosen, account expires at given
datetime.
- Timestamp in seconds since epoch is returned.
"""
active = int(data.request.get('%s.active' % widget.name, '0'))
if not active:
return 0
expires = data.extracted
if expires:
return time.mktime(expires.utctimetuple())
return UNSET | [
"def",
"expiration_extractor",
"(",
"widget",
",",
"data",
")",
":",
"active",
"=",
"int",
"(",
"data",
".",
"request",
".",
"get",
"(",
"'%s.active'",
"%",
"widget",
".",
"name",
",",
"'0'",
")",
")",
"if",
"not",
"active",
":",
"return",
"0",
"expi... | Extract expiration information.
- If active flag not set, Account is disabled (value 0).
- If active flag set and value is UNSET, account never expires.
- If active flag set and datetime choosen, account expires at given
datetime.
- Timestamp in seconds since epoch is returned. | [
"Extract",
"expiration",
"information",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/expires.py#L23-L38 | train | 52,158 |
bluedynamics/cone.ugm | src/cone/ugm/browser/expires.py | ExpirationForm.prepare | def prepare(_next, self):
"""Hook after prepare and set expiration widget to
``self.form``.
"""
_next(self)
cfg = ugm_general(self.model)
if cfg.attrs['users_account_expiration'] != 'True':
return
mode = 'edit'
if not self.request.has_permission(
'manage_expiration', self.model.parent):
mode = 'display'
if self.action_resource == 'edit':
attr = cfg.attrs['users_expires_attr']
unit = int(cfg.attrs['users_expires_unit'])
value = int(self.model.attrs.get(attr, 0))
# if format days, convert to seconds
if unit == 0:
value *= 86400
else:
value = UNSET
expires_widget = factory(
'field:label:expiration',
name='active',
value=value,
props={
'label': _('active', default='Active')
},
mode=mode
)
save_widget = self.form['save']
self.form.insertbefore(expires_widget, save_widget) | python | def prepare(_next, self):
"""Hook after prepare and set expiration widget to
``self.form``.
"""
_next(self)
cfg = ugm_general(self.model)
if cfg.attrs['users_account_expiration'] != 'True':
return
mode = 'edit'
if not self.request.has_permission(
'manage_expiration', self.model.parent):
mode = 'display'
if self.action_resource == 'edit':
attr = cfg.attrs['users_expires_attr']
unit = int(cfg.attrs['users_expires_unit'])
value = int(self.model.attrs.get(attr, 0))
# if format days, convert to seconds
if unit == 0:
value *= 86400
else:
value = UNSET
expires_widget = factory(
'field:label:expiration',
name='active',
value=value,
props={
'label': _('active', default='Active')
},
mode=mode
)
save_widget = self.form['save']
self.form.insertbefore(expires_widget, save_widget) | [
"def",
"prepare",
"(",
"_next",
",",
"self",
")",
":",
"_next",
"(",
"self",
")",
"cfg",
"=",
"ugm_general",
"(",
"self",
".",
"model",
")",
"if",
"cfg",
".",
"attrs",
"[",
"'users_account_expiration'",
"]",
"!=",
"'True'",
":",
"return",
"mode",
"=",
... | Hook after prepare and set expiration widget to
``self.form``. | [
"Hook",
"after",
"prepare",
"and",
"set",
"expiration",
"widget",
"to",
"self",
".",
"form",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/expires.py#L127-L158 | train | 52,159 |
bluedynamics/cone.ugm | src/cone/ugm/browser/portrait.py | PortraitForm.prepare | def prepare(_next, self):
"""Hook after prepare and set 'portrait' as image widget to
``self.form``.
"""
_next(self)
if not self.portrait_support:
return
model = self.model
request = self.request
if request.has_permission('edit_user', model.parent):
mode = 'edit'
else:
mode = 'display'
cfg = ugm_general(model)
image_attr = cfg.attrs['users_portrait_attr']
image_accept = cfg.attrs['users_portrait_accept']
image_width = int(cfg.attrs['users_portrait_width'])
image_height = int(cfg.attrs['users_portrait_height'])
image_data = model.attrs.get(image_attr)
if image_data:
image_value = {
'file': BytesIO(image_data),
'mimetype': 'image/jpeg',
}
image_url = make_url(request, node=model,
resource='portrait_image')
else:
image_value = UNSET
resource = 'cone.ugm.static/images/default_portrait.jpg'
image_url = make_url(request, node=model.root, resource=resource)
portrait_widget = factory(
'field:label:error:image',
name='portrait',
value=image_value,
props={
'label': _('portrait', default='Portrait'),
'src': image_url,
'alt': _('portrait', default='Portrait'),
'accept': image_accept,
'minsize': (image_width, image_height),
'crop': {
'size': (image_width, image_height),
'fitting': True,
}
},
mode=mode)
save_widget = self.form['save']
self.form.insertbefore(portrait_widget, save_widget) | python | def prepare(_next, self):
"""Hook after prepare and set 'portrait' as image widget to
``self.form``.
"""
_next(self)
if not self.portrait_support:
return
model = self.model
request = self.request
if request.has_permission('edit_user', model.parent):
mode = 'edit'
else:
mode = 'display'
cfg = ugm_general(model)
image_attr = cfg.attrs['users_portrait_attr']
image_accept = cfg.attrs['users_portrait_accept']
image_width = int(cfg.attrs['users_portrait_width'])
image_height = int(cfg.attrs['users_portrait_height'])
image_data = model.attrs.get(image_attr)
if image_data:
image_value = {
'file': BytesIO(image_data),
'mimetype': 'image/jpeg',
}
image_url = make_url(request, node=model,
resource='portrait_image')
else:
image_value = UNSET
resource = 'cone.ugm.static/images/default_portrait.jpg'
image_url = make_url(request, node=model.root, resource=resource)
portrait_widget = factory(
'field:label:error:image',
name='portrait',
value=image_value,
props={
'label': _('portrait', default='Portrait'),
'src': image_url,
'alt': _('portrait', default='Portrait'),
'accept': image_accept,
'minsize': (image_width, image_height),
'crop': {
'size': (image_width, image_height),
'fitting': True,
}
},
mode=mode)
save_widget = self.form['save']
self.form.insertbefore(portrait_widget, save_widget) | [
"def",
"prepare",
"(",
"_next",
",",
"self",
")",
":",
"_next",
"(",
"self",
")",
"if",
"not",
"self",
".",
"portrait_support",
":",
"return",
"model",
"=",
"self",
".",
"model",
"request",
"=",
"self",
".",
"request",
"if",
"request",
".",
"has_permis... | Hook after prepare and set 'portrait' as image widget to
``self.form``. | [
"Hook",
"after",
"prepare",
"and",
"set",
"portrait",
"as",
"image",
"widget",
"to",
"self",
".",
"form",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/portrait.py#L45-L92 | train | 52,160 |
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_consider_for_user | def local_manager_consider_for_user(self):
"""Flag whether local manager ACL should be considered for current
authenticated user.
"""
if not self.local_management_enabled:
return False
request = get_current_request()
if authenticated_userid(request) == security.ADMIN_USER:
return False
roles = security.authenticated_user(request).roles
if 'admin' in roles or 'manager' in roles:
return False
return True | python | def local_manager_consider_for_user(self):
"""Flag whether local manager ACL should be considered for current
authenticated user.
"""
if not self.local_management_enabled:
return False
request = get_current_request()
if authenticated_userid(request) == security.ADMIN_USER:
return False
roles = security.authenticated_user(request).roles
if 'admin' in roles or 'manager' in roles:
return False
return True | [
"def",
"local_manager_consider_for_user",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"local_management_enabled",
":",
"return",
"False",
"request",
"=",
"get_current_request",
"(",
")",
"if",
"authenticated_userid",
"(",
"request",
")",
"==",
"security",
"."... | Flag whether local manager ACL should be considered for current
authenticated user. | [
"Flag",
"whether",
"local",
"manager",
"ACL",
"should",
"be",
"considered",
"for",
"current",
"authenticated",
"user",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L73-L85 | train | 52,161 |
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_gid | def local_manager_gid(self):
"""Group id of local manager group of current authenticated member.
Currently a user can be assigned only to one local manager group. If
more than one local manager group is configured, an error is raised.
"""
config = self.root['settings']['ugm_localmanager'].attrs
user = security.authenticated_user(get_current_request())
if not user:
return None
gids = user.group_ids
adm_gids = list()
for gid in gids:
rule = config.get(gid)
if rule:
adm_gids.append(gid)
if len(adm_gids) == 0:
return None
if len(adm_gids) > 1:
msg = (u"Authenticated member defined in local manager "
u"groups %s but only one management group allowed for "
u"each user. Please contact System Administrator in "
u"order to fix this problem.")
exc = msg % ', '.join(["'%s'" % gid for gid in adm_gids])
raise Exception(exc)
return adm_gids[0] | python | def local_manager_gid(self):
"""Group id of local manager group of current authenticated member.
Currently a user can be assigned only to one local manager group. If
more than one local manager group is configured, an error is raised.
"""
config = self.root['settings']['ugm_localmanager'].attrs
user = security.authenticated_user(get_current_request())
if not user:
return None
gids = user.group_ids
adm_gids = list()
for gid in gids:
rule = config.get(gid)
if rule:
adm_gids.append(gid)
if len(adm_gids) == 0:
return None
if len(adm_gids) > 1:
msg = (u"Authenticated member defined in local manager "
u"groups %s but only one management group allowed for "
u"each user. Please contact System Administrator in "
u"order to fix this problem.")
exc = msg % ', '.join(["'%s'" % gid for gid in adm_gids])
raise Exception(exc)
return adm_gids[0] | [
"def",
"local_manager_gid",
"(",
"self",
")",
":",
"config",
"=",
"self",
".",
"root",
"[",
"'settings'",
"]",
"[",
"'ugm_localmanager'",
"]",
".",
"attrs",
"user",
"=",
"security",
".",
"authenticated_user",
"(",
"get_current_request",
"(",
")",
")",
"if",
... | Group id of local manager group of current authenticated member.
Currently a user can be assigned only to one local manager group. If
more than one local manager group is configured, an error is raised. | [
"Group",
"id",
"of",
"local",
"manager",
"group",
"of",
"current",
"authenticated",
"member",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L89-L114 | train | 52,162 |
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_rule | def local_manager_rule(self):
"""Return rule for local manager.
"""
adm_gid = self.local_manager_gid
if not adm_gid:
return None
config = self.root['settings']['ugm_localmanager'].attrs
return config[adm_gid] | python | def local_manager_rule(self):
"""Return rule for local manager.
"""
adm_gid = self.local_manager_gid
if not adm_gid:
return None
config = self.root['settings']['ugm_localmanager'].attrs
return config[adm_gid] | [
"def",
"local_manager_rule",
"(",
"self",
")",
":",
"adm_gid",
"=",
"self",
".",
"local_manager_gid",
"if",
"not",
"adm_gid",
":",
"return",
"None",
"config",
"=",
"self",
".",
"root",
"[",
"'settings'",
"]",
"[",
"'ugm_localmanager'",
"]",
".",
"attrs",
"... | Return rule for local manager. | [
"Return",
"rule",
"for",
"local",
"manager",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L118-L125 | train | 52,163 |
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_target_uids | def local_manager_target_uids(self):
"""Target uid's for local manager.
"""
groups = self.root['groups'].backend
managed_uids = set()
for gid in self.local_manager_target_gids:
group = groups.get(gid)
if group:
managed_uids.update(group.member_ids)
return list(managed_uids) | python | def local_manager_target_uids(self):
"""Target uid's for local manager.
"""
groups = self.root['groups'].backend
managed_uids = set()
for gid in self.local_manager_target_gids:
group = groups.get(gid)
if group:
managed_uids.update(group.member_ids)
return list(managed_uids) | [
"def",
"local_manager_target_uids",
"(",
"self",
")",
":",
"groups",
"=",
"self",
".",
"root",
"[",
"'groups'",
"]",
".",
"backend",
"managed_uids",
"=",
"set",
"(",
")",
"for",
"gid",
"in",
"self",
".",
"local_manager_target_gids",
":",
"group",
"=",
"gro... | Target uid's for local manager. | [
"Target",
"uid",
"s",
"for",
"local",
"manager",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L149-L158 | train | 52,164 |
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_is_default | def local_manager_is_default(self, adm_gid, gid):
"""Check whether gid is default group for local manager group.
"""
config = self.root['settings']['ugm_localmanager'].attrs
rule = config[adm_gid]
if gid not in rule['target']:
raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid))
return gid in rule['default'] | python | def local_manager_is_default(self, adm_gid, gid):
"""Check whether gid is default group for local manager group.
"""
config = self.root['settings']['ugm_localmanager'].attrs
rule = config[adm_gid]
if gid not in rule['target']:
raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid))
return gid in rule['default'] | [
"def",
"local_manager_is_default",
"(",
"self",
",",
"adm_gid",
",",
"gid",
")",
":",
"config",
"=",
"self",
".",
"root",
"[",
"'settings'",
"]",
"[",
"'ugm_localmanager'",
"]",
".",
"attrs",
"rule",
"=",
"config",
"[",
"adm_gid",
"]",
"if",
"gid",
"not"... | Check whether gid is default group for local manager group. | [
"Check",
"whether",
"gid",
"is",
"default",
"group",
"for",
"local",
"manager",
"group",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L161-L168 | train | 52,165 |
bluedynamics/cone.ugm | src/cone/ugm/browser/user.py | UserForm.form_field_definitions | def form_field_definitions(self):
"""Hook optional_login extractor if necessary for form defaults.
"""
schema = copy.deepcopy(form_field_definitions.user)
uid, login = self._get_auth_attrs()
if uid != login:
field = schema.get(login, schema['default'])
if field['chain'].find('*optional_login') == -1:
field['chain'] = '%s:%s' % (
'*optional_login', field['chain'])
if not field.get('custom'):
field['custom'] = dict()
field['custom']['optional_login'] = \
(['context.optional_login'], [], [], [], [])
schema[login] = field
return schema | python | def form_field_definitions(self):
"""Hook optional_login extractor if necessary for form defaults.
"""
schema = copy.deepcopy(form_field_definitions.user)
uid, login = self._get_auth_attrs()
if uid != login:
field = schema.get(login, schema['default'])
if field['chain'].find('*optional_login') == -1:
field['chain'] = '%s:%s' % (
'*optional_login', field['chain'])
if not field.get('custom'):
field['custom'] = dict()
field['custom']['optional_login'] = \
(['context.optional_login'], [], [], [], [])
schema[login] = field
return schema | [
"def",
"form_field_definitions",
"(",
"self",
")",
":",
"schema",
"=",
"copy",
".",
"deepcopy",
"(",
"form_field_definitions",
".",
"user",
")",
"uid",
",",
"login",
"=",
"self",
".",
"_get_auth_attrs",
"(",
")",
"if",
"uid",
"!=",
"login",
":",
"field",
... | Hook optional_login extractor if necessary for form defaults. | [
"Hook",
"optional_login",
"extractor",
"if",
"necessary",
"for",
"form",
"defaults",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/user.py#L183-L198 | train | 52,166 |
bluedynamics/cone.ugm | src/cone/ugm/browser/remote.py | remote_add_user | def remote_add_user(model, request):
"""Add user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
New user id.
password
User password to be set initially (optional).
roles
Comma seperated role names the user initially has.
groups
Comma seperated groups names the user should initially be member of.
attr.*
User attributes to be set. I.e. ``attr.mail`` would set the mail
attribute for newly created user. All request parameters prefixed with
``attr`` get checked against user attribute attrmap from settings.
Restrictions - All values, whether single or multi valued, are passed
as string or list of strings to the create function.
"""
params = request.params
uid = params.get('id')
if not uid:
return {
'success': False,
'message': u"No user ID given.",
}
users = model.backend
if uid in users:
return {
'success': False,
'message': u"User with given ID already exists.",
}
password = params.get('password')
add_roles = params.get('roles', '')
add_roles = [val.strip() for val in add_roles.split(',') if val]
add_groups = params.get('groups', '')
add_groups = [val.strip() for val in add_groups.split(',') if val]
attrs = dict()
for key, val in params.items():
if not key.startswith('attr.'):
continue
key = key[key.find('.') + 1:]
attrs[key] = val
settings = ugm_users(model)
attrmap = settings.attrs.users_form_attrmap
exposed = settings.attrs.users_exposed_attributes
if not exposed:
exposed = list()
valid_attrs = attrmap.keys() + exposed
checked_attrs = dict()
for key in valid_attrs:
val = attrs.get(key)
if not val:
continue
checked_attrs[key] = val
try:
user = users.create(uid, **checked_attrs)
message = u""
from cone.app.security import DEFAULT_ROLES
available_roles = [role[0] for role in DEFAULT_ROLES]
for role in add_roles:
if role not in available_roles:
message += u"Role '%s' given but inexistent. " % role
continue
user.add_role(role)
groups = users.parent.groups
for group in add_groups:
if group not in groups:
message += u"Group '%s' given but inexistent. " % group
continue
groups[group].add(uid)
users.parent()
if password is not None:
users.passwd(uid, None, password)
message += u"Created user with ID '%s'." % uid
return {
'success': True,
'message': message,
}
except Exception as e:
return {
'success': False,
'message': str(e),
}
finally:
model.invalidate() | python | def remote_add_user(model, request):
"""Add user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
New user id.
password
User password to be set initially (optional).
roles
Comma seperated role names the user initially has.
groups
Comma seperated groups names the user should initially be member of.
attr.*
User attributes to be set. I.e. ``attr.mail`` would set the mail
attribute for newly created user. All request parameters prefixed with
``attr`` get checked against user attribute attrmap from settings.
Restrictions - All values, whether single or multi valued, are passed
as string or list of strings to the create function.
"""
params = request.params
uid = params.get('id')
if not uid:
return {
'success': False,
'message': u"No user ID given.",
}
users = model.backend
if uid in users:
return {
'success': False,
'message': u"User with given ID already exists.",
}
password = params.get('password')
add_roles = params.get('roles', '')
add_roles = [val.strip() for val in add_roles.split(',') if val]
add_groups = params.get('groups', '')
add_groups = [val.strip() for val in add_groups.split(',') if val]
attrs = dict()
for key, val in params.items():
if not key.startswith('attr.'):
continue
key = key[key.find('.') + 1:]
attrs[key] = val
settings = ugm_users(model)
attrmap = settings.attrs.users_form_attrmap
exposed = settings.attrs.users_exposed_attributes
if not exposed:
exposed = list()
valid_attrs = attrmap.keys() + exposed
checked_attrs = dict()
for key in valid_attrs:
val = attrs.get(key)
if not val:
continue
checked_attrs[key] = val
try:
user = users.create(uid, **checked_attrs)
message = u""
from cone.app.security import DEFAULT_ROLES
available_roles = [role[0] for role in DEFAULT_ROLES]
for role in add_roles:
if role not in available_roles:
message += u"Role '%s' given but inexistent. " % role
continue
user.add_role(role)
groups = users.parent.groups
for group in add_groups:
if group not in groups:
message += u"Group '%s' given but inexistent. " % group
continue
groups[group].add(uid)
users.parent()
if password is not None:
users.passwd(uid, None, password)
message += u"Created user with ID '%s'." % uid
return {
'success': True,
'message': message,
}
except Exception as e:
return {
'success': False,
'message': str(e),
}
finally:
model.invalidate() | [
"def",
"remote_add_user",
"(",
"model",
",",
"request",
")",
":",
"params",
"=",
"request",
".",
"params",
"uid",
"=",
"params",
".",
"get",
"(",
"'id'",
")",
"if",
"not",
"uid",
":",
"return",
"{",
"'success'",
":",
"False",
",",
"'message'",
":",
"... | Add user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
New user id.
password
User password to be set initially (optional).
roles
Comma seperated role names the user initially has.
groups
Comma seperated groups names the user should initially be member of.
attr.*
User attributes to be set. I.e. ``attr.mail`` would set the mail
attribute for newly created user. All request parameters prefixed with
``attr`` get checked against user attribute attrmap from settings.
Restrictions - All values, whether single or multi valued, are passed
as string or list of strings to the create function. | [
"Add",
"user",
"via",
"remote",
"service",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/remote.py#L12-L124 | train | 52,167 |
bluedynamics/cone.ugm | src/cone/ugm/browser/remote.py | remote_delete_user | def remote_delete_user(model, request):
"""Remove user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
Id of user to delete.
"""
params = request.params
uid = params.get('id')
if not uid:
return {
'success': False,
'message': u"No user ID given.",
}
users = model.backend
if uid not in users:
return {
'success': False,
'message': u"User with given ID not exists.",
}
try:
del users[uid]
users.parent()
message = u"Deleted user with ID '%s'." % uid
return {
'success': True,
'message': message,
}
except Exception as e:
return {
'success': False,
'message': str(e),
}
finally:
model.invalidate() | python | def remote_delete_user(model, request):
"""Remove user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
Id of user to delete.
"""
params = request.params
uid = params.get('id')
if not uid:
return {
'success': False,
'message': u"No user ID given.",
}
users = model.backend
if uid not in users:
return {
'success': False,
'message': u"User with given ID not exists.",
}
try:
del users[uid]
users.parent()
message = u"Deleted user with ID '%s'." % uid
return {
'success': True,
'message': message,
}
except Exception as e:
return {
'success': False,
'message': str(e),
}
finally:
model.invalidate() | [
"def",
"remote_delete_user",
"(",
"model",
",",
"request",
")",
":",
"params",
"=",
"request",
".",
"params",
"uid",
"=",
"params",
".",
"get",
"(",
"'id'",
")",
"if",
"not",
"uid",
":",
"return",
"{",
"'success'",
":",
"False",
",",
"'message'",
":",
... | Remove user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
Id of user to delete. | [
"Remove",
"user",
"via",
"remote",
"service",
"."
] | 3c197075f3f6e94781289311c5637bb9c8e5597c | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/remote.py#L133-L180 | train | 52,168 |
ninuxorg/nodeshot | nodeshot/interop/sync/admin.py | LayerExternalInline.get_formset | def get_formset(self, request, obj=None, **kwargs):
"""
Load Synchronizer schema to display specific fields in admin
"""
if obj is not None:
try:
# this is enough to load the new schema
obj.external
except LayerExternal.DoesNotExist:
pass
return super(LayerExternalInline, self).get_formset(request, obj=None, **kwargs) | python | def get_formset(self, request, obj=None, **kwargs):
"""
Load Synchronizer schema to display specific fields in admin
"""
if obj is not None:
try:
# this is enough to load the new schema
obj.external
except LayerExternal.DoesNotExist:
pass
return super(LayerExternalInline, self).get_formset(request, obj=None, **kwargs) | [
"def",
"get_formset",
"(",
"self",
",",
"request",
",",
"obj",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"obj",
"is",
"not",
"None",
":",
"try",
":",
"# this is enough to load the new schema",
"obj",
".",
"external",
"except",
"LayerExternal",
... | Load Synchronizer schema to display specific fields in admin | [
"Load",
"Synchronizer",
"schema",
"to",
"display",
"specific",
"fields",
"in",
"admin"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/interop/sync/admin.py#L19-L29 | train | 52,169 |
ninuxorg/nodeshot | nodeshot/core/websockets/server.py | public_broadcaster | def public_broadcaster():
"""
Thread which runs in parallel and constantly checks for new messages
in the public pipe and broadcasts them publicly to all connected clients.
"""
while __websocket_server_running__:
pipein = open(PUBLIC_PIPE, 'r')
line = pipein.readline().replace('\n', '').replace('\r', '')
if line != '':
WebSocketHandler.broadcast(line)
print line
remaining_lines = pipein.read()
pipein.close()
pipeout = open(PUBLIC_PIPE, 'w')
pipeout.write(remaining_lines)
pipeout.close()
else:
pipein.close()
time.sleep(0.05) | python | def public_broadcaster():
"""
Thread which runs in parallel and constantly checks for new messages
in the public pipe and broadcasts them publicly to all connected clients.
"""
while __websocket_server_running__:
pipein = open(PUBLIC_PIPE, 'r')
line = pipein.readline().replace('\n', '').replace('\r', '')
if line != '':
WebSocketHandler.broadcast(line)
print line
remaining_lines = pipein.read()
pipein.close()
pipeout = open(PUBLIC_PIPE, 'w')
pipeout.write(remaining_lines)
pipeout.close()
else:
pipein.close()
time.sleep(0.05) | [
"def",
"public_broadcaster",
"(",
")",
":",
"while",
"__websocket_server_running__",
":",
"pipein",
"=",
"open",
"(",
"PUBLIC_PIPE",
",",
"'r'",
")",
"line",
"=",
"pipein",
".",
"readline",
"(",
")",
".",
"replace",
"(",
"'\\n'",
",",
"''",
")",
".",
"re... | Thread which runs in parallel and constantly checks for new messages
in the public pipe and broadcasts them publicly to all connected clients. | [
"Thread",
"which",
"runs",
"in",
"parallel",
"and",
"constantly",
"checks",
"for",
"new",
"messages",
"in",
"the",
"public",
"pipe",
"and",
"broadcasts",
"them",
"publicly",
"to",
"all",
"connected",
"clients",
"."
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/websockets/server.py#L17-L37 | train | 52,170 |
ninuxorg/nodeshot | nodeshot/core/websockets/server.py | private_messenger | def private_messenger():
"""
Thread which runs in parallel and constantly checks for new messages
in the private pipe and sends them to the specific client.
If client is not connected the message is discarded.
"""
while __websocket_server_running__:
pipein = open(PRIVATE_PIPE, 'r')
line = pipein.readline().replace('\n', '').replace('\r', '')
if line != '':
message = json.loads(line)
WebSocketHandler.send_private_message(user_id=message['user_id'],
message=message)
print line
remaining_lines = pipein.read()
pipein.close()
pipeout = open(PRIVATE_PIPE, 'w')
pipeout.write(remaining_lines)
pipeout.close()
else:
pipein.close()
time.sleep(0.05) | python | def private_messenger():
"""
Thread which runs in parallel and constantly checks for new messages
in the private pipe and sends them to the specific client.
If client is not connected the message is discarded.
"""
while __websocket_server_running__:
pipein = open(PRIVATE_PIPE, 'r')
line = pipein.readline().replace('\n', '').replace('\r', '')
if line != '':
message = json.loads(line)
WebSocketHandler.send_private_message(user_id=message['user_id'],
message=message)
print line
remaining_lines = pipein.read()
pipein.close()
pipeout = open(PRIVATE_PIPE, 'w')
pipeout.write(remaining_lines)
pipeout.close()
else:
pipein.close()
time.sleep(0.05) | [
"def",
"private_messenger",
"(",
")",
":",
"while",
"__websocket_server_running__",
":",
"pipein",
"=",
"open",
"(",
"PRIVATE_PIPE",
",",
"'r'",
")",
"line",
"=",
"pipein",
".",
"readline",
"(",
")",
".",
"replace",
"(",
"'\\n'",
",",
"''",
")",
".",
"re... | Thread which runs in parallel and constantly checks for new messages
in the private pipe and sends them to the specific client.
If client is not connected the message is discarded. | [
"Thread",
"which",
"runs",
"in",
"parallel",
"and",
"constantly",
"checks",
"for",
"new",
"messages",
"in",
"the",
"private",
"pipe",
"and",
"sends",
"them",
"to",
"the",
"specific",
"client",
".",
"If",
"client",
"is",
"not",
"connected",
"the",
"message",
... | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/websockets/server.py#L43-L66 | train | 52,171 |
ninuxorg/nodeshot | nodeshot/core/metrics/models.py | Metric.write | def write(self, values, timestamp=None, database=None, async=True):
""" write metric point """
func = write_async if async else write
return func(name=self.name,
values=values,
tags=self.tags,
timestamp=timestamp,
database=database) | python | def write(self, values, timestamp=None, database=None, async=True):
""" write metric point """
func = write_async if async else write
return func(name=self.name,
values=values,
tags=self.tags,
timestamp=timestamp,
database=database) | [
"def",
"write",
"(",
"self",
",",
"values",
",",
"timestamp",
"=",
"None",
",",
"database",
"=",
"None",
",",
"async",
"=",
"True",
")",
":",
"func",
"=",
"write_async",
"if",
"async",
"else",
"write",
"return",
"func",
"(",
"name",
"=",
"self",
".",... | write metric point | [
"write",
"metric",
"point"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/metrics/models.py#L41-L48 | train | 52,172 |
ninuxorg/nodeshot | nodeshot/interop/open311/views.py | ServiceDefinitionList.get | def get(self, request, *args, **kwargs):
""" return list of open 311 services """
# init django rest framework specific stuff
serializer_class = self.get_serializer_class()
context = self.get_serializer_context()
# init empty list
services = []
# loop over each service
for service_type in SERVICES.keys():
# initialize serializers for layer
services.append(
serializer_class(
object(),
context=context,
service_type=service_type
).data
)
return Response(services) | python | def get(self, request, *args, **kwargs):
""" return list of open 311 services """
# init django rest framework specific stuff
serializer_class = self.get_serializer_class()
context = self.get_serializer_context()
# init empty list
services = []
# loop over each service
for service_type in SERVICES.keys():
# initialize serializers for layer
services.append(
serializer_class(
object(),
context=context,
service_type=service_type
).data
)
return Response(services) | [
"def",
"get",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# init django rest framework specific stuff",
"serializer_class",
"=",
"self",
".",
"get_serializer_class",
"(",
")",
"context",
"=",
"self",
".",
"get_serializer_co... | return list of open 311 services | [
"return",
"list",
"of",
"open",
"311",
"services"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/interop/open311/views.py#L40-L60 | train | 52,173 |
ninuxorg/nodeshot | nodeshot/interop/open311/views.py | ServiceRequestList.get | def get(self, request, *args, **kwargs):
""" Retrieve list of service requests """
if 'service_code' not in request.GET.keys():
return Response({ 'detail': _('A service code must be inserted') }, status=404)
service_code = request.GET['service_code']
if service_code not in SERVICES.keys():
return Response({ 'detail': _('Service not found') }, status=404)
start_date = None
end_date = None
status = None
layer = None
STATUSES = {}
for status_type in ('open', 'closed'):
STATUSES[status_type] = [k for k, v in STATUS.items() if v == status_type]
if 'start_date' in request.GET.keys():
start_date = request.GET['start_date']
if iso8601_REGEXP.match(start_date) is None:
return Response({ 'detail': _('Invalid date inserted') }, status=404)
if 'end_date' in request.GET.keys():
end_date = request.GET['end_date']
if iso8601_REGEXP.match(end_date) is None:
return Response({ 'detail': _('Invalid date inserted') }, status=404)
if 'status' in request.GET.keys():
if request.GET['status'] not in ('open','closed'):
return Response({ 'detail': _('Invalid status inserted') }, status=404)
status = request.GET['status']
if 'layer' in request.GET.keys():
layer = request.GET['layer']
node_layer = get_object_or_404(Layer, slug=layer)
service_model = MODELS[service_code]
if service_code in ('vote', 'comment', 'rate'):
self.queryset = service_model.objects.none()
else:
self.queryset = service_model.objects.all()
# Filter by layer
if layer is not None:
self.queryset = self.queryset.filter(layer = node_layer)
# Check of date parameters
if start_date is not None and end_date is not None:
self.queryset = self.queryset.filter(added__gte = start_date).filter(added__lte = end_date)
if start_date is not None and end_date is None:
self.queryset = self.queryset.filter(added__gte = start_date)
if start_date is None and end_date is not None:
self.queryset = self.queryset.filter(added__lte = end_date)
# Check of status parameter
if status is not None:
q_list = [Q(status__slug__exact = s) for s in STATUSES[status]]
self.queryset = self.queryset.filter(reduce(operator.or_, q_list))
return self.list(request, *args, **kwargs) | python | def get(self, request, *args, **kwargs):
""" Retrieve list of service requests """
if 'service_code' not in request.GET.keys():
return Response({ 'detail': _('A service code must be inserted') }, status=404)
service_code = request.GET['service_code']
if service_code not in SERVICES.keys():
return Response({ 'detail': _('Service not found') }, status=404)
start_date = None
end_date = None
status = None
layer = None
STATUSES = {}
for status_type in ('open', 'closed'):
STATUSES[status_type] = [k for k, v in STATUS.items() if v == status_type]
if 'start_date' in request.GET.keys():
start_date = request.GET['start_date']
if iso8601_REGEXP.match(start_date) is None:
return Response({ 'detail': _('Invalid date inserted') }, status=404)
if 'end_date' in request.GET.keys():
end_date = request.GET['end_date']
if iso8601_REGEXP.match(end_date) is None:
return Response({ 'detail': _('Invalid date inserted') }, status=404)
if 'status' in request.GET.keys():
if request.GET['status'] not in ('open','closed'):
return Response({ 'detail': _('Invalid status inserted') }, status=404)
status = request.GET['status']
if 'layer' in request.GET.keys():
layer = request.GET['layer']
node_layer = get_object_or_404(Layer, slug=layer)
service_model = MODELS[service_code]
if service_code in ('vote', 'comment', 'rate'):
self.queryset = service_model.objects.none()
else:
self.queryset = service_model.objects.all()
# Filter by layer
if layer is not None:
self.queryset = self.queryset.filter(layer = node_layer)
# Check of date parameters
if start_date is not None and end_date is not None:
self.queryset = self.queryset.filter(added__gte = start_date).filter(added__lte = end_date)
if start_date is not None and end_date is None:
self.queryset = self.queryset.filter(added__gte = start_date)
if start_date is None and end_date is not None:
self.queryset = self.queryset.filter(added__lte = end_date)
# Check of status parameter
if status is not None:
q_list = [Q(status__slug__exact = s) for s in STATUSES[status]]
self.queryset = self.queryset.filter(reduce(operator.or_, q_list))
return self.list(request, *args, **kwargs) | [
"def",
"get",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'service_code'",
"not",
"in",
"request",
".",
"GET",
".",
"keys",
"(",
")",
":",
"return",
"Response",
"(",
"{",
"'detail'",
":",
"_",
"(",
"'A... | Retrieve list of service requests | [
"Retrieve",
"list",
"of",
"service",
"requests"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/interop/open311/views.py#L169-L231 | train | 52,174 |
ninuxorg/nodeshot | nodeshot/community/notifications/registrars/nodes.py | node_created_handler | def node_created_handler(sender, **kwargs):
""" send notification when a new node is created according to users's settings """
if kwargs['created']:
obj = kwargs['instance']
queryset = exclude_owner_of_node(obj)
create_notifications.delay(**{
"users": queryset,
"notification_model": Notification,
"notification_type": "node_created",
"related_object": obj
}) | python | def node_created_handler(sender, **kwargs):
""" send notification when a new node is created according to users's settings """
if kwargs['created']:
obj = kwargs['instance']
queryset = exclude_owner_of_node(obj)
create_notifications.delay(**{
"users": queryset,
"notification_model": Notification,
"notification_type": "node_created",
"related_object": obj
}) | [
"def",
"node_created_handler",
"(",
"sender",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"kwargs",
"[",
"'created'",
"]",
":",
"obj",
"=",
"kwargs",
"[",
"'instance'",
"]",
"queryset",
"=",
"exclude_owner_of_node",
"(",
"obj",
")",
"create_notifications",
".",... | send notification when a new node is created according to users's settings | [
"send",
"notification",
"when",
"a",
"new",
"node",
"is",
"created",
"according",
"to",
"users",
"s",
"settings"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/community/notifications/registrars/nodes.py#L26-L36 | train | 52,175 |
ninuxorg/nodeshot | nodeshot/community/notifications/registrars/nodes.py | node_status_changed_handler | def node_status_changed_handler(**kwargs):
""" send notification when the status of a node changes according to users's settings """
obj = kwargs['instance']
obj.old_status = kwargs['old_status'].name
obj.new_status = kwargs['new_status'].name
queryset = exclude_owner_of_node(obj)
create_notifications.delay(**{
"users": queryset,
"notification_model": Notification,
"notification_type": "node_status_changed",
"related_object": obj
})
# if node has owner send a different notification to him
if obj.user is not None:
create_notifications.delay(**{
"users": [obj.user],
"notification_model": Notification,
"notification_type": "node_own_status_changed",
"related_object": obj
}) | python | def node_status_changed_handler(**kwargs):
""" send notification when the status of a node changes according to users's settings """
obj = kwargs['instance']
obj.old_status = kwargs['old_status'].name
obj.new_status = kwargs['new_status'].name
queryset = exclude_owner_of_node(obj)
create_notifications.delay(**{
"users": queryset,
"notification_model": Notification,
"notification_type": "node_status_changed",
"related_object": obj
})
# if node has owner send a different notification to him
if obj.user is not None:
create_notifications.delay(**{
"users": [obj.user],
"notification_model": Notification,
"notification_type": "node_own_status_changed",
"related_object": obj
}) | [
"def",
"node_status_changed_handler",
"(",
"*",
"*",
"kwargs",
")",
":",
"obj",
"=",
"kwargs",
"[",
"'instance'",
"]",
"obj",
".",
"old_status",
"=",
"kwargs",
"[",
"'old_status'",
"]",
".",
"name",
"obj",
".",
"new_status",
"=",
"kwargs",
"[",
"'new_statu... | send notification when the status of a node changes according to users's settings | [
"send",
"notification",
"when",
"the",
"status",
"of",
"a",
"node",
"changes",
"according",
"to",
"users",
"s",
"settings"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/community/notifications/registrars/nodes.py#L42-L62 | train | 52,176 |
ninuxorg/nodeshot | nodeshot/networking/links/models/topology.py | Topology.diff | def diff(self):
""" shortcut to netdiff.diff """
latest = self.latest
current = NetJsonParser(self.json())
return diff(current, latest) | python | def diff(self):
""" shortcut to netdiff.diff """
latest = self.latest
current = NetJsonParser(self.json())
return diff(current, latest) | [
"def",
"diff",
"(",
"self",
")",
":",
"latest",
"=",
"self",
".",
"latest",
"current",
"=",
"NetJsonParser",
"(",
"self",
".",
"json",
"(",
")",
")",
"return",
"diff",
"(",
"current",
",",
"latest",
")"
] | shortcut to netdiff.diff | [
"shortcut",
"to",
"netdiff",
".",
"diff"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/networking/links/models/topology.py#L51-L55 | train | 52,177 |
ninuxorg/nodeshot | nodeshot/networking/links/models/topology.py | Topology.json | def json(self):
""" returns a dict that represents a NetJSON NetworkGraph object """
nodes = []
links = []
for link in self.link_set.all():
if self.is_layer2:
source = link.interface_a.mac
destination = link.interface_b.mac
else:
source = str(link.interface_a.ip_set.first().address)
destination = str(link.interface_b.ip_set.first().address)
nodes.append({
'id': source
})
nodes.append({
'id': destination
})
links.append(OrderedDict((
('source', source),
('target', destination),
('cost', link.metric_value)
)))
return OrderedDict((
('type', 'NetworkGraph'),
('protocol', self.parser.protocol),
('version', self.parser.version),
('metric', self.parser.metric),
('nodes', nodes),
('links', links)
)) | python | def json(self):
""" returns a dict that represents a NetJSON NetworkGraph object """
nodes = []
links = []
for link in self.link_set.all():
if self.is_layer2:
source = link.interface_a.mac
destination = link.interface_b.mac
else:
source = str(link.interface_a.ip_set.first().address)
destination = str(link.interface_b.ip_set.first().address)
nodes.append({
'id': source
})
nodes.append({
'id': destination
})
links.append(OrderedDict((
('source', source),
('target', destination),
('cost', link.metric_value)
)))
return OrderedDict((
('type', 'NetworkGraph'),
('protocol', self.parser.protocol),
('version', self.parser.version),
('metric', self.parser.metric),
('nodes', nodes),
('links', links)
)) | [
"def",
"json",
"(",
"self",
")",
":",
"nodes",
"=",
"[",
"]",
"links",
"=",
"[",
"]",
"for",
"link",
"in",
"self",
".",
"link_set",
".",
"all",
"(",
")",
":",
"if",
"self",
".",
"is_layer2",
":",
"source",
"=",
"link",
".",
"interface_a",
".",
... | returns a dict that represents a NetJSON NetworkGraph object | [
"returns",
"a",
"dict",
"that",
"represents",
"a",
"NetJSON",
"NetworkGraph",
"object"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/networking/links/models/topology.py#L57-L88 | train | 52,178 |
ninuxorg/nodeshot | nodeshot/networking/links/models/topology.py | Topology.update | def update(self):
"""
Updates topology
Links are not deleted straightaway but set as "disconnected"
"""
from .link import Link # avoid circular dependency
diff = self.diff()
status = {
'added': 'active',
'removed': 'disconnected',
'changed': 'active'
}
for section in ['added', 'removed', 'changed']:
# section might be empty
if not diff[section]:
continue
for link_dict in diff[section]['links']:
try:
link = Link.get_or_create(source=link_dict['source'],
target=link_dict['target'],
cost=link_dict['cost'],
topology=self)
except (LinkDataNotFound, ValidationError) as e:
msg = 'Exception while updating {0}'.format(self.__repr__())
logger.exception(msg)
print('{0}\n{1}\n'.format(msg, e))
continue
link.ensure(status=status[section],
cost=link_dict['cost']) | python | def update(self):
"""
Updates topology
Links are not deleted straightaway but set as "disconnected"
"""
from .link import Link # avoid circular dependency
diff = self.diff()
status = {
'added': 'active',
'removed': 'disconnected',
'changed': 'active'
}
for section in ['added', 'removed', 'changed']:
# section might be empty
if not diff[section]:
continue
for link_dict in diff[section]['links']:
try:
link = Link.get_or_create(source=link_dict['source'],
target=link_dict['target'],
cost=link_dict['cost'],
topology=self)
except (LinkDataNotFound, ValidationError) as e:
msg = 'Exception while updating {0}'.format(self.__repr__())
logger.exception(msg)
print('{0}\n{1}\n'.format(msg, e))
continue
link.ensure(status=status[section],
cost=link_dict['cost']) | [
"def",
"update",
"(",
"self",
")",
":",
"from",
".",
"link",
"import",
"Link",
"# avoid circular dependency",
"diff",
"=",
"self",
".",
"diff",
"(",
")",
"status",
"=",
"{",
"'added'",
":",
"'active'",
",",
"'removed'",
":",
"'disconnected'",
",",
"'change... | Updates topology
Links are not deleted straightaway but set as "disconnected" | [
"Updates",
"topology",
"Links",
"are",
"not",
"deleted",
"straightaway",
"but",
"set",
"as",
"disconnected"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/networking/links/models/topology.py#L90-L120 | train | 52,179 |
ninuxorg/nodeshot | nodeshot/interop/sync/models/layer_external.py | LayerExternal.clean | def clean(self, *args, **kwargs):
"""
Call self.synchronizer.clean method
"""
if self.synchronizer_path != 'None' and self.config:
# call synchronizer custom clean
try:
self.synchronizer.load_config(self.config)
self.synchronizer.clean()
except ImproperlyConfigured as e:
raise ValidationError(e.message) | python | def clean(self, *args, **kwargs):
"""
Call self.synchronizer.clean method
"""
if self.synchronizer_path != 'None' and self.config:
# call synchronizer custom clean
try:
self.synchronizer.load_config(self.config)
self.synchronizer.clean()
except ImproperlyConfigured as e:
raise ValidationError(e.message) | [
"def",
"clean",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"synchronizer_path",
"!=",
"'None'",
"and",
"self",
".",
"config",
":",
"# call synchronizer custom clean",
"try",
":",
"self",
".",
"synchronizer",
".",
... | Call self.synchronizer.clean method | [
"Call",
"self",
".",
"synchronizer",
".",
"clean",
"method"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/interop/sync/models/layer_external.py#L72-L82 | train | 52,180 |
ninuxorg/nodeshot | nodeshot/interop/sync/models/layer_external.py | LayerExternal.save | def save(self, *args, **kwargs):
"""
call synchronizer "after_external_layer_saved" method
for any additional operation that must be executed after save
"""
after_save = kwargs.pop('after_save', True)
super(LayerExternal, self).save(*args, **kwargs)
# call after_external_layer_saved method of synchronizer
if after_save:
try:
synchronizer = self.synchronizer
except ImproperlyConfigured:
pass
else:
if synchronizer:
synchronizer.after_external_layer_saved(self.config)
# reload schema
self._reload_schema() | python | def save(self, *args, **kwargs):
"""
call synchronizer "after_external_layer_saved" method
for any additional operation that must be executed after save
"""
after_save = kwargs.pop('after_save', True)
super(LayerExternal, self).save(*args, **kwargs)
# call after_external_layer_saved method of synchronizer
if after_save:
try:
synchronizer = self.synchronizer
except ImproperlyConfigured:
pass
else:
if synchronizer:
synchronizer.after_external_layer_saved(self.config)
# reload schema
self._reload_schema() | [
"def",
"save",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"after_save",
"=",
"kwargs",
".",
"pop",
"(",
"'after_save'",
",",
"True",
")",
"super",
"(",
"LayerExternal",
",",
"self",
")",
".",
"save",
"(",
"*",
"args",
",",
... | call synchronizer "after_external_layer_saved" method
for any additional operation that must be executed after save | [
"call",
"synchronizer",
"after_external_layer_saved",
"method",
"for",
"any",
"additional",
"operation",
"that",
"must",
"be",
"executed",
"after",
"save"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/interop/sync/models/layer_external.py#L84-L101 | train | 52,181 |
ninuxorg/nodeshot | nodeshot/interop/sync/models/layer_external.py | LayerExternal.synchronizer_class | def synchronizer_class(self):
""" returns synchronizer class """
if not self.synchronizer_path or self.synchronizer_path == 'None' or not self.layer:
return False
# ensure data is up to date
if (self._synchronizer_class is not None and self._synchronizer_class.__name__ not in self.synchronizer_path):
self._synchronizer = None
self._synchronizer_class = None
# import synchronizer class only if not imported already
if not self._synchronizer_class:
self._synchronizer_class = import_by_path(self.synchronizer_path)
return self._synchronizer_class | python | def synchronizer_class(self):
""" returns synchronizer class """
if not self.synchronizer_path or self.synchronizer_path == 'None' or not self.layer:
return False
# ensure data is up to date
if (self._synchronizer_class is not None and self._synchronizer_class.__name__ not in self.synchronizer_path):
self._synchronizer = None
self._synchronizer_class = None
# import synchronizer class only if not imported already
if not self._synchronizer_class:
self._synchronizer_class = import_by_path(self.synchronizer_path)
return self._synchronizer_class | [
"def",
"synchronizer_class",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"synchronizer_path",
"or",
"self",
".",
"synchronizer_path",
"==",
"'None'",
"or",
"not",
"self",
".",
"layer",
":",
"return",
"False",
"# ensure data is up to date",
"if",
"(",
"sel... | returns synchronizer class | [
"returns",
"synchronizer",
"class"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/interop/sync/models/layer_external.py#L118-L129 | train | 52,182 |
ninuxorg/nodeshot | nodeshot/core/nodes/serializers.py | NodeSerializer.get_can_edit | def get_can_edit(self, obj):
""" returns true if user has permission to edit, false otherwise """
view = self.context.get('view')
request = copy(self.context.get('request'))
request._method = 'PUT'
try:
view.check_object_permissions(request, obj)
except (PermissionDenied, NotAuthenticated):
return False
else:
return True | python | def get_can_edit(self, obj):
""" returns true if user has permission to edit, false otherwise """
view = self.context.get('view')
request = copy(self.context.get('request'))
request._method = 'PUT'
try:
view.check_object_permissions(request, obj)
except (PermissionDenied, NotAuthenticated):
return False
else:
return True | [
"def",
"get_can_edit",
"(",
"self",
",",
"obj",
")",
":",
"view",
"=",
"self",
".",
"context",
".",
"get",
"(",
"'view'",
")",
"request",
"=",
"copy",
"(",
"self",
".",
"context",
".",
"get",
"(",
"'request'",
")",
")",
"request",
".",
"_method",
"... | returns true if user has permission to edit, false otherwise | [
"returns",
"true",
"if",
"user",
"has",
"permission",
"to",
"edit",
"false",
"otherwise"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/nodes/serializers.py#L28-L38 | train | 52,183 |
ninuxorg/nodeshot | nodeshot/core/nodes/serializers.py | ImageSerializer.get_details | def get_details(self, obj):
""" returns uri of API image resource """
args = {
'slug': obj.node.slug,
'pk': obj.pk
}
return reverse('api_node_image_detail',
kwargs=args,
request=self.context.get('request', None)) | python | def get_details(self, obj):
""" returns uri of API image resource """
args = {
'slug': obj.node.slug,
'pk': obj.pk
}
return reverse('api_node_image_detail',
kwargs=args,
request=self.context.get('request', None)) | [
"def",
"get_details",
"(",
"self",
",",
"obj",
")",
":",
"args",
"=",
"{",
"'slug'",
":",
"obj",
".",
"node",
".",
"slug",
",",
"'pk'",
":",
"obj",
".",
"pk",
"}",
"return",
"reverse",
"(",
"'api_node_image_detail'",
",",
"kwargs",
"=",
"args",
",",
... | returns uri of API image resource | [
"returns",
"uri",
"of",
"API",
"image",
"resource"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/nodes/serializers.py#L91-L99 | train | 52,184 |
ninuxorg/nodeshot | nodeshot/community/profiles/permissions.py | IsProfileOwner.has_permission | def has_permission(self, request, view):
""" applies to social-link-list """
if request.method == 'POST':
user = Profile.objects.only('id', 'username').get(username=view.kwargs['username'])
return request.user.id == user.id
return True | python | def has_permission(self, request, view):
""" applies to social-link-list """
if request.method == 'POST':
user = Profile.objects.only('id', 'username').get(username=view.kwargs['username'])
return request.user.id == user.id
return True | [
"def",
"has_permission",
"(",
"self",
",",
"request",
",",
"view",
")",
":",
"if",
"request",
".",
"method",
"==",
"'POST'",
":",
"user",
"=",
"Profile",
".",
"objects",
".",
"only",
"(",
"'id'",
",",
"'username'",
")",
".",
"get",
"(",
"username",
"... | applies to social-link-list | [
"applies",
"to",
"social",
"-",
"link",
"-",
"list"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/community/profiles/permissions.py#L33-L39 | train | 52,185 |
ninuxorg/nodeshot | nodeshot/core/nodes/models/image.py | Image.delete | def delete(self, *args, **kwargs):
""" delete image when an image record is deleted """
try:
os.remove(self.file.file.name)
# image does not exist
except (OSError, IOError):
pass
super(Image, self).delete(*args, **kwargs) | python | def delete(self, *args, **kwargs):
""" delete image when an image record is deleted """
try:
os.remove(self.file.file.name)
# image does not exist
except (OSError, IOError):
pass
super(Image, self).delete(*args, **kwargs) | [
"def",
"delete",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"os",
".",
"remove",
"(",
"self",
".",
"file",
".",
"file",
".",
"name",
")",
"# image does not exist",
"except",
"(",
"OSError",
",",
"IOError",
")",
":... | delete image when an image record is deleted | [
"delete",
"image",
"when",
"an",
"image",
"record",
"is",
"deleted"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/nodes/models/image.py#L33-L40 | train | 52,186 |
ninuxorg/nodeshot | nodeshot/interop/oldimporter/models.py | OldLink.get_quality | def get_quality(self, type='etx'):
""" used to determine color of links"""
if type == 'etx':
if 0 < self.etx < 1.5:
quality = 1
elif self.etx < 3:
quality = 2
else:
quality = 3
elif type == 'dbm':
if -83 < self.dbm < 0:
quality = 1
elif self.dbm > -88:
quality = 2
else:
quality = 3
return quality | python | def get_quality(self, type='etx'):
""" used to determine color of links"""
if type == 'etx':
if 0 < self.etx < 1.5:
quality = 1
elif self.etx < 3:
quality = 2
else:
quality = 3
elif type == 'dbm':
if -83 < self.dbm < 0:
quality = 1
elif self.dbm > -88:
quality = 2
else:
quality = 3
return quality | [
"def",
"get_quality",
"(",
"self",
",",
"type",
"=",
"'etx'",
")",
":",
"if",
"type",
"==",
"'etx'",
":",
"if",
"0",
"<",
"self",
".",
"etx",
"<",
"1.5",
":",
"quality",
"=",
"1",
"elif",
"self",
".",
"etx",
"<",
"3",
":",
"quality",
"=",
"2",
... | used to determine color of links | [
"used",
"to",
"determine",
"color",
"of",
"links"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/interop/oldimporter/models.py#L157-L173 | train | 52,187 |
ninuxorg/nodeshot | nodeshot/core/layers/models/layer.py | new_nodes_allowed_for_layer | def new_nodes_allowed_for_layer(self):
"""
ensure new nodes are allowed for this layer
"""
if not self.pk and self.layer and not self.layer.new_nodes_allowed:
raise ValidationError(_('New nodes are not allowed for this layer')) | python | def new_nodes_allowed_for_layer(self):
"""
ensure new nodes are allowed for this layer
"""
if not self.pk and self.layer and not self.layer.new_nodes_allowed:
raise ValidationError(_('New nodes are not allowed for this layer')) | [
"def",
"new_nodes_allowed_for_layer",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"pk",
"and",
"self",
".",
"layer",
"and",
"not",
"self",
".",
"layer",
".",
"new_nodes_allowed",
":",
"raise",
"ValidationError",
"(",
"_",
"(",
"'New nodes are not allowed ... | ensure new nodes are allowed for this layer | [
"ensure",
"new",
"nodes",
"are",
"allowed",
"for",
"this",
"layer"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/layers/models/layer.py#L138-L143 | train | 52,188 |
ninuxorg/nodeshot | nodeshot/core/layers/models/layer.py | nodes_minimum_distance_validation | def nodes_minimum_distance_validation(self):
"""
if minimum distance is specified, ensure node is not too close to other nodes;
"""
if self.layer and self.layer.nodes_minimum_distance:
minimum_distance = self.layer.nodes_minimum_distance
# TODO - lower priority: do this check only when coordinates are changing
near_nodes = Node.objects.exclude(pk=self.id).filter(geometry__distance_lte=(self.geometry, D(m=minimum_distance))).count()
if near_nodes > 0:
raise ValidationError(_('Distance between nodes cannot be less than %s meters') % minimum_distance) | python | def nodes_minimum_distance_validation(self):
"""
if minimum distance is specified, ensure node is not too close to other nodes;
"""
if self.layer and self.layer.nodes_minimum_distance:
minimum_distance = self.layer.nodes_minimum_distance
# TODO - lower priority: do this check only when coordinates are changing
near_nodes = Node.objects.exclude(pk=self.id).filter(geometry__distance_lte=(self.geometry, D(m=minimum_distance))).count()
if near_nodes > 0:
raise ValidationError(_('Distance between nodes cannot be less than %s meters') % minimum_distance) | [
"def",
"nodes_minimum_distance_validation",
"(",
"self",
")",
":",
"if",
"self",
".",
"layer",
"and",
"self",
".",
"layer",
".",
"nodes_minimum_distance",
":",
"minimum_distance",
"=",
"self",
".",
"layer",
".",
"nodes_minimum_distance",
"# TODO - lower priority: do t... | if minimum distance is specified, ensure node is not too close to other nodes; | [
"if",
"minimum",
"distance",
"is",
"specified",
"ensure",
"node",
"is",
"not",
"too",
"close",
"to",
"other",
"nodes",
";"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/layers/models/layer.py#L146-L155 | train | 52,189 |
ninuxorg/nodeshot | nodeshot/core/layers/models/layer.py | node_contained_in_layer_area_validation | def node_contained_in_layer_area_validation(self):
"""
if layer defines an area, ensure node coordinates are contained in the area
"""
# if area is a polygon ensure it contains the node
if self.layer and isinstance(self.layer.area, Polygon) and not self.layer.area.contains(self.geometry):
raise ValidationError(_('Node must be inside layer area')) | python | def node_contained_in_layer_area_validation(self):
"""
if layer defines an area, ensure node coordinates are contained in the area
"""
# if area is a polygon ensure it contains the node
if self.layer and isinstance(self.layer.area, Polygon) and not self.layer.area.contains(self.geometry):
raise ValidationError(_('Node must be inside layer area')) | [
"def",
"node_contained_in_layer_area_validation",
"(",
"self",
")",
":",
"# if area is a polygon ensure it contains the node",
"if",
"self",
".",
"layer",
"and",
"isinstance",
"(",
"self",
".",
"layer",
".",
"area",
",",
"Polygon",
")",
"and",
"not",
"self",
".",
... | if layer defines an area, ensure node coordinates are contained in the area | [
"if",
"layer",
"defines",
"an",
"area",
"ensure",
"node",
"coordinates",
"are",
"contained",
"in",
"the",
"area"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/layers/models/layer.py#L158-L164 | train | 52,190 |
ninuxorg/nodeshot | nodeshot/core/layers/models/layer.py | Layer.save | def save(self, *args, **kwargs):
"""
intercepts changes to is_published and fires layer_is_published_changed signal
"""
super(Layer, self).save(*args, **kwargs)
# if is_published of an existing layer changes
if self.pk and self.is_published != self._current_is_published:
# send django signal
layer_is_published_changed.send(
sender=self.__class__,
instance=self,
old_is_published=self._current_is_published,
new_is_published=self.is_published
)
# unpublish nodes
self.update_nodes_published()
# update _current_is_published
self._current_is_published = self.is_published | python | def save(self, *args, **kwargs):
"""
intercepts changes to is_published and fires layer_is_published_changed signal
"""
super(Layer, self).save(*args, **kwargs)
# if is_published of an existing layer changes
if self.pk and self.is_published != self._current_is_published:
# send django signal
layer_is_published_changed.send(
sender=self.__class__,
instance=self,
old_is_published=self._current_is_published,
new_is_published=self.is_published
)
# unpublish nodes
self.update_nodes_published()
# update _current_is_published
self._current_is_published = self.is_published | [
"def",
"save",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"Layer",
",",
"self",
")",
".",
"save",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# if is_published of an existing layer changes",
"if",
"self",
".",
... | intercepts changes to is_published and fires layer_is_published_changed signal | [
"intercepts",
"changes",
"to",
"is_published",
"and",
"fires",
"layer_is_published_changed",
"signal"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/layers/models/layer.py#L72-L91 | train | 52,191 |
ninuxorg/nodeshot | nodeshot/core/layers/models/layer.py | Layer.update_nodes_published | def update_nodes_published(self):
""" publish or unpublish nodes of current layer """
if self.pk:
self.node_set.all().update(is_published=self.is_published) | python | def update_nodes_published(self):
""" publish or unpublish nodes of current layer """
if self.pk:
self.node_set.all().update(is_published=self.is_published) | [
"def",
"update_nodes_published",
"(",
"self",
")",
":",
"if",
"self",
".",
"pk",
":",
"self",
".",
"node_set",
".",
"all",
"(",
")",
".",
"update",
"(",
"is_published",
"=",
"self",
".",
"is_published",
")"
] | publish or unpublish nodes of current layer | [
"publish",
"or",
"unpublish",
"nodes",
"of",
"current",
"layer"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/core/layers/models/layer.py#L114-L117 | train | 52,192 |
ninuxorg/nodeshot | nodeshot/community/notifications/views.py | NotificationList.get | def get(self, request, format=None):
""" get HTTP method """
action = request.query_params.get('action', 'unread')
# action can be only "unread" (default), "count" and "all"
action = action if action == 'count' or action == 'all' else 'unread'
# mark as read parameter, defaults to true
mark_as_read = request.query_params.get('read', 'true') == 'true'
# queryset
notifications = self.get_queryset().filter(to_user=request.user)
# pass to specific action
return getattr(self, 'get_%s' % action)(request, notifications, mark_as_read) | python | def get(self, request, format=None):
""" get HTTP method """
action = request.query_params.get('action', 'unread')
# action can be only "unread" (default), "count" and "all"
action = action if action == 'count' or action == 'all' else 'unread'
# mark as read parameter, defaults to true
mark_as_read = request.query_params.get('read', 'true') == 'true'
# queryset
notifications = self.get_queryset().filter(to_user=request.user)
# pass to specific action
return getattr(self, 'get_%s' % action)(request, notifications, mark_as_read) | [
"def",
"get",
"(",
"self",
",",
"request",
",",
"format",
"=",
"None",
")",
":",
"action",
"=",
"request",
".",
"query_params",
".",
"get",
"(",
"'action'",
",",
"'unread'",
")",
"# action can be only \"unread\" (default), \"count\" and \"all\"",
"action",
"=",
... | get HTTP method | [
"get",
"HTTP",
"method"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/community/notifications/views.py#L36-L46 | train | 52,193 |
ninuxorg/nodeshot | nodeshot/community/notifications/views.py | NotificationList.get_count | def get_count(self, request, notifications, mark_as_read=False):
""" return count of unread notification """
return Response({'count': notifications.filter(is_read=False).count()}) | python | def get_count(self, request, notifications, mark_as_read=False):
""" return count of unread notification """
return Response({'count': notifications.filter(is_read=False).count()}) | [
"def",
"get_count",
"(",
"self",
",",
"request",
",",
"notifications",
",",
"mark_as_read",
"=",
"False",
")",
":",
"return",
"Response",
"(",
"{",
"'count'",
":",
"notifications",
".",
"filter",
"(",
"is_read",
"=",
"False",
")",
".",
"count",
"(",
")",... | return count of unread notification | [
"return",
"count",
"of",
"unread",
"notification"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/community/notifications/views.py#L62-L64 | train | 52,194 |
ninuxorg/nodeshot | nodeshot/community/notifications/views.py | NotificationList.get_all | def get_all(self, request, notifications, mark_as_read=False):
""" return all notifications with pagination """
return self.list(request, notifications) | python | def get_all(self, request, notifications, mark_as_read=False):
""" return all notifications with pagination """
return self.list(request, notifications) | [
"def",
"get_all",
"(",
"self",
",",
"request",
",",
"notifications",
",",
"mark_as_read",
"=",
"False",
")",
":",
"return",
"self",
".",
"list",
"(",
"request",
",",
"notifications",
")"
] | return all notifications with pagination | [
"return",
"all",
"notifications",
"with",
"pagination"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/community/notifications/views.py#L66-L68 | train | 52,195 |
ninuxorg/nodeshot | nodeshot/community/notifications/views.py | EmailNotificationSettings.get_object | def get_object(self, queryset=None):
""" get privacy settings of current user """
try:
obj = self.get_queryset()
except self.model.DoesNotExist:
raise Http404()
self.check_object_permissions(self.request, obj)
return obj | python | def get_object(self, queryset=None):
""" get privacy settings of current user """
try:
obj = self.get_queryset()
except self.model.DoesNotExist:
raise Http404()
self.check_object_permissions(self.request, obj)
return obj | [
"def",
"get_object",
"(",
"self",
",",
"queryset",
"=",
"None",
")",
":",
"try",
":",
"obj",
"=",
"self",
".",
"get_queryset",
"(",
")",
"except",
"self",
".",
"model",
".",
"DoesNotExist",
":",
"raise",
"Http404",
"(",
")",
"self",
".",
"check_object_... | get privacy settings of current user | [
"get",
"privacy",
"settings",
"of",
"current",
"user"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/community/notifications/views.py#L108-L115 | train | 52,196 |
ninuxorg/nodeshot | nodeshot/networking/links/models/link.py | Link.clean | def clean(self, *args, **kwargs):
"""
Custom validation
1. interface_a and interface_b mandatory except for planned links
2. planned links should have at least node_a and node_b filled in
3. dbm and noise fields can be filled only for radio links
4. interface_a and interface_b must differ
5. interface a and b type must match
"""
if self.status != LINK_STATUS.get('planned'):
if self.interface_a is None or self.interface_b is None:
raise ValidationError(_('fields "from interface" and "to interface" are mandatory in this case'))
if (self.interface_a_id == self.interface_b_id) or (self.interface_a == self.interface_b):
msg = _('link cannot have same "from interface" and "to interface: %s"') % self.interface_a
raise ValidationError(msg)
if self.status == LINK_STATUS.get('planned') and (self.node_a is None or self.node_b is None):
raise ValidationError(_('fields "from node" and "to node" are mandatory for planned links'))
if self.type != LINK_TYPES.get('radio') and (self.dbm is not None or self.noise is not None):
raise ValidationError(_('Only links of type "radio" can contain "dbm" and "noise" information')) | python | def clean(self, *args, **kwargs):
"""
Custom validation
1. interface_a and interface_b mandatory except for planned links
2. planned links should have at least node_a and node_b filled in
3. dbm and noise fields can be filled only for radio links
4. interface_a and interface_b must differ
5. interface a and b type must match
"""
if self.status != LINK_STATUS.get('planned'):
if self.interface_a is None or self.interface_b is None:
raise ValidationError(_('fields "from interface" and "to interface" are mandatory in this case'))
if (self.interface_a_id == self.interface_b_id) or (self.interface_a == self.interface_b):
msg = _('link cannot have same "from interface" and "to interface: %s"') % self.interface_a
raise ValidationError(msg)
if self.status == LINK_STATUS.get('planned') and (self.node_a is None or self.node_b is None):
raise ValidationError(_('fields "from node" and "to node" are mandatory for planned links'))
if self.type != LINK_TYPES.get('radio') and (self.dbm is not None or self.noise is not None):
raise ValidationError(_('Only links of type "radio" can contain "dbm" and "noise" information')) | [
"def",
"clean",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"status",
"!=",
"LINK_STATUS",
".",
"get",
"(",
"'planned'",
")",
":",
"if",
"self",
".",
"interface_a",
"is",
"None",
"or",
"self",
".",
"interfac... | Custom validation
1. interface_a and interface_b mandatory except for planned links
2. planned links should have at least node_a and node_b filled in
3. dbm and noise fields can be filled only for radio links
4. interface_a and interface_b must differ
5. interface a and b type must match | [
"Custom",
"validation",
"1",
".",
"interface_a",
"and",
"interface_b",
"mandatory",
"except",
"for",
"planned",
"links",
"2",
".",
"planned",
"links",
"should",
"have",
"at",
"least",
"node_a",
"and",
"node_b",
"filled",
"in",
"3",
".",
"dbm",
"and",
"noise"... | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/networking/links/models/link.py#L89-L110 | train | 52,197 |
ninuxorg/nodeshot | nodeshot/networking/links/models/link.py | Link.get_or_create | def get_or_create(cls, source, target, cost, topology=None):
"""
Tries to find a link with get_link, creates a new link if link not found.
"""
try:
return cls.get_link(source, target, topology)
except LinkNotFound as e:
pass
# create link
link = Link(interface_a=e.interface_a,
interface_b=e.interface_b,
status=LINK_STATUS['active'],
metric_value=cost,
topology=topology)
link.full_clean()
link.save()
return link | python | def get_or_create(cls, source, target, cost, topology=None):
"""
Tries to find a link with get_link, creates a new link if link not found.
"""
try:
return cls.get_link(source, target, topology)
except LinkNotFound as e:
pass
# create link
link = Link(interface_a=e.interface_a,
interface_b=e.interface_b,
status=LINK_STATUS['active'],
metric_value=cost,
topology=topology)
link.full_clean()
link.save()
return link | [
"def",
"get_or_create",
"(",
"cls",
",",
"source",
",",
"target",
",",
"cost",
",",
"topology",
"=",
"None",
")",
":",
"try",
":",
"return",
"cls",
".",
"get_link",
"(",
"source",
",",
"target",
",",
"topology",
")",
"except",
"LinkNotFound",
"as",
"e"... | Tries to find a link with get_link, creates a new link if link not found. | [
"Tries",
"to",
"find",
"a",
"link",
"with",
"get_link",
"creates",
"a",
"new",
"link",
"if",
"link",
"not",
"found",
"."
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/networking/links/models/link.py#L222-L238 | train | 52,198 |
ninuxorg/nodeshot | nodeshot/networking/links/models/link.py | Link.ensure | def ensure(self, status, cost):
"""
ensure link properties correspond to the specified ones
perform save operation only if necessary
"""
changed = False
status_id = LINK_STATUS[status]
if self.status != status_id:
self.status = status_id
changed = True
if self.metric_value != cost:
self.metric_value = cost
changed = True
if changed:
self.save() | python | def ensure(self, status, cost):
"""
ensure link properties correspond to the specified ones
perform save operation only if necessary
"""
changed = False
status_id = LINK_STATUS[status]
if self.status != status_id:
self.status = status_id
changed = True
if self.metric_value != cost:
self.metric_value = cost
changed = True
if changed:
self.save() | [
"def",
"ensure",
"(",
"self",
",",
"status",
",",
"cost",
")",
":",
"changed",
"=",
"False",
"status_id",
"=",
"LINK_STATUS",
"[",
"status",
"]",
"if",
"self",
".",
"status",
"!=",
"status_id",
":",
"self",
".",
"status",
"=",
"status_id",
"changed",
"... | ensure link properties correspond to the specified ones
perform save operation only if necessary | [
"ensure",
"link",
"properties",
"correspond",
"to",
"the",
"specified",
"ones",
"perform",
"save",
"operation",
"only",
"if",
"necessary"
] | 2466f0a55f522b2696026f196436ce7ba3f1e5c6 | https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/networking/links/models/link.py#L280-L294 | train | 52,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.