repository_name stringlengths 5 67 | func_path_in_repository stringlengths 4 234 | func_name stringlengths 0 314 | whole_func_string stringlengths 52 3.87M | language stringclasses 6
values | func_code_string stringlengths 52 3.87M | func_code_tokens listlengths 15 672k | func_documentation_string stringlengths 1 47.2k | func_documentation_tokens listlengths 1 3.92k | split_name stringclasses 1
value | func_code_url stringlengths 85 339 |
|---|---|---|---|---|---|---|---|---|---|---|
CitrineInformatics/pypif-sdk | pypif_sdk/readview/core.py | add_child_ambig | def add_child_ambig(child_ambig, child_unambig, ambig, unambig):
"""
Add information about decodings of a child object
:param child_ambig: ambiguous set from child
:param child_unambig: unambiguous set from child
:param ambig: set of keys storing ambig decodings
:param unambig: dictionary storing unambiguous decodings
:return:
"""
for k in child_ambig:
ambig.add(k)
if k in unambig:
del unambig[k]
for k, v in child_unambig.items():
new_keypair(k, v, ambig, unambig)
return | python | def add_child_ambig(child_ambig, child_unambig, ambig, unambig):
"""
Add information about decodings of a child object
:param child_ambig: ambiguous set from child
:param child_unambig: unambiguous set from child
:param ambig: set of keys storing ambig decodings
:param unambig: dictionary storing unambiguous decodings
:return:
"""
for k in child_ambig:
ambig.add(k)
if k in unambig:
del unambig[k]
for k, v in child_unambig.items():
new_keypair(k, v, ambig, unambig)
return | [
"def",
"add_child_ambig",
"(",
"child_ambig",
",",
"child_unambig",
",",
"ambig",
",",
"unambig",
")",
":",
"for",
"k",
"in",
"child_ambig",
":",
"ambig",
".",
"add",
"(",
"k",
")",
"if",
"k",
"in",
"unambig",
":",
"del",
"unambig",
"[",
"k",
"]",
"f... | Add information about decodings of a child object
:param child_ambig: ambiguous set from child
:param child_unambig: unambiguous set from child
:param ambig: set of keys storing ambig decodings
:param unambig: dictionary storing unambiguous decodings
:return: | [
"Add",
"information",
"about",
"decodings",
"of",
"a",
"child",
"object"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/readview/core.py#L39-L57 |
CitrineInformatics/pypif-sdk | pypif_sdk/util/citrination.py | get_client | def get_client(site=None):
"""Get a citrination client"""
if 'CITRINATION_API_KEY' not in environ:
raise ValueError("'CITRINATION_API_KEY' is not set as an environment variable")
if not site:
site = environ.get("CITRINATION_SITE", "https://citrination.com")
return CitrinationClient(environ['CITRINATION_API_KEY'], site) | python | def get_client(site=None):
"""Get a citrination client"""
if 'CITRINATION_API_KEY' not in environ:
raise ValueError("'CITRINATION_API_KEY' is not set as an environment variable")
if not site:
site = environ.get("CITRINATION_SITE", "https://citrination.com")
return CitrinationClient(environ['CITRINATION_API_KEY'], site) | [
"def",
"get_client",
"(",
"site",
"=",
"None",
")",
":",
"if",
"'CITRINATION_API_KEY'",
"not",
"in",
"environ",
":",
"raise",
"ValueError",
"(",
"\"'CITRINATION_API_KEY' is not set as an environment variable\"",
")",
"if",
"not",
"site",
":",
"site",
"=",
"environ",... | Get a citrination client | [
"Get",
"a",
"citrination",
"client"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/util/citrination.py#L6-L12 |
CitrineInformatics/pypif-sdk | pypif_sdk/util/citrination.py | set_uids | def set_uids(pifs, uids=None):
"""
Set the uids in a PIF, explicitly if the list of UIDs is passed in
:param pifs: to set UIDs in
:param uids: to set; defaults to a hash of the object
:return:
"""
if not uids:
uids = [str(hash(dumps(x))) for x in pifs]
for pif, uid in zip(pifs, uids):
pif.uid = uid
return pifs | python | def set_uids(pifs, uids=None):
"""
Set the uids in a PIF, explicitly if the list of UIDs is passed in
:param pifs: to set UIDs in
:param uids: to set; defaults to a hash of the object
:return:
"""
if not uids:
uids = [str(hash(dumps(x))) for x in pifs]
for pif, uid in zip(pifs, uids):
pif.uid = uid
return pifs | [
"def",
"set_uids",
"(",
"pifs",
",",
"uids",
"=",
"None",
")",
":",
"if",
"not",
"uids",
":",
"uids",
"=",
"[",
"str",
"(",
"hash",
"(",
"dumps",
"(",
"x",
")",
")",
")",
"for",
"x",
"in",
"pifs",
"]",
"for",
"pif",
",",
"uid",
"in",
"zip",
... | Set the uids in a PIF, explicitly if the list of UIDs is passed in
:param pifs: to set UIDs in
:param uids: to set; defaults to a hash of the object
:return: | [
"Set",
"the",
"uids",
"in",
"a",
"PIF",
"explicitly",
"if",
"the",
"list",
"of",
"UIDs",
"is",
"passed",
"in",
":",
"param",
"pifs",
":",
"to",
"set",
"UIDs",
"in",
":",
"param",
"uids",
":",
"to",
"set",
";",
"defaults",
"to",
"a",
"hash",
"of",
... | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/util/citrination.py#L15-L26 |
CitrineInformatics/pypif-sdk | pypif_sdk/util/citrination.py | get_url | def get_url(pif, dataset, version=1, site="https://citrination.com"):
"""
Construct the URL of a PIF on a site
:param pif: to construct URL for
:param dataset: the pif will belong to
:param version: of the PIF (default: 1)
:param site: for the dataset (default: https://citrination.com)
:return: the URL as a string
"""
return "{site}/datasets/{dataset}/version/{version}/pif/{uid}".format(
uid=pif.uid, version=version, dataset=dataset, site=site
) | python | def get_url(pif, dataset, version=1, site="https://citrination.com"):
"""
Construct the URL of a PIF on a site
:param pif: to construct URL for
:param dataset: the pif will belong to
:param version: of the PIF (default: 1)
:param site: for the dataset (default: https://citrination.com)
:return: the URL as a string
"""
return "{site}/datasets/{dataset}/version/{version}/pif/{uid}".format(
uid=pif.uid, version=version, dataset=dataset, site=site
) | [
"def",
"get_url",
"(",
"pif",
",",
"dataset",
",",
"version",
"=",
"1",
",",
"site",
"=",
"\"https://citrination.com\"",
")",
":",
"return",
"\"{site}/datasets/{dataset}/version/{version}/pif/{uid}\"",
".",
"format",
"(",
"uid",
"=",
"pif",
".",
"uid",
",",
"ver... | Construct the URL of a PIF on a site
:param pif: to construct URL for
:param dataset: the pif will belong to
:param version: of the PIF (default: 1)
:param site: for the dataset (default: https://citrination.com)
:return: the URL as a string | [
"Construct",
"the",
"URL",
"of",
"a",
"PIF",
"on",
"a",
"site",
":",
"param",
"pif",
":",
"to",
"construct",
"URL",
"for",
":",
"param",
"dataset",
":",
"the",
"pif",
"will",
"belong",
"to",
":",
"param",
"version",
":",
"of",
"the",
"PIF",
"(",
"d... | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/util/citrination.py#L29-L40 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | calculate_ideal_atomic_percent | def calculate_ideal_atomic_percent(pif):
"""
Calculates ideal atomic percents from a chemical formula string from a pif. Returns an appended pif with composition elements modified or added.
:param pif: a ChemicalSystem pif
:return: modified pif object
"""
if not isinstance(pif, ChemicalSystem):
return pif
if not pif.chemical_formula:
return pif
else:
expanded_formula_no_special_char = _expand_formula_(
pif.chemical_formula)
element_array = _create_emprical_compositional_array_(
expanded_formula_no_special_char)
appended_e_array = _add_atomic_percents_(element_array)
for e in appended_e_array:
# Checks if a Composition element decribing that element already
# exists.
if _get_element_in_pif_composition_(pif, e["symbol"]):
# If it exists, it removes the old Composition object, and
# inserts a new one with ideal atomic percent added.
in_pif = _get_element_in_pif_composition_(pif, e["symbol"])
comp = in_pif[0]
pif.composition.pop(in_pif[1])
comp.idealAtomicPercent = e["atomic_percent"]
pif.composition.append(comp)
else:
# If not, it creates a new Composition object with the element
# and ideal atomic percent.
comp = Composition()
comp.element = e["symbol"]
comp.idealAtomicPercent = e["atomic_percent"]
pif.composition.append(comp)
return pif | python | def calculate_ideal_atomic_percent(pif):
"""
Calculates ideal atomic percents from a chemical formula string from a pif. Returns an appended pif with composition elements modified or added.
:param pif: a ChemicalSystem pif
:return: modified pif object
"""
if not isinstance(pif, ChemicalSystem):
return pif
if not pif.chemical_formula:
return pif
else:
expanded_formula_no_special_char = _expand_formula_(
pif.chemical_formula)
element_array = _create_emprical_compositional_array_(
expanded_formula_no_special_char)
appended_e_array = _add_atomic_percents_(element_array)
for e in appended_e_array:
# Checks if a Composition element decribing that element already
# exists.
if _get_element_in_pif_composition_(pif, e["symbol"]):
# If it exists, it removes the old Composition object, and
# inserts a new one with ideal atomic percent added.
in_pif = _get_element_in_pif_composition_(pif, e["symbol"])
comp = in_pif[0]
pif.composition.pop(in_pif[1])
comp.idealAtomicPercent = e["atomic_percent"]
pif.composition.append(comp)
else:
# If not, it creates a new Composition object with the element
# and ideal atomic percent.
comp = Composition()
comp.element = e["symbol"]
comp.idealAtomicPercent = e["atomic_percent"]
pif.composition.append(comp)
return pif | [
"def",
"calculate_ideal_atomic_percent",
"(",
"pif",
")",
":",
"if",
"not",
"isinstance",
"(",
"pif",
",",
"ChemicalSystem",
")",
":",
"return",
"pif",
"if",
"not",
"pif",
".",
"chemical_formula",
":",
"return",
"pif",
"else",
":",
"expanded_formula_no_special_c... | Calculates ideal atomic percents from a chemical formula string from a pif. Returns an appended pif with composition elements modified or added.
:param pif: a ChemicalSystem pif
:return: modified pif object | [
"Calculates",
"ideal",
"atomic",
"percents",
"from",
"a",
"chemical",
"formula",
"string",
"from",
"a",
"pif",
".",
"Returns",
"an",
"appended",
"pif",
"with",
"composition",
"elements",
"modified",
"or",
"added",
"."
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L16-L51 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | calculate_ideal_weight_percent | def calculate_ideal_weight_percent(pif):
"""
Calculates ideal atomic weight percents from a chemical formula string from a pif. Returns an appended pif with composition elements modified or added.
:param pif: a ChemicalSystem pif
:return: modified pif object
"""
if not isinstance(pif, ChemicalSystem):
return pif
if not pif.chemical_formula:
return pif
else:
expanded_formula_no_special_char = _expand_formula_(
pif.chemical_formula)
element_array = _create_emprical_compositional_array_(
expanded_formula_no_special_char)
appended_e_array = _add_ideal_atomic_weights_(element_array)
a_array_with_pcts = _add_ideal_weight_percent_(appended_e_array)
for e in a_array_with_pcts:
# Checks if a Composition element decribing that element already
# exists.
if _get_element_in_pif_composition_(pif, e["symbol"]):
# If it exists, it removes the old Composition object, and
# inserts a new one with ideal atomic weight percent added
in_pif = _get_element_in_pif_composition_(pif, e["symbol"])
comp = in_pif[0]
pif.composition.pop(in_pif[1])
comp.idealWeightPercent = e["weight_percent"]
pif.composition.append(comp)
else:
# If not, it creates a new Composition object with the element
# and ideal atomic weight percent.
comp = Composition()
comp.element = e["symbol"]
comp.idealWeightPercent = e["weight_percent"]
pif.composition.append(comp)
return pif | python | def calculate_ideal_weight_percent(pif):
"""
Calculates ideal atomic weight percents from a chemical formula string from a pif. Returns an appended pif with composition elements modified or added.
:param pif: a ChemicalSystem pif
:return: modified pif object
"""
if not isinstance(pif, ChemicalSystem):
return pif
if not pif.chemical_formula:
return pif
else:
expanded_formula_no_special_char = _expand_formula_(
pif.chemical_formula)
element_array = _create_emprical_compositional_array_(
expanded_formula_no_special_char)
appended_e_array = _add_ideal_atomic_weights_(element_array)
a_array_with_pcts = _add_ideal_weight_percent_(appended_e_array)
for e in a_array_with_pcts:
# Checks if a Composition element decribing that element already
# exists.
if _get_element_in_pif_composition_(pif, e["symbol"]):
# If it exists, it removes the old Composition object, and
# inserts a new one with ideal atomic weight percent added
in_pif = _get_element_in_pif_composition_(pif, e["symbol"])
comp = in_pif[0]
pif.composition.pop(in_pif[1])
comp.idealWeightPercent = e["weight_percent"]
pif.composition.append(comp)
else:
# If not, it creates a new Composition object with the element
# and ideal atomic weight percent.
comp = Composition()
comp.element = e["symbol"]
comp.idealWeightPercent = e["weight_percent"]
pif.composition.append(comp)
return pif | [
"def",
"calculate_ideal_weight_percent",
"(",
"pif",
")",
":",
"if",
"not",
"isinstance",
"(",
"pif",
",",
"ChemicalSystem",
")",
":",
"return",
"pif",
"if",
"not",
"pif",
".",
"chemical_formula",
":",
"return",
"pif",
"else",
":",
"expanded_formula_no_special_c... | Calculates ideal atomic weight percents from a chemical formula string from a pif. Returns an appended pif with composition elements modified or added.
:param pif: a ChemicalSystem pif
:return: modified pif object | [
"Calculates",
"ideal",
"atomic",
"weight",
"percents",
"from",
"a",
"chemical",
"formula",
"string",
"from",
"a",
"pif",
".",
"Returns",
"an",
"appended",
"pif",
"with",
"composition",
"elements",
"modified",
"or",
"added",
"."
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L54-L90 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | _expand_formula_ | def _expand_formula_(formula_string):
"""
Accounts for the many ways a user may write a formula string, and returns an expanded chemical formula string.
Assumptions:
-The Chemical Formula string it is supplied is well-written, and has no hanging parethneses
-The number of repeats occurs after the elemental symbol or ) ] character EXCEPT in the case of a hydrate where it is assumed to be in front of the first element
-All hydrates explicitly use the · symbol
-Only (, (,[, ], ., · are "important" symbols to intrepreting the string.
-IONS ARE NOT HANDLED
:param formula_string: a messy chemical formula string
:return: a non-emperical but expanded formula string
"""
formula_string = re.sub(r'[^A-Za-z0-9\(\)\[\]\·\.]+', '', formula_string)
hydrate_pos = formula_string.find('·')
if hydrate_pos >= 0:
formula_string = _expand_hydrate_(hydrate_pos, formula_string)
search_result = re.search(
r'(?:[\(\[]([A-Za-z0-9]+)[\)\]](\d*))',
formula_string)
if search_result is None:
return formula_string
this_start = search_result.start()
this_end = search_result.end()
this_string = search_result.group()
this_expansion_array = re.findall(
r'(?:[\(\[]([A-Za-z0-9]+)[\)\]](\d*))', this_string)
for a in this_expansion_array:
if a[1] == "":
a = (a[0], 1)
parenth_expanded = ""
multiplier = float(a[1])
element_array = re.findall('[A-Z][^A-Z]*', a[0])
for e in element_array:
occurance_array = re.findall('[0-9][^0-9]*', e)
if len(occurance_array) == 0:
occurance_array.append(1)
for o in occurance_array:
symbol = re.findall('[A-Z][a-z]*', e)
total_num = float(o) * multiplier
if total_num.is_integer():
total_num = int(total_num)
total_str = str(total_num)
if total_str == "1":
total_str = ""
new_string = symbol[0] + total_str
parenth_expanded += new_string
formula_string = formula_string[0:this_start] + \
parenth_expanded + formula_string[this_end:]
return _expand_formula_(formula_string) | python | def _expand_formula_(formula_string):
"""
Accounts for the many ways a user may write a formula string, and returns an expanded chemical formula string.
Assumptions:
-The Chemical Formula string it is supplied is well-written, and has no hanging parethneses
-The number of repeats occurs after the elemental symbol or ) ] character EXCEPT in the case of a hydrate where it is assumed to be in front of the first element
-All hydrates explicitly use the · symbol
-Only (, (,[, ], ., · are "important" symbols to intrepreting the string.
-IONS ARE NOT HANDLED
:param formula_string: a messy chemical formula string
:return: a non-emperical but expanded formula string
"""
formula_string = re.sub(r'[^A-Za-z0-9\(\)\[\]\·\.]+', '', formula_string)
hydrate_pos = formula_string.find('·')
if hydrate_pos >= 0:
formula_string = _expand_hydrate_(hydrate_pos, formula_string)
search_result = re.search(
r'(?:[\(\[]([A-Za-z0-9]+)[\)\]](\d*))',
formula_string)
if search_result is None:
return formula_string
this_start = search_result.start()
this_end = search_result.end()
this_string = search_result.group()
this_expansion_array = re.findall(
r'(?:[\(\[]([A-Za-z0-9]+)[\)\]](\d*))', this_string)
for a in this_expansion_array:
if a[1] == "":
a = (a[0], 1)
parenth_expanded = ""
multiplier = float(a[1])
element_array = re.findall('[A-Z][^A-Z]*', a[0])
for e in element_array:
occurance_array = re.findall('[0-9][^0-9]*', e)
if len(occurance_array) == 0:
occurance_array.append(1)
for o in occurance_array:
symbol = re.findall('[A-Z][a-z]*', e)
total_num = float(o) * multiplier
if total_num.is_integer():
total_num = int(total_num)
total_str = str(total_num)
if total_str == "1":
total_str = ""
new_string = symbol[0] + total_str
parenth_expanded += new_string
formula_string = formula_string[0:this_start] + \
parenth_expanded + formula_string[this_end:]
return _expand_formula_(formula_string) | [
"def",
"_expand_formula_",
"(",
"formula_string",
")",
":",
"formula_string",
"=",
"re",
".",
"sub",
"(",
"r'[^A-Za-z0-9\\(\\)\\[\\]\\·\\.]+',",
" ",
"',",
" ",
"ormula_string)",
"",
"hydrate_pos",
"=",
"formula_string",
".",
"find",
"(",
"'·')",
"",
"if",
"hydr... | Accounts for the many ways a user may write a formula string, and returns an expanded chemical formula string.
Assumptions:
-The Chemical Formula string it is supplied is well-written, and has no hanging parethneses
-The number of repeats occurs after the elemental symbol or ) ] character EXCEPT in the case of a hydrate where it is assumed to be in front of the first element
-All hydrates explicitly use the · symbol
-Only (, (,[, ], ., · are "important" symbols to intrepreting the string.
-IONS ARE NOT HANDLED
:param formula_string: a messy chemical formula string
:return: a non-emperical but expanded formula string | [
"Accounts",
"for",
"the",
"many",
"ways",
"a",
"user",
"may",
"write",
"a",
"formula",
"string",
"and",
"returns",
"an",
"expanded",
"chemical",
"formula",
"string",
".",
"Assumptions",
":",
"-",
"The",
"Chemical",
"Formula",
"string",
"it",
"is",
"supplied"... | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L93-L142 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | _expand_hydrate_ | def _expand_hydrate_(hydrate_pos, formula_string):
"""
Handles the expansion of hydrate portions of a chemical formula, and expands out the coefficent to all elements
:param hydrate_pos: the index in the formula_string of the · symbol
:param formula_string: the unexpanded formula string
:return: a formula string without the · character with the hydrate portion expanded out
"""
hydrate = formula_string[hydrate_pos + 1:]
hydrate_string = ""
multiplier = float(re.search(r'^[\d\.]+', hydrate).group())
element_array = re.findall('[A-Z][^A-Z]*', hydrate)
for e in element_array:
occurance_array = re.findall('[0-9][^0-9]*', e)
if len(occurance_array) == 0:
occurance_array.append(1)
for o in occurance_array:
symbol = re.findall('[A-Z][a-z]*', e)
total_num = float(o) * multiplier
if total_num.is_integer():
total_num = int(total_num)
total_str = str(total_num)
if total_str == "1":
total_str = ""
new_string = symbol[0] + total_str
hydrate_string += new_string
return formula_string[:hydrate_pos] + hydrate_string | python | def _expand_hydrate_(hydrate_pos, formula_string):
"""
Handles the expansion of hydrate portions of a chemical formula, and expands out the coefficent to all elements
:param hydrate_pos: the index in the formula_string of the · symbol
:param formula_string: the unexpanded formula string
:return: a formula string without the · character with the hydrate portion expanded out
"""
hydrate = formula_string[hydrate_pos + 1:]
hydrate_string = ""
multiplier = float(re.search(r'^[\d\.]+', hydrate).group())
element_array = re.findall('[A-Z][^A-Z]*', hydrate)
for e in element_array:
occurance_array = re.findall('[0-9][^0-9]*', e)
if len(occurance_array) == 0:
occurance_array.append(1)
for o in occurance_array:
symbol = re.findall('[A-Z][a-z]*', e)
total_num = float(o) * multiplier
if total_num.is_integer():
total_num = int(total_num)
total_str = str(total_num)
if total_str == "1":
total_str = ""
new_string = symbol[0] + total_str
hydrate_string += new_string
return formula_string[:hydrate_pos] + hydrate_string | [
"def",
"_expand_hydrate_",
"(",
"hydrate_pos",
",",
"formula_string",
")",
":",
"hydrate",
"=",
"formula_string",
"[",
"hydrate_pos",
"+",
"1",
":",
"]",
"hydrate_string",
"=",
"\"\"",
"multiplier",
"=",
"float",
"(",
"re",
".",
"search",
"(",
"r'^[\\d\\.]+'",... | Handles the expansion of hydrate portions of a chemical formula, and expands out the coefficent to all elements
:param hydrate_pos: the index in the formula_string of the · symbol
:param formula_string: the unexpanded formula string
:return: a formula string without the · character with the hydrate portion expanded out | [
"Handles",
"the",
"expansion",
"of",
"hydrate",
"portions",
"of",
"a",
"chemical",
"formula",
"and",
"expands",
"out",
"the",
"coefficent",
"to",
"all",
"elements"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L145-L171 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | _create_compositional_array_ | def _create_compositional_array_(expanded_chemical_formaula_string):
"""
Splits an expanded chemical formula string into an array of dictionaries containing information about each element
:param expanded_chemical_formaula_string: a clean (not necessarily emperical, but without any special characters) chemical formula string, as returned by _expand_formula_()
:return: an array of dictionaries
"""
element_array = re.findall(
'[A-Z][^A-Z]*',
expanded_chemical_formaula_string)
split_element_array = []
for s in element_array:
m = re.match(r"([a-zA-Z]+)([0-9\.]*)", s, re.I)
if m:
items = m.groups()
if items[1] == "":
items = (items[0], 1)
this_e = {"symbol": items[0], "occurances": float(items[1])}
split_element_array.append(this_e)
return split_element_array | python | def _create_compositional_array_(expanded_chemical_formaula_string):
"""
Splits an expanded chemical formula string into an array of dictionaries containing information about each element
:param expanded_chemical_formaula_string: a clean (not necessarily emperical, but without any special characters) chemical formula string, as returned by _expand_formula_()
:return: an array of dictionaries
"""
element_array = re.findall(
'[A-Z][^A-Z]*',
expanded_chemical_formaula_string)
split_element_array = []
for s in element_array:
m = re.match(r"([a-zA-Z]+)([0-9\.]*)", s, re.I)
if m:
items = m.groups()
if items[1] == "":
items = (items[0], 1)
this_e = {"symbol": items[0], "occurances": float(items[1])}
split_element_array.append(this_e)
return split_element_array | [
"def",
"_create_compositional_array_",
"(",
"expanded_chemical_formaula_string",
")",
":",
"element_array",
"=",
"re",
".",
"findall",
"(",
"'[A-Z][^A-Z]*'",
",",
"expanded_chemical_formaula_string",
")",
"split_element_array",
"=",
"[",
"]",
"for",
"s",
"in",
"element_... | Splits an expanded chemical formula string into an array of dictionaries containing information about each element
:param expanded_chemical_formaula_string: a clean (not necessarily emperical, but without any special characters) chemical formula string, as returned by _expand_formula_()
:return: an array of dictionaries | [
"Splits",
"an",
"expanded",
"chemical",
"formula",
"string",
"into",
"an",
"array",
"of",
"dictionaries",
"containing",
"information",
"about",
"each",
"element"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L174-L194 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | _consolidate_elemental_array_ | def _consolidate_elemental_array_(elemental_array):
"""
Accounts for non-empirical chemical formulas by taking in the compositional array generated by _create_compositional_array_() and returning a consolidated array of dictionaries with no repeating elements
:param elemental_array: an elemental array generated from _create_compositional_array_()
:return: an array of element dictionaries
"""
condensed_array = []
for e in elemental_array:
exists = False
for k in condensed_array:
if k["symbol"] == e["symbol"]:
exists = True
k["occurances"] += e["occurances"]
break
if not exists:
condensed_array.append(e)
return condensed_array | python | def _consolidate_elemental_array_(elemental_array):
"""
Accounts for non-empirical chemical formulas by taking in the compositional array generated by _create_compositional_array_() and returning a consolidated array of dictionaries with no repeating elements
:param elemental_array: an elemental array generated from _create_compositional_array_()
:return: an array of element dictionaries
"""
condensed_array = []
for e in elemental_array:
exists = False
for k in condensed_array:
if k["symbol"] == e["symbol"]:
exists = True
k["occurances"] += e["occurances"]
break
if not exists:
condensed_array.append(e)
return condensed_array | [
"def",
"_consolidate_elemental_array_",
"(",
"elemental_array",
")",
":",
"condensed_array",
"=",
"[",
"]",
"for",
"e",
"in",
"elemental_array",
":",
"exists",
"=",
"False",
"for",
"k",
"in",
"condensed_array",
":",
"if",
"k",
"[",
"\"symbol\"",
"]",
"==",
"... | Accounts for non-empirical chemical formulas by taking in the compositional array generated by _create_compositional_array_() and returning a consolidated array of dictionaries with no repeating elements
:param elemental_array: an elemental array generated from _create_compositional_array_()
:return: an array of element dictionaries | [
"Accounts",
"for",
"non",
"-",
"empirical",
"chemical",
"formulas",
"by",
"taking",
"in",
"the",
"compositional",
"array",
"generated",
"by",
"_create_compositional_array_",
"()",
"and",
"returning",
"a",
"consolidated",
"array",
"of",
"dictionaries",
"with",
"no",
... | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L197-L214 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | _add_ideal_atomic_weights_ | def _add_ideal_atomic_weights_(elemental_array):
"""
Uses elements.json to find the molar mass of the element in question, and then multiplies that by the occurances of the element.
Adds the "weight" property to each of the dictionaries in elemental_array
:param elemental_array: an array of dictionaries containing information about the elements in the system
:return: the appended elemental_array
"""
for a in elemental_array:
this_atomic_weight = elements_data[a["symbol"]]["atomic_weight"]
a["weight"] = a["occurances"] * this_atomic_weight
return elemental_array | python | def _add_ideal_atomic_weights_(elemental_array):
"""
Uses elements.json to find the molar mass of the element in question, and then multiplies that by the occurances of the element.
Adds the "weight" property to each of the dictionaries in elemental_array
:param elemental_array: an array of dictionaries containing information about the elements in the system
:return: the appended elemental_array
"""
for a in elemental_array:
this_atomic_weight = elements_data[a["symbol"]]["atomic_weight"]
a["weight"] = a["occurances"] * this_atomic_weight
return elemental_array | [
"def",
"_add_ideal_atomic_weights_",
"(",
"elemental_array",
")",
":",
"for",
"a",
"in",
"elemental_array",
":",
"this_atomic_weight",
"=",
"elements_data",
"[",
"a",
"[",
"\"symbol\"",
"]",
"]",
"[",
"\"atomic_weight\"",
"]",
"a",
"[",
"\"weight\"",
"]",
"=",
... | Uses elements.json to find the molar mass of the element in question, and then multiplies that by the occurances of the element.
Adds the "weight" property to each of the dictionaries in elemental_array
:param elemental_array: an array of dictionaries containing information about the elements in the system
:return: the appended elemental_array | [
"Uses",
"elements",
".",
"json",
"to",
"find",
"the",
"molar",
"mass",
"of",
"the",
"element",
"in",
"question",
"and",
"then",
"multiplies",
"that",
"by",
"the",
"occurances",
"of",
"the",
"element",
".",
"Adds",
"the",
"weight",
"property",
"to",
"each",... | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L227-L238 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | _add_ideal_weight_percent_ | def _add_ideal_weight_percent_(elemental_array):
"""
Adds the "weight_percent" property to each of the dictionaries in elemental_array
:param elemental_array: an array of dictionaries containing information about the elements in the system
:return: the appended elemental_array
"""
t_mass = _calculate_total_mass_(elemental_array)
for a in elemental_array:
a["weight_percent"] = a["weight"] / t_mass * 100
return elemental_array | python | def _add_ideal_weight_percent_(elemental_array):
"""
Adds the "weight_percent" property to each of the dictionaries in elemental_array
:param elemental_array: an array of dictionaries containing information about the elements in the system
:return: the appended elemental_array
"""
t_mass = _calculate_total_mass_(elemental_array)
for a in elemental_array:
a["weight_percent"] = a["weight"] / t_mass * 100
return elemental_array | [
"def",
"_add_ideal_weight_percent_",
"(",
"elemental_array",
")",
":",
"t_mass",
"=",
"_calculate_total_mass_",
"(",
"elemental_array",
")",
"for",
"a",
"in",
"elemental_array",
":",
"a",
"[",
"\"weight_percent\"",
"]",
"=",
"a",
"[",
"\"weight\"",
"]",
"/",
"t_... | Adds the "weight_percent" property to each of the dictionaries in elemental_array
:param elemental_array: an array of dictionaries containing information about the elements in the system
:return: the appended elemental_array | [
"Adds",
"the",
"weight_percent",
"property",
"to",
"each",
"of",
"the",
"dictionaries",
"in",
"elemental_array"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L251-L261 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | _add_atomic_percents_ | def _add_atomic_percents_(elemental_array):
"""
Adds ideal atomic percents to a emperical compositional element array generated using _create_emprical_compositional_array_()
:param elemental_array: an array of dictionaries containing information about the elements in the system
:return: the elemental_array with the atomic percent of each element added
"""
n_atoms = _calculate_n_atoms_(elemental_array)
for e in elemental_array:
e["atomic_percent"] = e["occurances"] / n_atoms * 100
return elemental_array | python | def _add_atomic_percents_(elemental_array):
"""
Adds ideal atomic percents to a emperical compositional element array generated using _create_emprical_compositional_array_()
:param elemental_array: an array of dictionaries containing information about the elements in the system
:return: the elemental_array with the atomic percent of each element added
"""
n_atoms = _calculate_n_atoms_(elemental_array)
for e in elemental_array:
e["atomic_percent"] = e["occurances"] / n_atoms * 100
return elemental_array | [
"def",
"_add_atomic_percents_",
"(",
"elemental_array",
")",
":",
"n_atoms",
"=",
"_calculate_n_atoms_",
"(",
"elemental_array",
")",
"for",
"e",
"in",
"elemental_array",
":",
"e",
"[",
"\"atomic_percent\"",
"]",
"=",
"e",
"[",
"\"occurances\"",
"]",
"/",
"n_ato... | Adds ideal atomic percents to a emperical compositional element array generated using _create_emprical_compositional_array_()
:param elemental_array: an array of dictionaries containing information about the elements in the system
:return: the elemental_array with the atomic percent of each element added | [
"Adds",
"ideal",
"atomic",
"percents",
"to",
"a",
"emperical",
"compositional",
"element",
"array",
"generated",
"using",
"_create_emprical_compositional_array_",
"()"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L277-L287 |
CitrineInformatics/pypif-sdk | pypif_sdk/func/calculate_funcs.py | _get_element_in_pif_composition_ | def _get_element_in_pif_composition_(pif, elemental_symbol):
"""
If the element in question if in the composition array in the pif, it returns that Composition object and the position in the composition array otherwise it returns False
:param pif: ChemicalSystem Pif in question
:param elemental_symbol: string of the atomic symbol of the element in question
:return: either False if not found in the composition or the Compositional object along with its index in the composition array in the pif
"""
if pif.composition is None:
pif.composition = []
for i, c in enumerate(pif.composition):
if c.element == elemental_symbol or c.element.lower(
) == elements_data[elemental_symbol]["name"].lower():
return [c, i]
i += 1
return False | python | def _get_element_in_pif_composition_(pif, elemental_symbol):
"""
If the element in question if in the composition array in the pif, it returns that Composition object and the position in the composition array otherwise it returns False
:param pif: ChemicalSystem Pif in question
:param elemental_symbol: string of the atomic symbol of the element in question
:return: either False if not found in the composition or the Compositional object along with its index in the composition array in the pif
"""
if pif.composition is None:
pif.composition = []
for i, c in enumerate(pif.composition):
if c.element == elemental_symbol or c.element.lower(
) == elements_data[elemental_symbol]["name"].lower():
return [c, i]
i += 1
return False | [
"def",
"_get_element_in_pif_composition_",
"(",
"pif",
",",
"elemental_symbol",
")",
":",
"if",
"pif",
".",
"composition",
"is",
"None",
":",
"pif",
".",
"composition",
"=",
"[",
"]",
"for",
"i",
",",
"c",
"in",
"enumerate",
"(",
"pif",
".",
"composition",... | If the element in question if in the composition array in the pif, it returns that Composition object and the position in the composition array otherwise it returns False
:param pif: ChemicalSystem Pif in question
:param elemental_symbol: string of the atomic symbol of the element in question
:return: either False if not found in the composition or the Compositional object along with its index in the composition array in the pif | [
"If",
"the",
"element",
"in",
"question",
"if",
"in",
"the",
"composition",
"array",
"in",
"the",
"pif",
"it",
"returns",
"that",
"Composition",
"object",
"and",
"the",
"position",
"in",
"the",
"composition",
"array",
"otherwise",
"it",
"returns",
"False"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/calculate_funcs.py#L290-L305 |
CitrineInformatics/pypif-sdk | pypif_sdk/interop/datacite.py | parse_name_string | def parse_name_string(full_name):
"""
Parse a full name into a Name object
:param full_name: e.g. "John Smith" or "Smith, John"
:return: Name object
"""
name = Name()
if "," in full_name:
toks = full_name.split(",")
name.family = toks[0]
name.given = ",".join(toks[1:]).strip()
else:
toks = full_name.split()
name.given = toks[0]
name.family = " ".join(toks[1:]).strip()
return name | python | def parse_name_string(full_name):
"""
Parse a full name into a Name object
:param full_name: e.g. "John Smith" or "Smith, John"
:return: Name object
"""
name = Name()
if "," in full_name:
toks = full_name.split(",")
name.family = toks[0]
name.given = ",".join(toks[1:]).strip()
else:
toks = full_name.split()
name.given = toks[0]
name.family = " ".join(toks[1:]).strip()
return name | [
"def",
"parse_name_string",
"(",
"full_name",
")",
":",
"name",
"=",
"Name",
"(",
")",
"if",
"\",\"",
"in",
"full_name",
":",
"toks",
"=",
"full_name",
".",
"split",
"(",
"\",\"",
")",
"name",
".",
"family",
"=",
"toks",
"[",
"0",
"]",
"name",
".",
... | Parse a full name into a Name object
:param full_name: e.g. "John Smith" or "Smith, John"
:return: Name object | [
"Parse",
"a",
"full",
"name",
"into",
"a",
"Name",
"object"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/interop/datacite.py#L5-L21 |
CitrineInformatics/pypif-sdk | pypif_sdk/interop/datacite.py | creator_to_person | def creator_to_person(creator):
"""
Parse the creator block in datacite into a Person
:param creator: block in datacite format
:return: Person
"""
name = Name()
if "creatorName" in creator:
name = parse_name_string(creator["creatorName"])
if "familyName" in creator:
name.family = creator["familyName"]
if "givenName" in creator:
name.given = creator["givenName"]
person = Person(name=name, tags=creator.get("affiliations"))
return person | python | def creator_to_person(creator):
"""
Parse the creator block in datacite into a Person
:param creator: block in datacite format
:return: Person
"""
name = Name()
if "creatorName" in creator:
name = parse_name_string(creator["creatorName"])
if "familyName" in creator:
name.family = creator["familyName"]
if "givenName" in creator:
name.given = creator["givenName"]
person = Person(name=name, tags=creator.get("affiliations"))
return person | [
"def",
"creator_to_person",
"(",
"creator",
")",
":",
"name",
"=",
"Name",
"(",
")",
"if",
"\"creatorName\"",
"in",
"creator",
":",
"name",
"=",
"parse_name_string",
"(",
"creator",
"[",
"\"creatorName\"",
"]",
")",
"if",
"\"familyName\"",
"in",
"creator",
"... | Parse the creator block in datacite into a Person
:param creator: block in datacite format
:return: Person | [
"Parse",
"the",
"creator",
"block",
"in",
"datacite",
"into",
"a",
"Person",
":",
"param",
"creator",
":",
"block",
"in",
"datacite",
"format",
":",
"return",
":",
"Person"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/interop/datacite.py#L24-L39 |
CitrineInformatics/pypif-sdk | pypif_sdk/interop/datacite.py | datacite_to_pif_reference | def datacite_to_pif_reference(dc):
"""
Parse a top-level datacite dictionary into a Reference
:param dc: dictionary containing datacite metadata
:return: Reference corresponding to that datacite entry
"""
ref = Reference()
if dc.get('identifier', {}).get('identifierType') == "DOI":
ref.doi = dc.get('identifier', {}).get('identifier')
ref.title = dc.get('title')
ref.publisher = dc.get('publisher')
ref.year = dc.get('publicationYear')
ref.authors = [creator_to_person(x).name for x in dc.get('creators', [])] or None
return ref | python | def datacite_to_pif_reference(dc):
"""
Parse a top-level datacite dictionary into a Reference
:param dc: dictionary containing datacite metadata
:return: Reference corresponding to that datacite entry
"""
ref = Reference()
if dc.get('identifier', {}).get('identifierType') == "DOI":
ref.doi = dc.get('identifier', {}).get('identifier')
ref.title = dc.get('title')
ref.publisher = dc.get('publisher')
ref.year = dc.get('publicationYear')
ref.authors = [creator_to_person(x).name for x in dc.get('creators', [])] or None
return ref | [
"def",
"datacite_to_pif_reference",
"(",
"dc",
")",
":",
"ref",
"=",
"Reference",
"(",
")",
"if",
"dc",
".",
"get",
"(",
"'identifier'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'identifierType'",
")",
"==",
"\"DOI\"",
":",
"ref",
".",
"doi",
"=",
"dc",... | Parse a top-level datacite dictionary into a Reference
:param dc: dictionary containing datacite metadata
:return: Reference corresponding to that datacite entry | [
"Parse",
"a",
"top",
"-",
"level",
"datacite",
"dictionary",
"into",
"a",
"Reference",
":",
"param",
"dc",
":",
"dictionary",
"containing",
"datacite",
"metadata",
":",
"return",
":",
"Reference",
"corresponding",
"to",
"that",
"datacite",
"entry"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/interop/datacite.py#L42-L57 |
CitrineInformatics/pypif-sdk | pypif_sdk/interop/mdf.py | query_to_mdf_records | def query_to_mdf_records(query=None, dataset_id=None, mdf_acl=None):
"""Evaluate a query and return a list of MDF records
If a datasetID is specified by there is no query, a simple
whole dataset query is formed for the user
"""
if not query and not dataset_id:
raise ValueError("Either query or dataset_id must be specified")
if query and dataset_id:
raise ValueError("Both query and dataset_id were specified; pick one or the other.")
if not query:
query = PifSystemReturningQuery(
query=DataQuery(
dataset=DatasetQuery(
id=Filter(equal=dataset_id)
)
),
size = 10000 # Don't pull down all the results by default
)
client = get_client()
if not mdf_acl:
raise ValueError('Access controls (mdf_acl) must be specified. Use ["public"] for public access')
pif_result = client.pif_search(query)
if len(pif_result.hits) == 0:
return []
example_uid = pif_result.hits[0].system.uid
dataset_query = DatasetReturningQuery(
query=DataQuery(
system=PifSystemQuery(
uid=Filter(equal=example_uid)
)
),
size = 1 # we only expect one dataset to hit
)
dataset_result = client.dataset_search(dataset_query)
records = []
for hit in pif_result.hits:
records.append(pif_to_mdf_record(hit.system, dataset_result.hits[0], mdf_acl))
return records | python | def query_to_mdf_records(query=None, dataset_id=None, mdf_acl=None):
"""Evaluate a query and return a list of MDF records
If a datasetID is specified by there is no query, a simple
whole dataset query is formed for the user
"""
if not query and not dataset_id:
raise ValueError("Either query or dataset_id must be specified")
if query and dataset_id:
raise ValueError("Both query and dataset_id were specified; pick one or the other.")
if not query:
query = PifSystemReturningQuery(
query=DataQuery(
dataset=DatasetQuery(
id=Filter(equal=dataset_id)
)
),
size = 10000 # Don't pull down all the results by default
)
client = get_client()
if not mdf_acl:
raise ValueError('Access controls (mdf_acl) must be specified. Use ["public"] for public access')
pif_result = client.pif_search(query)
if len(pif_result.hits) == 0:
return []
example_uid = pif_result.hits[0].system.uid
dataset_query = DatasetReturningQuery(
query=DataQuery(
system=PifSystemQuery(
uid=Filter(equal=example_uid)
)
),
size = 1 # we only expect one dataset to hit
)
dataset_result = client.dataset_search(dataset_query)
records = []
for hit in pif_result.hits:
records.append(pif_to_mdf_record(hit.system, dataset_result.hits[0], mdf_acl))
return records | [
"def",
"query_to_mdf_records",
"(",
"query",
"=",
"None",
",",
"dataset_id",
"=",
"None",
",",
"mdf_acl",
"=",
"None",
")",
":",
"if",
"not",
"query",
"and",
"not",
"dataset_id",
":",
"raise",
"ValueError",
"(",
"\"Either query or dataset_id must be specified\"",
... | Evaluate a query and return a list of MDF records
If a datasetID is specified by there is no query, a simple
whole dataset query is formed for the user | [
"Evaluate",
"a",
"query",
"and",
"return",
"a",
"list",
"of",
"MDF",
"records"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/interop/mdf.py#L11-L56 |
CitrineInformatics/pypif-sdk | pypif_sdk/interop/mdf.py | pif_to_mdf_record | def pif_to_mdf_record(pif_obj, dataset_hit, mdf_acl):
"""Convert a PIF into partial MDF record"""
res = {}
res["mdf"] = _to_meta_data(pif_obj, dataset_hit, mdf_acl)
res[res["mdf"]["source_name"]] = _to_user_defined(pif_obj)
return dumps(res) | python | def pif_to_mdf_record(pif_obj, dataset_hit, mdf_acl):
"""Convert a PIF into partial MDF record"""
res = {}
res["mdf"] = _to_meta_data(pif_obj, dataset_hit, mdf_acl)
res[res["mdf"]["source_name"]] = _to_user_defined(pif_obj)
return dumps(res) | [
"def",
"pif_to_mdf_record",
"(",
"pif_obj",
",",
"dataset_hit",
",",
"mdf_acl",
")",
":",
"res",
"=",
"{",
"}",
"res",
"[",
"\"mdf\"",
"]",
"=",
"_to_meta_data",
"(",
"pif_obj",
",",
"dataset_hit",
",",
"mdf_acl",
")",
"res",
"[",
"res",
"[",
"\"mdf\"",
... | Convert a PIF into partial MDF record | [
"Convert",
"a",
"PIF",
"into",
"partial",
"MDF",
"record"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/interop/mdf.py#L59-L64 |
CitrineInformatics/pypif-sdk | pypif_sdk/interop/mdf.py | _to_meta_data | def _to_meta_data(pif_obj, dataset_hit, mdf_acl):
"""Convert the meta-data from the PIF into MDF"""
pif = pif_obj.as_dictionary()
dataset = dataset_hit.as_dictionary()
mdf = {}
try:
if pif.get("names"):
mdf["title"] = pif["names"][0]
else:
mdf["title"] = "Citrine PIF " + str(pif["uid"])
if pif.get("chemicalFormula"):
mdf["composition"] = pif["chemicalFormula"]
elif pif.get("composition"):
mdf["composition"] = ''.join([comp["element"] for comp in pif["composition"] if comp["element"]])
if not mdf["composition"]:
mdf.pop("composition")
mdf["acl"] = mdf_acl
mdf["source_name"] = _construct_new_key(dataset["name"])
if pif.get("contacts"):
mdf["data_contact"] = []
for contact in pif["contacts"]:
data_c = {
"given_name": contact["name"]["given"], #REQ
"family_name": contact["name"]["family"] #REQ
}
if contact.get("email"):
data_c["email"] = contact.get("email", "")
if contact.get("orcid"):
data_c["orcid"] = contact.get("orcid", "")
mdf["data_contact"].append(data_c)
if not mdf["data_contact"]:
mdf.pop("data_contact")
mdf["data_contributor"] = [{}]
if "owner" in dataset:
name = dataset["owner"].split()
contributor = {
"given_name": name[0],
"family_name": name[1],
"email": dataset["email"]
}
mdf["data_contributor"] = [contributor]
mdf["links"] = {
"landing_page": "https://citrination.com/datasets/{}".format(dataset["id"]),
"publication": []
}
if pif.get("references"):
mdf["author"] = []
mdf["citation"] = []
for ref in pif["references"]:
if ref.get("doi"):
mdf["citation"].append(ref["doi"]) #TODO: Make actual citation
mdf["links"]["publication"].append(ref["doi"])
if ref.get("authors"):
for author in ref["authors"]:
if author.get("given") and author.get("family"):
mdf["author"].append({
"given_name": author["given"],
"family_name": author["family"]
})
# Remove fields if blank
if not mdf["author"]:
mdf.pop("author")
if not mdf["citation"]:
mdf.pop("citation")
if not mdf["links"]["publication"]:
mdf["links"].pop("publication")
if pif.get("licenses", [{}])[0].get("url"):
mdf["license"] = pif["licenses"][0]["url"]
if pif.get("tags"):
mdf["tags"] = pif["tags"]
# If required MDF metadata is missing from PIF, abort
except KeyError as e:
print("Error: Required MDF metadata", str(e), "not found in PIF", pif["uid"])
return None
return mdf | python | def _to_meta_data(pif_obj, dataset_hit, mdf_acl):
"""Convert the meta-data from the PIF into MDF"""
pif = pif_obj.as_dictionary()
dataset = dataset_hit.as_dictionary()
mdf = {}
try:
if pif.get("names"):
mdf["title"] = pif["names"][0]
else:
mdf["title"] = "Citrine PIF " + str(pif["uid"])
if pif.get("chemicalFormula"):
mdf["composition"] = pif["chemicalFormula"]
elif pif.get("composition"):
mdf["composition"] = ''.join([comp["element"] for comp in pif["composition"] if comp["element"]])
if not mdf["composition"]:
mdf.pop("composition")
mdf["acl"] = mdf_acl
mdf["source_name"] = _construct_new_key(dataset["name"])
if pif.get("contacts"):
mdf["data_contact"] = []
for contact in pif["contacts"]:
data_c = {
"given_name": contact["name"]["given"], #REQ
"family_name": contact["name"]["family"] #REQ
}
if contact.get("email"):
data_c["email"] = contact.get("email", "")
if contact.get("orcid"):
data_c["orcid"] = contact.get("orcid", "")
mdf["data_contact"].append(data_c)
if not mdf["data_contact"]:
mdf.pop("data_contact")
mdf["data_contributor"] = [{}]
if "owner" in dataset:
name = dataset["owner"].split()
contributor = {
"given_name": name[0],
"family_name": name[1],
"email": dataset["email"]
}
mdf["data_contributor"] = [contributor]
mdf["links"] = {
"landing_page": "https://citrination.com/datasets/{}".format(dataset["id"]),
"publication": []
}
if pif.get("references"):
mdf["author"] = []
mdf["citation"] = []
for ref in pif["references"]:
if ref.get("doi"):
mdf["citation"].append(ref["doi"]) #TODO: Make actual citation
mdf["links"]["publication"].append(ref["doi"])
if ref.get("authors"):
for author in ref["authors"]:
if author.get("given") and author.get("family"):
mdf["author"].append({
"given_name": author["given"],
"family_name": author["family"]
})
# Remove fields if blank
if not mdf["author"]:
mdf.pop("author")
if not mdf["citation"]:
mdf.pop("citation")
if not mdf["links"]["publication"]:
mdf["links"].pop("publication")
if pif.get("licenses", [{}])[0].get("url"):
mdf["license"] = pif["licenses"][0]["url"]
if pif.get("tags"):
mdf["tags"] = pif["tags"]
# If required MDF metadata is missing from PIF, abort
except KeyError as e:
print("Error: Required MDF metadata", str(e), "not found in PIF", pif["uid"])
return None
return mdf | [
"def",
"_to_meta_data",
"(",
"pif_obj",
",",
"dataset_hit",
",",
"mdf_acl",
")",
":",
"pif",
"=",
"pif_obj",
".",
"as_dictionary",
"(",
")",
"dataset",
"=",
"dataset_hit",
".",
"as_dictionary",
"(",
")",
"mdf",
"=",
"{",
"}",
"try",
":",
"if",
"pif",
"... | Convert the meta-data from the PIF into MDF | [
"Convert",
"the",
"meta",
"-",
"data",
"from",
"the",
"PIF",
"into",
"MDF"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/interop/mdf.py#L67-L149 |
CitrineInformatics/pypif-sdk | pypif_sdk/interop/mdf.py | _to_user_defined | def _to_user_defined(pif_obj):
"""Read the systems in the PIF to populate the user-defined portion"""
res = {}
# make a read view to flatten the hierarchy
rv = ReadView(pif_obj)
# Iterate over the keys in the read view
for k in rv.keys():
name, value = _extract_key_value(rv[k].raw)
# add any objects that can be extracted
if name and value is not None:
res[name] = value
# Grab interesting values not in the ReadView
pif = pif_obj.as_dictionary()
elements = {}
if pif.get("composition"):
for comp in pif["composition"]:
if comp.get("actualAtomicPercent"):
elements[comp["element"]] = float(comp["actualAtomicPercent"]["value"])
elif comp.get("actualWeightPercent"):
elements[comp["element"]] = float(comp["actualWeightPercent"]["value"])
if elements:
res["elemental_percent"] = elements
elif pif.get("chemicalFormula"):
symbol = ""
num = ""
# Chemical formulae are comprised of letters, numbers, and potentially characters we don't care about
for char in pif["chemicalFormula"]:
# Uppercase char indicates beginning of new symbol
if char.isupper():
# If there is already a symbol in holding, process it
if symbol:
try:
elements[symbol] = int(num)
# If num is a float, raises ValueError
except ValueError:
elements[symbol] = float(num) if num else 1
symbol = ""
num = ""
symbol += char
# Lowercase chars or digits are continuations of a symbol
elif char.islower():
symbol += char
elif char.isdigit():
num += char
elif char == ".":
num += char
# All other chars are not useful
if elements:
res["elemental_proportion"] = elements
return res | python | def _to_user_defined(pif_obj):
"""Read the systems in the PIF to populate the user-defined portion"""
res = {}
# make a read view to flatten the hierarchy
rv = ReadView(pif_obj)
# Iterate over the keys in the read view
for k in rv.keys():
name, value = _extract_key_value(rv[k].raw)
# add any objects that can be extracted
if name and value is not None:
res[name] = value
# Grab interesting values not in the ReadView
pif = pif_obj.as_dictionary()
elements = {}
if pif.get("composition"):
for comp in pif["composition"]:
if comp.get("actualAtomicPercent"):
elements[comp["element"]] = float(comp["actualAtomicPercent"]["value"])
elif comp.get("actualWeightPercent"):
elements[comp["element"]] = float(comp["actualWeightPercent"]["value"])
if elements:
res["elemental_percent"] = elements
elif pif.get("chemicalFormula"):
symbol = ""
num = ""
# Chemical formulae are comprised of letters, numbers, and potentially characters we don't care about
for char in pif["chemicalFormula"]:
# Uppercase char indicates beginning of new symbol
if char.isupper():
# If there is already a symbol in holding, process it
if symbol:
try:
elements[symbol] = int(num)
# If num is a float, raises ValueError
except ValueError:
elements[symbol] = float(num) if num else 1
symbol = ""
num = ""
symbol += char
# Lowercase chars or digits are continuations of a symbol
elif char.islower():
symbol += char
elif char.isdigit():
num += char
elif char == ".":
num += char
# All other chars are not useful
if elements:
res["elemental_proportion"] = elements
return res | [
"def",
"_to_user_defined",
"(",
"pif_obj",
")",
":",
"res",
"=",
"{",
"}",
"# make a read view to flatten the hierarchy",
"rv",
"=",
"ReadView",
"(",
"pif_obj",
")",
"# Iterate over the keys in the read view",
"for",
"k",
"in",
"rv",
".",
"keys",
"(",
")",
":",
... | Read the systems in the PIF to populate the user-defined portion | [
"Read",
"the",
"systems",
"in",
"the",
"PIF",
"to",
"populate",
"the",
"user",
"-",
"defined",
"portion"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/interop/mdf.py#L152-L205 |
CitrineInformatics/pypif-sdk | pypif_sdk/interop/mdf.py | _construct_new_key | def _construct_new_key(name, units=None):
"""Construct an MDF safe key from the name and units"""
to_replace = ["/", "\\", "*", "^", "#", " ", "\n", "\t", ",", ".", ")", "(", "'", "`", "-"]
to_remove = ["$", "{", "}"]
cat = name
if units:
cat = "_".join([name, units])
for c in to_replace:
cat = cat.replace(c, "_")
for c in to_remove:
cat = cat.replace(c, "")
cat = re.sub('_+','_', cat)
return cat | python | def _construct_new_key(name, units=None):
"""Construct an MDF safe key from the name and units"""
to_replace = ["/", "\\", "*", "^", "#", " ", "\n", "\t", ",", ".", ")", "(", "'", "`", "-"]
to_remove = ["$", "{", "}"]
cat = name
if units:
cat = "_".join([name, units])
for c in to_replace:
cat = cat.replace(c, "_")
for c in to_remove:
cat = cat.replace(c, "")
cat = re.sub('_+','_', cat)
return cat | [
"def",
"_construct_new_key",
"(",
"name",
",",
"units",
"=",
"None",
")",
":",
"to_replace",
"=",
"[",
"\"/\"",
",",
"\"\\\\\"",
",",
"\"*\"",
",",
"\"^\"",
",",
"\"#\"",
",",
"\" \"",
",",
"\"\\n\"",
",",
"\"\\t\"",
",",
"\",\"",
",",
"\".\"",
",",
... | Construct an MDF safe key from the name and units | [
"Construct",
"an",
"MDF",
"safe",
"key",
"from",
"the",
"name",
"and",
"units"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/interop/mdf.py#L208-L223 |
CitrineInformatics/pypif-sdk | pypif_sdk/interop/mdf.py | _extract_key_value | def _extract_key_value(obj):
"""Extract the value from the object and make a descriptive key"""
key = None; value = None
# Parse a Value object, which includes Properties
if isinstance(obj, Value):
key = _construct_new_key(obj.name, obj.units)
value = []
if obj.scalars:
value = [(val.value if isinstance(val, Scalar) else val)
for val in obj.scalars]
elif obj.vectors and len(obj.vectors) == 1:
value = [(val.value if isinstance(val, Scalar) else val)
for val in obj.vectors[0]]
if len(value) == 1:
value = value[0]
elif len(value) == 0:
value = None
# If there is a process step, pul out its name as the value
# TODO: resolve duplicates
if isinstance(obj, ProcessStep):
key = "Processing"
value = obj.name
return key, value | python | def _extract_key_value(obj):
"""Extract the value from the object and make a descriptive key"""
key = None; value = None
# Parse a Value object, which includes Properties
if isinstance(obj, Value):
key = _construct_new_key(obj.name, obj.units)
value = []
if obj.scalars:
value = [(val.value if isinstance(val, Scalar) else val)
for val in obj.scalars]
elif obj.vectors and len(obj.vectors) == 1:
value = [(val.value if isinstance(val, Scalar) else val)
for val in obj.vectors[0]]
if len(value) == 1:
value = value[0]
elif len(value) == 0:
value = None
# If there is a process step, pul out its name as the value
# TODO: resolve duplicates
if isinstance(obj, ProcessStep):
key = "Processing"
value = obj.name
return key, value | [
"def",
"_extract_key_value",
"(",
"obj",
")",
":",
"key",
"=",
"None",
"value",
"=",
"None",
"# Parse a Value object, which includes Properties",
"if",
"isinstance",
"(",
"obj",
",",
"Value",
")",
":",
"key",
"=",
"_construct_new_key",
"(",
"obj",
".",
"name",
... | Extract the value from the object and make a descriptive key | [
"Extract",
"the",
"value",
"from",
"the",
"object",
"and",
"make",
"a",
"descriptive",
"key"
] | train | https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/interop/mdf.py#L226-L251 |
innogames/polysh | polysh/stdin.py | process_input_buffer | def process_input_buffer():
"""Send the content of the input buffer to all remote processes, this must
be called in the main thread"""
from polysh.control_commands_helpers import handle_control_command
data = the_stdin_thread.input_buffer.get()
remote_dispatcher.log(b'> ' + data)
if data.startswith(b':'):
try:
handle_control_command(data[1:-1].decode())
except UnicodeDecodeError as e:
console_output(b'Could not decode command.')
return
if data.startswith(b'!'):
try:
retcode = subprocess.call(data[1:], shell=True)
except OSError as e:
if e.errno == errno.EINTR:
console_output(b'Child was interrupted\n')
retcode = 0
else:
raise
if retcode > 128 and retcode <= 192:
retcode = 128 - retcode
if retcode > 0:
console_output('Child returned {:d}\n'.format(retcode).encode())
elif retcode < 0:
console_output('Child was terminated by signal {:d}\n'.format(
-retcode).encode())
return
for r in dispatchers.all_instances():
try:
r.dispatch_command(data)
except asyncore.ExitNow as e:
raise e
except Exception as msg:
raise msg
console_output('{} for {}, disconnecting\n'.format(
str(msg), r.display_name).encode())
r.disconnect()
else:
if r.enabled and r.state is remote_dispatcher.STATE_IDLE:
r.change_state(remote_dispatcher.STATE_RUNNING) | python | def process_input_buffer():
"""Send the content of the input buffer to all remote processes, this must
be called in the main thread"""
from polysh.control_commands_helpers import handle_control_command
data = the_stdin_thread.input_buffer.get()
remote_dispatcher.log(b'> ' + data)
if data.startswith(b':'):
try:
handle_control_command(data[1:-1].decode())
except UnicodeDecodeError as e:
console_output(b'Could not decode command.')
return
if data.startswith(b'!'):
try:
retcode = subprocess.call(data[1:], shell=True)
except OSError as e:
if e.errno == errno.EINTR:
console_output(b'Child was interrupted\n')
retcode = 0
else:
raise
if retcode > 128 and retcode <= 192:
retcode = 128 - retcode
if retcode > 0:
console_output('Child returned {:d}\n'.format(retcode).encode())
elif retcode < 0:
console_output('Child was terminated by signal {:d}\n'.format(
-retcode).encode())
return
for r in dispatchers.all_instances():
try:
r.dispatch_command(data)
except asyncore.ExitNow as e:
raise e
except Exception as msg:
raise msg
console_output('{} for {}, disconnecting\n'.format(
str(msg), r.display_name).encode())
r.disconnect()
else:
if r.enabled and r.state is remote_dispatcher.STATE_IDLE:
r.change_state(remote_dispatcher.STATE_RUNNING) | [
"def",
"process_input_buffer",
"(",
")",
":",
"from",
"polysh",
".",
"control_commands_helpers",
"import",
"handle_control_command",
"data",
"=",
"the_stdin_thread",
".",
"input_buffer",
".",
"get",
"(",
")",
"remote_dispatcher",
".",
"log",
"(",
"b'> '",
"+",
"da... | Send the content of the input buffer to all remote processes, this must
be called in the main thread | [
"Send",
"the",
"content",
"of",
"the",
"input",
"buffer",
"to",
"all",
"remote",
"processes",
"this",
"must",
"be",
"called",
"in",
"the",
"main",
"thread"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/stdin.py#L60-L104 |
innogames/polysh | polysh/stdin.py | write_main_socket | def write_main_socket(c):
"""Synchronous write to the main socket, wait for ACK"""
the_stdin_thread.socket_write.send(c)
while True:
try:
the_stdin_thread.socket_write.recv(1)
except socket.error as e:
if e.errno != errno.EINTR:
raise
else:
break | python | def write_main_socket(c):
"""Synchronous write to the main socket, wait for ACK"""
the_stdin_thread.socket_write.send(c)
while True:
try:
the_stdin_thread.socket_write.recv(1)
except socket.error as e:
if e.errno != errno.EINTR:
raise
else:
break | [
"def",
"write_main_socket",
"(",
"c",
")",
":",
"the_stdin_thread",
".",
"socket_write",
".",
"send",
"(",
"c",
")",
"while",
"True",
":",
"try",
":",
"the_stdin_thread",
".",
"socket_write",
".",
"recv",
"(",
"1",
")",
"except",
"socket",
".",
"error",
... | Synchronous write to the main socket, wait for ACK | [
"Synchronous",
"write",
"to",
"the",
"main",
"socket",
"wait",
"for",
"ACK"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/stdin.py#L149-L159 |
innogames/polysh | polysh/stdin.py | get_stdin_pid | def get_stdin_pid(cached_result=None):
"""Try to get the PID of the stdin thread, otherwise get the whole process
ID"""
if cached_result is None:
try:
tasks = os.listdir('/proc/self/task')
except OSError as e:
if e.errno != errno.ENOENT:
raise
cached_result = os.getpid()
else:
tasks.remove(str(os.getpid()))
assert len(tasks) == 1
cached_result = int(tasks[0])
return cached_result | python | def get_stdin_pid(cached_result=None):
"""Try to get the PID of the stdin thread, otherwise get the whole process
ID"""
if cached_result is None:
try:
tasks = os.listdir('/proc/self/task')
except OSError as e:
if e.errno != errno.ENOENT:
raise
cached_result = os.getpid()
else:
tasks.remove(str(os.getpid()))
assert len(tasks) == 1
cached_result = int(tasks[0])
return cached_result | [
"def",
"get_stdin_pid",
"(",
"cached_result",
"=",
"None",
")",
":",
"if",
"cached_result",
"is",
"None",
":",
"try",
":",
"tasks",
"=",
"os",
".",
"listdir",
"(",
"'/proc/self/task'",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"e",
".",
"errno",
... | Try to get the PID of the stdin thread, otherwise get the whole process
ID | [
"Try",
"to",
"get",
"the",
"PID",
"of",
"the",
"stdin",
"thread",
"otherwise",
"get",
"the",
"whole",
"process",
"ID"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/stdin.py#L172-L186 |
innogames/polysh | polysh/stdin.py | interrupt_stdin_thread | def interrupt_stdin_thread():
"""The stdin thread may be in raw_input(), get out of it"""
dupped_stdin = os.dup(0) # Backup the stdin fd
assert not the_stdin_thread.interrupt_asked # Sanity check
the_stdin_thread.interrupt_asked = True # Not user triggered
os.lseek(tempfile_fd, 0, 0) # Rewind in the temp file
os.dup2(tempfile_fd, 0) # This will make raw_input() return
pid = get_stdin_pid()
os.kill(pid, signal.SIGWINCH) # Try harder to wake up raw_input()
the_stdin_thread.out_of_raw_input.wait() # Wait for this return
the_stdin_thread.interrupt_asked = False # Restore sanity
os.dup2(dupped_stdin, 0) # Restore stdin
os.close(dupped_stdin) | python | def interrupt_stdin_thread():
"""The stdin thread may be in raw_input(), get out of it"""
dupped_stdin = os.dup(0) # Backup the stdin fd
assert not the_stdin_thread.interrupt_asked # Sanity check
the_stdin_thread.interrupt_asked = True # Not user triggered
os.lseek(tempfile_fd, 0, 0) # Rewind in the temp file
os.dup2(tempfile_fd, 0) # This will make raw_input() return
pid = get_stdin_pid()
os.kill(pid, signal.SIGWINCH) # Try harder to wake up raw_input()
the_stdin_thread.out_of_raw_input.wait() # Wait for this return
the_stdin_thread.interrupt_asked = False # Restore sanity
os.dup2(dupped_stdin, 0) # Restore stdin
os.close(dupped_stdin) | [
"def",
"interrupt_stdin_thread",
"(",
")",
":",
"dupped_stdin",
"=",
"os",
".",
"dup",
"(",
"0",
")",
"# Backup the stdin fd",
"assert",
"not",
"the_stdin_thread",
".",
"interrupt_asked",
"# Sanity check",
"the_stdin_thread",
".",
"interrupt_asked",
"=",
"True",
"# ... | The stdin thread may be in raw_input(), get out of it | [
"The",
"stdin",
"thread",
"may",
"be",
"in",
"raw_input",
"()",
"get",
"out",
"of",
"it"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/stdin.py#L189-L201 |
innogames/polysh | polysh/stdin.py | InputBuffer.add | def add(self, data):
"""Add data to the buffer"""
assert isinstance(data, bytes)
with self.lock:
self.buf += data | python | def add(self, data):
"""Add data to the buffer"""
assert isinstance(data, bytes)
with self.lock:
self.buf += data | [
"def",
"add",
"(",
"self",
",",
"data",
")",
":",
"assert",
"isinstance",
"(",
"data",
",",
"bytes",
")",
"with",
"self",
".",
"lock",
":",
"self",
".",
"buf",
"+=",
"data"
] | Add data to the buffer | [
"Add",
"data",
"to",
"the",
"buffer"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/stdin.py#L45-L49 |
innogames/polysh | polysh/stdin.py | InputBuffer.get | def get(self):
"""Get the content of the buffer"""
data = b''
with self.lock:
data, self.buf = self.buf, b''
return data | python | def get(self):
"""Get the content of the buffer"""
data = b''
with self.lock:
data, self.buf = self.buf, b''
return data | [
"def",
"get",
"(",
"self",
")",
":",
"data",
"=",
"b''",
"with",
"self",
".",
"lock",
":",
"data",
",",
"self",
".",
"buf",
"=",
"self",
".",
"buf",
",",
"b''",
"return",
"data"
] | Get the content of the buffer | [
"Get",
"the",
"content",
"of",
"the",
"buffer"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/stdin.py#L51-L57 |
innogames/polysh | polysh/stdin.py | SocketNotificationReader.handle_read | def handle_read(self):
"""Handle all the available character commands in the socket"""
while True:
try:
c = self.recv(1)
except socket.error as e:
if e.errno == errno.EWOULDBLOCK:
return
else:
raise
else:
self._do(c)
self.socket.setblocking(True)
self.send(b'A')
self.socket.setblocking(False) | python | def handle_read(self):
"""Handle all the available character commands in the socket"""
while True:
try:
c = self.recv(1)
except socket.error as e:
if e.errno == errno.EWOULDBLOCK:
return
else:
raise
else:
self._do(c)
self.socket.setblocking(True)
self.send(b'A')
self.socket.setblocking(False) | [
"def",
"handle_read",
"(",
"self",
")",
":",
"while",
"True",
":",
"try",
":",
"c",
"=",
"self",
".",
"recv",
"(",
"1",
")",
"except",
"socket",
".",
"error",
"as",
"e",
":",
"if",
"e",
".",
"errno",
"==",
"errno",
".",
"EWOULDBLOCK",
":",
"retur... | Handle all the available character commands in the socket | [
"Handle",
"all",
"the",
"available",
"character",
"commands",
"in",
"the",
"socket"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/stdin.py#L128-L142 |
nschloe/accupy | accupy/sums.py | knuth_sum | def knuth_sum(a, b):
"""Error-free transformation of the sum of two floating point numbers
according to
D.E. Knuth.
The Art of Computer Programming: Seminumerical Algorithms, volume 2.
Addison Wesley, Reading, Massachusetts, second edition, 1981.
The underlying problem is that the exact sum a+b of two floating point
number a and b is not necessarily a floating point number; for example if
you add a very large and a very small number. It is however known that the
difference between the best floating point approximation of a+b and the
exact a+b is again a floating point number. This routine returns the sum
and the error.
Algorithm 3.1 in <https://doi.org/10.1137/030601818>.
"""
x = a + b
z = x - a
y = (a - (x - z)) + (b - z)
return x, y | python | def knuth_sum(a, b):
"""Error-free transformation of the sum of two floating point numbers
according to
D.E. Knuth.
The Art of Computer Programming: Seminumerical Algorithms, volume 2.
Addison Wesley, Reading, Massachusetts, second edition, 1981.
The underlying problem is that the exact sum a+b of two floating point
number a and b is not necessarily a floating point number; for example if
you add a very large and a very small number. It is however known that the
difference between the best floating point approximation of a+b and the
exact a+b is again a floating point number. This routine returns the sum
and the error.
Algorithm 3.1 in <https://doi.org/10.1137/030601818>.
"""
x = a + b
z = x - a
y = (a - (x - z)) + (b - z)
return x, y | [
"def",
"knuth_sum",
"(",
"a",
",",
"b",
")",
":",
"x",
"=",
"a",
"+",
"b",
"z",
"=",
"x",
"-",
"a",
"y",
"=",
"(",
"a",
"-",
"(",
"x",
"-",
"z",
")",
")",
"+",
"(",
"b",
"-",
"z",
")",
"return",
"x",
",",
"y"
] | Error-free transformation of the sum of two floating point numbers
according to
D.E. Knuth.
The Art of Computer Programming: Seminumerical Algorithms, volume 2.
Addison Wesley, Reading, Massachusetts, second edition, 1981.
The underlying problem is that the exact sum a+b of two floating point
number a and b is not necessarily a floating point number; for example if
you add a very large and a very small number. It is however known that the
difference between the best floating point approximation of a+b and the
exact a+b is again a floating point number. This routine returns the sum
and the error.
Algorithm 3.1 in <https://doi.org/10.1137/030601818>. | [
"Error",
"-",
"free",
"transformation",
"of",
"the",
"sum",
"of",
"two",
"floating",
"point",
"numbers",
"according",
"to"
] | train | https://github.com/nschloe/accupy/blob/63a031cab7f4d3b9ba1073f9328c10c1862d1c4d/accupy/sums.py#L10-L30 |
nschloe/accupy | accupy/sums.py | decker_sum | def decker_sum(a, b):
"""Computationally equivalent to knuth_sum, but formally a bit cheaper.
Only works for floats though (and not arrays), and the branch make it in
fact less favorable in terms of actual speed.
"""
x = a + b
y = b - (x - a) if abs(a) > abs(b) else a - (x - b)
return x, y | python | def decker_sum(a, b):
"""Computationally equivalent to knuth_sum, but formally a bit cheaper.
Only works for floats though (and not arrays), and the branch make it in
fact less favorable in terms of actual speed.
"""
x = a + b
y = b - (x - a) if abs(a) > abs(b) else a - (x - b)
return x, y | [
"def",
"decker_sum",
"(",
"a",
",",
"b",
")",
":",
"x",
"=",
"a",
"+",
"b",
"y",
"=",
"b",
"-",
"(",
"x",
"-",
"a",
")",
"if",
"abs",
"(",
"a",
")",
">",
"abs",
"(",
"b",
")",
"else",
"a",
"-",
"(",
"x",
"-",
"b",
")",
"return",
"x",
... | Computationally equivalent to knuth_sum, but formally a bit cheaper.
Only works for floats though (and not arrays), and the branch make it in
fact less favorable in terms of actual speed. | [
"Computationally",
"equivalent",
"to",
"knuth_sum",
"but",
"formally",
"a",
"bit",
"cheaper",
".",
"Only",
"works",
"for",
"floats",
"though",
"(",
"and",
"not",
"arrays",
")",
"and",
"the",
"branch",
"make",
"it",
"in",
"fact",
"less",
"favorable",
"in",
... | train | https://github.com/nschloe/accupy/blob/63a031cab7f4d3b9ba1073f9328c10c1862d1c4d/accupy/sums.py#L33-L40 |
nschloe/accupy | accupy/sums.py | distill | def distill(p, K):
"""Algorithm 4.3. Error-free vector transformation for summation.
The vector p is transformed without changing the sum, and p_n is replaced
by float(sum(p)). Kahan [21] calls this a 'distillation algorithm.'
"""
q = p.reshape(p.shape[0], -1)
for _ in range(K):
_accupy.distill(q)
return q.reshape(p.shape) | python | def distill(p, K):
"""Algorithm 4.3. Error-free vector transformation for summation.
The vector p is transformed without changing the sum, and p_n is replaced
by float(sum(p)). Kahan [21] calls this a 'distillation algorithm.'
"""
q = p.reshape(p.shape[0], -1)
for _ in range(K):
_accupy.distill(q)
return q.reshape(p.shape) | [
"def",
"distill",
"(",
"p",
",",
"K",
")",
":",
"q",
"=",
"p",
".",
"reshape",
"(",
"p",
".",
"shape",
"[",
"0",
"]",
",",
"-",
"1",
")",
"for",
"_",
"in",
"range",
"(",
"K",
")",
":",
"_accupy",
".",
"distill",
"(",
"q",
")",
"return",
"... | Algorithm 4.3. Error-free vector transformation for summation.
The vector p is transformed without changing the sum, and p_n is replaced
by float(sum(p)). Kahan [21] calls this a 'distillation algorithm.' | [
"Algorithm",
"4",
".",
"3",
".",
"Error",
"-",
"free",
"vector",
"transformation",
"for",
"summation",
"."
] | train | https://github.com/nschloe/accupy/blob/63a031cab7f4d3b9ba1073f9328c10c1862d1c4d/accupy/sums.py#L43-L52 |
nschloe/accupy | accupy/sums.py | ksum | def ksum(p, K=2):
"""From
T. Ogita, S.M. Rump, and S. Oishi.
Accurate Sum and Dot Product,
SIAM J. Sci. Comput., 26(6), 1955–1988 (34 pages).
<https://doi.org/10.1137/030601818>.
Algorithm 4.8. Summation as in K-fold precision by (K−1)-fold error-free
vector transformation.
"""
# Don't override the input data.
q = p.copy()
distill(q, K - 1)
return numpy.sum(q[:-1], axis=0) + q[-1] | python | def ksum(p, K=2):
"""From
T. Ogita, S.M. Rump, and S. Oishi.
Accurate Sum and Dot Product,
SIAM J. Sci. Comput., 26(6), 1955–1988 (34 pages).
<https://doi.org/10.1137/030601818>.
Algorithm 4.8. Summation as in K-fold precision by (K−1)-fold error-free
vector transformation.
"""
# Don't override the input data.
q = p.copy()
distill(q, K - 1)
return numpy.sum(q[:-1], axis=0) + q[-1] | [
"def",
"ksum",
"(",
"p",
",",
"K",
"=",
"2",
")",
":",
"# Don't override the input data.",
"q",
"=",
"p",
".",
"copy",
"(",
")",
"distill",
"(",
"q",
",",
"K",
"-",
"1",
")",
"return",
"numpy",
".",
"sum",
"(",
"q",
"[",
":",
"-",
"1",
"]",
"... | From
T. Ogita, S.M. Rump, and S. Oishi.
Accurate Sum and Dot Product,
SIAM J. Sci. Comput., 26(6), 1955–1988 (34 pages).
<https://doi.org/10.1137/030601818>.
Algorithm 4.8. Summation as in K-fold precision by (K−1)-fold error-free
vector transformation. | [
"From"
] | train | https://github.com/nschloe/accupy/blob/63a031cab7f4d3b9ba1073f9328c10c1862d1c4d/accupy/sums.py#L55-L69 |
nschloe/accupy | accupy/sums.py | kahan_sum | def kahan_sum(p):
"""Kahan summation
<https://en.wikipedia.org/wiki/Kahan_summation_algorithm>.
"""
q = p.reshape(p.shape[0], -1)
s = _accupy.kahan(q)
return s.reshape(p.shape[1:]) | python | def kahan_sum(p):
"""Kahan summation
<https://en.wikipedia.org/wiki/Kahan_summation_algorithm>.
"""
q = p.reshape(p.shape[0], -1)
s = _accupy.kahan(q)
return s.reshape(p.shape[1:]) | [
"def",
"kahan_sum",
"(",
"p",
")",
":",
"q",
"=",
"p",
".",
"reshape",
"(",
"p",
".",
"shape",
"[",
"0",
"]",
",",
"-",
"1",
")",
"s",
"=",
"_accupy",
".",
"kahan",
"(",
"q",
")",
"return",
"s",
".",
"reshape",
"(",
"p",
".",
"shape",
"[",
... | Kahan summation
<https://en.wikipedia.org/wiki/Kahan_summation_algorithm>. | [
"Kahan",
"summation",
"<https",
":",
"//",
"en",
".",
"wikipedia",
".",
"org",
"/",
"wiki",
"/",
"Kahan_summation_algorithm",
">",
"."
] | train | https://github.com/nschloe/accupy/blob/63a031cab7f4d3b9ba1073f9328c10c1862d1c4d/accupy/sums.py#L79-L85 |
innogames/polysh | polysh/control_commands_helpers.py | toggle_shells | def toggle_shells(command, enable):
"""Enable or disable the specified shells. If the command would have
no effect, it changes all other shells to the inverse enable value."""
selection = list(selected_shells(command))
if command and command != '*' and selection:
for i in selection:
if i.state != remote_dispatcher.STATE_DEAD and i.enabled != enable:
break
else:
toggle_shells('*', not enable)
for i in selection:
if i.state != remote_dispatcher.STATE_DEAD:
i.set_enabled(enable) | python | def toggle_shells(command, enable):
"""Enable or disable the specified shells. If the command would have
no effect, it changes all other shells to the inverse enable value."""
selection = list(selected_shells(command))
if command and command != '*' and selection:
for i in selection:
if i.state != remote_dispatcher.STATE_DEAD and i.enabled != enable:
break
else:
toggle_shells('*', not enable)
for i in selection:
if i.state != remote_dispatcher.STATE_DEAD:
i.set_enabled(enable) | [
"def",
"toggle_shells",
"(",
"command",
",",
"enable",
")",
":",
"selection",
"=",
"list",
"(",
"selected_shells",
"(",
"command",
")",
")",
"if",
"command",
"and",
"command",
"!=",
"'*'",
"and",
"selection",
":",
"for",
"i",
"in",
"selection",
":",
"if"... | Enable or disable the specified shells. If the command would have
no effect, it changes all other shells to the inverse enable value. | [
"Enable",
"or",
"disable",
"the",
"specified",
"shells",
".",
"If",
"the",
"command",
"would",
"have",
"no",
"effect",
"it",
"changes",
"all",
"other",
"shells",
"to",
"the",
"inverse",
"enable",
"value",
"."
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/control_commands_helpers.py#L29-L42 |
innogames/polysh | polysh/control_commands_helpers.py | selected_shells | def selected_shells(command):
"""Iterator over the shells with names matching the patterns.
An empty patterns matches all the shells"""
if not command or command == '*':
for i in dispatchers.all_instances():
yield i
return
selected = set()
instance_found = False
for pattern in command.split():
found = False
for expanded_pattern in expand_syntax(pattern):
for i in dispatchers.all_instances():
instance_found = True
if fnmatch(i.display_name, expanded_pattern):
found = True
if i not in selected:
selected.add(i)
yield i
if instance_found and not found:
console_output('{} not found\n'.format(pattern).encode()) | python | def selected_shells(command):
"""Iterator over the shells with names matching the patterns.
An empty patterns matches all the shells"""
if not command or command == '*':
for i in dispatchers.all_instances():
yield i
return
selected = set()
instance_found = False
for pattern in command.split():
found = False
for expanded_pattern in expand_syntax(pattern):
for i in dispatchers.all_instances():
instance_found = True
if fnmatch(i.display_name, expanded_pattern):
found = True
if i not in selected:
selected.add(i)
yield i
if instance_found and not found:
console_output('{} not found\n'.format(pattern).encode()) | [
"def",
"selected_shells",
"(",
"command",
")",
":",
"if",
"not",
"command",
"or",
"command",
"==",
"'*'",
":",
"for",
"i",
"in",
"dispatchers",
".",
"all_instances",
"(",
")",
":",
"yield",
"i",
"return",
"selected",
"=",
"set",
"(",
")",
"instance_found... | Iterator over the shells with names matching the patterns.
An empty patterns matches all the shells | [
"Iterator",
"over",
"the",
"shells",
"with",
"names",
"matching",
"the",
"patterns",
".",
"An",
"empty",
"patterns",
"matches",
"all",
"the",
"shells"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/control_commands_helpers.py#L45-L65 |
innogames/polysh | polysh/control_commands_helpers.py | complete_shells | def complete_shells(line, text, predicate=lambda i: True):
"""Return the shell names to include in the completion"""
res = [i.display_name + ' ' for i in dispatchers.all_instances() if
i.display_name.startswith(text) and
predicate(i) and
' ' + i.display_name + ' ' not in line]
return res | python | def complete_shells(line, text, predicate=lambda i: True):
"""Return the shell names to include in the completion"""
res = [i.display_name + ' ' for i in dispatchers.all_instances() if
i.display_name.startswith(text) and
predicate(i) and
' ' + i.display_name + ' ' not in line]
return res | [
"def",
"complete_shells",
"(",
"line",
",",
"text",
",",
"predicate",
"=",
"lambda",
"i",
":",
"True",
")",
":",
"res",
"=",
"[",
"i",
".",
"display_name",
"+",
"' '",
"for",
"i",
"in",
"dispatchers",
".",
"all_instances",
"(",
")",
"if",
"i",
".",
... | Return the shell names to include in the completion | [
"Return",
"the",
"shell",
"names",
"to",
"include",
"in",
"the",
"completion"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/control_commands_helpers.py#L68-L74 |
nschloe/accupy | accupy/dot.py | kdot | def kdot(x, y, K=2):
"""Algorithm 5.10. Dot product algorithm in K-fold working precision,
K >= 3.
"""
xx = x.reshape(-1, x.shape[-1])
yy = y.reshape(y.shape[0], -1)
xx = numpy.ascontiguousarray(xx)
yy = numpy.ascontiguousarray(yy)
r = _accupy.kdot_helper(xx, yy).reshape((-1,) + x.shape[:-1] + y.shape[1:])
return ksum(r, K - 1) | python | def kdot(x, y, K=2):
"""Algorithm 5.10. Dot product algorithm in K-fold working precision,
K >= 3.
"""
xx = x.reshape(-1, x.shape[-1])
yy = y.reshape(y.shape[0], -1)
xx = numpy.ascontiguousarray(xx)
yy = numpy.ascontiguousarray(yy)
r = _accupy.kdot_helper(xx, yy).reshape((-1,) + x.shape[:-1] + y.shape[1:])
return ksum(r, K - 1) | [
"def",
"kdot",
"(",
"x",
",",
"y",
",",
"K",
"=",
"2",
")",
":",
"xx",
"=",
"x",
".",
"reshape",
"(",
"-",
"1",
",",
"x",
".",
"shape",
"[",
"-",
"1",
"]",
")",
"yy",
"=",
"y",
".",
"reshape",
"(",
"y",
".",
"shape",
"[",
"0",
"]",
",... | Algorithm 5.10. Dot product algorithm in K-fold working precision,
K >= 3. | [
"Algorithm",
"5",
".",
"10",
".",
"Dot",
"product",
"algorithm",
"in",
"K",
"-",
"fold",
"working",
"precision",
"K",
">",
"=",
"3",
"."
] | train | https://github.com/nschloe/accupy/blob/63a031cab7f4d3b9ba1073f9328c10c1862d1c4d/accupy/dot.py#L23-L34 |
nschloe/accupy | accupy/dot.py | fdot | def fdot(x, y):
"""Algorithm 5.10. Dot product algorithm in K-fold working precision,
K >= 3.
"""
xx = x.reshape(-1, x.shape[-1])
yy = y.reshape(y.shape[0], -1)
xx = numpy.ascontiguousarray(xx)
yy = numpy.ascontiguousarray(yy)
r = _accupy.kdot_helper(xx, yy).reshape((-1,) + x.shape[:-1] + y.shape[1:])
return fsum(r) | python | def fdot(x, y):
"""Algorithm 5.10. Dot product algorithm in K-fold working precision,
K >= 3.
"""
xx = x.reshape(-1, x.shape[-1])
yy = y.reshape(y.shape[0], -1)
xx = numpy.ascontiguousarray(xx)
yy = numpy.ascontiguousarray(yy)
r = _accupy.kdot_helper(xx, yy).reshape((-1,) + x.shape[:-1] + y.shape[1:])
return fsum(r) | [
"def",
"fdot",
"(",
"x",
",",
"y",
")",
":",
"xx",
"=",
"x",
".",
"reshape",
"(",
"-",
"1",
",",
"x",
".",
"shape",
"[",
"-",
"1",
"]",
")",
"yy",
"=",
"y",
".",
"reshape",
"(",
"y",
".",
"shape",
"[",
"0",
"]",
",",
"-",
"1",
")",
"x... | Algorithm 5.10. Dot product algorithm in K-fold working precision,
K >= 3. | [
"Algorithm",
"5",
".",
"10",
".",
"Dot",
"product",
"algorithm",
"in",
"K",
"-",
"fold",
"working",
"precision",
"K",
">",
"=",
"3",
"."
] | train | https://github.com/nschloe/accupy/blob/63a031cab7f4d3b9ba1073f9328c10c1862d1c4d/accupy/dot.py#L37-L48 |
innogames/polysh | polysh/main.py | kill_all | def kill_all():
"""When polysh quits, we kill all the remote shells we started"""
for i in dispatchers.all_instances():
try:
os.kill(-i.pid, signal.SIGKILL)
except OSError:
# The process was already dead, no problem
pass | python | def kill_all():
"""When polysh quits, we kill all the remote shells we started"""
for i in dispatchers.all_instances():
try:
os.kill(-i.pid, signal.SIGKILL)
except OSError:
# The process was already dead, no problem
pass | [
"def",
"kill_all",
"(",
")",
":",
"for",
"i",
"in",
"dispatchers",
".",
"all_instances",
"(",
")",
":",
"try",
":",
"os",
".",
"kill",
"(",
"-",
"i",
".",
"pid",
",",
"signal",
".",
"SIGKILL",
")",
"except",
"OSError",
":",
"# The process was already d... | When polysh quits, we kill all the remote shells we started | [
"When",
"polysh",
"quits",
"we",
"kill",
"all",
"the",
"remote",
"shells",
"we",
"started"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/main.py#L39-L46 |
innogames/polysh | polysh/main.py | run | def run():
"""Launch polysh"""
locale.setlocale(locale.LC_ALL, '')
atexit.register(kill_all)
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
args = parse_cmdline()
args.command = find_non_interactive_command(args.command)
args.exit_code = 0
args.interactive = (
not args.command
and sys.stdin.isatty()
and sys.stdout.isatty())
if args.interactive:
restore_tty_on_exit()
remote_dispatcher.options = args
hosts = []
for host in args.host_names:
hosts.extend(expand_syntax(host))
dispatchers.create_remote_dispatchers(hosts)
signal.signal(signal.SIGWINCH, lambda signum, frame:
dispatchers.update_terminal_size())
stdin.the_stdin_thread = stdin.StdinThread(args.interactive)
if args.profile:
def safe_loop():
try:
loop(args.interactive)
except BaseException:
pass
_profile(safe_loop)
else:
loop(args.interactive) | python | def run():
"""Launch polysh"""
locale.setlocale(locale.LC_ALL, '')
atexit.register(kill_all)
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
args = parse_cmdline()
args.command = find_non_interactive_command(args.command)
args.exit_code = 0
args.interactive = (
not args.command
and sys.stdin.isatty()
and sys.stdout.isatty())
if args.interactive:
restore_tty_on_exit()
remote_dispatcher.options = args
hosts = []
for host in args.host_names:
hosts.extend(expand_syntax(host))
dispatchers.create_remote_dispatchers(hosts)
signal.signal(signal.SIGWINCH, lambda signum, frame:
dispatchers.update_terminal_size())
stdin.the_stdin_thread = stdin.StdinThread(args.interactive)
if args.profile:
def safe_loop():
try:
loop(args.interactive)
except BaseException:
pass
_profile(safe_loop)
else:
loop(args.interactive) | [
"def",
"run",
"(",
")",
":",
"locale",
".",
"setlocale",
"(",
"locale",
".",
"LC_ALL",
",",
"''",
")",
"atexit",
".",
"register",
"(",
"kill_all",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGPIPE",
",",
"signal",
".",
"SIG_DFL",
")",
"args"... | Launch polysh | [
"Launch",
"polysh"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/main.py#L220-L258 |
innogames/polysh | polysh/main.py | main | def main():
"""Wrapper around run() to setup sentry"""
sentry_dsn = os.environ.get('POLYSH_SENTRY_DSN')
if sentry_dsn:
from raven import Client
client = Client(
dsn=sentry_dsn,
release='.'.join(map(str, VERSION)),
ignore_exceptions=[
KeyboardInterrupt
]
)
try:
run()
except Exception:
client.captureException()
else:
run() | python | def main():
"""Wrapper around run() to setup sentry"""
sentry_dsn = os.environ.get('POLYSH_SENTRY_DSN')
if sentry_dsn:
from raven import Client
client = Client(
dsn=sentry_dsn,
release='.'.join(map(str, VERSION)),
ignore_exceptions=[
KeyboardInterrupt
]
)
try:
run()
except Exception:
client.captureException()
else:
run() | [
"def",
"main",
"(",
")",
":",
"sentry_dsn",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'POLYSH_SENTRY_DSN'",
")",
"if",
"sentry_dsn",
":",
"from",
"raven",
"import",
"Client",
"client",
"=",
"Client",
"(",
"dsn",
"=",
"sentry_dsn",
",",
"release",
"=",... | Wrapper around run() to setup sentry | [
"Wrapper",
"around",
"run",
"()",
"to",
"setup",
"sentry"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/main.py#L261-L282 |
innogames/polysh | polysh/buffered_dispatcher.py | BufferedDispatcher._handle_read_chunk | def _handle_read_chunk(self):
"""Some data can be read"""
new_data = b''
buffer_length = len(self.read_buffer)
try:
while buffer_length < self.MAX_BUFFER_SIZE:
try:
piece = self.recv(4096)
except OSError as e:
if e.errno == errno.EAGAIN:
# End of the available data
break
elif e.errno == errno.EIO and new_data:
# Hopefully we could read an error message before the
# actual termination
break
else:
raise
if not piece:
# A closed connection is indicated by signaling a read
# condition, and having recv() return 0.
break
new_data += piece
buffer_length += len(piece)
finally:
new_data = new_data.replace(b'\r', b'\n')
self.read_buffer += new_data
return new_data | python | def _handle_read_chunk(self):
"""Some data can be read"""
new_data = b''
buffer_length = len(self.read_buffer)
try:
while buffer_length < self.MAX_BUFFER_SIZE:
try:
piece = self.recv(4096)
except OSError as e:
if e.errno == errno.EAGAIN:
# End of the available data
break
elif e.errno == errno.EIO and new_data:
# Hopefully we could read an error message before the
# actual termination
break
else:
raise
if not piece:
# A closed connection is indicated by signaling a read
# condition, and having recv() return 0.
break
new_data += piece
buffer_length += len(piece)
finally:
new_data = new_data.replace(b'\r', b'\n')
self.read_buffer += new_data
return new_data | [
"def",
"_handle_read_chunk",
"(",
"self",
")",
":",
"new_data",
"=",
"b''",
"buffer_length",
"=",
"len",
"(",
"self",
".",
"read_buffer",
")",
"try",
":",
"while",
"buffer_length",
"<",
"self",
".",
"MAX_BUFFER_SIZE",
":",
"try",
":",
"piece",
"=",
"self",... | Some data can be read | [
"Some",
"data",
"can",
"be",
"read"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/buffered_dispatcher.py#L41-L71 |
innogames/polysh | polysh/buffered_dispatcher.py | BufferedDispatcher.dispatch_write | def dispatch_write(self, buf):
"""Augment the buffer with stuff to write when possible"""
self.write_buffer += buf
if len(self.write_buffer) > self.MAX_BUFFER_SIZE:
console_output('Buffer too big ({:d}) for {}\n'.format(
len(self.write_buffer), str(self)).encode())
raise asyncore.ExitNow(1)
return True | python | def dispatch_write(self, buf):
"""Augment the buffer with stuff to write when possible"""
self.write_buffer += buf
if len(self.write_buffer) > self.MAX_BUFFER_SIZE:
console_output('Buffer too big ({:d}) for {}\n'.format(
len(self.write_buffer), str(self)).encode())
raise asyncore.ExitNow(1)
return True | [
"def",
"dispatch_write",
"(",
"self",
",",
"buf",
")",
":",
"self",
".",
"write_buffer",
"+=",
"buf",
"if",
"len",
"(",
"self",
".",
"write_buffer",
")",
">",
"self",
".",
"MAX_BUFFER_SIZE",
":",
"console_output",
"(",
"'Buffer too big ({:d}) for {}\\n'",
".",... | Augment the buffer with stuff to write when possible | [
"Augment",
"the",
"buffer",
"with",
"stuff",
"to",
"write",
"when",
"possible"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/buffered_dispatcher.py#L81-L88 |
innogames/polysh | polysh/console.py | safe_write | def safe_write(buf):
"""We can get a SIGWINCH when printing, which will cause write to raise
an EINTR. That's not a reason to stop printing."""
assert isinstance(buf, bytes)
while True:
try:
os.write(1, buf)
break
except IOError as e:
if e.errno != errno.EINTR:
raise | python | def safe_write(buf):
"""We can get a SIGWINCH when printing, which will cause write to raise
an EINTR. That's not a reason to stop printing."""
assert isinstance(buf, bytes)
while True:
try:
os.write(1, buf)
break
except IOError as e:
if e.errno != errno.EINTR:
raise | [
"def",
"safe_write",
"(",
"buf",
")",
":",
"assert",
"isinstance",
"(",
"buf",
",",
"bytes",
")",
"while",
"True",
":",
"try",
":",
"os",
".",
"write",
"(",
"1",
",",
"buf",
")",
"break",
"except",
"IOError",
"as",
"e",
":",
"if",
"e",
".",
"errn... | We can get a SIGWINCH when printing, which will cause write to raise
an EINTR. That's not a reason to stop printing. | [
"We",
"can",
"get",
"a",
"SIGWINCH",
"when",
"printing",
"which",
"will",
"cause",
"write",
"to",
"raise",
"an",
"EINTR",
".",
"That",
"s",
"not",
"a",
"reason",
"to",
"stop",
"printing",
"."
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/console.py#L27-L37 |
innogames/polysh | polysh/console.py | console_output | def console_output(msg, logging_msg=None):
"""Use instead of print, to clear the status information before printing"""
assert isinstance(msg, bytes)
assert isinstance(logging_msg, bytes) or logging_msg is None
from polysh import remote_dispatcher
remote_dispatcher.log(logging_msg or msg)
if remote_dispatcher.options.interactive:
from polysh.stdin import the_stdin_thread
the_stdin_thread.no_raw_input()
global last_status_length
if last_status_length:
safe_write('\r{}\r'.format(
last_status_length * ' ').encode())
last_status_length = 0
safe_write(msg) | python | def console_output(msg, logging_msg=None):
"""Use instead of print, to clear the status information before printing"""
assert isinstance(msg, bytes)
assert isinstance(logging_msg, bytes) or logging_msg is None
from polysh import remote_dispatcher
remote_dispatcher.log(logging_msg or msg)
if remote_dispatcher.options.interactive:
from polysh.stdin import the_stdin_thread
the_stdin_thread.no_raw_input()
global last_status_length
if last_status_length:
safe_write('\r{}\r'.format(
last_status_length * ' ').encode())
last_status_length = 0
safe_write(msg) | [
"def",
"console_output",
"(",
"msg",
",",
"logging_msg",
"=",
"None",
")",
":",
"assert",
"isinstance",
"(",
"msg",
",",
"bytes",
")",
"assert",
"isinstance",
"(",
"logging_msg",
",",
"bytes",
")",
"or",
"logging_msg",
"is",
"None",
"from",
"polysh",
"impo... | Use instead of print, to clear the status information before printing | [
"Use",
"instead",
"of",
"print",
"to",
"clear",
"the",
"status",
"information",
"before",
"printing"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/console.py#L40-L56 |
innogames/polysh | polysh/host_syntax.py | expand_syntax | def expand_syntax(string):
"""Iterator over all the strings in the expansion of the argument"""
match = syntax_pattern.search(string)
if match:
prefix = string[:match.start()]
suffix = string[match.end():]
intervals = match.group(1).split(',')
for interval in intervals:
interval_match = interval_pattern.match(interval)
if interval_match:
start = interval_match.group(1)
end = (interval_match.group(2) or start).strip('-')
for i in _iter_numbers(start, end):
for expanded in expand_syntax(prefix + i + suffix):
yield expanded
else:
yield string | python | def expand_syntax(string):
"""Iterator over all the strings in the expansion of the argument"""
match = syntax_pattern.search(string)
if match:
prefix = string[:match.start()]
suffix = string[match.end():]
intervals = match.group(1).split(',')
for interval in intervals:
interval_match = interval_pattern.match(interval)
if interval_match:
start = interval_match.group(1)
end = (interval_match.group(2) or start).strip('-')
for i in _iter_numbers(start, end):
for expanded in expand_syntax(prefix + i + suffix):
yield expanded
else:
yield string | [
"def",
"expand_syntax",
"(",
"string",
")",
":",
"match",
"=",
"syntax_pattern",
".",
"search",
"(",
"string",
")",
"if",
"match",
":",
"prefix",
"=",
"string",
"[",
":",
"match",
".",
"start",
"(",
")",
"]",
"suffix",
"=",
"string",
"[",
"match",
".... | Iterator over all the strings in the expansion of the argument | [
"Iterator",
"over",
"all",
"the",
"strings",
"in",
"the",
"expansion",
"of",
"the",
"argument"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/host_syntax.py#L57-L73 |
innogames/polysh | polysh/dispatchers.py | all_instances | def all_instances():
"""Iterator over all the remote_dispatcher instances"""
return sorted([i for i in asyncore.socket_map.values() if
isinstance(i, remote_dispatcher.RemoteDispatcher)],
key=lambda i: i.display_name or '') | python | def all_instances():
"""Iterator over all the remote_dispatcher instances"""
return sorted([i for i in asyncore.socket_map.values() if
isinstance(i, remote_dispatcher.RemoteDispatcher)],
key=lambda i: i.display_name or '') | [
"def",
"all_instances",
"(",
")",
":",
"return",
"sorted",
"(",
"[",
"i",
"for",
"i",
"in",
"asyncore",
".",
"socket_map",
".",
"values",
"(",
")",
"if",
"isinstance",
"(",
"i",
",",
"remote_dispatcher",
".",
"RemoteDispatcher",
")",
"]",
",",
"key",
"... | Iterator over all the remote_dispatcher instances | [
"Iterator",
"over",
"all",
"the",
"remote_dispatcher",
"instances"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/dispatchers.py#L40-L44 |
innogames/polysh | polysh/dispatchers.py | count_awaited_processes | def count_awaited_processes():
"""Return a tuple with the number of awaited processes and the total
number"""
awaited = 0
total = 0
for i in all_instances():
if i.enabled:
total += 1
if i.state is not remote_dispatcher.STATE_IDLE:
awaited += 1
return awaited, total | python | def count_awaited_processes():
"""Return a tuple with the number of awaited processes and the total
number"""
awaited = 0
total = 0
for i in all_instances():
if i.enabled:
total += 1
if i.state is not remote_dispatcher.STATE_IDLE:
awaited += 1
return awaited, total | [
"def",
"count_awaited_processes",
"(",
")",
":",
"awaited",
"=",
"0",
"total",
"=",
"0",
"for",
"i",
"in",
"all_instances",
"(",
")",
":",
"if",
"i",
".",
"enabled",
":",
"total",
"+=",
"1",
"if",
"i",
".",
"state",
"is",
"not",
"remote_dispatcher",
... | Return a tuple with the number of awaited processes and the total
number | [
"Return",
"a",
"tuple",
"with",
"the",
"number",
"of",
"awaited",
"processes",
"and",
"the",
"total",
"number"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/dispatchers.py#L47-L57 |
innogames/polysh | polysh/dispatchers.py | all_terminated | def all_terminated():
"""For each remote shell determine if its terminated"""
instances_found = False
for i in all_instances():
instances_found = True
if i.state not in (remote_dispatcher.STATE_TERMINATED,
remote_dispatcher.STATE_DEAD):
return False
return instances_found | python | def all_terminated():
"""For each remote shell determine if its terminated"""
instances_found = False
for i in all_instances():
instances_found = True
if i.state not in (remote_dispatcher.STATE_TERMINATED,
remote_dispatcher.STATE_DEAD):
return False
return instances_found | [
"def",
"all_terminated",
"(",
")",
":",
"instances_found",
"=",
"False",
"for",
"i",
"in",
"all_instances",
"(",
")",
":",
"instances_found",
"=",
"True",
"if",
"i",
".",
"state",
"not",
"in",
"(",
"remote_dispatcher",
".",
"STATE_TERMINATED",
",",
"remote_d... | For each remote shell determine if its terminated | [
"For",
"each",
"remote",
"shell",
"determine",
"if",
"its",
"terminated"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/dispatchers.py#L60-L68 |
innogames/polysh | polysh/dispatchers.py | update_terminal_size | def update_terminal_size():
"""Propagate the terminal size to the remote shells accounting for the
place taken by the longest name"""
w, h = terminal_size()
w = max(w - display_names.max_display_name_length - 2, min(w, 10))
# python bug http://python.org/sf/1112949 on amd64
# from ajaxterm.py
bug = struct.unpack('i', struct.pack('I', termios.TIOCSWINSZ))[0]
packed_size = struct.pack('HHHH', h, w, 0, 0)
term_size = w, h
for i in all_instances():
if i.enabled and i.term_size != term_size:
i.term_size = term_size
fcntl.ioctl(i.fd, bug, packed_size) | python | def update_terminal_size():
"""Propagate the terminal size to the remote shells accounting for the
place taken by the longest name"""
w, h = terminal_size()
w = max(w - display_names.max_display_name_length - 2, min(w, 10))
# python bug http://python.org/sf/1112949 on amd64
# from ajaxterm.py
bug = struct.unpack('i', struct.pack('I', termios.TIOCSWINSZ))[0]
packed_size = struct.pack('HHHH', h, w, 0, 0)
term_size = w, h
for i in all_instances():
if i.enabled and i.term_size != term_size:
i.term_size = term_size
fcntl.ioctl(i.fd, bug, packed_size) | [
"def",
"update_terminal_size",
"(",
")",
":",
"w",
",",
"h",
"=",
"terminal_size",
"(",
")",
"w",
"=",
"max",
"(",
"w",
"-",
"display_names",
".",
"max_display_name_length",
"-",
"2",
",",
"min",
"(",
"w",
",",
"10",
")",
")",
"# python bug http://python... | Propagate the terminal size to the remote shells accounting for the
place taken by the longest name | [
"Propagate",
"the",
"terminal",
"size",
"to",
"the",
"remote",
"shells",
"accounting",
"for",
"the",
"place",
"taken",
"by",
"the",
"longest",
"name"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/dispatchers.py#L71-L84 |
innogames/polysh | polysh/dispatchers.py | format_info | def format_info(info_list):
"""Turn a 2-dimension list of bytes into a 1-dimension list of bytes with
correct spacing"""
max_lengths = []
if info_list:
nr_columns = len(info_list[0])
else:
nr_columns = 0
for i in range(nr_columns):
max_lengths.append(max([len(info[i]) for info in info_list]))
flattened_info_list = []
for info_id in range(len(info_list)):
info = info_list[info_id]
for str_id in range(len(info) - 1):
# Don't justify the last column (i.e. the last printed line)
# as it can get much longer in some shells than in others
orig_str = info[str_id]
indent = max_lengths[str_id] - len(orig_str)
info[str_id] = orig_str + indent * b' '
flattened_info_list.append(b' '.join(info) + b'\n')
return flattened_info_list | python | def format_info(info_list):
"""Turn a 2-dimension list of bytes into a 1-dimension list of bytes with
correct spacing"""
max_lengths = []
if info_list:
nr_columns = len(info_list[0])
else:
nr_columns = 0
for i in range(nr_columns):
max_lengths.append(max([len(info[i]) for info in info_list]))
flattened_info_list = []
for info_id in range(len(info_list)):
info = info_list[info_id]
for str_id in range(len(info) - 1):
# Don't justify the last column (i.e. the last printed line)
# as it can get much longer in some shells than in others
orig_str = info[str_id]
indent = max_lengths[str_id] - len(orig_str)
info[str_id] = orig_str + indent * b' '
flattened_info_list.append(b' '.join(info) + b'\n')
return flattened_info_list | [
"def",
"format_info",
"(",
"info_list",
")",
":",
"max_lengths",
"=",
"[",
"]",
"if",
"info_list",
":",
"nr_columns",
"=",
"len",
"(",
"info_list",
"[",
"0",
"]",
")",
"else",
":",
"nr_columns",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"nr_columns",
... | Turn a 2-dimension list of bytes into a 1-dimension list of bytes with
correct spacing | [
"Turn",
"a",
"2",
"-",
"dimension",
"list",
"of",
"bytes",
"into",
"a",
"1",
"-",
"dimension",
"list",
"of",
"bytes",
"with",
"correct",
"spacing"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/dispatchers.py#L87-L110 |
nschloe/accupy | accupy/ill_cond.py | generate_ill_conditioned_dot_product | def generate_ill_conditioned_dot_product(n, c, dps=100):
"""n ... length of vector
c ... target condition number
"""
# Algorithm 6.1 from
#
# ACCURATE SUM AND DOT PRODUCT,
# TAKESHI OGITA, SIEGFRIED M. RUMP, AND SHIN'ICHI OISHI.
assert n >= 6
n2 = round(n / 2)
x = numpy.zeros(n)
y = numpy.zeros(n)
b = math.log2(c)
# vector of exponents between 0 and b/2:
e = numpy.rint(numpy.random.rand(n2) * b / 2).astype(int)
# make sure exponents b/2 and 0 actually occur in e
# vectors x,y
e[0] = round(b / 2) + 1
e[-1] = 0
# generate first half of vectors x, y
rx, ry = numpy.random.rand(2, n2)
x[:n2] = (2 * rx - 1) * 2 ** e
y[:n2] = (2 * ry - 1) * 2 ** e
def dot_exact(x, y):
mp.dps = dps
# convert to list first, see
# <https://github.com/fredrik-johansson/mpmath/pull/385>
return mp.fdot(x.tolist(), y.tolist())
# for i=n2+1:n and v=1:i,
# generate x_i, y_i such that (*) x(v)’*y(v) ~ 2^e(i-n2)
# generate exponents for second half
e = numpy.rint(numpy.linspace(b / 2, 0, n - n2)).astype(int)
rx, ry = numpy.random.rand(2, n2)
for i in range(n2, n):
# x_i random with generated exponent
x[i] = (2 * rx[i - n2] - 1) * 2 ** e[i - n2]
# y_i according to (*)
y[i] = (
(2 * ry[i - n2] - 1) * 2 ** e[i - n2] - dot_exact(x[: i + 1], y[: i + 1])
) / x[i]
x, y = numpy.random.permutation((x, y))
# the true dot product rounded to nearest floating point
d = dot_exact(x, y)
# the actual condition number
C = 2 * dot_exact(abs(x), abs(y)) / abs(d)
return x, y, d, C | python | def generate_ill_conditioned_dot_product(n, c, dps=100):
"""n ... length of vector
c ... target condition number
"""
# Algorithm 6.1 from
#
# ACCURATE SUM AND DOT PRODUCT,
# TAKESHI OGITA, SIEGFRIED M. RUMP, AND SHIN'ICHI OISHI.
assert n >= 6
n2 = round(n / 2)
x = numpy.zeros(n)
y = numpy.zeros(n)
b = math.log2(c)
# vector of exponents between 0 and b/2:
e = numpy.rint(numpy.random.rand(n2) * b / 2).astype(int)
# make sure exponents b/2 and 0 actually occur in e
# vectors x,y
e[0] = round(b / 2) + 1
e[-1] = 0
# generate first half of vectors x, y
rx, ry = numpy.random.rand(2, n2)
x[:n2] = (2 * rx - 1) * 2 ** e
y[:n2] = (2 * ry - 1) * 2 ** e
def dot_exact(x, y):
mp.dps = dps
# convert to list first, see
# <https://github.com/fredrik-johansson/mpmath/pull/385>
return mp.fdot(x.tolist(), y.tolist())
# for i=n2+1:n and v=1:i,
# generate x_i, y_i such that (*) x(v)’*y(v) ~ 2^e(i-n2)
# generate exponents for second half
e = numpy.rint(numpy.linspace(b / 2, 0, n - n2)).astype(int)
rx, ry = numpy.random.rand(2, n2)
for i in range(n2, n):
# x_i random with generated exponent
x[i] = (2 * rx[i - n2] - 1) * 2 ** e[i - n2]
# y_i according to (*)
y[i] = (
(2 * ry[i - n2] - 1) * 2 ** e[i - n2] - dot_exact(x[: i + 1], y[: i + 1])
) / x[i]
x, y = numpy.random.permutation((x, y))
# the true dot product rounded to nearest floating point
d = dot_exact(x, y)
# the actual condition number
C = 2 * dot_exact(abs(x), abs(y)) / abs(d)
return x, y, d, C | [
"def",
"generate_ill_conditioned_dot_product",
"(",
"n",
",",
"c",
",",
"dps",
"=",
"100",
")",
":",
"# Algorithm 6.1 from",
"#",
"# ACCURATE SUM AND DOT PRODUCT,",
"# TAKESHI OGITA, SIEGFRIED M. RUMP, AND SHIN'ICHI OISHI.",
"assert",
"n",
">=",
"6",
"n2",
"=",
"round",
... | n ... length of vector
c ... target condition number | [
"n",
"...",
"length",
"of",
"vector",
"c",
"...",
"target",
"condition",
"number"
] | train | https://github.com/nschloe/accupy/blob/63a031cab7f4d3b9ba1073f9328c10c1862d1c4d/accupy/ill_cond.py#L34-L85 |
innogames/polysh | polysh/remote_dispatcher.py | main_loop_iteration | def main_loop_iteration(timeout=None):
"""Return the number of RemoteDispatcher.handle_read() calls made by this
iteration"""
prev_nr_read = nr_handle_read
asyncore.loop(count=1, timeout=timeout, use_poll=True)
return nr_handle_read - prev_nr_read | python | def main_loop_iteration(timeout=None):
"""Return the number of RemoteDispatcher.handle_read() calls made by this
iteration"""
prev_nr_read = nr_handle_read
asyncore.loop(count=1, timeout=timeout, use_poll=True)
return nr_handle_read - prev_nr_read | [
"def",
"main_loop_iteration",
"(",
"timeout",
"=",
"None",
")",
":",
"prev_nr_read",
"=",
"nr_handle_read",
"asyncore",
".",
"loop",
"(",
"count",
"=",
"1",
",",
"timeout",
"=",
"timeout",
",",
"use_poll",
"=",
"True",
")",
"return",
"nr_handle_read",
"-",
... | Return the number of RemoteDispatcher.handle_read() calls made by this
iteration | [
"Return",
"the",
"number",
"of",
"RemoteDispatcher",
".",
"handle_read",
"()",
"calls",
"made",
"by",
"this",
"iteration"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L51-L56 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.launch_ssh | def launch_ssh(self, name, port):
"""Launch the ssh command in the child process"""
if options.user:
name = '%s@%s' % (options.user, name)
evaluated = options.ssh % {'host': name, 'port': port}
if evaluated == options.ssh:
evaluated = '%s %s' % (evaluated, name)
os.execlp('/bin/sh', 'sh', '-c', evaluated) | python | def launch_ssh(self, name, port):
"""Launch the ssh command in the child process"""
if options.user:
name = '%s@%s' % (options.user, name)
evaluated = options.ssh % {'host': name, 'port': port}
if evaluated == options.ssh:
evaluated = '%s %s' % (evaluated, name)
os.execlp('/bin/sh', 'sh', '-c', evaluated) | [
"def",
"launch_ssh",
"(",
"self",
",",
"name",
",",
"port",
")",
":",
"if",
"options",
".",
"user",
":",
"name",
"=",
"'%s@%s'",
"%",
"(",
"options",
".",
"user",
",",
"name",
")",
"evaluated",
"=",
"options",
".",
"ssh",
"%",
"{",
"'host'",
":",
... | Launch the ssh command in the child process | [
"Launch",
"the",
"ssh",
"command",
"in",
"the",
"child",
"process"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L113-L120 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.change_state | def change_state(self, state):
"""Change the state of the remote process, logging the change"""
if state is not self.state:
if self.debug:
self.print_debug(b'state => ' + STATE_NAMES[state].encode())
if self.state is STATE_NOT_STARTED:
self.read_in_state_not_started = b''
self.state = state | python | def change_state(self, state):
"""Change the state of the remote process, logging the change"""
if state is not self.state:
if self.debug:
self.print_debug(b'state => ' + STATE_NAMES[state].encode())
if self.state is STATE_NOT_STARTED:
self.read_in_state_not_started = b''
self.state = state | [
"def",
"change_state",
"(",
"self",
",",
"state",
")",
":",
"if",
"state",
"is",
"not",
"self",
".",
"state",
":",
"if",
"self",
".",
"debug",
":",
"self",
".",
"print_debug",
"(",
"b'state => '",
"+",
"STATE_NAMES",
"[",
"state",
"]",
".",
"encode",
... | Change the state of the remote process, logging the change | [
"Change",
"the",
"state",
"of",
"the",
"remote",
"process",
"logging",
"the",
"change"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L130-L137 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.disconnect | def disconnect(self):
"""We are no more interested in this remote process"""
try:
os.kill(-self.pid, signal.SIGKILL)
except OSError:
# The process was already dead, no problem
pass
self.read_buffer = b''
self.write_buffer = b''
self.set_enabled(False)
if self.read_in_state_not_started:
self.print_lines(self.read_in_state_not_started)
self.read_in_state_not_started = b''
if options.abort_error and self.state is STATE_NOT_STARTED:
raise asyncore.ExitNow(1)
self.change_state(STATE_DEAD) | python | def disconnect(self):
"""We are no more interested in this remote process"""
try:
os.kill(-self.pid, signal.SIGKILL)
except OSError:
# The process was already dead, no problem
pass
self.read_buffer = b''
self.write_buffer = b''
self.set_enabled(False)
if self.read_in_state_not_started:
self.print_lines(self.read_in_state_not_started)
self.read_in_state_not_started = b''
if options.abort_error and self.state is STATE_NOT_STARTED:
raise asyncore.ExitNow(1)
self.change_state(STATE_DEAD) | [
"def",
"disconnect",
"(",
"self",
")",
":",
"try",
":",
"os",
".",
"kill",
"(",
"-",
"self",
".",
"pid",
",",
"signal",
".",
"SIGKILL",
")",
"except",
"OSError",
":",
"# The process was already dead, no problem",
"pass",
"self",
".",
"read_buffer",
"=",
"b... | We are no more interested in this remote process | [
"We",
"are",
"no",
"more",
"interested",
"in",
"this",
"remote",
"process"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L139-L154 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.configure_tty | def configure_tty(self):
"""We don't want \n to be replaced with \r\n, and we disable the echo"""
attr = termios.tcgetattr(self.fd)
attr[1] &= ~termios.ONLCR # oflag
attr[3] &= ~termios.ECHO # lflag
termios.tcsetattr(self.fd, termios.TCSANOW, attr)
# unsetopt zle prevents Zsh from resetting the tty
return b'unsetopt zle 2> /dev/null;stty -echo -onlcr -ctlecho;' | python | def configure_tty(self):
"""We don't want \n to be replaced with \r\n, and we disable the echo"""
attr = termios.tcgetattr(self.fd)
attr[1] &= ~termios.ONLCR # oflag
attr[3] &= ~termios.ECHO # lflag
termios.tcsetattr(self.fd, termios.TCSANOW, attr)
# unsetopt zle prevents Zsh from resetting the tty
return b'unsetopt zle 2> /dev/null;stty -echo -onlcr -ctlecho;' | [
"def",
"configure_tty",
"(",
"self",
")",
":",
"attr",
"=",
"termios",
".",
"tcgetattr",
"(",
"self",
".",
"fd",
")",
"attr",
"[",
"1",
"]",
"&=",
"~",
"termios",
".",
"ONLCR",
"# oflag",
"attr",
"[",
"3",
"]",
"&=",
"~",
"termios",
".",
"ECHO",
... | We don't want \n to be replaced with \r\n, and we disable the echo | [
"We",
"don",
"t",
"want",
"\\",
"n",
"to",
"be",
"replaced",
"with",
"\\",
"r",
"\\",
"n",
"and",
"we",
"disable",
"the",
"echo"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L156-L163 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.set_prompt | def set_prompt(self):
"""The prompt is important because we detect the readyness of a process
by waiting for its prompt."""
# No right prompt
command_line = b'PS2=;RPS1=;RPROMPT=;'
command_line += b'PROMPT_COMMAND=;'
command_line += b'TERM=ansi;'
command_line += b'unset HISTFILE;'
prompt1, prompt2 = callbacks.add(b'prompt', self.seen_prompt_cb, True)
command_line += b'PS1="' + prompt1 + b'""' + prompt2 + b'\n"\n'
return command_line | python | def set_prompt(self):
"""The prompt is important because we detect the readyness of a process
by waiting for its prompt."""
# No right prompt
command_line = b'PS2=;RPS1=;RPROMPT=;'
command_line += b'PROMPT_COMMAND=;'
command_line += b'TERM=ansi;'
command_line += b'unset HISTFILE;'
prompt1, prompt2 = callbacks.add(b'prompt', self.seen_prompt_cb, True)
command_line += b'PS1="' + prompt1 + b'""' + prompt2 + b'\n"\n'
return command_line | [
"def",
"set_prompt",
"(",
"self",
")",
":",
"# No right prompt",
"command_line",
"=",
"b'PS2=;RPS1=;RPROMPT=;'",
"command_line",
"+=",
"b'PROMPT_COMMAND=;'",
"command_line",
"+=",
"b'TERM=ansi;'",
"command_line",
"+=",
"b'unset HISTFILE;'",
"prompt1",
",",
"prompt2",
"=",... | The prompt is important because we detect the readyness of a process
by waiting for its prompt. | [
"The",
"prompt",
"is",
"important",
"because",
"we",
"detect",
"the",
"readyness",
"of",
"a",
"process",
"by",
"waiting",
"for",
"its",
"prompt",
"."
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L175-L185 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.handle_read_fast_case | def handle_read_fast_case(self, data):
"""If we are in a fast case we'll avoid the long processing of each
line"""
if self.state is not STATE_RUNNING or callbacks.any_in(data):
# Slow case :-(
return False
last_nl = data.rfind(b'\n')
if last_nl == -1:
# No '\n' in data => slow case
return False
self.read_buffer = data[last_nl + 1:]
self.print_lines(data[:last_nl])
return True | python | def handle_read_fast_case(self, data):
"""If we are in a fast case we'll avoid the long processing of each
line"""
if self.state is not STATE_RUNNING or callbacks.any_in(data):
# Slow case :-(
return False
last_nl = data.rfind(b'\n')
if last_nl == -1:
# No '\n' in data => slow case
return False
self.read_buffer = data[last_nl + 1:]
self.print_lines(data[:last_nl])
return True | [
"def",
"handle_read_fast_case",
"(",
"self",
",",
"data",
")",
":",
"if",
"self",
".",
"state",
"is",
"not",
"STATE_RUNNING",
"or",
"callbacks",
".",
"any_in",
"(",
"data",
")",
":",
"# Slow case :-(",
"return",
"False",
"last_nl",
"=",
"data",
".",
"rfind... | If we are in a fast case we'll avoid the long processing of each
line | [
"If",
"we",
"are",
"in",
"a",
"fast",
"case",
"we",
"ll",
"avoid",
"the",
"long",
"processing",
"of",
"each",
"line"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L243-L256 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.handle_read | def handle_read(self):
"""We got some output from a remote shell, this is one of the state
machine"""
if self.state == STATE_DEAD:
return
global nr_handle_read
nr_handle_read += 1
new_data = self._handle_read_chunk()
if self.debug:
self.print_debug(b'==> ' + new_data)
if self.handle_read_fast_case(self.read_buffer):
return
lf_pos = new_data.find(b'\n')
if lf_pos >= 0:
# Optimization: we knew there were no '\n' in the previous read
# buffer, so we searched only in the new_data and we offset the
# found index by the length of the previous buffer
lf_pos += len(self.read_buffer) - len(new_data)
elif self.state is STATE_NOT_STARTED and \
options.password is not None and \
b'password:' in self.read_buffer.lower():
self.dispatch_write('{}\n'.format(options.password).encode())
self.read_buffer = b''
return
while lf_pos >= 0:
# For each line in the buffer
line = self.read_buffer[:lf_pos + 1]
if callbacks.process(line):
pass
elif self.state in (STATE_IDLE, STATE_RUNNING):
self.print_lines(line)
elif self.state is STATE_NOT_STARTED:
self.read_in_state_not_started += line
if b'The authenticity of host' in line:
msg = line.strip(b'\n') + b' Closing connection.'
self.disconnect()
elif b'REMOTE HOST IDENTIFICATION HAS CHANGED' in line:
msg = b'Remote host identification has changed.'
else:
msg = None
if msg:
self.print_lines(msg + b' Consider manually connecting or '
b'using ssh-keyscan.')
# Go to the next line in the buffer
self.read_buffer = self.read_buffer[lf_pos + 1:]
if self.handle_read_fast_case(self.read_buffer):
return
lf_pos = self.read_buffer.find(b'\n')
if self.state is STATE_NOT_STARTED and not self.init_string_sent:
self.dispatch_write(self.init_string)
self.init_string_sent = True | python | def handle_read(self):
"""We got some output from a remote shell, this is one of the state
machine"""
if self.state == STATE_DEAD:
return
global nr_handle_read
nr_handle_read += 1
new_data = self._handle_read_chunk()
if self.debug:
self.print_debug(b'==> ' + new_data)
if self.handle_read_fast_case(self.read_buffer):
return
lf_pos = new_data.find(b'\n')
if lf_pos >= 0:
# Optimization: we knew there were no '\n' in the previous read
# buffer, so we searched only in the new_data and we offset the
# found index by the length of the previous buffer
lf_pos += len(self.read_buffer) - len(new_data)
elif self.state is STATE_NOT_STARTED and \
options.password is not None and \
b'password:' in self.read_buffer.lower():
self.dispatch_write('{}\n'.format(options.password).encode())
self.read_buffer = b''
return
while lf_pos >= 0:
# For each line in the buffer
line = self.read_buffer[:lf_pos + 1]
if callbacks.process(line):
pass
elif self.state in (STATE_IDLE, STATE_RUNNING):
self.print_lines(line)
elif self.state is STATE_NOT_STARTED:
self.read_in_state_not_started += line
if b'The authenticity of host' in line:
msg = line.strip(b'\n') + b' Closing connection.'
self.disconnect()
elif b'REMOTE HOST IDENTIFICATION HAS CHANGED' in line:
msg = b'Remote host identification has changed.'
else:
msg = None
if msg:
self.print_lines(msg + b' Consider manually connecting or '
b'using ssh-keyscan.')
# Go to the next line in the buffer
self.read_buffer = self.read_buffer[lf_pos + 1:]
if self.handle_read_fast_case(self.read_buffer):
return
lf_pos = self.read_buffer.find(b'\n')
if self.state is STATE_NOT_STARTED and not self.init_string_sent:
self.dispatch_write(self.init_string)
self.init_string_sent = True | [
"def",
"handle_read",
"(",
"self",
")",
":",
"if",
"self",
".",
"state",
"==",
"STATE_DEAD",
":",
"return",
"global",
"nr_handle_read",
"nr_handle_read",
"+=",
"1",
"new_data",
"=",
"self",
".",
"_handle_read_chunk",
"(",
")",
"if",
"self",
".",
"debug",
"... | We got some output from a remote shell, this is one of the state
machine | [
"We",
"got",
"some",
"output",
"from",
"a",
"remote",
"shell",
"this",
"is",
"one",
"of",
"the",
"state",
"machine"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L258-L310 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.print_unfinished_line | def print_unfinished_line(self):
"""The unfinished line stayed long enough in the buffer to be printed"""
if self.state is STATE_RUNNING:
if not callbacks.process(self.read_buffer):
self.print_lines(self.read_buffer)
self.read_buffer = b'' | python | def print_unfinished_line(self):
"""The unfinished line stayed long enough in the buffer to be printed"""
if self.state is STATE_RUNNING:
if not callbacks.process(self.read_buffer):
self.print_lines(self.read_buffer)
self.read_buffer = b'' | [
"def",
"print_unfinished_line",
"(",
"self",
")",
":",
"if",
"self",
".",
"state",
"is",
"STATE_RUNNING",
":",
"if",
"not",
"callbacks",
".",
"process",
"(",
"self",
".",
"read_buffer",
")",
":",
"self",
".",
"print_lines",
"(",
"self",
".",
"read_buffer",... | The unfinished line stayed long enough in the buffer to be printed | [
"The",
"unfinished",
"line",
"stayed",
"long",
"enough",
"in",
"the",
"buffer",
"to",
"be",
"printed"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L312-L317 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.handle_write | def handle_write(self):
"""Let's write as much as we can"""
num_sent = self.send(self.write_buffer)
if self.debug:
if self.state is not STATE_NOT_STARTED or options.password is None:
self.print_debug(b'<== ' + self.write_buffer[:num_sent])
self.write_buffer = self.write_buffer[num_sent:] | python | def handle_write(self):
"""Let's write as much as we can"""
num_sent = self.send(self.write_buffer)
if self.debug:
if self.state is not STATE_NOT_STARTED or options.password is None:
self.print_debug(b'<== ' + self.write_buffer[:num_sent])
self.write_buffer = self.write_buffer[num_sent:] | [
"def",
"handle_write",
"(",
"self",
")",
":",
"num_sent",
"=",
"self",
".",
"send",
"(",
"self",
".",
"write_buffer",
")",
"if",
"self",
".",
"debug",
":",
"if",
"self",
".",
"state",
"is",
"not",
"STATE_NOT_STARTED",
"or",
"options",
".",
"password",
... | Let's write as much as we can | [
"Let",
"s",
"write",
"as",
"much",
"as",
"we",
"can"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L324-L330 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.print_debug | def print_debug(self, msg):
"""Log some debugging information to the console"""
assert isinstance(msg, bytes)
state = STATE_NAMES[self.state].encode()
console_output(b'[dbg] ' + self.display_name.encode() + b'[' + state +
b']: ' + msg + b'\n') | python | def print_debug(self, msg):
"""Log some debugging information to the console"""
assert isinstance(msg, bytes)
state = STATE_NAMES[self.state].encode()
console_output(b'[dbg] ' + self.display_name.encode() + b'[' + state +
b']: ' + msg + b'\n') | [
"def",
"print_debug",
"(",
"self",
",",
"msg",
")",
":",
"assert",
"isinstance",
"(",
"msg",
",",
"bytes",
")",
"state",
"=",
"STATE_NAMES",
"[",
"self",
".",
"state",
"]",
".",
"encode",
"(",
")",
"console_output",
"(",
"b'[dbg] '",
"+",
"self",
".",
... | Log some debugging information to the console | [
"Log",
"some",
"debugging",
"information",
"to",
"the",
"console"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L332-L337 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.get_info | def get_info(self):
"""Return a list with all information available about this process"""
return [self.display_name.encode(),
self.enabled and b'enabled' or b'disabled',
STATE_NAMES[self.state].encode() + b':',
self.last_printed_line.strip()] | python | def get_info(self):
"""Return a list with all information available about this process"""
return [self.display_name.encode(),
self.enabled and b'enabled' or b'disabled',
STATE_NAMES[self.state].encode() + b':',
self.last_printed_line.strip()] | [
"def",
"get_info",
"(",
"self",
")",
":",
"return",
"[",
"self",
".",
"display_name",
".",
"encode",
"(",
")",
",",
"self",
".",
"enabled",
"and",
"b'enabled'",
"or",
"b'disabled'",
",",
"STATE_NAMES",
"[",
"self",
".",
"state",
"]",
".",
"encode",
"("... | Return a list with all information available about this process | [
"Return",
"a",
"list",
"with",
"all",
"information",
"available",
"about",
"this",
"process"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L339-L344 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.dispatch_write | def dispatch_write(self, buf):
"""There is new stuff to write when possible"""
if self.state != STATE_DEAD and self.enabled:
super().dispatch_write(buf)
return True
return False | python | def dispatch_write(self, buf):
"""There is new stuff to write when possible"""
if self.state != STATE_DEAD and self.enabled:
super().dispatch_write(buf)
return True
return False | [
"def",
"dispatch_write",
"(",
"self",
",",
"buf",
")",
":",
"if",
"self",
".",
"state",
"!=",
"STATE_DEAD",
"and",
"self",
".",
"enabled",
":",
"super",
"(",
")",
".",
"dispatch_write",
"(",
"buf",
")",
"return",
"True",
"return",
"False"
] | There is new stuff to write when possible | [
"There",
"is",
"new",
"stuff",
"to",
"write",
"when",
"possible"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L346-L351 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.change_name | def change_name(self, new_name):
"""Change the name of the shell, possibly updating the maximum name
length"""
if not new_name:
name = self.hostname
else:
name = new_name.decode()
self.display_name = display_names.change(
self.display_name, name) | python | def change_name(self, new_name):
"""Change the name of the shell, possibly updating the maximum name
length"""
if not new_name:
name = self.hostname
else:
name = new_name.decode()
self.display_name = display_names.change(
self.display_name, name) | [
"def",
"change_name",
"(",
"self",
",",
"new_name",
")",
":",
"if",
"not",
"new_name",
":",
"name",
"=",
"self",
".",
"hostname",
"else",
":",
"name",
"=",
"new_name",
".",
"decode",
"(",
")",
"self",
".",
"display_name",
"=",
"display_names",
".",
"ch... | Change the name of the shell, possibly updating the maximum name
length | [
"Change",
"the",
"name",
"of",
"the",
"shell",
"possibly",
"updating",
"the",
"maximum",
"name",
"length"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L357-L365 |
innogames/polysh | polysh/remote_dispatcher.py | RemoteDispatcher.rename | def rename(self, name):
"""Send to the remote shell, its new name to be shell expanded"""
if name:
# defug callback add?
rename1, rename2 = callbacks.add(
b'rename', self.change_name, False)
self.dispatch_command(b'/bin/echo "' + rename1 + b'""' + rename2 +
b'"' + name + b'\n')
else:
self.change_name(self.hostname.encode()) | python | def rename(self, name):
"""Send to the remote shell, its new name to be shell expanded"""
if name:
# defug callback add?
rename1, rename2 = callbacks.add(
b'rename', self.change_name, False)
self.dispatch_command(b'/bin/echo "' + rename1 + b'""' + rename2 +
b'"' + name + b'\n')
else:
self.change_name(self.hostname.encode()) | [
"def",
"rename",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
":",
"# defug callback add?",
"rename1",
",",
"rename2",
"=",
"callbacks",
".",
"add",
"(",
"b'rename'",
",",
"self",
".",
"change_name",
",",
"False",
")",
"self",
".",
"dispatch_command",... | Send to the remote shell, its new name to be shell expanded | [
"Send",
"to",
"the",
"remote",
"shell",
"its",
"new",
"name",
"to",
"be",
"shell",
"expanded"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/remote_dispatcher.py#L367-L376 |
innogames/polysh | polysh/terminal_size.py | terminal_size | def terminal_size(): # decide on *some* terminal size
"""Return (lines, columns)."""
cr = _ioctl_GWINSZ(0) or _ioctl_GWINSZ(
1) or _ioctl_GWINSZ(2) # try open fds
if not cr: # ...then ctty
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = _ioctl_GWINSZ(fd)
os.close(fd)
except BaseException:
pass
if not cr: # env vars or finally defaults
try:
cr = os.environ['LINES'], os.environ['COLUMNS']
except BaseException:
cr = 25, 80
return int(cr[1]), int(cr[0]) | python | def terminal_size(): # decide on *some* terminal size
"""Return (lines, columns)."""
cr = _ioctl_GWINSZ(0) or _ioctl_GWINSZ(
1) or _ioctl_GWINSZ(2) # try open fds
if not cr: # ...then ctty
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = _ioctl_GWINSZ(fd)
os.close(fd)
except BaseException:
pass
if not cr: # env vars or finally defaults
try:
cr = os.environ['LINES'], os.environ['COLUMNS']
except BaseException:
cr = 25, 80
return int(cr[1]), int(cr[0]) | [
"def",
"terminal_size",
"(",
")",
":",
"# decide on *some* terminal size",
"cr",
"=",
"_ioctl_GWINSZ",
"(",
"0",
")",
"or",
"_ioctl_GWINSZ",
"(",
"1",
")",
"or",
"_ioctl_GWINSZ",
"(",
"2",
")",
"# try open fds",
"if",
"not",
"cr",
":",
"# ...then ctty",
"try",... | Return (lines, columns). | [
"Return",
"(",
"lines",
"columns",
")",
"."
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/terminal_size.py#L48-L64 |
innogames/polysh | polysh/completion.py | complete | def complete(text, state):
"""On tab press, return the next possible completion"""
global completion_results
if state == 0:
line = readline.get_line_buffer()
if line.startswith(':'):
# Control command completion
completion_results = complete_control_command(line, text)
else:
if line.startswith('!') and text and line.startswith(text):
dropped_exclam = True
text = text[1:]
else:
dropped_exclam = False
completion_results = []
# Complete local paths
completion_results += complete_local_path(text)
# Complete from history
l = len(text)
completion_results += [w + ' ' for w in history_words if
len(w) > l and w.startswith(text)]
if readline.get_begidx() == 0:
# Completing first word from $PATH
completion_results += [w + ' ' for w in user_commands_in_path
if len(w) > l and w.startswith(text)]
completion_results = remove_dupes(completion_results)
if dropped_exclam:
completion_results = ['!' + r for r in completion_results]
if state < len(completion_results):
return completion_results[state]
completion_results = None
return None | python | def complete(text, state):
"""On tab press, return the next possible completion"""
global completion_results
if state == 0:
line = readline.get_line_buffer()
if line.startswith(':'):
# Control command completion
completion_results = complete_control_command(line, text)
else:
if line.startswith('!') and text and line.startswith(text):
dropped_exclam = True
text = text[1:]
else:
dropped_exclam = False
completion_results = []
# Complete local paths
completion_results += complete_local_path(text)
# Complete from history
l = len(text)
completion_results += [w + ' ' for w in history_words if
len(w) > l and w.startswith(text)]
if readline.get_begidx() == 0:
# Completing first word from $PATH
completion_results += [w + ' ' for w in user_commands_in_path
if len(w) > l and w.startswith(text)]
completion_results = remove_dupes(completion_results)
if dropped_exclam:
completion_results = ['!' + r for r in completion_results]
if state < len(completion_results):
return completion_results[state]
completion_results = None
return None | [
"def",
"complete",
"(",
"text",
",",
"state",
")",
":",
"global",
"completion_results",
"if",
"state",
"==",
"0",
":",
"line",
"=",
"readline",
".",
"get_line_buffer",
"(",
")",
"if",
"line",
".",
"startswith",
"(",
"':'",
")",
":",
"# Control command comp... | On tab press, return the next possible completion | [
"On",
"tab",
"press",
"return",
"the",
"next",
"possible",
"completion"
] | train | https://github.com/innogames/polysh/blob/fbea36f3bc9f47a62d72040c48dad1776124dae3/polysh/completion.py#L75-L107 |
aetros/aetros-cli | aetros/backend.py | Popen | def Popen(*args, **kwargs):
"""
Executes a command using subprocess.Popen and redirects output to AETROS and stdout.
Parses stdout as well for stdout API calls.
Use read_line argument to read stdout of command's stdout line by line.
Use returned process stdin to communicate with the command.
:return: subprocess.Popen
"""
read_line = None
if 'read_line' in kwargs:
read_line = kwargs['read_line']
del kwargs['read_line']
p = subprocess.Popen(*args, **kwargs)
wait_stdout = None
wait_stderr = None
if p.stdout:
wait_stdout = sys.stdout.attach(p.stdout, read_line=read_line)
if p.stderr:
wait_stderr = sys.stderr.attach(p.stderr)
original_wait = p.wait
def wait():
original_wait()
if wait_stdout:
wait_stdout()
if wait_stderr:
wait_stderr()
p.wait = wait
return p | python | def Popen(*args, **kwargs):
"""
Executes a command using subprocess.Popen and redirects output to AETROS and stdout.
Parses stdout as well for stdout API calls.
Use read_line argument to read stdout of command's stdout line by line.
Use returned process stdin to communicate with the command.
:return: subprocess.Popen
"""
read_line = None
if 'read_line' in kwargs:
read_line = kwargs['read_line']
del kwargs['read_line']
p = subprocess.Popen(*args, **kwargs)
wait_stdout = None
wait_stderr = None
if p.stdout:
wait_stdout = sys.stdout.attach(p.stdout, read_line=read_line)
if p.stderr:
wait_stderr = sys.stderr.attach(p.stderr)
original_wait = p.wait
def wait():
original_wait()
if wait_stdout:
wait_stdout()
if wait_stderr:
wait_stderr()
p.wait = wait
return p | [
"def",
"Popen",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"read_line",
"=",
"None",
"if",
"'read_line'",
"in",
"kwargs",
":",
"read_line",
"=",
"kwargs",
"[",
"'read_line'",
"]",
"del",
"kwargs",
"[",
"'read_line'",
"]",
"p",
"=",
"subproces... | Executes a command using subprocess.Popen and redirects output to AETROS and stdout.
Parses stdout as well for stdout API calls.
Use read_line argument to read stdout of command's stdout line by line.
Use returned process stdin to communicate with the command.
:return: subprocess.Popen | [
"Executes",
"a",
"command",
"using",
"subprocess",
".",
"Popen",
"and",
"redirects",
"output",
"to",
"AETROS",
"and",
"stdout",
".",
"Parses",
"stdout",
"as",
"well",
"for",
"stdout",
"API",
"calls",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L65-L101 |
aetros/aetros-cli | aetros/backend.py | context | def context():
"""
Returns a new JobBackend instance which connects to AETROS Trainer
based on "model" in aetros.yml or (internal: env:AETROS_MODEL_NAME environment variable).
internal: If env:AETROS_JOB_ID is not defined, it creates a new job.
Job is ended either by calling JobBackend.done(), JobBackend.fail() or JobBackend.abort().
If the script ends without calling one of the methods above, JobBackend.stop() is called and exit code defines
whether it is a fail() or done() result.
:return: JobBackend
"""
job = JobBackend()
offline = False
if '1' == os.getenv('AETROS_OFFLINE', ''):
offline = True
if os.getenv('AETROS_JOB_ID'):
job.load(os.getenv('AETROS_JOB_ID'))
if not offline:
job.connect()
else:
job.create()
if not offline:
job.connect()
job.start(offline=offline)
return job | python | def context():
"""
Returns a new JobBackend instance which connects to AETROS Trainer
based on "model" in aetros.yml or (internal: env:AETROS_MODEL_NAME environment variable).
internal: If env:AETROS_JOB_ID is not defined, it creates a new job.
Job is ended either by calling JobBackend.done(), JobBackend.fail() or JobBackend.abort().
If the script ends without calling one of the methods above, JobBackend.stop() is called and exit code defines
whether it is a fail() or done() result.
:return: JobBackend
"""
job = JobBackend()
offline = False
if '1' == os.getenv('AETROS_OFFLINE', ''):
offline = True
if os.getenv('AETROS_JOB_ID'):
job.load(os.getenv('AETROS_JOB_ID'))
if not offline:
job.connect()
else:
job.create()
if not offline:
job.connect()
job.start(offline=offline)
return job | [
"def",
"context",
"(",
")",
":",
"job",
"=",
"JobBackend",
"(",
")",
"offline",
"=",
"False",
"if",
"'1'",
"==",
"os",
".",
"getenv",
"(",
"'AETROS_OFFLINE'",
",",
"''",
")",
":",
"offline",
"=",
"True",
"if",
"os",
".",
"getenv",
"(",
"'AETROS_JOB_I... | Returns a new JobBackend instance which connects to AETROS Trainer
based on "model" in aetros.yml or (internal: env:AETROS_MODEL_NAME environment variable).
internal: If env:AETROS_JOB_ID is not defined, it creates a new job.
Job is ended either by calling JobBackend.done(), JobBackend.fail() or JobBackend.abort().
If the script ends without calling one of the methods above, JobBackend.stop() is called and exit code defines
whether it is a fail() or done() result.
:return: JobBackend | [
"Returns",
"a",
"new",
"JobBackend",
"instance",
"which",
"connects",
"to",
"AETROS",
"Trainer",
"based",
"on",
"model",
"in",
"aetros",
".",
"yml",
"or",
"(",
"internal",
":",
"env",
":",
"AETROS_MODEL_NAME",
"environment",
"variable",
")",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L127-L157 |
aetros/aetros-cli | aetros/backend.py | JobBackend.on_sigint | def on_sigint(self, sig, frame):
"""
We got SIGINT signal.
"""
if self.stop_requested or self.stop_requested_force:
# signal has already been sent or we force a shutdown.
# handles the keystroke 2x CTRL+C to force an exit.
self.stop_requested_force = True
self.logger.warning('Force stopped: ' + str(sig))
# just kill the process, we don't care about the results
self.on_force_exit()
os._exit(1)
# with force_exit we really close the process, killing it in unknown state
# self.fail('Force stopped', force_exit=True)
# return
if self.is_master_process():
self.logger.warning('Received signal '+str(sig)+'. Send again to force stop. Stopping ...')
else:
self.logger.debug("Got child signal " + str(sig))
self.stop_requested = True
# the default SIGINT handle in python is not always installed, so we can't rely on the
# KeyboardInterrupt exception to be thrown.
# thread.interrupt_main would call sigint again.
# the shutdown listener will do the rest like committing rest memory files into Git and closing connections.
sys.exit(0 if self.in_early_stop else 1) | python | def on_sigint(self, sig, frame):
"""
We got SIGINT signal.
"""
if self.stop_requested or self.stop_requested_force:
# signal has already been sent or we force a shutdown.
# handles the keystroke 2x CTRL+C to force an exit.
self.stop_requested_force = True
self.logger.warning('Force stopped: ' + str(sig))
# just kill the process, we don't care about the results
self.on_force_exit()
os._exit(1)
# with force_exit we really close the process, killing it in unknown state
# self.fail('Force stopped', force_exit=True)
# return
if self.is_master_process():
self.logger.warning('Received signal '+str(sig)+'. Send again to force stop. Stopping ...')
else:
self.logger.debug("Got child signal " + str(sig))
self.stop_requested = True
# the default SIGINT handle in python is not always installed, so we can't rely on the
# KeyboardInterrupt exception to be thrown.
# thread.interrupt_main would call sigint again.
# the shutdown listener will do the rest like committing rest memory files into Git and closing connections.
sys.exit(0 if self.in_early_stop else 1) | [
"def",
"on_sigint",
"(",
"self",
",",
"sig",
",",
"frame",
")",
":",
"if",
"self",
".",
"stop_requested",
"or",
"self",
".",
"stop_requested_force",
":",
"# signal has already been sent or we force a shutdown.",
"# handles the keystroke 2x CTRL+C to force an exit.",
"self",... | We got SIGINT signal. | [
"We",
"got",
"SIGINT",
"signal",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L474-L503 |
aetros/aetros-cli | aetros/backend.py | JobBackend.external_aborted | def external_aborted(self, params):
"""
Immediately abort the job by server.
This runs in the Client:read() thread.
"""
self.ended = True
self.running = False
# When the server sends an abort signal, we really have to close immediately,
# since for example the job has been already deleted.
# without touching the git and client any further
os._exit(1) | python | def external_aborted(self, params):
"""
Immediately abort the job by server.
This runs in the Client:read() thread.
"""
self.ended = True
self.running = False
# When the server sends an abort signal, we really have to close immediately,
# since for example the job has been already deleted.
# without touching the git and client any further
os._exit(1) | [
"def",
"external_aborted",
"(",
"self",
",",
"params",
")",
":",
"self",
".",
"ended",
"=",
"True",
"self",
".",
"running",
"=",
"False",
"# When the server sends an abort signal, we really have to close immediately,",
"# since for example the job has been already deleted.",
... | Immediately abort the job by server.
This runs in the Client:read() thread. | [
"Immediately",
"abort",
"the",
"job",
"by",
"server",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L505-L517 |
aetros/aetros-cli | aetros/backend.py | JobBackend.external_stop | def external_stop(self, force):
"""
Stop signal by server.
"""
# only the master processes handles the regular stop signal from the server, sending a SIGINT to
# all its child (means to us, non-master process)
if not self.is_master_process():
if force:
# make sure even the subprocess dies really on force
os._exit(1)
return
self.logger.warning("Received stop signal by server.")
if not self.stop_requested_force:
self.stop_requested_force = force
raise_sigint() | python | def external_stop(self, force):
"""
Stop signal by server.
"""
# only the master processes handles the regular stop signal from the server, sending a SIGINT to
# all its child (means to us, non-master process)
if not self.is_master_process():
if force:
# make sure even the subprocess dies really on force
os._exit(1)
return
self.logger.warning("Received stop signal by server.")
if not self.stop_requested_force:
self.stop_requested_force = force
raise_sigint() | [
"def",
"external_stop",
"(",
"self",
",",
"force",
")",
":",
"# only the master processes handles the regular stop signal from the server, sending a SIGINT to",
"# all its child (means to us, non-master process)",
"if",
"not",
"self",
".",
"is_master_process",
"(",
")",
":",
"if"... | Stop signal by server. | [
"Stop",
"signal",
"by",
"server",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L519-L537 |
aetros/aetros-cli | aetros/backend.py | JobBackend.step | def step(self, step, total, label='STEP', speed_label='STEPS/S', size=1):
"""
Increase the step indicator, which is a sub progress circle of the actual
main progress circle (epoch, progress() method).
"""
self.lock.acquire()
try:
time_diff = time.time() - self.last_step_time
if self.last_step > step:
# it restarted
self.last_step = 0
made_steps_since_last_call = step - self.last_step
self.last_step = step
self.made_steps_since_last_sync += made_steps_since_last_call
self.made_steps_size_since_last_sync += made_steps_since_last_call * size
if time_diff >= 1 or step == total: # only each second or last batch
self.set_system_info('step', step, True)
self.set_system_info('steps', total, True)
steps_per_second = self.made_steps_since_last_sync / time_diff
samples_per_second = self.made_steps_size_since_last_sync / time_diff
self.last_step_time = time.time()
if size:
self.report_speed(samples_per_second)
epochs_per_second = steps_per_second / total # all batches
self.set_system_info('epochsPerSecond', epochs_per_second, True)
current_epochs = self.current_epoch if self.current_epoch else 1
total_epochs = self.total_epochs if self.total_epochs else 1
self.made_steps_since_last_sync = 0
self.made_steps_size_since_last_sync = 0
eta = 0
if step < total:
# time to end this epoch
if steps_per_second != 0:
eta = (total - step) / steps_per_second
# time until all epochs are done
if total_epochs - current_epochs > 0:
if epochs_per_second != 0:
eta += (total_epochs - (current_epochs)) / epochs_per_second
self.git.store_file('aetros/job/times/eta.json', simplejson.dumps(eta))
if label and self.step_label != label:
self.set_system_info('stepLabel', label, True)
self.step_label = label
if speed_label and self.step_speed_label != speed_label:
self.set_system_info('stepSpeedLabel', speed_label, True)
self.step_speed_label = speed_label
finally:
self.lock.release() | python | def step(self, step, total, label='STEP', speed_label='STEPS/S', size=1):
"""
Increase the step indicator, which is a sub progress circle of the actual
main progress circle (epoch, progress() method).
"""
self.lock.acquire()
try:
time_diff = time.time() - self.last_step_time
if self.last_step > step:
# it restarted
self.last_step = 0
made_steps_since_last_call = step - self.last_step
self.last_step = step
self.made_steps_since_last_sync += made_steps_since_last_call
self.made_steps_size_since_last_sync += made_steps_since_last_call * size
if time_diff >= 1 or step == total: # only each second or last batch
self.set_system_info('step', step, True)
self.set_system_info('steps', total, True)
steps_per_second = self.made_steps_since_last_sync / time_diff
samples_per_second = self.made_steps_size_since_last_sync / time_diff
self.last_step_time = time.time()
if size:
self.report_speed(samples_per_second)
epochs_per_second = steps_per_second / total # all batches
self.set_system_info('epochsPerSecond', epochs_per_second, True)
current_epochs = self.current_epoch if self.current_epoch else 1
total_epochs = self.total_epochs if self.total_epochs else 1
self.made_steps_since_last_sync = 0
self.made_steps_size_since_last_sync = 0
eta = 0
if step < total:
# time to end this epoch
if steps_per_second != 0:
eta = (total - step) / steps_per_second
# time until all epochs are done
if total_epochs - current_epochs > 0:
if epochs_per_second != 0:
eta += (total_epochs - (current_epochs)) / epochs_per_second
self.git.store_file('aetros/job/times/eta.json', simplejson.dumps(eta))
if label and self.step_label != label:
self.set_system_info('stepLabel', label, True)
self.step_label = label
if speed_label and self.step_speed_label != speed_label:
self.set_system_info('stepSpeedLabel', speed_label, True)
self.step_speed_label = speed_label
finally:
self.lock.release() | [
"def",
"step",
"(",
"self",
",",
"step",
",",
"total",
",",
"label",
"=",
"'STEP'",
",",
"speed_label",
"=",
"'STEPS/S'",
",",
"size",
"=",
"1",
")",
":",
"self",
".",
"lock",
".",
"acquire",
"(",
")",
"try",
":",
"time_diff",
"=",
"time",
".",
"... | Increase the step indicator, which is a sub progress circle of the actual
main progress circle (epoch, progress() method). | [
"Increase",
"the",
"step",
"indicator",
"which",
"is",
"a",
"sub",
"progress",
"circle",
"of",
"the",
"actual",
"main",
"progress",
"circle",
"(",
"epoch",
"progress",
"()",
"method",
")",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L553-L614 |
aetros/aetros-cli | aetros/backend.py | JobBackend.create_loss_channel | def create_loss_channel(self, name='loss', xaxis=None, yaxis=None, layout=None):
"""
:param name: string
:return: JobLossGraph
"""
return JobLossChannel(self, name, xaxis, yaxis, layout) | python | def create_loss_channel(self, name='loss', xaxis=None, yaxis=None, layout=None):
"""
:param name: string
:return: JobLossGraph
"""
return JobLossChannel(self, name, xaxis, yaxis, layout) | [
"def",
"create_loss_channel",
"(",
"self",
",",
"name",
"=",
"'loss'",
",",
"xaxis",
"=",
"None",
",",
"yaxis",
"=",
"None",
",",
"layout",
"=",
"None",
")",
":",
"return",
"JobLossChannel",
"(",
"self",
",",
"name",
",",
"xaxis",
",",
"yaxis",
",",
... | :param name: string
:return: JobLossGraph | [
":",
"param",
"name",
":",
"string",
":",
"return",
":",
"JobLossGraph"
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L759-L765 |
aetros/aetros-cli | aetros/backend.py | JobBackend.create_channel | def create_channel(self, name, traces=None,
main=False, kpi=False, kpiTrace=0, max_optimization=True,
type=JobChannel.NUMBER,
xaxis=None, yaxis=None, layout=None):
"""
:param name: str
:param traces: None|list : per default create a trace based on "name".
:param main: bool : whether this channel is visible in the job list as column for better comparison.
:param kpi: bool : whether this channel is the KPI (key performance indicator).
Used for hyperparameter optimization. Only one channel can be a kpi. Only first trace used.
:param kpiTrace: bool : if you have multiple traces, define which is the KPI. 0 based index.
:param max_optimization: bool : whether the optimization maximizes or minmizes the kpi. Use max_optimization=False to
tell the optimization algorithm that his channel minimizes a kpi, for instance the loss of a model.
:param type: str : One of JobChannel.NUMBER, JobChannel.TEXT
:param xaxis: dict
:param yaxis: dict
:param layout: dict
"""
return JobChannel(self, name, traces, main, kpi, kpiTrace, max_optimization, type, xaxis, yaxis, layout) | python | def create_channel(self, name, traces=None,
main=False, kpi=False, kpiTrace=0, max_optimization=True,
type=JobChannel.NUMBER,
xaxis=None, yaxis=None, layout=None):
"""
:param name: str
:param traces: None|list : per default create a trace based on "name".
:param main: bool : whether this channel is visible in the job list as column for better comparison.
:param kpi: bool : whether this channel is the KPI (key performance indicator).
Used for hyperparameter optimization. Only one channel can be a kpi. Only first trace used.
:param kpiTrace: bool : if you have multiple traces, define which is the KPI. 0 based index.
:param max_optimization: bool : whether the optimization maximizes or minmizes the kpi. Use max_optimization=False to
tell the optimization algorithm that his channel minimizes a kpi, for instance the loss of a model.
:param type: str : One of JobChannel.NUMBER, JobChannel.TEXT
:param xaxis: dict
:param yaxis: dict
:param layout: dict
"""
return JobChannel(self, name, traces, main, kpi, kpiTrace, max_optimization, type, xaxis, yaxis, layout) | [
"def",
"create_channel",
"(",
"self",
",",
"name",
",",
"traces",
"=",
"None",
",",
"main",
"=",
"False",
",",
"kpi",
"=",
"False",
",",
"kpiTrace",
"=",
"0",
",",
"max_optimization",
"=",
"True",
",",
"type",
"=",
"JobChannel",
".",
"NUMBER",
",",
"... | :param name: str
:param traces: None|list : per default create a trace based on "name".
:param main: bool : whether this channel is visible in the job list as column for better comparison.
:param kpi: bool : whether this channel is the KPI (key performance indicator).
Used for hyperparameter optimization. Only one channel can be a kpi. Only first trace used.
:param kpiTrace: bool : if you have multiple traces, define which is the KPI. 0 based index.
:param max_optimization: bool : whether the optimization maximizes or minmizes the kpi. Use max_optimization=False to
tell the optimization algorithm that his channel minimizes a kpi, for instance the loss of a model.
:param type: str : One of JobChannel.NUMBER, JobChannel.TEXT
:param xaxis: dict
:param yaxis: dict
:param layout: dict | [
":",
"param",
"name",
":",
"str",
":",
"param",
"traces",
":",
"None|list",
":",
"per",
"default",
"create",
"a",
"trace",
"based",
"on",
"name",
".",
":",
"param",
"main",
":",
"bool",
":",
"whether",
"this",
"channel",
"is",
"visible",
"in",
"the",
... | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L767-L788 |
aetros/aetros-cli | aetros/backend.py | JobBackend.on_shutdown | def on_shutdown(self):
"""
Shutdown routine. Sets the last progress (done, aborted, failed) and tries to send last logs and git commits.
Also makes sure the ssh connection is closed (thus, the job marked as offline).
Is triggered by atexit.register().
"""
self.in_shutdown = True
self.logger.debug('on_shutdown, stopped=%s, ended=%s, early_stop=%s, stop_requested=%s'
% (str(self.stopped), str(self.ended), str(self.in_early_stop), str(self.stop_requested)))
if self.stopped or self.ended:
# make really sure, ssh connection closed
self.client.close()
return
if self.in_early_stop:
self.done()
return
if self.stop_requested:
# when SIGINT has been triggered
if self.stop_requested_force:
if not self.is_master_process():
# if not master process, we just stop everything. status/progress is set by master
self.stop(force_exit=True)
else:
# master process
self.fail('Force stopped.', force_exit=True)
else:
if not self.is_master_process():
# if not master process, we just stop everything. status/progress is set by master
self.stop()
else:
# master process
self.abort()
return
if hasattr(sys, 'last_value'):
# sys.last_value contains a exception, when there was an uncaught one
if isinstance(sys.last_value, KeyboardInterrupt):
# can only happen when KeyboardInterrupt has been raised manually
# since the one from the default sigint handler will never reach here
# since we catch the sigint signal and sys.exit() before the default sigint handler
# is able to raise KeyboardInterrupt
self.abort()
else:
self.fail(type(sys.last_value).__name__ + ': ' + str(sys.last_value))
elif self.running:
self.done() | python | def on_shutdown(self):
"""
Shutdown routine. Sets the last progress (done, aborted, failed) and tries to send last logs and git commits.
Also makes sure the ssh connection is closed (thus, the job marked as offline).
Is triggered by atexit.register().
"""
self.in_shutdown = True
self.logger.debug('on_shutdown, stopped=%s, ended=%s, early_stop=%s, stop_requested=%s'
% (str(self.stopped), str(self.ended), str(self.in_early_stop), str(self.stop_requested)))
if self.stopped or self.ended:
# make really sure, ssh connection closed
self.client.close()
return
if self.in_early_stop:
self.done()
return
if self.stop_requested:
# when SIGINT has been triggered
if self.stop_requested_force:
if not self.is_master_process():
# if not master process, we just stop everything. status/progress is set by master
self.stop(force_exit=True)
else:
# master process
self.fail('Force stopped.', force_exit=True)
else:
if not self.is_master_process():
# if not master process, we just stop everything. status/progress is set by master
self.stop()
else:
# master process
self.abort()
return
if hasattr(sys, 'last_value'):
# sys.last_value contains a exception, when there was an uncaught one
if isinstance(sys.last_value, KeyboardInterrupt):
# can only happen when KeyboardInterrupt has been raised manually
# since the one from the default sigint handler will never reach here
# since we catch the sigint signal and sys.exit() before the default sigint handler
# is able to raise KeyboardInterrupt
self.abort()
else:
self.fail(type(sys.last_value).__name__ + ': ' + str(sys.last_value))
elif self.running:
self.done() | [
"def",
"on_shutdown",
"(",
"self",
")",
":",
"self",
".",
"in_shutdown",
"=",
"True",
"self",
".",
"logger",
".",
"debug",
"(",
"'on_shutdown, stopped=%s, ended=%s, early_stop=%s, stop_requested=%s'",
"%",
"(",
"str",
"(",
"self",
".",
"stopped",
")",
",",
"str"... | Shutdown routine. Sets the last progress (done, aborted, failed) and tries to send last logs and git commits.
Also makes sure the ssh connection is closed (thus, the job marked as offline).
Is triggered by atexit.register(). | [
"Shutdown",
"routine",
".",
"Sets",
"the",
"last",
"progress",
"(",
"done",
"aborted",
"failed",
")",
"and",
"tries",
"to",
"send",
"last",
"logs",
"and",
"git",
"commits",
".",
"Also",
"makes",
"sure",
"the",
"ssh",
"connection",
"is",
"closed",
"(",
"t... | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L951-L1003 |
aetros/aetros-cli | aetros/backend.py | JobBackend.fail | def fail(self, message=None, force_exit=False):
"""
Marks the job as failed, saves the given error message and force exists the process when force_exit=True.
"""
global last_exit_code
if not last_exit_code:
last_exit_code = 1
with self.git.batch_commit('FAILED'):
self.set_status('FAILED', add_section=False)
self.git.commit_json_file('FAIL_MESSAGE', 'aetros/job/crash/error', str(message) if message else '')
if isinstance(sys.stderr, GeneralLogger):
self.git.commit_json_file('FAIL_MESSAGE_LAST_LOG', 'aetros/job/crash/last_message', sys.stderr.last_messages)
self.logger.debug('Crash report stored in commit ' + self.git.get_head_commit())
self.stop(JOB_STATUS.PROGRESS_STATUS_FAILED, force_exit=force_exit) | python | def fail(self, message=None, force_exit=False):
"""
Marks the job as failed, saves the given error message and force exists the process when force_exit=True.
"""
global last_exit_code
if not last_exit_code:
last_exit_code = 1
with self.git.batch_commit('FAILED'):
self.set_status('FAILED', add_section=False)
self.git.commit_json_file('FAIL_MESSAGE', 'aetros/job/crash/error', str(message) if message else '')
if isinstance(sys.stderr, GeneralLogger):
self.git.commit_json_file('FAIL_MESSAGE_LAST_LOG', 'aetros/job/crash/last_message', sys.stderr.last_messages)
self.logger.debug('Crash report stored in commit ' + self.git.get_head_commit())
self.stop(JOB_STATUS.PROGRESS_STATUS_FAILED, force_exit=force_exit) | [
"def",
"fail",
"(",
"self",
",",
"message",
"=",
"None",
",",
"force_exit",
"=",
"False",
")",
":",
"global",
"last_exit_code",
"if",
"not",
"last_exit_code",
":",
"last_exit_code",
"=",
"1",
"with",
"self",
".",
"git",
".",
"batch_commit",
"(",
"'FAILED'"... | Marks the job as failed, saves the given error message and force exists the process when force_exit=True. | [
"Marks",
"the",
"job",
"as",
"failed",
"saves",
"the",
"given",
"error",
"message",
"and",
"force",
"exists",
"the",
"process",
"when",
"force_exit",
"=",
"True",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1150-L1166 |
aetros/aetros-cli | aetros/backend.py | JobBackend.write_log | def write_log(self, message):
"""
Proxy method for GeneralLogger.
"""
if self.stream_log and not self.ended:
# points to the Git stream write
self.stream_log.write(message)
return True | python | def write_log(self, message):
"""
Proxy method for GeneralLogger.
"""
if self.stream_log and not self.ended:
# points to the Git stream write
self.stream_log.write(message)
return True | [
"def",
"write_log",
"(",
"self",
",",
"message",
")",
":",
"if",
"self",
".",
"stream_log",
"and",
"not",
"self",
".",
"ended",
":",
"# points to the Git stream write",
"self",
".",
"stream_log",
".",
"write",
"(",
"message",
")",
"return",
"True"
] | Proxy method for GeneralLogger. | [
"Proxy",
"method",
"for",
"GeneralLogger",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1168-L1175 |
aetros/aetros-cli | aetros/backend.py | JobBackend.set_status | def set_status(self, status, add_section=True):
"""
Set an arbitrary status, visible in the big wheel of the job view.
"""
status = str(status)
if add_section:
self.section(status)
self.job_add_status('status', status) | python | def set_status(self, status, add_section=True):
"""
Set an arbitrary status, visible in the big wheel of the job view.
"""
status = str(status)
if add_section:
self.section(status)
self.job_add_status('status', status) | [
"def",
"set_status",
"(",
"self",
",",
"status",
",",
"add_section",
"=",
"True",
")",
":",
"status",
"=",
"str",
"(",
"status",
")",
"if",
"add_section",
":",
"self",
".",
"section",
"(",
"status",
")",
"self",
".",
"job_add_status",
"(",
"'status'",
... | Set an arbitrary status, visible in the big wheel of the job view. | [
"Set",
"an",
"arbitrary",
"status",
"visible",
"in",
"the",
"big",
"wheel",
"of",
"the",
"job",
"view",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1177-L1186 |
aetros/aetros-cli | aetros/backend.py | JobBackend.create | def create(self, create_info=None, hyperparameter=None, server='local', insights=False):
"""
Creates a new job in git and pushes it.
:param create_info: from the api.create_job_info(id). Contains the config and job info (type, server)
:param hyperparameter: simple nested dict with key->value, which overwrites stuff from aetros.yml
:param server: if None, the the job will be assigned to a server.
:param insights: whether you want to activate insights (for simple models)
"""
if not create_info:
create_info = {
'server': server,
'config': {
'insights': insights,
'command': ' '.join(sys.argv)
}
}
config = find_config(self.config_path, logger=self.logger)
if not config['model']:
raise Exception('AETROS config file (aetros.yml) not found.')
# first transform simple format in the full definition with parameter types
# (string, number, group, choice_group, etc)
full_hyperparameters = lose_parameters_to_full(config['parameters'])
# now extract hyperparameters from full definition, and overwrite stuff using
# incoming_hyperparameter if available
hyperparameter = extract_parameters(full_hyperparameters, hyperparameter)
create_info['config']['parameters'] = hyperparameter
self.job = create_info
if 'server' not in self.job and server:
# setting this disables server assignment
self.job['server'] = server
self.job['optimization'] = None
self.job['type'] = 'custom'
if 'parameters' not in self.job['config']:
self.job['config']['parameters'] = {}
if 'insights' not in self.job['config']:
self.job['config']['insights'] = insights
self.job['created'] = time.time()
self.git.create_job_id(self.job)
self.logger.debug("Job created with Git ref " + self.git.ref_head)
return self.job_id | python | def create(self, create_info=None, hyperparameter=None, server='local', insights=False):
"""
Creates a new job in git and pushes it.
:param create_info: from the api.create_job_info(id). Contains the config and job info (type, server)
:param hyperparameter: simple nested dict with key->value, which overwrites stuff from aetros.yml
:param server: if None, the the job will be assigned to a server.
:param insights: whether you want to activate insights (for simple models)
"""
if not create_info:
create_info = {
'server': server,
'config': {
'insights': insights,
'command': ' '.join(sys.argv)
}
}
config = find_config(self.config_path, logger=self.logger)
if not config['model']:
raise Exception('AETROS config file (aetros.yml) not found.')
# first transform simple format in the full definition with parameter types
# (string, number, group, choice_group, etc)
full_hyperparameters = lose_parameters_to_full(config['parameters'])
# now extract hyperparameters from full definition, and overwrite stuff using
# incoming_hyperparameter if available
hyperparameter = extract_parameters(full_hyperparameters, hyperparameter)
create_info['config']['parameters'] = hyperparameter
self.job = create_info
if 'server' not in self.job and server:
# setting this disables server assignment
self.job['server'] = server
self.job['optimization'] = None
self.job['type'] = 'custom'
if 'parameters' not in self.job['config']:
self.job['config']['parameters'] = {}
if 'insights' not in self.job['config']:
self.job['config']['insights'] = insights
self.job['created'] = time.time()
self.git.create_job_id(self.job)
self.logger.debug("Job created with Git ref " + self.git.ref_head)
return self.job_id | [
"def",
"create",
"(",
"self",
",",
"create_info",
"=",
"None",
",",
"hyperparameter",
"=",
"None",
",",
"server",
"=",
"'local'",
",",
"insights",
"=",
"False",
")",
":",
"if",
"not",
"create_info",
":",
"create_info",
"=",
"{",
"'server'",
":",
"server"... | Creates a new job in git and pushes it.
:param create_info: from the api.create_job_info(id). Contains the config and job info (type, server)
:param hyperparameter: simple nested dict with key->value, which overwrites stuff from aetros.yml
:param server: if None, the the job will be assigned to a server.
:param insights: whether you want to activate insights (for simple models) | [
"Creates",
"a",
"new",
"job",
"in",
"git",
"and",
"pushes",
"it",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1196-L1247 |
aetros/aetros-cli | aetros/backend.py | JobBackend.get_parameter | def get_parameter(self, path, default=None, return_group=False):
"""
Reads hyperparameter from job configuration. If nothing found use given default.
:param path: str
:param default: *
:param return_group: If true and path is a choice_group, we return the dict instead of the group name.
:return: *
"""
value = read_parameter_by_path(self.job['config']['parameters'], path, return_group)
if value is None:
return default
return value | python | def get_parameter(self, path, default=None, return_group=False):
"""
Reads hyperparameter from job configuration. If nothing found use given default.
:param path: str
:param default: *
:param return_group: If true and path is a choice_group, we return the dict instead of the group name.
:return: *
"""
value = read_parameter_by_path(self.job['config']['parameters'], path, return_group)
if value is None:
return default
return value | [
"def",
"get_parameter",
"(",
"self",
",",
"path",
",",
"default",
"=",
"None",
",",
"return_group",
"=",
"False",
")",
":",
"value",
"=",
"read_parameter_by_path",
"(",
"self",
".",
"job",
"[",
"'config'",
"]",
"[",
"'parameters'",
"]",
",",
"path",
",",... | Reads hyperparameter from job configuration. If nothing found use given default.
:param path: str
:param default: *
:param return_group: If true and path is a choice_group, we return the dict instead of the group name.
:return: * | [
"Reads",
"hyperparameter",
"from",
"job",
"configuration",
".",
"If",
"nothing",
"found",
"use",
"given",
"default",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1280-L1294 |
aetros/aetros-cli | aetros/backend.py | JobBackend.load | def load(self, job_id):
"""
Loads job into index and work-tree, restart its ref and sets as current.
:param job_id: int
"""
self.git.read_job(job_id, checkout=self.is_master_process())
self.load_job_from_ref() | python | def load(self, job_id):
"""
Loads job into index and work-tree, restart its ref and sets as current.
:param job_id: int
"""
self.git.read_job(job_id, checkout=self.is_master_process())
self.load_job_from_ref() | [
"def",
"load",
"(",
"self",
",",
"job_id",
")",
":",
"self",
".",
"git",
".",
"read_job",
"(",
"job_id",
",",
"checkout",
"=",
"self",
".",
"is_master_process",
"(",
")",
")",
"self",
".",
"load_job_from_ref",
"(",
")"
] | Loads job into index and work-tree, restart its ref and sets as current.
:param job_id: int | [
"Loads",
"job",
"into",
"index",
"and",
"work",
"-",
"tree",
"restart",
"its",
"ref",
"and",
"sets",
"as",
"current",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1302-L1309 |
aetros/aetros-cli | aetros/backend.py | JobBackend.load_job_from_ref | def load_job_from_ref(self):
"""
Loads the job.json into self.job
"""
if not self.job_id:
raise Exception('Job not loaded yet. Use load(id) first.')
if not os.path.exists(self.git.work_tree + '/aetros/job.json'):
raise Exception('Could not load aetros/job.json from git repository. Make sure you have created the job correctly.')
with open(self.git.work_tree + '/aetros/job.json') as f:
self.job = simplejson.loads(f.read(), object_pairs_hook=collections.OrderedDict)
if not self.job:
raise Exception('Could not parse aetros/job.json from git repository. Make sure you have created the job correctly.')
self.logger.debug('job: ' + str(self.job)) | python | def load_job_from_ref(self):
"""
Loads the job.json into self.job
"""
if not self.job_id:
raise Exception('Job not loaded yet. Use load(id) first.')
if not os.path.exists(self.git.work_tree + '/aetros/job.json'):
raise Exception('Could not load aetros/job.json from git repository. Make sure you have created the job correctly.')
with open(self.git.work_tree + '/aetros/job.json') as f:
self.job = simplejson.loads(f.read(), object_pairs_hook=collections.OrderedDict)
if not self.job:
raise Exception('Could not parse aetros/job.json from git repository. Make sure you have created the job correctly.')
self.logger.debug('job: ' + str(self.job)) | [
"def",
"load_job_from_ref",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"job_id",
":",
"raise",
"Exception",
"(",
"'Job not loaded yet. Use load(id) first.'",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"git",
".",
"work_tree",... | Loads the job.json into self.job | [
"Loads",
"the",
"job",
".",
"json",
"into",
"self",
".",
"job"
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1311-L1327 |
aetros/aetros-cli | aetros/backend.py | JobBackend.get_job_model | def get_job_model(self):
"""
Returns a new JobModel instance with current loaded job data attached.
:return: JobModel
"""
if not self.job:
raise Exception('Job not loaded yet. Use load(id) first.')
return JobModel(self.job_id, self.job, self.home_config['storage_dir']) | python | def get_job_model(self):
"""
Returns a new JobModel instance with current loaded job data attached.
:return: JobModel
"""
if not self.job:
raise Exception('Job not loaded yet. Use load(id) first.')
return JobModel(self.job_id, self.job, self.home_config['storage_dir']) | [
"def",
"get_job_model",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"job",
":",
"raise",
"Exception",
"(",
"'Job not loaded yet. Use load(id) first.'",
")",
"return",
"JobModel",
"(",
"self",
".",
"job_id",
",",
"self",
".",
"job",
",",
"self",
".",
"... | Returns a new JobModel instance with current loaded job data attached.
:return: JobModel | [
"Returns",
"a",
"new",
"JobModel",
"instance",
"with",
"current",
"loaded",
"job",
"data",
"attached",
".",
":",
"return",
":",
"JobModel"
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1345-L1353 |
aetros/aetros-cli | aetros/backend.py | JobBackend.file_list | def file_list(self):
"""
Lists all files in the working directory.
"""
blacklist = ['.git', 'aetros']
working_tree = self.git.work_tree
def recursive(path='.'):
if os.path.basename(path) in blacklist:
return 0, 0
if os.path.isdir(path):
files = []
for file in os.listdir(path):
if path and path != '.':
file = path + '/' + file
added_files = recursive(file)
files += added_files
return files
else:
if path.endswith('.pyc'):
return []
if is_ignored(path, self.config['ignore']):
return []
return [os.path.relpath(path, working_tree)]
return recursive(working_tree) | python | def file_list(self):
"""
Lists all files in the working directory.
"""
blacklist = ['.git', 'aetros']
working_tree = self.git.work_tree
def recursive(path='.'):
if os.path.basename(path) in blacklist:
return 0, 0
if os.path.isdir(path):
files = []
for file in os.listdir(path):
if path and path != '.':
file = path + '/' + file
added_files = recursive(file)
files += added_files
return files
else:
if path.endswith('.pyc'):
return []
if is_ignored(path, self.config['ignore']):
return []
return [os.path.relpath(path, working_tree)]
return recursive(working_tree) | [
"def",
"file_list",
"(",
"self",
")",
":",
"blacklist",
"=",
"[",
"'.git'",
",",
"'aetros'",
"]",
"working_tree",
"=",
"self",
".",
"git",
".",
"work_tree",
"def",
"recursive",
"(",
"path",
"=",
"'.'",
")",
":",
"if",
"os",
".",
"path",
".",
"basenam... | Lists all files in the working directory. | [
"Lists",
"all",
"files",
"in",
"the",
"working",
"directory",
"."
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1559-L1589 |
aetros/aetros-cli | aetros/backend.py | JobBackend.add_files | def add_files(self, working_tree, report=False):
"""
Commits all files from limited in aetros.yml. `files` is a whitelist, `exclude_files` is a blacklist.
If both are empty, we commit all files smaller than 10MB.
:return:
"""
blacklist = ['.git']
def add_resursiv(path = '.', report=report):
if os.path.basename(path) in blacklist:
return 0, 0
if working_tree + '/aetros' == path:
# ignore in work_tree the folder ./aetros/, as it could be
# that we checked out a job and start it again.
return 0, 0
if os.path.isdir(path):
files = 0
size = 0
for file in os.listdir(path):
if path and path != '.':
file = path + '/' + file
added_files, added_size = add_resursiv(file)
files += added_files
size += added_size
return files, size
else:
if path.endswith('.pyc'):
return 0, 0
relative_path = os.path.relpath(path, working_tree)
if is_ignored(relative_path, self.config['ignore']):
return 0, 0
self.logger.debug("added file to job " + relative_path)
if report:
print("Added job file: " + relative_path)
self.git.add_file_path_in_work_tree(path, working_tree, verbose=False)
return 1, os.path.getsize(path)
return add_resursiv(working_tree, report=report) | python | def add_files(self, working_tree, report=False):
"""
Commits all files from limited in aetros.yml. `files` is a whitelist, `exclude_files` is a blacklist.
If both are empty, we commit all files smaller than 10MB.
:return:
"""
blacklist = ['.git']
def add_resursiv(path = '.', report=report):
if os.path.basename(path) in blacklist:
return 0, 0
if working_tree + '/aetros' == path:
# ignore in work_tree the folder ./aetros/, as it could be
# that we checked out a job and start it again.
return 0, 0
if os.path.isdir(path):
files = 0
size = 0
for file in os.listdir(path):
if path and path != '.':
file = path + '/' + file
added_files, added_size = add_resursiv(file)
files += added_files
size += added_size
return files, size
else:
if path.endswith('.pyc'):
return 0, 0
relative_path = os.path.relpath(path, working_tree)
if is_ignored(relative_path, self.config['ignore']):
return 0, 0
self.logger.debug("added file to job " + relative_path)
if report:
print("Added job file: " + relative_path)
self.git.add_file_path_in_work_tree(path, working_tree, verbose=False)
return 1, os.path.getsize(path)
return add_resursiv(working_tree, report=report) | [
"def",
"add_files",
"(",
"self",
",",
"working_tree",
",",
"report",
"=",
"False",
")",
":",
"blacklist",
"=",
"[",
"'.git'",
"]",
"def",
"add_resursiv",
"(",
"path",
"=",
"'.'",
",",
"report",
"=",
"report",
")",
":",
"if",
"os",
".",
"path",
".",
... | Commits all files from limited in aetros.yml. `files` is a whitelist, `exclude_files` is a blacklist.
If both are empty, we commit all files smaller than 10MB.
:return: | [
"Commits",
"all",
"files",
"from",
"limited",
"in",
"aetros",
".",
"yml",
".",
"files",
"is",
"a",
"whitelist",
"exclude_files",
"is",
"a",
"blacklist",
".",
"If",
"both",
"are",
"empty",
"we",
"commit",
"all",
"files",
"smaller",
"than",
"10MB",
".",
":... | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1591-L1637 |
aetros/aetros-cli | aetros/backend.py | JobBackend.add_embedding_word2vec | def add_embedding_word2vec(self, x, path, dimensions=None, header_with_dimensions=True):
"""
Parse the word2vec file and extracts vectors as bytes and labels as TSV file.
The format is simple: It's a UTF-8 encoded file, each word + vectors separated by new line.
Vector is space separated.
At the very first line might be dimensions, given as space separated value.
Line 1: 2 4\n
Line 2: word 200.3 4004.4 34.2 22.3\n
Line 3: word2 20.0 4.4 4.2 0.022\n
and so on
For performance reasons, you should prefer add_embedding_path().
"""
if path.endswith('.txt'):
if not os.path.exists(path):
raise Exception("Given word2vec file does not exist: " + path)
f = open(path, 'r')
if not header_with_dimensions and not dimensions:
raise Exception('Either the word2vec file should contain the dimensions as header or it needs to be'
'specified manually using dimensions=[x,y] argument.')
if header_with_dimensions:
line = f.readline()
if ' ' not in line:
raise Exception('Given word2vec file should have in first line the dimensions, e.g.: 1000 200')
dimensions = np.fromstring(line, dtype=np.uint, sep=' ').tolist()
labels = ''
vectors = ''
line_pos = 1 if header_with_dimensions else 0
if len(dimensions) != 2:
raise Exception('dimensions invalid shape. e.g. [200, 32] => 200 rows, 32 cols.')
for line in iter(f.readline, ''):
line_pos += 1
space_pos = line.find(' ')
if -1 == space_pos:
message = 'Given word2vec does not have correct format in line ' + str(line_pos)
message += '\nGot: ' + str(line)
raise Exception(message)
labels += line[:space_pos] + '\n'
vectors += line[space_pos+1:] + ' '
vectors = np.fromstring(vectors, dtype=np.float32, sep=' ').tobytes()
else:
raise Exception("Given word2vec is not a .txt file. Other file formats are not supported.")
info = {
'dimensions': dimensions
}
name = os.path.basename(path)
self._ensure_insight(x)
remote_path = 'aetros/job/insight/'+str(x)+'/embedding/'
with self.git.batch_commit('INSIGHT_EMBEDDING ' + str(x)):
self.git.commit_file('WORD2VEC', remote_path + name + '/tensor.bytes', vectors)
self.git.commit_file('WORD2VEC', remote_path + name + '/metadata.tsv', labels)
self.git.commit_file('WORD2VEC', remote_path + name + '/info.json', simplejson.dumps(info)) | python | def add_embedding_word2vec(self, x, path, dimensions=None, header_with_dimensions=True):
"""
Parse the word2vec file and extracts vectors as bytes and labels as TSV file.
The format is simple: It's a UTF-8 encoded file, each word + vectors separated by new line.
Vector is space separated.
At the very first line might be dimensions, given as space separated value.
Line 1: 2 4\n
Line 2: word 200.3 4004.4 34.2 22.3\n
Line 3: word2 20.0 4.4 4.2 0.022\n
and so on
For performance reasons, you should prefer add_embedding_path().
"""
if path.endswith('.txt'):
if not os.path.exists(path):
raise Exception("Given word2vec file does not exist: " + path)
f = open(path, 'r')
if not header_with_dimensions and not dimensions:
raise Exception('Either the word2vec file should contain the dimensions as header or it needs to be'
'specified manually using dimensions=[x,y] argument.')
if header_with_dimensions:
line = f.readline()
if ' ' not in line:
raise Exception('Given word2vec file should have in first line the dimensions, e.g.: 1000 200')
dimensions = np.fromstring(line, dtype=np.uint, sep=' ').tolist()
labels = ''
vectors = ''
line_pos = 1 if header_with_dimensions else 0
if len(dimensions) != 2:
raise Exception('dimensions invalid shape. e.g. [200, 32] => 200 rows, 32 cols.')
for line in iter(f.readline, ''):
line_pos += 1
space_pos = line.find(' ')
if -1 == space_pos:
message = 'Given word2vec does not have correct format in line ' + str(line_pos)
message += '\nGot: ' + str(line)
raise Exception(message)
labels += line[:space_pos] + '\n'
vectors += line[space_pos+1:] + ' '
vectors = np.fromstring(vectors, dtype=np.float32, sep=' ').tobytes()
else:
raise Exception("Given word2vec is not a .txt file. Other file formats are not supported.")
info = {
'dimensions': dimensions
}
name = os.path.basename(path)
self._ensure_insight(x)
remote_path = 'aetros/job/insight/'+str(x)+'/embedding/'
with self.git.batch_commit('INSIGHT_EMBEDDING ' + str(x)):
self.git.commit_file('WORD2VEC', remote_path + name + '/tensor.bytes', vectors)
self.git.commit_file('WORD2VEC', remote_path + name + '/metadata.tsv', labels)
self.git.commit_file('WORD2VEC', remote_path + name + '/info.json', simplejson.dumps(info)) | [
"def",
"add_embedding_word2vec",
"(",
"self",
",",
"x",
",",
"path",
",",
"dimensions",
"=",
"None",
",",
"header_with_dimensions",
"=",
"True",
")",
":",
"if",
"path",
".",
"endswith",
"(",
"'.txt'",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exi... | Parse the word2vec file and extracts vectors as bytes and labels as TSV file.
The format is simple: It's a UTF-8 encoded file, each word + vectors separated by new line.
Vector is space separated.
At the very first line might be dimensions, given as space separated value.
Line 1: 2 4\n
Line 2: word 200.3 4004.4 34.2 22.3\n
Line 3: word2 20.0 4.4 4.2 0.022\n
and so on
For performance reasons, you should prefer add_embedding_path(). | [
"Parse",
"the",
"word2vec",
"file",
"and",
"extracts",
"vectors",
"as",
"bytes",
"and",
"labels",
"as",
"TSV",
"file",
".",
"The",
"format",
"is",
"simple",
":",
"It",
"s",
"a",
"UTF",
"-",
"8",
"encoded",
"file",
"each",
"word",
"+",
"vectors",
"separ... | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1639-L1703 |
aetros/aetros-cli | aetros/backend.py | JobBackend.add_embedding_path | def add_embedding_path(self, x, dimensions, vectors_path, metadata=None, image_shape=None, image=None):
"""
Adds a new embedding with optional metadata.
Example how to generate vectors based on 2D numpy array:
# 4 vectors, each size of 3
vectors = [
[2.3, 4.0, 33],
[2.4, 4.2, 44],
[2.5, 3.9, 34],
[5.5, 200.2, 66]
]
metadata = [
# header, only necessary when more then on column
# can be anything.
['label', 'count'],
# for each vector from above an entry.
['red', '123'],
['white', '143'],
['yellow', '344'],
['house', '24'],
]
numpy.array(vectors, dtype=numpy.float32).tofile('vectors.bytes')
numpy.savetxt('metadata.tsv', numpy.array(metadata), delimiter='\t', fmt='%s')
job.add_embedding_path([4, 3], 'vectors.bytes', 'metadata.tsv')
Metadata format example:
Label\tCount\n
red\t4\n
yellow\t6\n
:param x: The x axis of the insights.
:param dimensions: 2D List of dimension, e.g [200, 20], means 200 vectors and each vector has size of 20.
:param vectors_path: A path to a floats64 bytes file, no separators, sum(dimensions)*floats64 long.
Example: If dimensions [200, 20] then the tensor file has 200*20 float32 bytes in it
:param metadata: A TSV file. If only one column long (=no tab separator per line), then there's no need for a header.
If you have more than one column, use the first line as header.
:param image_shape: Size of the image of each vector.
:param image: Path to an image sprite.
:return:
"""
if not os.path.exists(vectors_path):
raise Exception("Given embedding vectors file does not exist: " + vectors_path)
if metadata and not os.path.exists(metadata):
raise Exception("Given embedding metadata file does not exist: " + metadata)
name = os.path.basename(vectors_path)
self._ensure_insight(x)
remote_path = 'aetros/job/insight/'+str(x)+'/embedding/'
info = {
'dimensions': dimensions,
'image_shape': image_shape,
'image': os.path.basename(image) if image else None,
}
with self.git.lock_write():
self.git.add_file_path(remote_path + name + '/tensor.bytes', vectors_path)
self.git.add_file_path(remote_path + name + '/metadata.tsv', metadata)
self.git.add_file(remote_path + name + '/info.json', simplejson.dumps(info))
if image:
self.git.add_file(remote_path + name + '/' + os.path.basename(image), image)
self.git.commit_index('INSIGHT_EMBEDDING ' + str(x)) | python | def add_embedding_path(self, x, dimensions, vectors_path, metadata=None, image_shape=None, image=None):
"""
Adds a new embedding with optional metadata.
Example how to generate vectors based on 2D numpy array:
# 4 vectors, each size of 3
vectors = [
[2.3, 4.0, 33],
[2.4, 4.2, 44],
[2.5, 3.9, 34],
[5.5, 200.2, 66]
]
metadata = [
# header, only necessary when more then on column
# can be anything.
['label', 'count'],
# for each vector from above an entry.
['red', '123'],
['white', '143'],
['yellow', '344'],
['house', '24'],
]
numpy.array(vectors, dtype=numpy.float32).tofile('vectors.bytes')
numpy.savetxt('metadata.tsv', numpy.array(metadata), delimiter='\t', fmt='%s')
job.add_embedding_path([4, 3], 'vectors.bytes', 'metadata.tsv')
Metadata format example:
Label\tCount\n
red\t4\n
yellow\t6\n
:param x: The x axis of the insights.
:param dimensions: 2D List of dimension, e.g [200, 20], means 200 vectors and each vector has size of 20.
:param vectors_path: A path to a floats64 bytes file, no separators, sum(dimensions)*floats64 long.
Example: If dimensions [200, 20] then the tensor file has 200*20 float32 bytes in it
:param metadata: A TSV file. If only one column long (=no tab separator per line), then there's no need for a header.
If you have more than one column, use the first line as header.
:param image_shape: Size of the image of each vector.
:param image: Path to an image sprite.
:return:
"""
if not os.path.exists(vectors_path):
raise Exception("Given embedding vectors file does not exist: " + vectors_path)
if metadata and not os.path.exists(metadata):
raise Exception("Given embedding metadata file does not exist: " + metadata)
name = os.path.basename(vectors_path)
self._ensure_insight(x)
remote_path = 'aetros/job/insight/'+str(x)+'/embedding/'
info = {
'dimensions': dimensions,
'image_shape': image_shape,
'image': os.path.basename(image) if image else None,
}
with self.git.lock_write():
self.git.add_file_path(remote_path + name + '/tensor.bytes', vectors_path)
self.git.add_file_path(remote_path + name + '/metadata.tsv', metadata)
self.git.add_file(remote_path + name + '/info.json', simplejson.dumps(info))
if image:
self.git.add_file(remote_path + name + '/' + os.path.basename(image), image)
self.git.commit_index('INSIGHT_EMBEDDING ' + str(x)) | [
"def",
"add_embedding_path",
"(",
"self",
",",
"x",
",",
"dimensions",
",",
"vectors_path",
",",
"metadata",
"=",
"None",
",",
"image_shape",
"=",
"None",
",",
"image",
"=",
"None",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"vectors... | Adds a new embedding with optional metadata.
Example how to generate vectors based on 2D numpy array:
# 4 vectors, each size of 3
vectors = [
[2.3, 4.0, 33],
[2.4, 4.2, 44],
[2.5, 3.9, 34],
[5.5, 200.2, 66]
]
metadata = [
# header, only necessary when more then on column
# can be anything.
['label', 'count'],
# for each vector from above an entry.
['red', '123'],
['white', '143'],
['yellow', '344'],
['house', '24'],
]
numpy.array(vectors, dtype=numpy.float32).tofile('vectors.bytes')
numpy.savetxt('metadata.tsv', numpy.array(metadata), delimiter='\t', fmt='%s')
job.add_embedding_path([4, 3], 'vectors.bytes', 'metadata.tsv')
Metadata format example:
Label\tCount\n
red\t4\n
yellow\t6\n
:param x: The x axis of the insights.
:param dimensions: 2D List of dimension, e.g [200, 20], means 200 vectors and each vector has size of 20.
:param vectors_path: A path to a floats64 bytes file, no separators, sum(dimensions)*floats64 long.
Example: If dimensions [200, 20] then the tensor file has 200*20 float32 bytes in it
:param metadata: A TSV file. If only one column long (=no tab separator per line), then there's no need for a header.
If you have more than one column, use the first line as header.
:param image_shape: Size of the image of each vector.
:param image: Path to an image sprite.
:return: | [
"Adds",
"a",
"new",
"embedding",
"with",
"optional",
"metadata",
".",
"Example",
"how",
"to",
"generate",
"vectors",
"based",
"on",
"2D",
"numpy",
"array",
":"
] | train | https://github.com/aetros/aetros-cli/blob/a2a1f38d6af1660e1e2680c7d413ec2aef45faab/aetros/backend.py#L1705-L1779 |
codebynumbers/ftpretty | ftpretty.py | split_file_info | def split_file_info(fileinfo):
""" Parse sane directory output usually ls -l
Adapted from https://gist.github.com/tobiasoberrauch/2942716
"""
current_year = datetime.datetime.now().strftime('%Y')
files = []
for line in fileinfo:
parts = re.split(
r'^([\-dbclps])' + # Directory flag [1]
r'([\-rwxs]{9})\s+' + # Permissions [2]
r'(\d+)\s+' + # Number of items [3]
r'([a-zA-Z0-9_-]+)\s+' + # File owner [4]
r'([a-zA-Z0-9_-]+)\s+' + # File group [5]
r'(\d+)\s+' + # File size in bytes [6]
r'(\w{3}\s+\d{1,2})\s+' + # 3-char month and 1/2-char day of the month [7]
r'(\d{1,2}:\d{1,2}|\d{4})\s+' + # Time or year (need to check conditions) [+= 7]
r'(.+)$', # File/directory name [8]
line
)
date = parts[7]
time = parts[8] if ':' in parts[8] else '00:00'
year = parts[8] if ':' not in parts[8] else current_year
dt_obj = parser.parse("%s %s %s" % (date, year, time))
files.append({
'directory': parts[1],
'perms': parts[2],
'items': parts[3],
'owner': parts[4],
'group': parts[5],
'size': int(parts[6]),
'date': date,
'time': time,
'year': year,
'name': parts[9],
'datetime': dt_obj
})
return files | python | def split_file_info(fileinfo):
""" Parse sane directory output usually ls -l
Adapted from https://gist.github.com/tobiasoberrauch/2942716
"""
current_year = datetime.datetime.now().strftime('%Y')
files = []
for line in fileinfo:
parts = re.split(
r'^([\-dbclps])' + # Directory flag [1]
r'([\-rwxs]{9})\s+' + # Permissions [2]
r'(\d+)\s+' + # Number of items [3]
r'([a-zA-Z0-9_-]+)\s+' + # File owner [4]
r'([a-zA-Z0-9_-]+)\s+' + # File group [5]
r'(\d+)\s+' + # File size in bytes [6]
r'(\w{3}\s+\d{1,2})\s+' + # 3-char month and 1/2-char day of the month [7]
r'(\d{1,2}:\d{1,2}|\d{4})\s+' + # Time or year (need to check conditions) [+= 7]
r'(.+)$', # File/directory name [8]
line
)
date = parts[7]
time = parts[8] if ':' in parts[8] else '00:00'
year = parts[8] if ':' not in parts[8] else current_year
dt_obj = parser.parse("%s %s %s" % (date, year, time))
files.append({
'directory': parts[1],
'perms': parts[2],
'items': parts[3],
'owner': parts[4],
'group': parts[5],
'size': int(parts[6]),
'date': date,
'time': time,
'year': year,
'name': parts[9],
'datetime': dt_obj
})
return files | [
"def",
"split_file_info",
"(",
"fileinfo",
")",
":",
"current_year",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
".",
"strftime",
"(",
"'%Y'",
")",
"files",
"=",
"[",
"]",
"for",
"line",
"in",
"fileinfo",
":",
"parts",
"=",
"re",
".",
"sp... | Parse sane directory output usually ls -l
Adapted from https://gist.github.com/tobiasoberrauch/2942716 | [
"Parse",
"sane",
"directory",
"output",
"usually",
"ls",
"-",
"l",
"Adapted",
"from",
"https",
":",
"//",
"gist",
".",
"github",
".",
"com",
"/",
"tobiasoberrauch",
"/",
"2942716"
] | train | https://github.com/codebynumbers/ftpretty/blob/5ee6e2cc679199ff52d1cd2ed1b0613f12aa6f67/ftpretty.py#L225-L263 |
codebynumbers/ftpretty | ftpretty.py | ftpretty.get | def get(self, remote, local=None):
""" Gets the file from FTP server
local can be:
a file: opened for writing, left open
a string: path to output file
None: contents are returned
"""
if isinstance(local, file_type): # open file, leave open
local_file = local
elif local is None: # return string
local_file = buffer_type()
else: # path to file, open, write/close return None
local_file = open(local, 'wb')
self.conn.retrbinary("RETR %s" % remote, local_file.write)
if isinstance(local, file_type):
pass
elif local is None:
contents = local_file.getvalue()
local_file.close()
return contents
else:
local_file.close()
return None | python | def get(self, remote, local=None):
""" Gets the file from FTP server
local can be:
a file: opened for writing, left open
a string: path to output file
None: contents are returned
"""
if isinstance(local, file_type): # open file, leave open
local_file = local
elif local is None: # return string
local_file = buffer_type()
else: # path to file, open, write/close return None
local_file = open(local, 'wb')
self.conn.retrbinary("RETR %s" % remote, local_file.write)
if isinstance(local, file_type):
pass
elif local is None:
contents = local_file.getvalue()
local_file.close()
return contents
else:
local_file.close()
return None | [
"def",
"get",
"(",
"self",
",",
"remote",
",",
"local",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"local",
",",
"file_type",
")",
":",
"# open file, leave open",
"local_file",
"=",
"local",
"elif",
"local",
"is",
"None",
":",
"# return string",
"loca... | Gets the file from FTP server
local can be:
a file: opened for writing, left open
a string: path to output file
None: contents are returned | [
"Gets",
"the",
"file",
"from",
"FTP",
"server"
] | train | https://github.com/codebynumbers/ftpretty/blob/5ee6e2cc679199ff52d1cd2ed1b0613f12aa6f67/ftpretty.py#L58-L84 |
codebynumbers/ftpretty | ftpretty.py | ftpretty.put | def put(self, local, remote, contents=None, quiet=False):
""" Puts a local file (or contents) on to the FTP server
local can be:
a string: path to inpit file
a file: opened for reading
None: contents are pushed
"""
remote_dir = os.path.dirname(remote)
remote_file = os.path.basename(local)\
if remote.endswith('/') else os.path.basename(remote)
if contents:
# local is ignored if contents is set
local_file = buffer_type(contents)
elif isinstance(local, file_type):
local_file = local
else:
local_file = open(local, 'rb')
current = self.conn.pwd()
self.descend(remote_dir, force=True)
size = 0
try:
self.conn.storbinary('STOR %s' % remote_file, local_file)
size = self.conn.size(remote_file)
except:
if not quiet:
raise
finally:
local_file.close()
self.conn.cwd(current)
return size | python | def put(self, local, remote, contents=None, quiet=False):
""" Puts a local file (or contents) on to the FTP server
local can be:
a string: path to inpit file
a file: opened for reading
None: contents are pushed
"""
remote_dir = os.path.dirname(remote)
remote_file = os.path.basename(local)\
if remote.endswith('/') else os.path.basename(remote)
if contents:
# local is ignored if contents is set
local_file = buffer_type(contents)
elif isinstance(local, file_type):
local_file = local
else:
local_file = open(local, 'rb')
current = self.conn.pwd()
self.descend(remote_dir, force=True)
size = 0
try:
self.conn.storbinary('STOR %s' % remote_file, local_file)
size = self.conn.size(remote_file)
except:
if not quiet:
raise
finally:
local_file.close()
self.conn.cwd(current)
return size | [
"def",
"put",
"(",
"self",
",",
"local",
",",
"remote",
",",
"contents",
"=",
"None",
",",
"quiet",
"=",
"False",
")",
":",
"remote_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"remote",
")",
"remote_file",
"=",
"os",
".",
"path",
".",
"basen... | Puts a local file (or contents) on to the FTP server
local can be:
a string: path to inpit file
a file: opened for reading
None: contents are pushed | [
"Puts",
"a",
"local",
"file",
"(",
"or",
"contents",
")",
"on",
"to",
"the",
"FTP",
"server"
] | train | https://github.com/codebynumbers/ftpretty/blob/5ee6e2cc679199ff52d1cd2ed1b0613f12aa6f67/ftpretty.py#L86-L118 |
codebynumbers/ftpretty | ftpretty.py | ftpretty.upload_tree | def upload_tree(self, src, dst, ignore=None):
"""Recursively upload a directory tree.
Although similar to shutil.copytree we don't follow symlinks.
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
try:
self.conn.mkd(dst)
except error_perm:
pass
errors = []
for name in names:
if name in ignored_names:
continue
src_name = os.path.join(src, name)
dst_name = os.path.join(dst, name)
try:
if os.path.islink(src_name):
pass
elif os.path.isdir(src_name):
self.upload_tree(src_name, dst_name, ignore)
else:
# Will raise a SpecialFileError for unsupported file types
self.put(src_name, dst_name)
except Exception as why:
errors.append((src_name, dst_name, str(why)))
return dst | python | def upload_tree(self, src, dst, ignore=None):
"""Recursively upload a directory tree.
Although similar to shutil.copytree we don't follow symlinks.
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
try:
self.conn.mkd(dst)
except error_perm:
pass
errors = []
for name in names:
if name in ignored_names:
continue
src_name = os.path.join(src, name)
dst_name = os.path.join(dst, name)
try:
if os.path.islink(src_name):
pass
elif os.path.isdir(src_name):
self.upload_tree(src_name, dst_name, ignore)
else:
# Will raise a SpecialFileError for unsupported file types
self.put(src_name, dst_name)
except Exception as why:
errors.append((src_name, dst_name, str(why)))
return dst | [
"def",
"upload_tree",
"(",
"self",
",",
"src",
",",
"dst",
",",
"ignore",
"=",
"None",
")",
":",
"names",
"=",
"os",
".",
"listdir",
"(",
"src",
")",
"if",
"ignore",
"is",
"not",
"None",
":",
"ignored_names",
"=",
"ignore",
"(",
"src",
",",
"names"... | Recursively upload a directory tree.
Although similar to shutil.copytree we don't follow symlinks. | [
"Recursively",
"upload",
"a",
"directory",
"tree",
"."
] | train | https://github.com/codebynumbers/ftpretty/blob/5ee6e2cc679199ff52d1cd2ed1b0613f12aa6f67/ftpretty.py#L120-L153 |
codebynumbers/ftpretty | ftpretty.py | ftpretty.list | def list(self, remote='.', extra=False, remove_relative_paths=False):
""" Return directory list """
if extra:
self.tmp_output = []
self.conn.dir(remote, self._collector)
directory_list = split_file_info(self.tmp_output)
else:
directory_list = self.conn.nlst(remote)
if remove_relative_paths:
return list(filter(self.is_not_relative_path, directory_list))
return directory_list | python | def list(self, remote='.', extra=False, remove_relative_paths=False):
""" Return directory list """
if extra:
self.tmp_output = []
self.conn.dir(remote, self._collector)
directory_list = split_file_info(self.tmp_output)
else:
directory_list = self.conn.nlst(remote)
if remove_relative_paths:
return list(filter(self.is_not_relative_path, directory_list))
return directory_list | [
"def",
"list",
"(",
"self",
",",
"remote",
"=",
"'.'",
",",
"extra",
"=",
"False",
",",
"remove_relative_paths",
"=",
"False",
")",
":",
"if",
"extra",
":",
"self",
".",
"tmp_output",
"=",
"[",
"]",
"self",
".",
"conn",
".",
"dir",
"(",
"remote",
"... | Return directory list | [
"Return",
"directory",
"list"
] | train | https://github.com/codebynumbers/ftpretty/blob/5ee6e2cc679199ff52d1cd2ed1b0613f12aa6f67/ftpretty.py#L155-L167 |
codebynumbers/ftpretty | ftpretty.py | ftpretty.descend | def descend(self, remote, force=False):
""" Descend, possibly creating directories as needed """
remote_dirs = remote.split('/')
for directory in remote_dirs:
try:
self.conn.cwd(directory)
except Exception:
if force:
self.conn.mkd(directory)
self.conn.cwd(directory)
return self.conn.pwd() | python | def descend(self, remote, force=False):
""" Descend, possibly creating directories as needed """
remote_dirs = remote.split('/')
for directory in remote_dirs:
try:
self.conn.cwd(directory)
except Exception:
if force:
self.conn.mkd(directory)
self.conn.cwd(directory)
return self.conn.pwd() | [
"def",
"descend",
"(",
"self",
",",
"remote",
",",
"force",
"=",
"False",
")",
":",
"remote_dirs",
"=",
"remote",
".",
"split",
"(",
"'/'",
")",
"for",
"directory",
"in",
"remote_dirs",
":",
"try",
":",
"self",
".",
"conn",
".",
"cwd",
"(",
"director... | Descend, possibly creating directories as needed | [
"Descend",
"possibly",
"creating",
"directories",
"as",
"needed"
] | train | https://github.com/codebynumbers/ftpretty/blob/5ee6e2cc679199ff52d1cd2ed1b0613f12aa6f67/ftpretty.py#L175-L185 |
codebynumbers/ftpretty | ftpretty.py | ftpretty.delete | def delete(self, remote):
""" Delete a file from server """
try:
self.conn.delete(remote)
except Exception:
return False
else:
return True | python | def delete(self, remote):
""" Delete a file from server """
try:
self.conn.delete(remote)
except Exception:
return False
else:
return True | [
"def",
"delete",
"(",
"self",
",",
"remote",
")",
":",
"try",
":",
"self",
".",
"conn",
".",
"delete",
"(",
"remote",
")",
"except",
"Exception",
":",
"return",
"False",
"else",
":",
"return",
"True"
] | Delete a file from server | [
"Delete",
"a",
"file",
"from",
"server"
] | train | https://github.com/codebynumbers/ftpretty/blob/5ee6e2cc679199ff52d1cd2ed1b0613f12aa6f67/ftpretty.py#L187-L194 |
codebynumbers/ftpretty | ftpretty.py | ftpretty.cd | def cd(self, remote):
""" Change working directory on server """
try:
self.conn.cwd(remote)
except Exception:
return False
else:
return self.pwd() | python | def cd(self, remote):
""" Change working directory on server """
try:
self.conn.cwd(remote)
except Exception:
return False
else:
return self.pwd() | [
"def",
"cd",
"(",
"self",
",",
"remote",
")",
":",
"try",
":",
"self",
".",
"conn",
".",
"cwd",
"(",
"remote",
")",
"except",
"Exception",
":",
"return",
"False",
"else",
":",
"return",
"self",
".",
"pwd",
"(",
")"
] | Change working directory on server | [
"Change",
"working",
"directory",
"on",
"server"
] | train | https://github.com/codebynumbers/ftpretty/blob/5ee6e2cc679199ff52d1cd2ed1b0613f12aa6f67/ftpretty.py#L196-L203 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.