repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
jssimporter/python-jss | jss/jssobject.py | JSSObject._set_xml_from_keys | def _set_xml_from_keys(self, root, item, **kwargs):
"""Create SubElements of root with kwargs.
Args:
root: Element to add SubElements to.
item: Tuple key/value pair from self.data_keys to add.
kwargs:
For each item in self.data_keys, if it has a
corresponding kwarg, create a SubElement at root with
the kwarg's value.
Int and bool values will be cast to string. (Int 10,
bool False become string values "10" and "false").
Dicts will be recursively added to their key's Element.
"""
key, val = item
target_key = root.find(key)
if target_key is None:
target_key = ElementTree.SubElement(root, key)
if isinstance(val, dict):
for dict_item in val.items():
self._set_xml_from_keys(target_key, dict_item, **kwargs)
return
# Convert kwarg data to the appropriate string.
if key in kwargs:
kwarg = kwargs[key]
if isinstance(kwarg, bool):
kwargs[key] = str(kwargs[key]).lower()
elif kwarg is None:
kwargs[key] = ""
elif isinstance(kwarg, int):
kwargs[key] = str(kwargs[key])
elif isinstance(kwarg, JSSObject):
kwargs[key] = kwargs[key].name
target_key.text = kwargs.get(key, val) | python | def _set_xml_from_keys(self, root, item, **kwargs):
"""Create SubElements of root with kwargs.
Args:
root: Element to add SubElements to.
item: Tuple key/value pair from self.data_keys to add.
kwargs:
For each item in self.data_keys, if it has a
corresponding kwarg, create a SubElement at root with
the kwarg's value.
Int and bool values will be cast to string. (Int 10,
bool False become string values "10" and "false").
Dicts will be recursively added to their key's Element.
"""
key, val = item
target_key = root.find(key)
if target_key is None:
target_key = ElementTree.SubElement(root, key)
if isinstance(val, dict):
for dict_item in val.items():
self._set_xml_from_keys(target_key, dict_item, **kwargs)
return
# Convert kwarg data to the appropriate string.
if key in kwargs:
kwarg = kwargs[key]
if isinstance(kwarg, bool):
kwargs[key] = str(kwargs[key]).lower()
elif kwarg is None:
kwargs[key] = ""
elif isinstance(kwarg, int):
kwargs[key] = str(kwargs[key])
elif isinstance(kwarg, JSSObject):
kwargs[key] = kwargs[key].name
target_key.text = kwargs.get(key, val) | [
"def",
"_set_xml_from_keys",
"(",
"self",
",",
"root",
",",
"item",
",",
"*",
"*",
"kwargs",
")",
":",
"key",
",",
"val",
"=",
"item",
"target_key",
"=",
"root",
".",
"find",
"(",
"key",
")",
"if",
"target_key",
"is",
"None",
":",
"target_key",
"=",
... | Create SubElements of root with kwargs.
Args:
root: Element to add SubElements to.
item: Tuple key/value pair from self.data_keys to add.
kwargs:
For each item in self.data_keys, if it has a
corresponding kwarg, create a SubElement at root with
the kwarg's value.
Int and bool values will be cast to string. (Int 10,
bool False become string values "10" and "false").
Dicts will be recursively added to their key's Element. | [
"Create",
"SubElements",
"of",
"root",
"with",
"kwargs",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L190-L228 | train | 27,800 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.get_url | def get_url(cls, data):
"""Return the URL for a get request based on data type.
Args:
data: Accepts multiple types.
Int: Generate URL to object with data ID.
None: Get basic object GET URL (list).
String/Unicode: Search for <data> with default_search,
usually "name".
String/Unicode with "=": Other searches, for example
Computers can be search by uuid with:
"udid=E79E84CB-3227-5C69-A32C-6C45C2E77DF5"
See the class "search_types" attribute for options.
"""
try:
data = int(data)
except (ValueError, TypeError):
pass
if isinstance(data, int):
return "%s%s%s" % (cls._url, cls.id_url, data)
elif data is None:
return cls._url
elif isinstance(data, basestring):
if "=" in data:
key, value = data.split("=") # pylint: disable=no-member
if key in cls.search_types:
return "%s%s%s" % (cls._url, cls.search_types[key], value)
else:
raise JSSUnsupportedSearchMethodError(
"This object cannot be queried by %s." % key)
else:
return "%s%s%s" % (cls._url,
cls.search_types[cls.default_search], data)
else:
raise ValueError | python | def get_url(cls, data):
"""Return the URL for a get request based on data type.
Args:
data: Accepts multiple types.
Int: Generate URL to object with data ID.
None: Get basic object GET URL (list).
String/Unicode: Search for <data> with default_search,
usually "name".
String/Unicode with "=": Other searches, for example
Computers can be search by uuid with:
"udid=E79E84CB-3227-5C69-A32C-6C45C2E77DF5"
See the class "search_types" attribute for options.
"""
try:
data = int(data)
except (ValueError, TypeError):
pass
if isinstance(data, int):
return "%s%s%s" % (cls._url, cls.id_url, data)
elif data is None:
return cls._url
elif isinstance(data, basestring):
if "=" in data:
key, value = data.split("=") # pylint: disable=no-member
if key in cls.search_types:
return "%s%s%s" % (cls._url, cls.search_types[key], value)
else:
raise JSSUnsupportedSearchMethodError(
"This object cannot be queried by %s." % key)
else:
return "%s%s%s" % (cls._url,
cls.search_types[cls.default_search], data)
else:
raise ValueError | [
"def",
"get_url",
"(",
"cls",
",",
"data",
")",
":",
"try",
":",
"data",
"=",
"int",
"(",
"data",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"pass",
"if",
"isinstance",
"(",
"data",
",",
"int",
")",
":",
"return",
"\"%s%s%s\"",
"... | Return the URL for a get request based on data type.
Args:
data: Accepts multiple types.
Int: Generate URL to object with data ID.
None: Get basic object GET URL (list).
String/Unicode: Search for <data> with default_search,
usually "name".
String/Unicode with "=": Other searches, for example
Computers can be search by uuid with:
"udid=E79E84CB-3227-5C69-A32C-6C45C2E77DF5"
See the class "search_types" attribute for options. | [
"Return",
"the",
"URL",
"for",
"a",
"get",
"request",
"based",
"on",
"data",
"type",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L239-L273 | train | 27,801 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.url | def url(self):
"""Return the path subcomponent of the url to this object.
For example: "/computers/id/451"
"""
if self.id:
url = "%s%s%s" % (self._url, self.id_url, self.id)
else:
url = None
return url | python | def url(self):
"""Return the path subcomponent of the url to this object.
For example: "/computers/id/451"
"""
if self.id:
url = "%s%s%s" % (self._url, self.id_url, self.id)
else:
url = None
return url | [
"def",
"url",
"(",
"self",
")",
":",
"if",
"self",
".",
"id",
":",
"url",
"=",
"\"%s%s%s\"",
"%",
"(",
"self",
".",
"_url",
",",
"self",
".",
"id_url",
",",
"self",
".",
"id",
")",
"else",
":",
"url",
"=",
"None",
"return",
"url"
] | Return the path subcomponent of the url to this object.
For example: "/computers/id/451" | [
"Return",
"the",
"path",
"subcomponent",
"of",
"the",
"url",
"to",
"this",
"object",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L281-L290 | train | 27,802 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.delete | def delete(self, data=None):
"""Delete this object from the JSS."""
if not self.can_delete:
raise JSSMethodNotAllowedError(self.__class__.__name__)
if data:
self.jss.delete(self.url, data)
else:
self.jss.delete(self.url) | python | def delete(self, data=None):
"""Delete this object from the JSS."""
if not self.can_delete:
raise JSSMethodNotAllowedError(self.__class__.__name__)
if data:
self.jss.delete(self.url, data)
else:
self.jss.delete(self.url) | [
"def",
"delete",
"(",
"self",
",",
"data",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"can_delete",
":",
"raise",
"JSSMethodNotAllowedError",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
"if",
"data",
":",
"self",
".",
"jss",
".",
"delete... | Delete this object from the JSS. | [
"Delete",
"this",
"object",
"from",
"the",
"JSS",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L301-L308 | train | 27,803 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.save | def save(self):
"""Update or create a new object on the JSS.
If this object is not yet on the JSS, this method will create
a new object with POST, otherwise, it will try to update the
existing object with PUT.
Data validation is up to the client; The JSS in most cases will
at least give you some hints as to what is invalid.
"""
# Object probably exists if it has an ID (user can't assign
# one). The only objects that don't have an ID are those that
# cannot list.
if self.can_put and (not self.can_list or self.id):
# The JSS will reject PUT requests for objects that do not have
# a category. The JSS assigns a name of "No category assigned",
# which it will reject. Therefore, if that is the category
# name, changed it to "", which is accepted.
categories = [elem for elem in self.findall("category")]
categories.extend([elem for elem in self.findall("category/name")])
for cat_tag in categories:
if cat_tag.text == "No category assigned":
cat_tag.text = ""
try:
self.jss.put(self.url, self)
updated_data = self.jss.get(self.url)
except JSSPutError as put_error:
# Something when wrong.
raise JSSPutError(put_error)
elif self.can_post:
url = self.get_post_url()
try:
updated_data = self.jss.post(self.__class__, url, self)
except JSSPostError as err:
raise JSSPostError(err)
else:
raise JSSMethodNotAllowedError(self.__class__.__name__)
# Replace current instance's data with new, JSS-validated data.
self.clear()
for child in updated_data.getchildren():
self._children.append(child) | python | def save(self):
"""Update or create a new object on the JSS.
If this object is not yet on the JSS, this method will create
a new object with POST, otherwise, it will try to update the
existing object with PUT.
Data validation is up to the client; The JSS in most cases will
at least give you some hints as to what is invalid.
"""
# Object probably exists if it has an ID (user can't assign
# one). The only objects that don't have an ID are those that
# cannot list.
if self.can_put and (not self.can_list or self.id):
# The JSS will reject PUT requests for objects that do not have
# a category. The JSS assigns a name of "No category assigned",
# which it will reject. Therefore, if that is the category
# name, changed it to "", which is accepted.
categories = [elem for elem in self.findall("category")]
categories.extend([elem for elem in self.findall("category/name")])
for cat_tag in categories:
if cat_tag.text == "No category assigned":
cat_tag.text = ""
try:
self.jss.put(self.url, self)
updated_data = self.jss.get(self.url)
except JSSPutError as put_error:
# Something when wrong.
raise JSSPutError(put_error)
elif self.can_post:
url = self.get_post_url()
try:
updated_data = self.jss.post(self.__class__, url, self)
except JSSPostError as err:
raise JSSPostError(err)
else:
raise JSSMethodNotAllowedError(self.__class__.__name__)
# Replace current instance's data with new, JSS-validated data.
self.clear()
for child in updated_data.getchildren():
self._children.append(child) | [
"def",
"save",
"(",
"self",
")",
":",
"# Object probably exists if it has an ID (user can't assign",
"# one). The only objects that don't have an ID are those that",
"# cannot list.",
"if",
"self",
".",
"can_put",
"and",
"(",
"not",
"self",
".",
"can_list",
"or",
"self",
"... | Update or create a new object on the JSS.
If this object is not yet on the JSS, this method will create
a new object with POST, otherwise, it will try to update the
existing object with PUT.
Data validation is up to the client; The JSS in most cases will
at least give you some hints as to what is invalid. | [
"Update",
"or",
"create",
"a",
"new",
"object",
"on",
"the",
"JSS",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L310-L352 | train | 27,804 |
jssimporter/python-jss | jss/jssobject.py | JSSObject._handle_location | def _handle_location(self, location):
"""Return an element located at location with flexible args.
Args:
location: String xpath to use in an Element.find search OR
an Element (which is simply returned).
Returns:
The found Element.
Raises:
ValueError if the location is a string that results in a
find of None.
"""
if not isinstance(location, ElementTree.Element):
element = self.find(location)
if element is None:
raise ValueError("Invalid path!")
else:
element = location
return element | python | def _handle_location(self, location):
"""Return an element located at location with flexible args.
Args:
location: String xpath to use in an Element.find search OR
an Element (which is simply returned).
Returns:
The found Element.
Raises:
ValueError if the location is a string that results in a
find of None.
"""
if not isinstance(location, ElementTree.Element):
element = self.find(location)
if element is None:
raise ValueError("Invalid path!")
else:
element = location
return element | [
"def",
"_handle_location",
"(",
"self",
",",
"location",
")",
":",
"if",
"not",
"isinstance",
"(",
"location",
",",
"ElementTree",
".",
"Element",
")",
":",
"element",
"=",
"self",
".",
"find",
"(",
"location",
")",
"if",
"element",
"is",
"None",
":",
... | Return an element located at location with flexible args.
Args:
location: String xpath to use in an Element.find search OR
an Element (which is simply returned).
Returns:
The found Element.
Raises:
ValueError if the location is a string that results in a
find of None. | [
"Return",
"an",
"element",
"located",
"at",
"location",
"with",
"flexible",
"args",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L369-L389 | train | 27,805 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.set_bool | def set_bool(self, location, value):
"""Set a boolean value.
Casper booleans in XML are string literals of "true" or "false".
This method sets the text value of "location" to the correct
string representation of a boolean.
Args:
location: Element or a string path argument to find()
value: Boolean or string value to set. (Accepts
"true"/"True"/"TRUE"; all other strings are False).
"""
element = self._handle_location(location)
if isinstance(value, basestring):
value = True if value.upper() == "TRUE" else False
elif not isinstance(value, bool):
raise ValueError
if value is True:
element.text = "true"
else:
element.text = "false" | python | def set_bool(self, location, value):
"""Set a boolean value.
Casper booleans in XML are string literals of "true" or "false".
This method sets the text value of "location" to the correct
string representation of a boolean.
Args:
location: Element or a string path argument to find()
value: Boolean or string value to set. (Accepts
"true"/"True"/"TRUE"; all other strings are False).
"""
element = self._handle_location(location)
if isinstance(value, basestring):
value = True if value.upper() == "TRUE" else False
elif not isinstance(value, bool):
raise ValueError
if value is True:
element.text = "true"
else:
element.text = "false" | [
"def",
"set_bool",
"(",
"self",
",",
"location",
",",
"value",
")",
":",
"element",
"=",
"self",
".",
"_handle_location",
"(",
"location",
")",
"if",
"isinstance",
"(",
"value",
",",
"basestring",
")",
":",
"value",
"=",
"True",
"if",
"value",
".",
"up... | Set a boolean value.
Casper booleans in XML are string literals of "true" or "false".
This method sets the text value of "location" to the correct
string representation of a boolean.
Args:
location: Element or a string path argument to find()
value: Boolean or string value to set. (Accepts
"true"/"True"/"TRUE"; all other strings are False). | [
"Set",
"a",
"boolean",
"value",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L391-L411 | train | 27,806 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.add_object_to_path | def add_object_to_path(self, obj, location):
"""Add an object of type JSSContainerObject to location.
This method determines the correct list representation of an
object and adds it to "location". For example, add a Computer to
a ComputerGroup. The ComputerGroup will not have a child
Computers/Computer tag with subelements "name" and "id".
Args:
obj: A JSSContainerObject subclass.
location: Element or a string path argument to find()
Returns:
Element for the object just added.
"""
location = self._handle_location(location)
location.append(obj.as_list_data())
results = [item for item in location.getchildren() if
item.findtext("id") == obj.id][0]
return results | python | def add_object_to_path(self, obj, location):
"""Add an object of type JSSContainerObject to location.
This method determines the correct list representation of an
object and adds it to "location". For example, add a Computer to
a ComputerGroup. The ComputerGroup will not have a child
Computers/Computer tag with subelements "name" and "id".
Args:
obj: A JSSContainerObject subclass.
location: Element or a string path argument to find()
Returns:
Element for the object just added.
"""
location = self._handle_location(location)
location.append(obj.as_list_data())
results = [item for item in location.getchildren() if
item.findtext("id") == obj.id][0]
return results | [
"def",
"add_object_to_path",
"(",
"self",
",",
"obj",
",",
"location",
")",
":",
"location",
"=",
"self",
".",
"_handle_location",
"(",
"location",
")",
"location",
".",
"append",
"(",
"obj",
".",
"as_list_data",
"(",
")",
")",
"results",
"=",
"[",
"item... | Add an object of type JSSContainerObject to location.
This method determines the correct list representation of an
object and adds it to "location". For example, add a Computer to
a ComputerGroup. The ComputerGroup will not have a child
Computers/Computer tag with subelements "name" and "id".
Args:
obj: A JSSContainerObject subclass.
location: Element or a string path argument to find()
Returns:
Element for the object just added. | [
"Add",
"an",
"object",
"of",
"type",
"JSSContainerObject",
"to",
"location",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L413-L432 | train | 27,807 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.remove_object_from_list | def remove_object_from_list(self, obj, list_element):
"""Remove an object from a list element.
Args:
obj: Accepts JSSObjects, id's, and names
list_element: Accepts an Element or a string path to that
element
"""
list_element = self._handle_location(list_element)
if isinstance(obj, JSSObject):
results = [item for item in list_element.getchildren() if
item.findtext("id") == obj.id]
elif isinstance(obj, (int, basestring)):
results = [item for item in list_element.getchildren() if
item.findtext("id") == str(obj) or
item.findtext("name") == obj]
if len(results) == 1:
list_element.remove(results[0])
elif len(results) > 1:
raise ValueError("There is more than one matching object at that "
"path!") | python | def remove_object_from_list(self, obj, list_element):
"""Remove an object from a list element.
Args:
obj: Accepts JSSObjects, id's, and names
list_element: Accepts an Element or a string path to that
element
"""
list_element = self._handle_location(list_element)
if isinstance(obj, JSSObject):
results = [item for item in list_element.getchildren() if
item.findtext("id") == obj.id]
elif isinstance(obj, (int, basestring)):
results = [item for item in list_element.getchildren() if
item.findtext("id") == str(obj) or
item.findtext("name") == obj]
if len(results) == 1:
list_element.remove(results[0])
elif len(results) > 1:
raise ValueError("There is more than one matching object at that "
"path!") | [
"def",
"remove_object_from_list",
"(",
"self",
",",
"obj",
",",
"list_element",
")",
":",
"list_element",
"=",
"self",
".",
"_handle_location",
"(",
"list_element",
")",
"if",
"isinstance",
"(",
"obj",
",",
"JSSObject",
")",
":",
"results",
"=",
"[",
"item",... | Remove an object from a list element.
Args:
obj: Accepts JSSObjects, id's, and names
list_element: Accepts an Element or a string path to that
element | [
"Remove",
"an",
"object",
"from",
"a",
"list",
"element",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L434-L456 | train | 27,808 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.from_file | def from_file(cls, jss, filename):
"""Create a new JSSObject from an external XML file.
Args:
jss: A JSS object.
filename: String path to an XML file.
"""
tree = ElementTree.parse(filename)
root = tree.getroot()
return cls(jss, root) | python | def from_file(cls, jss, filename):
"""Create a new JSSObject from an external XML file.
Args:
jss: A JSS object.
filename: String path to an XML file.
"""
tree = ElementTree.parse(filename)
root = tree.getroot()
return cls(jss, root) | [
"def",
"from_file",
"(",
"cls",
",",
"jss",
",",
"filename",
")",
":",
"tree",
"=",
"ElementTree",
".",
"parse",
"(",
"filename",
")",
"root",
"=",
"tree",
".",
"getroot",
"(",
")",
"return",
"cls",
"(",
"jss",
",",
"root",
")"
] | Create a new JSSObject from an external XML file.
Args:
jss: A JSS object.
filename: String path to an XML file. | [
"Create",
"a",
"new",
"JSSObject",
"from",
"an",
"external",
"XML",
"file",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L472-L481 | train | 27,809 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.from_string | def from_string(cls, jss, xml_string):
"""Creates a new JSSObject from an UTF-8 XML string.
Args:
jss: A JSS object.
xml_string: String XML file data used to create object.
"""
root = ElementTree.fromstring(xml_string.encode('utf-8'))
return cls(jss, root) | python | def from_string(cls, jss, xml_string):
"""Creates a new JSSObject from an UTF-8 XML string.
Args:
jss: A JSS object.
xml_string: String XML file data used to create object.
"""
root = ElementTree.fromstring(xml_string.encode('utf-8'))
return cls(jss, root) | [
"def",
"from_string",
"(",
"cls",
",",
"jss",
",",
"xml_string",
")",
":",
"root",
"=",
"ElementTree",
".",
"fromstring",
"(",
"xml_string",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"return",
"cls",
"(",
"jss",
",",
"root",
")"
] | Creates a new JSSObject from an UTF-8 XML string.
Args:
jss: A JSS object.
xml_string: String XML file data used to create object. | [
"Creates",
"a",
"new",
"JSSObject",
"from",
"an",
"UTF",
"-",
"8",
"XML",
"string",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L484-L492 | train | 27,810 |
jssimporter/python-jss | jss/jssobject.py | JSSObject.to_file | def to_file(self, path):
"""Write object XML to path.
Args:
path: String file path to the file you wish to (over)write.
Path will have ~ expanded prior to opening.
"""
with open(os.path.expanduser(path), "w") as ofile:
ofile.write(self.__repr__()) | python | def to_file(self, path):
"""Write object XML to path.
Args:
path: String file path to the file you wish to (over)write.
Path will have ~ expanded prior to opening.
"""
with open(os.path.expanduser(path), "w") as ofile:
ofile.write(self.__repr__()) | [
"def",
"to_file",
"(",
"self",
",",
"path",
")",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"path",
")",
",",
"\"w\"",
")",
"as",
"ofile",
":",
"ofile",
".",
"write",
"(",
"self",
".",
"__repr__",
"(",
")",
")"
] | Write object XML to path.
Args:
path: String file path to the file you wish to (over)write.
Path will have ~ expanded prior to opening. | [
"Write",
"object",
"XML",
"to",
"path",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L494-L502 | train | 27,811 |
jssimporter/python-jss | jss/jssobject.py | JSSContainerObject.as_list_data | def as_list_data(self):
"""Return an Element to be used in a list.
Most lists want an element with tag of list_type, and
subelements of id and name.
Returns:
Element: list representation of object.
"""
element = ElementTree.Element(self.list_type)
id_ = ElementTree.SubElement(element, "id")
id_.text = self.id
name = ElementTree.SubElement(element, "name")
name.text = self.name
return element | python | def as_list_data(self):
"""Return an Element to be used in a list.
Most lists want an element with tag of list_type, and
subelements of id and name.
Returns:
Element: list representation of object.
"""
element = ElementTree.Element(self.list_type)
id_ = ElementTree.SubElement(element, "id")
id_.text = self.id
name = ElementTree.SubElement(element, "name")
name.text = self.name
return element | [
"def",
"as_list_data",
"(",
"self",
")",
":",
"element",
"=",
"ElementTree",
".",
"Element",
"(",
"self",
".",
"list_type",
")",
"id_",
"=",
"ElementTree",
".",
"SubElement",
"(",
"element",
",",
"\"id\"",
")",
"id_",
".",
"text",
"=",
"self",
".",
"id... | Return an Element to be used in a list.
Most lists want an element with tag of list_type, and
subelements of id and name.
Returns:
Element: list representation of object. | [
"Return",
"an",
"Element",
"to",
"be",
"used",
"in",
"a",
"list",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L547-L561 | train | 27,812 |
jssimporter/python-jss | jss/jssobject.py | JSSGroupObject.add_criterion | def add_criterion(self, name, priority, and_or, search_type, value): # pylint: disable=too-many-arguments
"""Add a search criteria object to a smart group.
Args:
name: String Criteria type name (e.g. "Application Title")
priority: Int or Str number priority of criterion.
and_or: Str, either "and" or "or".
search_type: String Criteria search type. (e.g. "is", "is
not", "member of", etc). Construct a SmartGroup with the
criteria of interest in the web interface to determine
what range of values are available.
value: String value to search for/against.
"""
criterion = SearchCriteria(name, priority, and_or, search_type, value)
self.criteria.append(criterion) | python | def add_criterion(self, name, priority, and_or, search_type, value): # pylint: disable=too-many-arguments
"""Add a search criteria object to a smart group.
Args:
name: String Criteria type name (e.g. "Application Title")
priority: Int or Str number priority of criterion.
and_or: Str, either "and" or "or".
search_type: String Criteria search type. (e.g. "is", "is
not", "member of", etc). Construct a SmartGroup with the
criteria of interest in the web interface to determine
what range of values are available.
value: String value to search for/against.
"""
criterion = SearchCriteria(name, priority, and_or, search_type, value)
self.criteria.append(criterion) | [
"def",
"add_criterion",
"(",
"self",
",",
"name",
",",
"priority",
",",
"and_or",
",",
"search_type",
",",
"value",
")",
":",
"# pylint: disable=too-many-arguments",
"criterion",
"=",
"SearchCriteria",
"(",
"name",
",",
"priority",
",",
"and_or",
",",
"search_ty... | Add a search criteria object to a smart group.
Args:
name: String Criteria type name (e.g. "Application Title")
priority: Int or Str number priority of criterion.
and_or: Str, either "and" or "or".
search_type: String Criteria search type. (e.g. "is", "is
not", "member of", etc). Construct a SmartGroup with the
criteria of interest in the web interface to determine
what range of values are available.
value: String value to search for/against. | [
"Add",
"a",
"search",
"criteria",
"object",
"to",
"a",
"smart",
"group",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L567-L581 | train | 27,813 |
jssimporter/python-jss | jss/jssobject.py | JSSGroupObject.is_smart | def is_smart(self, value):
"""Set group is_smart property to value.
Args:
value: Boolean.
"""
self.set_bool("is_smart", value)
if value is True:
if self.find("criteria") is None:
# pylint: disable=attribute-defined-outside-init
self.criteria = ElementTree.SubElement(self, "criteria") | python | def is_smart(self, value):
"""Set group is_smart property to value.
Args:
value: Boolean.
"""
self.set_bool("is_smart", value)
if value is True:
if self.find("criteria") is None:
# pylint: disable=attribute-defined-outside-init
self.criteria = ElementTree.SubElement(self, "criteria") | [
"def",
"is_smart",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"set_bool",
"(",
"\"is_smart\"",
",",
"value",
")",
"if",
"value",
"is",
"True",
":",
"if",
"self",
".",
"find",
"(",
"\"criteria\"",
")",
"is",
"None",
":",
"# pylint: disable=attribut... | Set group is_smart property to value.
Args:
value: Boolean. | [
"Set",
"group",
"is_smart",
"property",
"to",
"value",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L592-L602 | train | 27,814 |
jssimporter/python-jss | jss/jssobject.py | JSSGroupObject.add_device | def add_device(self, device, container):
"""Add a device to a group. Wraps JSSObject.add_object_to_path.
Args:
device: A JSSObject to add (as list data), to this object.
location: Element or a string path argument to find()
"""
# There is a size tag which the JSS manages for us, so we can
# ignore it.
if self.findtext("is_smart") == "false":
self.add_object_to_path(device, container)
else:
# Technically this isn't true. It will strangely accept
# them, and they even show up as members of the group!
raise ValueError("Devices may not be added to smart groups.") | python | def add_device(self, device, container):
"""Add a device to a group. Wraps JSSObject.add_object_to_path.
Args:
device: A JSSObject to add (as list data), to this object.
location: Element or a string path argument to find()
"""
# There is a size tag which the JSS manages for us, so we can
# ignore it.
if self.findtext("is_smart") == "false":
self.add_object_to_path(device, container)
else:
# Technically this isn't true. It will strangely accept
# them, and they even show up as members of the group!
raise ValueError("Devices may not be added to smart groups.") | [
"def",
"add_device",
"(",
"self",
",",
"device",
",",
"container",
")",
":",
"# There is a size tag which the JSS manages for us, so we can",
"# ignore it.",
"if",
"self",
".",
"findtext",
"(",
"\"is_smart\"",
")",
"==",
"\"false\"",
":",
"self",
".",
"add_object_to_p... | Add a device to a group. Wraps JSSObject.add_object_to_path.
Args:
device: A JSSObject to add (as list data), to this object.
location: Element or a string path argument to find() | [
"Add",
"a",
"device",
"to",
"a",
"group",
".",
"Wraps",
"JSSObject",
".",
"add_object_to_path",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L605-L619 | train | 27,815 |
jssimporter/python-jss | jss/jssobject.py | JSSGroupObject.has_member | def has_member(self, device_object):
"""Return bool whether group has a device as a member.
Args:
device_object (Computer or MobileDevice). Membership is
determined by ID, as names can be shared amongst devices.
"""
if device_object.tag == "computer":
container_search = "computers/computer"
elif device_object.tag == "mobile_device":
container_search = "mobile_devices/mobile_device"
else:
raise ValueError
return len([device for device in self.findall(container_search) if
device.findtext("id") == device_object.id]) is not 0 | python | def has_member(self, device_object):
"""Return bool whether group has a device as a member.
Args:
device_object (Computer or MobileDevice). Membership is
determined by ID, as names can be shared amongst devices.
"""
if device_object.tag == "computer":
container_search = "computers/computer"
elif device_object.tag == "mobile_device":
container_search = "mobile_devices/mobile_device"
else:
raise ValueError
return len([device for device in self.findall(container_search) if
device.findtext("id") == device_object.id]) is not 0 | [
"def",
"has_member",
"(",
"self",
",",
"device_object",
")",
":",
"if",
"device_object",
".",
"tag",
"==",
"\"computer\"",
":",
"container_search",
"=",
"\"computers/computer\"",
"elif",
"device_object",
".",
"tag",
"==",
"\"mobile_device\"",
":",
"container_search"... | Return bool whether group has a device as a member.
Args:
device_object (Computer or MobileDevice). Membership is
determined by ID, as names can be shared amongst devices. | [
"Return",
"bool",
"whether",
"group",
"has",
"a",
"device",
"as",
"a",
"member",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobject.py#L621-L636 | train | 27,816 |
jssimporter/python-jss | jss/distribution_points.py | DistributionPoints.copy | def copy(self, filename, id_=-1, pre_callback=None, post_callback=None):
"""Copy a package or script to all repos.
Determines appropriate location (for file shares) and type based
on file extension.
Args:
filename: String path to the local file to copy.
id_: Package or Script object ID to target. For use with JDS
and CDP DP's only. If uploading a package that does not
have a corresponding object, use id_ of -1, which is the
default.
pre_callback: Func to call before each distribution point
starts copying. Should accept a Repository connection
dictionary as a parameter. Will be called like:
`pre_callback(repo.connection)`
post_callback: Func to call after each distribution point
finishes copying. Should accept a Repository connection
dictionary as a parameter. Will be called like:
`pre_callback(repo.connection)`
"""
for repo in self._children:
if is_package(filename):
copy_method = repo.copy_pkg
else:
# All other file types can go to scripts.
copy_method = repo.copy_script
if pre_callback:
pre_callback(repo.connection)
copy_method(filename, id_)
if post_callback:
post_callback(repo.connection) | python | def copy(self, filename, id_=-1, pre_callback=None, post_callback=None):
"""Copy a package or script to all repos.
Determines appropriate location (for file shares) and type based
on file extension.
Args:
filename: String path to the local file to copy.
id_: Package or Script object ID to target. For use with JDS
and CDP DP's only. If uploading a package that does not
have a corresponding object, use id_ of -1, which is the
default.
pre_callback: Func to call before each distribution point
starts copying. Should accept a Repository connection
dictionary as a parameter. Will be called like:
`pre_callback(repo.connection)`
post_callback: Func to call after each distribution point
finishes copying. Should accept a Repository connection
dictionary as a parameter. Will be called like:
`pre_callback(repo.connection)`
"""
for repo in self._children:
if is_package(filename):
copy_method = repo.copy_pkg
else:
# All other file types can go to scripts.
copy_method = repo.copy_script
if pre_callback:
pre_callback(repo.connection)
copy_method(filename, id_)
if post_callback:
post_callback(repo.connection) | [
"def",
"copy",
"(",
"self",
",",
"filename",
",",
"id_",
"=",
"-",
"1",
",",
"pre_callback",
"=",
"None",
",",
"post_callback",
"=",
"None",
")",
":",
"for",
"repo",
"in",
"self",
".",
"_children",
":",
"if",
"is_package",
"(",
"filename",
")",
":",
... | Copy a package or script to all repos.
Determines appropriate location (for file shares) and type based
on file extension.
Args:
filename: String path to the local file to copy.
id_: Package or Script object ID to target. For use with JDS
and CDP DP's only. If uploading a package that does not
have a corresponding object, use id_ of -1, which is the
default.
pre_callback: Func to call before each distribution point
starts copying. Should accept a Repository connection
dictionary as a parameter. Will be called like:
`pre_callback(repo.connection)`
post_callback: Func to call after each distribution point
finishes copying. Should accept a Repository connection
dictionary as a parameter. Will be called like:
`pre_callback(repo.connection)` | [
"Copy",
"a",
"package",
"or",
"script",
"to",
"all",
"repos",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_points.py#L198-L229 | train | 27,817 |
jssimporter/python-jss | jss/distribution_points.py | DistributionPoints.copy_pkg | def copy_pkg(self, filename, id_=-1):
"""Copy a pkg, dmg, or zip to all repositories.
Args:
filename: String path to the local file to copy.
id_: Integer ID you wish to associate package with for a JDS
or CDP only. Default is -1, which is used for creating
a new package object in the database.
"""
for repo in self._children:
repo.copy_pkg(filename, id_) | python | def copy_pkg(self, filename, id_=-1):
"""Copy a pkg, dmg, or zip to all repositories.
Args:
filename: String path to the local file to copy.
id_: Integer ID you wish to associate package with for a JDS
or CDP only. Default is -1, which is used for creating
a new package object in the database.
"""
for repo in self._children:
repo.copy_pkg(filename, id_) | [
"def",
"copy_pkg",
"(",
"self",
",",
"filename",
",",
"id_",
"=",
"-",
"1",
")",
":",
"for",
"repo",
"in",
"self",
".",
"_children",
":",
"repo",
".",
"copy_pkg",
"(",
"filename",
",",
"id_",
")"
] | Copy a pkg, dmg, or zip to all repositories.
Args:
filename: String path to the local file to copy.
id_: Integer ID you wish to associate package with for a JDS
or CDP only. Default is -1, which is used for creating
a new package object in the database. | [
"Copy",
"a",
"pkg",
"dmg",
"or",
"zip",
"to",
"all",
"repositories",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_points.py#L231-L241 | train | 27,818 |
jssimporter/python-jss | jss/distribution_points.py | DistributionPoints.copy_script | def copy_script(self, filename, id_=-1):
"""Copy a script to all repositories.
Takes into account whether a JSS has been migrated. See the
individual DistributionPoint types for more information.
Args:
filename: String path to the local file to copy.
id_: Integer ID you wish to associate script with for a JDS
or CDP only. Default is -1, which is used for creating
a new script object in the database.
"""
for repo in self._children:
repo.copy_script(filename, id_) | python | def copy_script(self, filename, id_=-1):
"""Copy a script to all repositories.
Takes into account whether a JSS has been migrated. See the
individual DistributionPoint types for more information.
Args:
filename: String path to the local file to copy.
id_: Integer ID you wish to associate script with for a JDS
or CDP only. Default is -1, which is used for creating
a new script object in the database.
"""
for repo in self._children:
repo.copy_script(filename, id_) | [
"def",
"copy_script",
"(",
"self",
",",
"filename",
",",
"id_",
"=",
"-",
"1",
")",
":",
"for",
"repo",
"in",
"self",
".",
"_children",
":",
"repo",
".",
"copy_script",
"(",
"filename",
",",
"id_",
")"
] | Copy a script to all repositories.
Takes into account whether a JSS has been migrated. See the
individual DistributionPoint types for more information.
Args:
filename: String path to the local file to copy.
id_: Integer ID you wish to associate script with for a JDS
or CDP only. Default is -1, which is used for creating
a new script object in the database. | [
"Copy",
"a",
"script",
"to",
"all",
"repositories",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_points.py#L243-L256 | train | 27,819 |
jssimporter/python-jss | jss/distribution_points.py | DistributionPoints.delete | def delete(self, filename):
"""Delete a file from all repositories which support it.
Individual repositories will determine correct location to
delete from (Scripts vs. Packages).
This will not remove the corresponding Package or Script object
from the JSS's database!
Args:
filename: The filename you wish to delete (do not include a
path).
"""
for repo in self._children:
if hasattr(repo, "delete"):
repo.delete(filename) | python | def delete(self, filename):
"""Delete a file from all repositories which support it.
Individual repositories will determine correct location to
delete from (Scripts vs. Packages).
This will not remove the corresponding Package or Script object
from the JSS's database!
Args:
filename: The filename you wish to delete (do not include a
path).
"""
for repo in self._children:
if hasattr(repo, "delete"):
repo.delete(filename) | [
"def",
"delete",
"(",
"self",
",",
"filename",
")",
":",
"for",
"repo",
"in",
"self",
".",
"_children",
":",
"if",
"hasattr",
"(",
"repo",
",",
"\"delete\"",
")",
":",
"repo",
".",
"delete",
"(",
"filename",
")"
] | Delete a file from all repositories which support it.
Individual repositories will determine correct location to
delete from (Scripts vs. Packages).
This will not remove the corresponding Package or Script object
from the JSS's database!
Args:
filename: The filename you wish to delete (do not include a
path). | [
"Delete",
"a",
"file",
"from",
"all",
"repositories",
"which",
"support",
"it",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_points.py#L258-L273 | train | 27,820 |
jssimporter/python-jss | jss/distribution_points.py | DistributionPoints.umount | def umount(self, forced=True):
"""Umount all mountable distribution points.
Defaults to using forced method.
"""
for child in self._children:
if hasattr(child, "umount"):
child.umount(forced) | python | def umount(self, forced=True):
"""Umount all mountable distribution points.
Defaults to using forced method.
"""
for child in self._children:
if hasattr(child, "umount"):
child.umount(forced) | [
"def",
"umount",
"(",
"self",
",",
"forced",
"=",
"True",
")",
":",
"for",
"child",
"in",
"self",
".",
"_children",
":",
"if",
"hasattr",
"(",
"child",
",",
"\"umount\"",
")",
":",
"child",
".",
"umount",
"(",
"forced",
")"
] | Umount all mountable distribution points.
Defaults to using forced method. | [
"Umount",
"all",
"mountable",
"distribution",
"points",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_points.py#L281-L288 | train | 27,821 |
jssimporter/python-jss | jss/distribution_points.py | DistributionPoints.exists | def exists(self, filename):
"""Report whether a file exists on all distribution points.
Determines file type by extension.
Args:
filename: Filename you wish to check. (No path! e.g.:
"AdobeFlashPlayer-14.0.0.176.pkg")
Returns:
Boolean
"""
result = True
for repo in self._children:
if not repo.exists(filename):
result = False
return result | python | def exists(self, filename):
"""Report whether a file exists on all distribution points.
Determines file type by extension.
Args:
filename: Filename you wish to check. (No path! e.g.:
"AdobeFlashPlayer-14.0.0.176.pkg")
Returns:
Boolean
"""
result = True
for repo in self._children:
if not repo.exists(filename):
result = False
return result | [
"def",
"exists",
"(",
"self",
",",
"filename",
")",
":",
"result",
"=",
"True",
"for",
"repo",
"in",
"self",
".",
"_children",
":",
"if",
"not",
"repo",
".",
"exists",
"(",
"filename",
")",
":",
"result",
"=",
"False",
"return",
"result"
] | Report whether a file exists on all distribution points.
Determines file type by extension.
Args:
filename: Filename you wish to check. (No path! e.g.:
"AdobeFlashPlayer-14.0.0.176.pkg")
Returns:
Boolean | [
"Report",
"whether",
"a",
"file",
"exists",
"on",
"all",
"distribution",
"points",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_points.py#L290-L306 | train | 27,822 |
jssimporter/python-jss | jss/jss_prefs.py | _get_user_input | def _get_user_input(prompt, key_name, parent, input_func=raw_input):
"""Prompt the user for a value, and assign it to key_name."""
val = input_func(prompt)
ElementTree.SubElement(parent, "key").text = key_name
if isinstance(val, bool):
string_val = "true" if val else "false"
ElementTree.SubElement(parent, string_val)
else:
ElementTree.SubElement(parent, "string").text = val
return val | python | def _get_user_input(prompt, key_name, parent, input_func=raw_input):
"""Prompt the user for a value, and assign it to key_name."""
val = input_func(prompt)
ElementTree.SubElement(parent, "key").text = key_name
if isinstance(val, bool):
string_val = "true" if val else "false"
ElementTree.SubElement(parent, string_val)
else:
ElementTree.SubElement(parent, "string").text = val
return val | [
"def",
"_get_user_input",
"(",
"prompt",
",",
"key_name",
",",
"parent",
",",
"input_func",
"=",
"raw_input",
")",
":",
"val",
"=",
"input_func",
"(",
"prompt",
")",
"ElementTree",
".",
"SubElement",
"(",
"parent",
",",
"\"key\"",
")",
".",
"text",
"=",
... | Prompt the user for a value, and assign it to key_name. | [
"Prompt",
"the",
"user",
"for",
"a",
"value",
"and",
"assign",
"it",
"to",
"key_name",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jss_prefs.py#L257-L266 | train | 27,823 |
jssimporter/python-jss | jss/jss_prefs.py | _handle_dist_server | def _handle_dist_server(ds_type, repos_array):
"""Ask user for whether to use a type of dist server."""
if ds_type not in ("JDS", "CDP"):
raise ValueError("Must be JDS or CDP")
prompt = "Does your JSS use a %s? (Y|N): " % ds_type
result = loop_until_valid_response(prompt)
if result:
repo_dict = ElementTree.SubElement(repos_array, "dict")
repo_name_key = ElementTree.SubElement(repo_dict, "key")
repo_name_key.text = "type"
repo_name_string = ElementTree.SubElement(repo_dict, "string")
repo_name_string.text = ds_type | python | def _handle_dist_server(ds_type, repos_array):
"""Ask user for whether to use a type of dist server."""
if ds_type not in ("JDS", "CDP"):
raise ValueError("Must be JDS or CDP")
prompt = "Does your JSS use a %s? (Y|N): " % ds_type
result = loop_until_valid_response(prompt)
if result:
repo_dict = ElementTree.SubElement(repos_array, "dict")
repo_name_key = ElementTree.SubElement(repo_dict, "key")
repo_name_key.text = "type"
repo_name_string = ElementTree.SubElement(repo_dict, "string")
repo_name_string.text = ds_type | [
"def",
"_handle_dist_server",
"(",
"ds_type",
",",
"repos_array",
")",
":",
"if",
"ds_type",
"not",
"in",
"(",
"\"JDS\"",
",",
"\"CDP\"",
")",
":",
"raise",
"ValueError",
"(",
"\"Must be JDS or CDP\"",
")",
"prompt",
"=",
"\"Does your JSS use a %s? (Y|N): \"",
"%"... | Ask user for whether to use a type of dist server. | [
"Ask",
"user",
"for",
"whether",
"to",
"use",
"a",
"type",
"of",
"dist",
"server",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jss_prefs.py#L269-L281 | train | 27,824 |
jssimporter/python-jss | jss/jss_prefs.py | JSSPrefs.parse_plist | def parse_plist(self, preferences_file):
"""Try to reset preferences from preference_file."""
preferences_file = os.path.expanduser(preferences_file)
# Try to open using FoundationPlist. If it's not available,
# fall back to plistlib and hope it's not binary encoded.
try:
prefs = FoundationPlist.readPlist(preferences_file)
except NameError:
try:
prefs = plistlib.readPlist(preferences_file)
except ExpatError:
# If we're on OSX, try to convert using another
# tool.
if is_osx():
subprocess.call(["plutil", "-convert", "xml1",
preferences_file])
prefs = plistlib.readPlist(preferences_file)
self.preferences_file = preferences_file
self.user = prefs.get("jss_user")
self.password = prefs.get("jss_pass")
self.url = prefs.get("jss_url")
if not all([self.user, self.password, self.url]):
raise JSSPrefsMissingKeyError("Please provide all required "
"preferences!")
# Optional file repository array. Defaults to empty list.
self.repos = []
for repo in prefs.get("repos", []):
self.repos.append(dict(repo))
self.verify = prefs.get("verify", True)
self.suppress_warnings = prefs.get("suppress_warnings", True) | python | def parse_plist(self, preferences_file):
"""Try to reset preferences from preference_file."""
preferences_file = os.path.expanduser(preferences_file)
# Try to open using FoundationPlist. If it's not available,
# fall back to plistlib and hope it's not binary encoded.
try:
prefs = FoundationPlist.readPlist(preferences_file)
except NameError:
try:
prefs = plistlib.readPlist(preferences_file)
except ExpatError:
# If we're on OSX, try to convert using another
# tool.
if is_osx():
subprocess.call(["plutil", "-convert", "xml1",
preferences_file])
prefs = plistlib.readPlist(preferences_file)
self.preferences_file = preferences_file
self.user = prefs.get("jss_user")
self.password = prefs.get("jss_pass")
self.url = prefs.get("jss_url")
if not all([self.user, self.password, self.url]):
raise JSSPrefsMissingKeyError("Please provide all required "
"preferences!")
# Optional file repository array. Defaults to empty list.
self.repos = []
for repo in prefs.get("repos", []):
self.repos.append(dict(repo))
self.verify = prefs.get("verify", True)
self.suppress_warnings = prefs.get("suppress_warnings", True) | [
"def",
"parse_plist",
"(",
"self",
",",
"preferences_file",
")",
":",
"preferences_file",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"preferences_file",
")",
"# Try to open using FoundationPlist. If it's not available,",
"# fall back to plistlib and hope it's not binary en... | Try to reset preferences from preference_file. | [
"Try",
"to",
"reset",
"preferences",
"from",
"preference_file",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jss_prefs.py#L134-L167 | train | 27,825 |
jssimporter/python-jss | jss/jss_prefs.py | JSSPrefs.configure | def configure(self):
"""Prompt user for config and write to plist
Uses preferences_file argument from JSSPrefs.__init__ as path
to write.
"""
root = ElementTree.Element("dict")
print ("It seems like you do not have a preferences file configured. "
"Please answer the following questions to generate a plist at "
"%s for use with python-jss." % self.preferences_file)
self.url = _get_user_input(
"The complete URL to your JSS, with port (e.g. "
"'https://mycasperserver.org:8443')\nURL: ", "jss_url", root)
self.user = _get_user_input("API Username: ", "jss_user", root)
self.password = _get_user_input("API User's Password: ", "jss_pass",
root, getpass.getpass)
verify_prompt = ("Do you want to verify that traffic is encrypted by "
"a certificate that you trust?: (Y|N) ")
self.verify = _get_user_input(verify_prompt, "verify", root,
loop_until_valid_response)
self._handle_repos(root)
self._write_plist(root)
print "Preferences created.\n" | python | def configure(self):
"""Prompt user for config and write to plist
Uses preferences_file argument from JSSPrefs.__init__ as path
to write.
"""
root = ElementTree.Element("dict")
print ("It seems like you do not have a preferences file configured. "
"Please answer the following questions to generate a plist at "
"%s for use with python-jss." % self.preferences_file)
self.url = _get_user_input(
"The complete URL to your JSS, with port (e.g. "
"'https://mycasperserver.org:8443')\nURL: ", "jss_url", root)
self.user = _get_user_input("API Username: ", "jss_user", root)
self.password = _get_user_input("API User's Password: ", "jss_pass",
root, getpass.getpass)
verify_prompt = ("Do you want to verify that traffic is encrypted by "
"a certificate that you trust?: (Y|N) ")
self.verify = _get_user_input(verify_prompt, "verify", root,
loop_until_valid_response)
self._handle_repos(root)
self._write_plist(root)
print "Preferences created.\n" | [
"def",
"configure",
"(",
"self",
")",
":",
"root",
"=",
"ElementTree",
".",
"Element",
"(",
"\"dict\"",
")",
"print",
"(",
"\"It seems like you do not have a preferences file configured. \"",
"\"Please answer the following questions to generate a plist at \"",
"\"%s for use with ... | Prompt user for config and write to plist
Uses preferences_file argument from JSSPrefs.__init__ as path
to write. | [
"Prompt",
"user",
"for",
"config",
"and",
"write",
"to",
"plist"
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jss_prefs.py#L169-L197 | train | 27,826 |
jssimporter/python-jss | jss/jss_prefs.py | JSSPrefs._handle_repos | def _handle_repos(self, root):
"""Handle repo configuration."""
ElementTree.SubElement(root, "key").text = "repos"
repos_array = ElementTree.SubElement(root, "array")
# Make a temporary jss object to try to pull repo information.
jss_server = JSS(url=self.url, user=self.user, password=self.password,
ssl_verify=self.verify, suppress_warnings=True)
print "Fetching distribution point info..."
try:
dpts = jss_server.DistributionPoint()
except JSSGetError:
print ("Fetching distribution point info failed. If you want to "
"configure distribution points, ensure that your API user "
"has read permissions for distribution points, and that "
"the URL, username, and password are correct.")
dpts = None
if dpts:
print ("There are file share distribution points configured on "
"your JSS. Most of the configuration can be automated "
"from the information on the JSS, with the exception of "
"the password for the R/W user.\n")
for dpt in dpts:
repo_dict = ElementTree.SubElement(repos_array, "dict")
repo_name_key = ElementTree.SubElement(repo_dict, "key")
repo_name_key.text = "name"
repo_name_string = ElementTree.SubElement(repo_dict, "string")
repo_name_string.text = dpt.get("name")
repo_pass_key = ElementTree.SubElement(repo_dict, "key")
repo_pass_key.text = "password"
repo_pass_string = ElementTree.SubElement(repo_dict, "string")
repo_pass_string.text = getpass.getpass(
"Please enter the R/W user's password for distribution "
"point: %s: " % dpt.get("name", "<NO NAME CONFIGURED>"))
_handle_dist_server("JDS", repos_array)
_handle_dist_server("CDP", repos_array) | python | def _handle_repos(self, root):
"""Handle repo configuration."""
ElementTree.SubElement(root, "key").text = "repos"
repos_array = ElementTree.SubElement(root, "array")
# Make a temporary jss object to try to pull repo information.
jss_server = JSS(url=self.url, user=self.user, password=self.password,
ssl_verify=self.verify, suppress_warnings=True)
print "Fetching distribution point info..."
try:
dpts = jss_server.DistributionPoint()
except JSSGetError:
print ("Fetching distribution point info failed. If you want to "
"configure distribution points, ensure that your API user "
"has read permissions for distribution points, and that "
"the URL, username, and password are correct.")
dpts = None
if dpts:
print ("There are file share distribution points configured on "
"your JSS. Most of the configuration can be automated "
"from the information on the JSS, with the exception of "
"the password for the R/W user.\n")
for dpt in dpts:
repo_dict = ElementTree.SubElement(repos_array, "dict")
repo_name_key = ElementTree.SubElement(repo_dict, "key")
repo_name_key.text = "name"
repo_name_string = ElementTree.SubElement(repo_dict, "string")
repo_name_string.text = dpt.get("name")
repo_pass_key = ElementTree.SubElement(repo_dict, "key")
repo_pass_key.text = "password"
repo_pass_string = ElementTree.SubElement(repo_dict, "string")
repo_pass_string.text = getpass.getpass(
"Please enter the R/W user's password for distribution "
"point: %s: " % dpt.get("name", "<NO NAME CONFIGURED>"))
_handle_dist_server("JDS", repos_array)
_handle_dist_server("CDP", repos_array) | [
"def",
"_handle_repos",
"(",
"self",
",",
"root",
")",
":",
"ElementTree",
".",
"SubElement",
"(",
"root",
",",
"\"key\"",
")",
".",
"text",
"=",
"\"repos\"",
"repos_array",
"=",
"ElementTree",
".",
"SubElement",
"(",
"root",
",",
"\"array\"",
")",
"# Make... | Handle repo configuration. | [
"Handle",
"repo",
"configuration",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jss_prefs.py#L199-L239 | train | 27,827 |
jssimporter/python-jss | jss/jss_prefs.py | JSSPrefs._write_plist | def _write_plist(self, root):
"""Write plist file based on our generated tree."""
# prettify the XML
indent_xml(root)
tree = ElementTree.ElementTree(root)
with open(self.preferences_file, "w") as prefs_file:
prefs_file.write(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
"<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" "
"\"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n"
"<plist version=\"1.0\">\n")
tree.write(prefs_file, xml_declaration=False, encoding="utf-8")
prefs_file.write("</plist>") | python | def _write_plist(self, root):
"""Write plist file based on our generated tree."""
# prettify the XML
indent_xml(root)
tree = ElementTree.ElementTree(root)
with open(self.preferences_file, "w") as prefs_file:
prefs_file.write(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
"<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" "
"\"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n"
"<plist version=\"1.0\">\n")
tree.write(prefs_file, xml_declaration=False, encoding="utf-8")
prefs_file.write("</plist>") | [
"def",
"_write_plist",
"(",
"self",
",",
"root",
")",
":",
"# prettify the XML",
"indent_xml",
"(",
"root",
")",
"tree",
"=",
"ElementTree",
".",
"ElementTree",
"(",
"root",
")",
"with",
"open",
"(",
"self",
".",
"preferences_file",
",",
"\"w\"",
")",
"as"... | Write plist file based on our generated tree. | [
"Write",
"plist",
"file",
"based",
"on",
"our",
"generated",
"tree",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jss_prefs.py#L241-L254 | train | 27,828 |
jssimporter/python-jss | jss/casper.py | Casper.update | def update(self):
"""Request an updated set of data from casper.jxml."""
response = self.jss.session.post(self.url, data=self.auth)
response_xml = ElementTree.fromstring(response.text.encode("utf_8"))
# Remove previous data, if any, and then add in response's XML.
self.clear()
for child in response_xml.getchildren():
self.append(child) | python | def update(self):
"""Request an updated set of data from casper.jxml."""
response = self.jss.session.post(self.url, data=self.auth)
response_xml = ElementTree.fromstring(response.text.encode("utf_8"))
# Remove previous data, if any, and then add in response's XML.
self.clear()
for child in response_xml.getchildren():
self.append(child) | [
"def",
"update",
"(",
"self",
")",
":",
"response",
"=",
"self",
".",
"jss",
".",
"session",
".",
"post",
"(",
"self",
".",
"url",
",",
"data",
"=",
"self",
".",
"auth",
")",
"response_xml",
"=",
"ElementTree",
".",
"fromstring",
"(",
"response",
"."... | Request an updated set of data from casper.jxml. | [
"Request",
"an",
"updated",
"set",
"of",
"data",
"from",
"casper",
".",
"jxml",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/casper.py#L68-L76 | train | 27,829 |
jssimporter/python-jss | jss/contrib/mount_shares_better.py | mount_share_at_path | def mount_share_at_path(share_path, mount_path):
"""Mounts a share at the specified path
Args:
share_path: String URL with all auth info to connect to file share.
mount_path: Path to mount share on.
Returns:
The mount point or raises an error
"""
sh_url = CFURLCreateWithString(None, share_path, None)
mo_url = CFURLCreateWithString(None, mount_path, None)
# Set UI to reduced interaction
open_options = {NetFS.kNAUIOptionKey: NetFS.kNAUIOptionNoUI}
# Allow mounting sub-directories of root shares
# Also specify the share should be mounted directly at (not under)
# mount_path
mount_options = {NetFS.kNetFSAllowSubMountsKey: True,
NetFS.kNetFSMountAtMountDirKey: True}
# Mount!
result, output = NetFS.NetFSMountURLSync(sh_url, mo_url, None, None,
open_options, mount_options, None)
# Check if it worked
if result != 0:
raise Exception('Error mounting url "%s" at path "%s": %s' %
(share_path, mount_path, output))
# Return the mountpath
return str(output[0]) | python | def mount_share_at_path(share_path, mount_path):
"""Mounts a share at the specified path
Args:
share_path: String URL with all auth info to connect to file share.
mount_path: Path to mount share on.
Returns:
The mount point or raises an error
"""
sh_url = CFURLCreateWithString(None, share_path, None)
mo_url = CFURLCreateWithString(None, mount_path, None)
# Set UI to reduced interaction
open_options = {NetFS.kNAUIOptionKey: NetFS.kNAUIOptionNoUI}
# Allow mounting sub-directories of root shares
# Also specify the share should be mounted directly at (not under)
# mount_path
mount_options = {NetFS.kNetFSAllowSubMountsKey: True,
NetFS.kNetFSMountAtMountDirKey: True}
# Mount!
result, output = NetFS.NetFSMountURLSync(sh_url, mo_url, None, None,
open_options, mount_options, None)
# Check if it worked
if result != 0:
raise Exception('Error mounting url "%s" at path "%s": %s' %
(share_path, mount_path, output))
# Return the mountpath
return str(output[0]) | [
"def",
"mount_share_at_path",
"(",
"share_path",
",",
"mount_path",
")",
":",
"sh_url",
"=",
"CFURLCreateWithString",
"(",
"None",
",",
"share_path",
",",
"None",
")",
"mo_url",
"=",
"CFURLCreateWithString",
"(",
"None",
",",
"mount_path",
",",
"None",
")",
"#... | Mounts a share at the specified path
Args:
share_path: String URL with all auth info to connect to file share.
mount_path: Path to mount share on.
Returns:
The mount point or raises an error | [
"Mounts",
"a",
"share",
"at",
"the",
"specified",
"path"
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/contrib/mount_shares_better.py#L67-L94 | train | 27,830 |
jssimporter/python-jss | jss/distribution_point.py | auto_mounter | def auto_mounter(original):
"""Decorator for automatically mounting, if needed."""
def mounter(*args):
"""If not mounted, mount."""
self = args[0]
if not self.is_mounted():
self.mount()
return original(*args)
return mounter | python | def auto_mounter(original):
"""Decorator for automatically mounting, if needed."""
def mounter(*args):
"""If not mounted, mount."""
self = args[0]
if not self.is_mounted():
self.mount()
return original(*args)
return mounter | [
"def",
"auto_mounter",
"(",
"original",
")",
":",
"def",
"mounter",
"(",
"*",
"args",
")",
":",
"\"\"\"If not mounted, mount.\"\"\"",
"self",
"=",
"args",
"[",
"0",
"]",
"if",
"not",
"self",
".",
"is_mounted",
"(",
")",
":",
"self",
".",
"mount",
"(",
... | Decorator for automatically mounting, if needed. | [
"Decorator",
"for",
"automatically",
"mounting",
"if",
"needed",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L48-L56 | train | 27,831 |
jssimporter/python-jss | jss/distribution_point.py | FileRepository.copy_pkg | def copy_pkg(self, filename, _):
"""Copy a package to the repo's Package subdirectory.
Args:
filename: Path for file to copy.
_: Ignored. Used for compatibility with JDS repos.
"""
basename = os.path.basename(filename)
self._copy(filename, os.path.join(self.connection["mount_point"],
"Packages", basename)) | python | def copy_pkg(self, filename, _):
"""Copy a package to the repo's Package subdirectory.
Args:
filename: Path for file to copy.
_: Ignored. Used for compatibility with JDS repos.
"""
basename = os.path.basename(filename)
self._copy(filename, os.path.join(self.connection["mount_point"],
"Packages", basename)) | [
"def",
"copy_pkg",
"(",
"self",
",",
"filename",
",",
"_",
")",
":",
"basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
"self",
".",
"_copy",
"(",
"filename",
",",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"connectio... | Copy a package to the repo's Package subdirectory.
Args:
filename: Path for file to copy.
_: Ignored. Used for compatibility with JDS repos. | [
"Copy",
"a",
"package",
"to",
"the",
"repo",
"s",
"Package",
"subdirectory",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L107-L116 | train | 27,832 |
jssimporter/python-jss | jss/distribution_point.py | FileRepository.copy_script | def copy_script(self, filename, id_=-1):
"""Copy a script to the repo's Script subdirectory.
Scripts are copied as files to a path, or, on a "migrated" JSS,
are POSTed to the JSS (pass an id if you wish to associate
the script with an existing Script object).
Args:
filename: Path for file to copy.
id_: Int ID, used _only_ for migrated repos. Default is -1,
which creates a new Script.
"""
if ("jss" in self.connection.keys() and
self.connection["jss"].jss_migrated):
self._copy_script_migrated(filename, id_, SCRIPT_FILE_TYPE)
else:
basename = os.path.basename(filename)
self._copy(filename, os.path.join(self.connection["mount_point"],
"Scripts", basename)) | python | def copy_script(self, filename, id_=-1):
"""Copy a script to the repo's Script subdirectory.
Scripts are copied as files to a path, or, on a "migrated" JSS,
are POSTed to the JSS (pass an id if you wish to associate
the script with an existing Script object).
Args:
filename: Path for file to copy.
id_: Int ID, used _only_ for migrated repos. Default is -1,
which creates a new Script.
"""
if ("jss" in self.connection.keys() and
self.connection["jss"].jss_migrated):
self._copy_script_migrated(filename, id_, SCRIPT_FILE_TYPE)
else:
basename = os.path.basename(filename)
self._copy(filename, os.path.join(self.connection["mount_point"],
"Scripts", basename)) | [
"def",
"copy_script",
"(",
"self",
",",
"filename",
",",
"id_",
"=",
"-",
"1",
")",
":",
"if",
"(",
"\"jss\"",
"in",
"self",
".",
"connection",
".",
"keys",
"(",
")",
"and",
"self",
".",
"connection",
"[",
"\"jss\"",
"]",
".",
"jss_migrated",
")",
... | Copy a script to the repo's Script subdirectory.
Scripts are copied as files to a path, or, on a "migrated" JSS,
are POSTed to the JSS (pass an id if you wish to associate
the script with an existing Script object).
Args:
filename: Path for file to copy.
id_: Int ID, used _only_ for migrated repos. Default is -1,
which creates a new Script. | [
"Copy",
"a",
"script",
"to",
"the",
"repo",
"s",
"Script",
"subdirectory",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L118-L136 | train | 27,833 |
jssimporter/python-jss | jss/distribution_point.py | FileRepository._copy_script_migrated | def _copy_script_migrated(self, filename, id_=-1,
file_type=SCRIPT_FILE_TYPE):
"""Upload a script to a migrated JSS's database.
On a "migrated" JSS, scripts are POSTed to the JSS. Pass an id
if you wish to associate the script with an existing Script
object, otherwise, it will create a new Script object.
Args:
filename: Path to script file.
id_: Int ID of Script object to associate this file with.
Default is -1, which creates a new Script.
"""
basefname = os.path.basename(filename)
resource = open(filename, "rb")
headers = {"DESTINATION": "1", "OBJECT_ID": str(id_), "FILE_TYPE":
file_type, "FILE_NAME": basefname}
response = self.connection["jss"].session.post(
url="%s/%s" % (self.connection["jss"].base_url, "dbfileupload"),
data=resource, headers=headers)
return response | python | def _copy_script_migrated(self, filename, id_=-1,
file_type=SCRIPT_FILE_TYPE):
"""Upload a script to a migrated JSS's database.
On a "migrated" JSS, scripts are POSTed to the JSS. Pass an id
if you wish to associate the script with an existing Script
object, otherwise, it will create a new Script object.
Args:
filename: Path to script file.
id_: Int ID of Script object to associate this file with.
Default is -1, which creates a new Script.
"""
basefname = os.path.basename(filename)
resource = open(filename, "rb")
headers = {"DESTINATION": "1", "OBJECT_ID": str(id_), "FILE_TYPE":
file_type, "FILE_NAME": basefname}
response = self.connection["jss"].session.post(
url="%s/%s" % (self.connection["jss"].base_url, "dbfileupload"),
data=resource, headers=headers)
return response | [
"def",
"_copy_script_migrated",
"(",
"self",
",",
"filename",
",",
"id_",
"=",
"-",
"1",
",",
"file_type",
"=",
"SCRIPT_FILE_TYPE",
")",
":",
"basefname",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
"resource",
"=",
"open",
"(",
"filen... | Upload a script to a migrated JSS's database.
On a "migrated" JSS, scripts are POSTed to the JSS. Pass an id
if you wish to associate the script with an existing Script
object, otherwise, it will create a new Script object.
Args:
filename: Path to script file.
id_: Int ID of Script object to associate this file with.
Default is -1, which creates a new Script. | [
"Upload",
"a",
"script",
"to",
"a",
"migrated",
"JSS",
"s",
"database",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L138-L159 | train | 27,834 |
jssimporter/python-jss | jss/distribution_point.py | FileRepository.delete | def delete(self, filename):
"""Delete a file from the repository.
This method will not delete a script from a migrated JSS.
Please remove migrated scripts with jss.Script.delete.
Args:
filename: String filename only (i.e. no path) of file to
delete. Will handle deleting scripts vs. packages
automatically.
"""
folder = "Packages" if is_package(filename) else "Scripts"
path = os.path.join(self.connection["mount_point"], folder, filename)
if os.path.isdir(path):
shutil.rmtree(path)
elif os.path.isfile(path):
os.remove(path) | python | def delete(self, filename):
"""Delete a file from the repository.
This method will not delete a script from a migrated JSS.
Please remove migrated scripts with jss.Script.delete.
Args:
filename: String filename only (i.e. no path) of file to
delete. Will handle deleting scripts vs. packages
automatically.
"""
folder = "Packages" if is_package(filename) else "Scripts"
path = os.path.join(self.connection["mount_point"], folder, filename)
if os.path.isdir(path):
shutil.rmtree(path)
elif os.path.isfile(path):
os.remove(path) | [
"def",
"delete",
"(",
"self",
",",
"filename",
")",
":",
"folder",
"=",
"\"Packages\"",
"if",
"is_package",
"(",
"filename",
")",
"else",
"\"Scripts\"",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"connection",
"[",
"\"mount_point\"",
... | Delete a file from the repository.
This method will not delete a script from a migrated JSS.
Please remove migrated scripts with jss.Script.delete.
Args:
filename: String filename only (i.e. no path) of file to
delete. Will handle deleting scripts vs. packages
automatically. | [
"Delete",
"a",
"file",
"from",
"the",
"repository",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L177-L193 | train | 27,835 |
jssimporter/python-jss | jss/distribution_point.py | FileRepository.exists | def exists(self, filename):
"""Report whether a file exists on the distribution point.
Determines file type by extension.
Args:
filename: Filename you wish to check. (No path! e.g.:
"AdobeFlashPlayer-14.0.0.176.pkg")
"""
if is_package(filename):
filepath = os.path.join(self.connection["mount_point"],
"Packages", filename)
else:
filepath = os.path.join(self.connection["mount_point"],
"Scripts", filename)
return os.path.exists(filepath) | python | def exists(self, filename):
"""Report whether a file exists on the distribution point.
Determines file type by extension.
Args:
filename: Filename you wish to check. (No path! e.g.:
"AdobeFlashPlayer-14.0.0.176.pkg")
"""
if is_package(filename):
filepath = os.path.join(self.connection["mount_point"],
"Packages", filename)
else:
filepath = os.path.join(self.connection["mount_point"],
"Scripts", filename)
return os.path.exists(filepath) | [
"def",
"exists",
"(",
"self",
",",
"filename",
")",
":",
"if",
"is_package",
"(",
"filename",
")",
":",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"connection",
"[",
"\"mount_point\"",
"]",
",",
"\"Packages\"",
",",
"filename",
... | Report whether a file exists on the distribution point.
Determines file type by extension.
Args:
filename: Filename you wish to check. (No path! e.g.:
"AdobeFlashPlayer-14.0.0.176.pkg") | [
"Report",
"whether",
"a",
"file",
"exists",
"on",
"the",
"distribution",
"point",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L195-L210 | train | 27,836 |
jssimporter/python-jss | jss/distribution_point.py | MountedRepository.mount | def mount(self):
"""Mount the repository."""
if not self.is_mounted():
# OS X mounting is handled automagically in /Volumes:
# DO NOT mkdir there!
# For Linux, ensure the mountpoint exists.
if not is_osx():
if not os.path.exists(self.connection["mount_point"]):
os.mkdir(self.connection["mount_point"])
self._mount() | python | def mount(self):
"""Mount the repository."""
if not self.is_mounted():
# OS X mounting is handled automagically in /Volumes:
# DO NOT mkdir there!
# For Linux, ensure the mountpoint exists.
if not is_osx():
if not os.path.exists(self.connection["mount_point"]):
os.mkdir(self.connection["mount_point"])
self._mount() | [
"def",
"mount",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"is_mounted",
"(",
")",
":",
"# OS X mounting is handled automagically in /Volumes:",
"# DO NOT mkdir there!",
"# For Linux, ensure the mountpoint exists.",
"if",
"not",
"is_osx",
"(",
")",
":",
"if",
"n... | Mount the repository. | [
"Mount",
"the",
"repository",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L250-L259 | train | 27,837 |
jssimporter/python-jss | jss/distribution_point.py | MountedRepository.umount | def umount(self, forced=True):
"""Try to unmount our mount point.
Defaults to using forced method. If OS is Linux, it will not
delete the mount point.
Args:
forced: Bool whether to force the unmount. Default is True.
"""
if self.is_mounted():
if is_osx():
cmd = ["/usr/sbin/diskutil", "unmount",
self.connection["mount_point"]]
if forced:
cmd.insert(2, "force")
subprocess.check_call(cmd)
else:
cmd = ["umount", self.connection["mount_point"]]
if forced:
cmd.insert(1, "-f")
subprocess.check_call(cmd) | python | def umount(self, forced=True):
"""Try to unmount our mount point.
Defaults to using forced method. If OS is Linux, it will not
delete the mount point.
Args:
forced: Bool whether to force the unmount. Default is True.
"""
if self.is_mounted():
if is_osx():
cmd = ["/usr/sbin/diskutil", "unmount",
self.connection["mount_point"]]
if forced:
cmd.insert(2, "force")
subprocess.check_call(cmd)
else:
cmd = ["umount", self.connection["mount_point"]]
if forced:
cmd.insert(1, "-f")
subprocess.check_call(cmd) | [
"def",
"umount",
"(",
"self",
",",
"forced",
"=",
"True",
")",
":",
"if",
"self",
".",
"is_mounted",
"(",
")",
":",
"if",
"is_osx",
"(",
")",
":",
"cmd",
"=",
"[",
"\"/usr/sbin/diskutil\"",
",",
"\"unmount\"",
",",
"self",
".",
"connection",
"[",
"\"... | Try to unmount our mount point.
Defaults to using forced method. If OS is Linux, it will not
delete the mount point.
Args:
forced: Bool whether to force the unmount. Default is True. | [
"Try",
"to",
"unmount",
"our",
"mount",
"point",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L265-L285 | train | 27,838 |
jssimporter/python-jss | jss/distribution_point.py | MountedRepository.is_mounted | def is_mounted(self):
"""Test for whether a mount point is mounted.
If it is currently mounted, determine the path where it's
mounted and update the connection's mount_point accordingly.
"""
mount_check = subprocess.check_output("mount").splitlines()
# The mount command returns lines like this on OS X...
# //username@pretendco.com/JSS%20REPO on /Volumes/JSS REPO
# (afpfs, nodev, nosuid, mounted by local_me)
# and like this on Linux...
# //pretendco.com/jamf on /mnt/jamf type cifs (rw,relatime,
# <options>...)
valid_mount_strings = self._get_valid_mount_strings()
was_mounted = False
if is_osx():
mount_string_regex = re.compile(r"\(([\w]*),*.*\)$")
mount_point_regex = re.compile(r"on ([\w/ -]*) \(.*$")
elif is_linux():
mount_string_regex = re.compile(r"type ([\w]*) \(.*\)$")
mount_point_regex = re.compile(r"on ([\w/ -]*) type .*$")
else:
raise JSSError("Unsupported OS.")
for mount in mount_check:
fs_match = re.search(mount_string_regex, mount)
fs_type = fs_match.group(1) if fs_match else None
# Automounts, non-network shares, and network shares
# all have a slightly different format, so it's easiest to
# just split.
mount_string = mount.split(" on ")[0]
# Does the mount_string match one of our valid_mount_strings?
if [mstring for mstring in valid_mount_strings if
mstring in mount_string] and self.fs_type == fs_type:
# Get the mount point string between from the end back to
# the last "on", but before the options (wrapped in
# parenthesis). Considers alphanumerics, / , _ , - and a
# blank space as valid, but no crazy chars.
match = re.search(mount_point_regex, mount)
mount_point = match.group(1) if match else None
was_mounted = True
# Reset the connection's mount point to the discovered
# value.
if mount_point:
self.connection["mount_point"] = mount_point
if self.connection["jss"].verbose:
print ("%s is already mounted at %s.\n" %
(self.connection["url"], mount_point))
# We found the share, no need to continue.
break
if not was_mounted:
# If the share is not mounted, check for another share
# mounted to the same path and if found, incremement the
# name to avoid conflicts.
count = 1
while os.path.ismount(self.connection["mount_point"]):
self.connection["mount_point"] = (
"%s-%s" % (self.connection["mount_point"], count))
count += 1
# Do an inexpensive double check...
return os.path.ismount(self.connection["mount_point"]) | python | def is_mounted(self):
"""Test for whether a mount point is mounted.
If it is currently mounted, determine the path where it's
mounted and update the connection's mount_point accordingly.
"""
mount_check = subprocess.check_output("mount").splitlines()
# The mount command returns lines like this on OS X...
# //username@pretendco.com/JSS%20REPO on /Volumes/JSS REPO
# (afpfs, nodev, nosuid, mounted by local_me)
# and like this on Linux...
# //pretendco.com/jamf on /mnt/jamf type cifs (rw,relatime,
# <options>...)
valid_mount_strings = self._get_valid_mount_strings()
was_mounted = False
if is_osx():
mount_string_regex = re.compile(r"\(([\w]*),*.*\)$")
mount_point_regex = re.compile(r"on ([\w/ -]*) \(.*$")
elif is_linux():
mount_string_regex = re.compile(r"type ([\w]*) \(.*\)$")
mount_point_regex = re.compile(r"on ([\w/ -]*) type .*$")
else:
raise JSSError("Unsupported OS.")
for mount in mount_check:
fs_match = re.search(mount_string_regex, mount)
fs_type = fs_match.group(1) if fs_match else None
# Automounts, non-network shares, and network shares
# all have a slightly different format, so it's easiest to
# just split.
mount_string = mount.split(" on ")[0]
# Does the mount_string match one of our valid_mount_strings?
if [mstring for mstring in valid_mount_strings if
mstring in mount_string] and self.fs_type == fs_type:
# Get the mount point string between from the end back to
# the last "on", but before the options (wrapped in
# parenthesis). Considers alphanumerics, / , _ , - and a
# blank space as valid, but no crazy chars.
match = re.search(mount_point_regex, mount)
mount_point = match.group(1) if match else None
was_mounted = True
# Reset the connection's mount point to the discovered
# value.
if mount_point:
self.connection["mount_point"] = mount_point
if self.connection["jss"].verbose:
print ("%s is already mounted at %s.\n" %
(self.connection["url"], mount_point))
# We found the share, no need to continue.
break
if not was_mounted:
# If the share is not mounted, check for another share
# mounted to the same path and if found, incremement the
# name to avoid conflicts.
count = 1
while os.path.ismount(self.connection["mount_point"]):
self.connection["mount_point"] = (
"%s-%s" % (self.connection["mount_point"], count))
count += 1
# Do an inexpensive double check...
return os.path.ismount(self.connection["mount_point"]) | [
"def",
"is_mounted",
"(",
"self",
")",
":",
"mount_check",
"=",
"subprocess",
".",
"check_output",
"(",
"\"mount\"",
")",
".",
"splitlines",
"(",
")",
"# The mount command returns lines like this on OS X...",
"# //username@pretendco.com/JSS%20REPO on /Volumes/JSS REPO",
"# (a... | Test for whether a mount point is mounted.
If it is currently mounted, determine the path where it's
mounted and update the connection's mount_point accordingly. | [
"Test",
"for",
"whether",
"a",
"mount",
"point",
"is",
"mounted",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L287-L351 | train | 27,839 |
jssimporter/python-jss | jss/distribution_point.py | MountedRepository._get_valid_mount_strings | def _get_valid_mount_strings(self):
"""Return a tuple of potential mount strings.
Casper Admin seems to mount in a number of ways:
- hostname/share
- fqdn/share
Plus, there's the possibility of:
- IPAddress/share
Then factor in the possibility that the port is included too!
This gives us a total of up to six valid addresses for mount
to report.
"""
results = set()
join = os.path.join
url = self.connection["url"]
share_name = urllib.quote(self.connection["share_name"],
safe="~()*!.'")
port = self.connection["port"]
# URL from python-jss form:
results.add(join(url, share_name))
results.add(join("%s:%s" % (url, port), share_name))
# IP Address form:
# socket.gethostbyname() will return an IP address whether
# an IP address, FQDN, or .local name is provided.
ip_address = socket.gethostbyname(url)
results.add(join(ip_address, share_name))
results.add(join("%s:%s" % (ip_address, port), share_name))
# Domain name only form:
domain_name = url.split(".")[0]
results.add(join(domain_name, share_name))
results.add(join("%s:%s" % (domain_name, port), share_name))
# FQDN form using getfqdn:
# socket.getfqdn() could just resolve back to the ip
# or be the same as the initial URL so only add it if it's
# different than both.
fqdn = socket.getfqdn(ip_address)
results.add(join(fqdn, share_name))
results.add(join("%s:%s" % (fqdn, port), share_name))
return tuple(results) | python | def _get_valid_mount_strings(self):
"""Return a tuple of potential mount strings.
Casper Admin seems to mount in a number of ways:
- hostname/share
- fqdn/share
Plus, there's the possibility of:
- IPAddress/share
Then factor in the possibility that the port is included too!
This gives us a total of up to six valid addresses for mount
to report.
"""
results = set()
join = os.path.join
url = self.connection["url"]
share_name = urllib.quote(self.connection["share_name"],
safe="~()*!.'")
port = self.connection["port"]
# URL from python-jss form:
results.add(join(url, share_name))
results.add(join("%s:%s" % (url, port), share_name))
# IP Address form:
# socket.gethostbyname() will return an IP address whether
# an IP address, FQDN, or .local name is provided.
ip_address = socket.gethostbyname(url)
results.add(join(ip_address, share_name))
results.add(join("%s:%s" % (ip_address, port), share_name))
# Domain name only form:
domain_name = url.split(".")[0]
results.add(join(domain_name, share_name))
results.add(join("%s:%s" % (domain_name, port), share_name))
# FQDN form using getfqdn:
# socket.getfqdn() could just resolve back to the ip
# or be the same as the initial URL so only add it if it's
# different than both.
fqdn = socket.getfqdn(ip_address)
results.add(join(fqdn, share_name))
results.add(join("%s:%s" % (fqdn, port), share_name))
return tuple(results) | [
"def",
"_get_valid_mount_strings",
"(",
"self",
")",
":",
"results",
"=",
"set",
"(",
")",
"join",
"=",
"os",
".",
"path",
".",
"join",
"url",
"=",
"self",
".",
"connection",
"[",
"\"url\"",
"]",
"share_name",
"=",
"urllib",
".",
"quote",
"(",
"self",
... | Return a tuple of potential mount strings.
Casper Admin seems to mount in a number of ways:
- hostname/share
- fqdn/share
Plus, there's the possibility of:
- IPAddress/share
Then factor in the possibility that the port is included too!
This gives us a total of up to six valid addresses for mount
to report. | [
"Return",
"a",
"tuple",
"of",
"potential",
"mount",
"strings",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L353-L396 | train | 27,840 |
jssimporter/python-jss | jss/distribution_point.py | AFPDistributionPoint._mount | def _mount(self):
"""Mount based on which OS is running."""
# mount_afp "afp://scraig:<password>@address/share" <mnt_point>
if is_osx():
if self.connection["jss"].verbose:
print self.connection["mount_url"]
if mount_share:
self.connection["mount_point"] = mount_share(
self.connection["mount_url"])
else:
# Non-Apple OS X python:
args = ["mount", "-t", self.protocol,
self.connection["mount_url"],
self.connection["mount_point"]]
if self.connection["jss"].verbose:
print " ".join(args)
subprocess.check_call(args)
elif is_linux():
args = ["mount_afp", "-t", self.protocol,
self.connection["mount_url"],
self.connection["mount_point"]]
if self.connection["jss"].verbose:
print " ".join(args)
subprocess.check_call(args)
else:
raise JSSError("Unsupported OS.") | python | def _mount(self):
"""Mount based on which OS is running."""
# mount_afp "afp://scraig:<password>@address/share" <mnt_point>
if is_osx():
if self.connection["jss"].verbose:
print self.connection["mount_url"]
if mount_share:
self.connection["mount_point"] = mount_share(
self.connection["mount_url"])
else:
# Non-Apple OS X python:
args = ["mount", "-t", self.protocol,
self.connection["mount_url"],
self.connection["mount_point"]]
if self.connection["jss"].verbose:
print " ".join(args)
subprocess.check_call(args)
elif is_linux():
args = ["mount_afp", "-t", self.protocol,
self.connection["mount_url"],
self.connection["mount_point"]]
if self.connection["jss"].verbose:
print " ".join(args)
subprocess.check_call(args)
else:
raise JSSError("Unsupported OS.") | [
"def",
"_mount",
"(",
"self",
")",
":",
"# mount_afp \"afp://scraig:<password>@address/share\" <mnt_point>",
"if",
"is_osx",
"(",
")",
":",
"if",
"self",
".",
"connection",
"[",
"\"jss\"",
"]",
".",
"verbose",
":",
"print",
"self",
".",
"connection",
"[",
"\"mou... | Mount based on which OS is running. | [
"Mount",
"based",
"on",
"which",
"OS",
"is",
"running",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L498-L523 | train | 27,841 |
jssimporter/python-jss | jss/distribution_point.py | DistributionServer._build_url | def _build_url(self):
"""Build the URL for POSTing files."""
self.connection["upload_url"] = (
"%s/%s" % (self.connection["jss"].base_url, "dbfileupload"))
self.connection["delete_url"] = (
"%s/%s" % (self.connection["jss"].base_url,
"casperAdminSave.jxml")) | python | def _build_url(self):
"""Build the URL for POSTing files."""
self.connection["upload_url"] = (
"%s/%s" % (self.connection["jss"].base_url, "dbfileupload"))
self.connection["delete_url"] = (
"%s/%s" % (self.connection["jss"].base_url,
"casperAdminSave.jxml")) | [
"def",
"_build_url",
"(",
"self",
")",
":",
"self",
".",
"connection",
"[",
"\"upload_url\"",
"]",
"=",
"(",
"\"%s/%s\"",
"%",
"(",
"self",
".",
"connection",
"[",
"\"jss\"",
"]",
".",
"base_url",
",",
"\"dbfileupload\"",
")",
")",
"self",
".",
"connecti... | Build the URL for POSTing files. | [
"Build",
"the",
"URL",
"for",
"POSTing",
"files",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L642-L648 | train | 27,842 |
jssimporter/python-jss | jss/distribution_point.py | DistributionServer.copy_pkg | def copy_pkg(self, filename, id_=-1):
"""Copy a package to the distribution server.
Bundle-style packages must be zipped prior to copying.
Args:
filename: Full path to file to upload.
id_: ID of Package object to associate with, or -1 for new
packages (default).
"""
self._copy(filename, id_=id_, file_type=PKG_FILE_TYPE) | python | def copy_pkg(self, filename, id_=-1):
"""Copy a package to the distribution server.
Bundle-style packages must be zipped prior to copying.
Args:
filename: Full path to file to upload.
id_: ID of Package object to associate with, or -1 for new
packages (default).
"""
self._copy(filename, id_=id_, file_type=PKG_FILE_TYPE) | [
"def",
"copy_pkg",
"(",
"self",
",",
"filename",
",",
"id_",
"=",
"-",
"1",
")",
":",
"self",
".",
"_copy",
"(",
"filename",
",",
"id_",
"=",
"id_",
",",
"file_type",
"=",
"PKG_FILE_TYPE",
")"
] | Copy a package to the distribution server.
Bundle-style packages must be zipped prior to copying.
Args:
filename: Full path to file to upload.
id_: ID of Package object to associate with, or -1 for new
packages (default). | [
"Copy",
"a",
"package",
"to",
"the",
"distribution",
"server",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L650-L660 | train | 27,843 |
jssimporter/python-jss | jss/distribution_point.py | DistributionServer.copy_script | def copy_script(self, filename, id_=-1):
"""Copy a script to the distribution server.
Args:
filename: Full path to file to upload.
id_: ID of Script object to associate with, or -1 for new
Script (default).
"""
self._copy(filename, id_=id_, file_type=SCRIPT_FILE_TYPE) | python | def copy_script(self, filename, id_=-1):
"""Copy a script to the distribution server.
Args:
filename: Full path to file to upload.
id_: ID of Script object to associate with, or -1 for new
Script (default).
"""
self._copy(filename, id_=id_, file_type=SCRIPT_FILE_TYPE) | [
"def",
"copy_script",
"(",
"self",
",",
"filename",
",",
"id_",
"=",
"-",
"1",
")",
":",
"self",
".",
"_copy",
"(",
"filename",
",",
"id_",
"=",
"id_",
",",
"file_type",
"=",
"SCRIPT_FILE_TYPE",
")"
] | Copy a script to the distribution server.
Args:
filename: Full path to file to upload.
id_: ID of Script object to associate with, or -1 for new
Script (default). | [
"Copy",
"a",
"script",
"to",
"the",
"distribution",
"server",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L662-L670 | train | 27,844 |
jssimporter/python-jss | jss/distribution_point.py | DistributionServer._copy | def _copy(self, filename, id_=-1, file_type=0):
"""Upload a file to the distribution server.
Directories/bundle-style packages must be zipped prior to
copying.
"""
if os.path.isdir(filename):
raise JSSUnsupportedFileType(
"Distribution Server type repos do not permit directory "
"uploads. You are probably trying to upload a non-flat "
"package. Please zip or create a flat package.")
basefname = os.path.basename(filename)
resource = open(filename, "rb")
headers = {"DESTINATION": self.destination, "OBJECT_ID": str(id_),
"FILE_TYPE": file_type, "FILE_NAME": basefname}
response = self.connection["jss"].session.post(
url=self.connection["upload_url"], data=resource, headers=headers)
if self.connection["jss"].verbose:
print response | python | def _copy(self, filename, id_=-1, file_type=0):
"""Upload a file to the distribution server.
Directories/bundle-style packages must be zipped prior to
copying.
"""
if os.path.isdir(filename):
raise JSSUnsupportedFileType(
"Distribution Server type repos do not permit directory "
"uploads. You are probably trying to upload a non-flat "
"package. Please zip or create a flat package.")
basefname = os.path.basename(filename)
resource = open(filename, "rb")
headers = {"DESTINATION": self.destination, "OBJECT_ID": str(id_),
"FILE_TYPE": file_type, "FILE_NAME": basefname}
response = self.connection["jss"].session.post(
url=self.connection["upload_url"], data=resource, headers=headers)
if self.connection["jss"].verbose:
print response | [
"def",
"_copy",
"(",
"self",
",",
"filename",
",",
"id_",
"=",
"-",
"1",
",",
"file_type",
"=",
"0",
")",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"filename",
")",
":",
"raise",
"JSSUnsupportedFileType",
"(",
"\"Distribution Server type repos do no... | Upload a file to the distribution server.
Directories/bundle-style packages must be zipped prior to
copying. | [
"Upload",
"a",
"file",
"to",
"the",
"distribution",
"server",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L672-L690 | train | 27,845 |
jssimporter/python-jss | jss/distribution_point.py | DistributionServer.delete_with_casper_admin_save | def delete_with_casper_admin_save(self, pkg):
"""Delete a pkg from the distribution server.
Args:
pkg: Can be a jss.Package object, an int ID of a package, or
a filename.
"""
# The POST needs the package ID.
if pkg.__class__.__name__ == "Package":
package_to_delete = pkg.id
elif isinstance(pkg, int):
package_to_delete = pkg
elif isinstance(pkg, str):
package_to_delete = self.connection["jss"].Package(pkg).id
else:
raise TypeError
data_dict = {"username": self.connection["jss"].user,
"password": self.connection["jss"].password,
"deletedPackageID": package_to_delete}
self.connection["jss"].session.post(url=self.connection["delete_url"],
data=data_dict) | python | def delete_with_casper_admin_save(self, pkg):
"""Delete a pkg from the distribution server.
Args:
pkg: Can be a jss.Package object, an int ID of a package, or
a filename.
"""
# The POST needs the package ID.
if pkg.__class__.__name__ == "Package":
package_to_delete = pkg.id
elif isinstance(pkg, int):
package_to_delete = pkg
elif isinstance(pkg, str):
package_to_delete = self.connection["jss"].Package(pkg).id
else:
raise TypeError
data_dict = {"username": self.connection["jss"].user,
"password": self.connection["jss"].password,
"deletedPackageID": package_to_delete}
self.connection["jss"].session.post(url=self.connection["delete_url"],
data=data_dict) | [
"def",
"delete_with_casper_admin_save",
"(",
"self",
",",
"pkg",
")",
":",
"# The POST needs the package ID.",
"if",
"pkg",
".",
"__class__",
".",
"__name__",
"==",
"\"Package\"",
":",
"package_to_delete",
"=",
"pkg",
".",
"id",
"elif",
"isinstance",
"(",
"pkg",
... | Delete a pkg from the distribution server.
Args:
pkg: Can be a jss.Package object, an int ID of a package, or
a filename. | [
"Delete",
"a",
"pkg",
"from",
"the",
"distribution",
"server",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L692-L713 | train | 27,846 |
jssimporter/python-jss | jss/distribution_point.py | DistributionServer.delete | def delete(self, filename):
"""Delete a package or script from the distribution server.
This method simply finds the Package or Script object from the
database with the API GET call and then deletes it. This will
remove the file from the database blob.
For setups which have file share distribution points, you will
need to delete the files on the shares also.
Args:
filename: Filename (no path) to delete.
"""
if is_package(filename):
self.connection["jss"].Package(filename).delete()
else:
self.connection["jss"].Script(filename).delete() | python | def delete(self, filename):
"""Delete a package or script from the distribution server.
This method simply finds the Package or Script object from the
database with the API GET call and then deletes it. This will
remove the file from the database blob.
For setups which have file share distribution points, you will
need to delete the files on the shares also.
Args:
filename: Filename (no path) to delete.
"""
if is_package(filename):
self.connection["jss"].Package(filename).delete()
else:
self.connection["jss"].Script(filename).delete() | [
"def",
"delete",
"(",
"self",
",",
"filename",
")",
":",
"if",
"is_package",
"(",
"filename",
")",
":",
"self",
".",
"connection",
"[",
"\"jss\"",
"]",
".",
"Package",
"(",
"filename",
")",
".",
"delete",
"(",
")",
"else",
":",
"self",
".",
"connecti... | Delete a package or script from the distribution server.
This method simply finds the Package or Script object from the
database with the API GET call and then deletes it. This will
remove the file from the database blob.
For setups which have file share distribution points, you will
need to delete the files on the shares also.
Args:
filename: Filename (no path) to delete. | [
"Delete",
"a",
"package",
"or",
"script",
"from",
"the",
"distribution",
"server",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L716-L732 | train | 27,847 |
jssimporter/python-jss | jss/distribution_point.py | DistributionServer.exists | def exists(self, filename):
"""Check for the existence of a package or script.
Unlike other DistributionPoint types, JDS and CDP types have no
documented interface for checking whether the server and its
children have a complete copy of a file. The best we can do is
check for an object using the API /packages URL--JSS.Package()
or /scripts and look for matches on the filename.
If this is not enough, please use the alternate
exists_with_casper method. For example, it's possible to create
a Package object but never upload a package file, and this
method will still return "True".
Also, this may be slow, as it needs to retrieve the complete
list of packages from the server.
"""
# Technically, the results of the casper.jxml page list the
# package files on the server. This is an undocumented
# interface, however.
result = False
if is_package(filename):
packages = self.connection["jss"].Package().retrieve_all()
for package in packages:
if package.findtext("filename") == filename:
result = True
break
else:
scripts = self.connection["jss"].Script().retrieve_all()
for script in scripts:
if script.findtext("filename") == filename:
result = True
break
return result | python | def exists(self, filename):
"""Check for the existence of a package or script.
Unlike other DistributionPoint types, JDS and CDP types have no
documented interface for checking whether the server and its
children have a complete copy of a file. The best we can do is
check for an object using the API /packages URL--JSS.Package()
or /scripts and look for matches on the filename.
If this is not enough, please use the alternate
exists_with_casper method. For example, it's possible to create
a Package object but never upload a package file, and this
method will still return "True".
Also, this may be slow, as it needs to retrieve the complete
list of packages from the server.
"""
# Technically, the results of the casper.jxml page list the
# package files on the server. This is an undocumented
# interface, however.
result = False
if is_package(filename):
packages = self.connection["jss"].Package().retrieve_all()
for package in packages:
if package.findtext("filename") == filename:
result = True
break
else:
scripts = self.connection["jss"].Script().retrieve_all()
for script in scripts:
if script.findtext("filename") == filename:
result = True
break
return result | [
"def",
"exists",
"(",
"self",
",",
"filename",
")",
":",
"# Technically, the results of the casper.jxml page list the",
"# package files on the server. This is an undocumented",
"# interface, however.",
"result",
"=",
"False",
"if",
"is_package",
"(",
"filename",
")",
":",
"p... | Check for the existence of a package or script.
Unlike other DistributionPoint types, JDS and CDP types have no
documented interface for checking whether the server and its
children have a complete copy of a file. The best we can do is
check for an object using the API /packages URL--JSS.Package()
or /scripts and look for matches on the filename.
If this is not enough, please use the alternate
exists_with_casper method. For example, it's possible to create
a Package object but never upload a package file, and this
method will still return "True".
Also, this may be slow, as it needs to retrieve the complete
list of packages from the server. | [
"Check",
"for",
"the",
"existence",
"of",
"a",
"package",
"or",
"script",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L734-L768 | train | 27,848 |
jssimporter/python-jss | jss/distribution_point.py | DistributionServer.exists_using_casper | def exists_using_casper(self, filename):
"""Check for the existence of a package file.
Unlike other DistributionPoint types, JDS and CDP types have no
documented interface for checking whether the server and its
children have a complete copy of a file. The best we can do is
check for an object using the API /packages URL--JSS.Package()
or /scripts and look for matches on the filename.
If this is not enough, this method uses the results of the
casper.jxml page to determine if a package exists. This is an
undocumented feature and as such should probably not be relied
upon. Please note, scripts are not listed per-distributionserver
like packages. For scripts, the best you can do is use the
regular exists method.
It will test for whether the file exists on ALL configured
distribution servers. This may register False if the JDS is busy
syncing them.
"""
casper_results = casper.Casper(self.connection["jss"])
distribution_servers = casper_results.find("distributionservers")
# Step one: Build a list of sets of all package names.
all_packages = []
for distribution_server in distribution_servers:
packages = set()
for package in distribution_server.findall("packages/package"):
packages.add(os.path.basename(package.find("fileURL").text))
all_packages.append(packages)
# Step two: Intersect the sets.
base_set = all_packages.pop()
for packages in all_packages:
base_set = base_set.intersection(packages)
# Step three: Check for membership.
return filename in base_set | python | def exists_using_casper(self, filename):
"""Check for the existence of a package file.
Unlike other DistributionPoint types, JDS and CDP types have no
documented interface for checking whether the server and its
children have a complete copy of a file. The best we can do is
check for an object using the API /packages URL--JSS.Package()
or /scripts and look for matches on the filename.
If this is not enough, this method uses the results of the
casper.jxml page to determine if a package exists. This is an
undocumented feature and as such should probably not be relied
upon. Please note, scripts are not listed per-distributionserver
like packages. For scripts, the best you can do is use the
regular exists method.
It will test for whether the file exists on ALL configured
distribution servers. This may register False if the JDS is busy
syncing them.
"""
casper_results = casper.Casper(self.connection["jss"])
distribution_servers = casper_results.find("distributionservers")
# Step one: Build a list of sets of all package names.
all_packages = []
for distribution_server in distribution_servers:
packages = set()
for package in distribution_server.findall("packages/package"):
packages.add(os.path.basename(package.find("fileURL").text))
all_packages.append(packages)
# Step two: Intersect the sets.
base_set = all_packages.pop()
for packages in all_packages:
base_set = base_set.intersection(packages)
# Step three: Check for membership.
return filename in base_set | [
"def",
"exists_using_casper",
"(",
"self",
",",
"filename",
")",
":",
"casper_results",
"=",
"casper",
".",
"Casper",
"(",
"self",
".",
"connection",
"[",
"\"jss\"",
"]",
")",
"distribution_servers",
"=",
"casper_results",
".",
"find",
"(",
"\"distributionserver... | Check for the existence of a package file.
Unlike other DistributionPoint types, JDS and CDP types have no
documented interface for checking whether the server and its
children have a complete copy of a file. The best we can do is
check for an object using the API /packages URL--JSS.Package()
or /scripts and look for matches on the filename.
If this is not enough, this method uses the results of the
casper.jxml page to determine if a package exists. This is an
undocumented feature and as such should probably not be relied
upon. Please note, scripts are not listed per-distributionserver
like packages. For scripts, the best you can do is use the
regular exists method.
It will test for whether the file exists on ALL configured
distribution servers. This may register False if the JDS is busy
syncing them. | [
"Check",
"for",
"the",
"existence",
"of",
"a",
"package",
"file",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/distribution_point.py#L770-L808 | train | 27,849 |
jssimporter/python-jss | jss/jssobjects.py | CommandFlush.command_flush_for | def command_flush_for(self, id_type, command_id, status):
"""Flush commands for an individual device.
Args:
id_type (str): One of 'computers', 'computergroups',
'mobiledevices', or 'mobiledevicegroups'.
id_value (str, int, list): ID value(s) for the devices to
flush. More than one device should be passed as IDs
in a list or tuple.
status (str): One of 'Pending', 'Failed', 'Pending+Failed'.
Raises:
JSSDeleteError if provided url_path has a >= 400 response.
"""
id_types = ('computers', 'computergroups', 'mobiledevices',
'mobiledevicegroups')
status_types = ('Pending', 'Failed', 'Pending+Failed')
if id_type not in id_types or status not in status_types:
raise ValueError("Invalid arguments.")
if isinstance(command_id, list):
command_id = ",".join(str(item) for item in command_id)
flush_url = "{}/{}/id/{}/status/{}".format(
self.url, id_type, command_id, status)
self.jss.delete(flush_url) | python | def command_flush_for(self, id_type, command_id, status):
"""Flush commands for an individual device.
Args:
id_type (str): One of 'computers', 'computergroups',
'mobiledevices', or 'mobiledevicegroups'.
id_value (str, int, list): ID value(s) for the devices to
flush. More than one device should be passed as IDs
in a list or tuple.
status (str): One of 'Pending', 'Failed', 'Pending+Failed'.
Raises:
JSSDeleteError if provided url_path has a >= 400 response.
"""
id_types = ('computers', 'computergroups', 'mobiledevices',
'mobiledevicegroups')
status_types = ('Pending', 'Failed', 'Pending+Failed')
if id_type not in id_types or status not in status_types:
raise ValueError("Invalid arguments.")
if isinstance(command_id, list):
command_id = ",".join(str(item) for item in command_id)
flush_url = "{}/{}/id/{}/status/{}".format(
self.url, id_type, command_id, status)
self.jss.delete(flush_url) | [
"def",
"command_flush_for",
"(",
"self",
",",
"id_type",
",",
"command_id",
",",
"status",
")",
":",
"id_types",
"=",
"(",
"'computers'",
",",
"'computergroups'",
",",
"'mobiledevices'",
",",
"'mobiledevicegroups'",
")",
"status_types",
"=",
"(",
"'Pending'",
",... | Flush commands for an individual device.
Args:
id_type (str): One of 'computers', 'computergroups',
'mobiledevices', or 'mobiledevicegroups'.
id_value (str, int, list): ID value(s) for the devices to
flush. More than one device should be passed as IDs
in a list or tuple.
status (str): One of 'Pending', 'Failed', 'Pending+Failed'.
Raises:
JSSDeleteError if provided url_path has a >= 400 response. | [
"Flush",
"commands",
"for",
"an",
"individual",
"device",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L153-L179 | train | 27,850 |
jssimporter/python-jss | jss/jssobjects.py | Computer.mac_addresses | def mac_addresses(self):
"""Return a list of mac addresses for this device.
Computers don't tell you which network device is which.
"""
mac_addresses = [self.findtext("general/mac_address")]
if self.findtext("general/alt_mac_address"):
mac_addresses.append(self.findtext("general/alt_mac_address"))
return mac_addresses | python | def mac_addresses(self):
"""Return a list of mac addresses for this device.
Computers don't tell you which network device is which.
"""
mac_addresses = [self.findtext("general/mac_address")]
if self.findtext("general/alt_mac_address"):
mac_addresses.append(self.findtext("general/alt_mac_address"))
return mac_addresses | [
"def",
"mac_addresses",
"(",
"self",
")",
":",
"mac_addresses",
"=",
"[",
"self",
".",
"findtext",
"(",
"\"general/mac_address\"",
")",
"]",
"if",
"self",
".",
"findtext",
"(",
"\"general/alt_mac_address\"",
")",
":",
"mac_addresses",
".",
"append",
"(",
"self... | Return a list of mac addresses for this device.
Computers don't tell you which network device is which. | [
"Return",
"a",
"list",
"of",
"mac",
"addresses",
"for",
"this",
"device",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L190-L198 | train | 27,851 |
jssimporter/python-jss | jss/jssobjects.py | FileUpload._set_upload_url | def _set_upload_url(self):
"""Generate the full URL for a POST."""
# pylint: disable=protected-access
self._upload_url = "/".join(
[self.jss._url, self._url, self.resource_type, self.id_type,
str(self._id)]) | python | def _set_upload_url(self):
"""Generate the full URL for a POST."""
# pylint: disable=protected-access
self._upload_url = "/".join(
[self.jss._url, self._url, self.resource_type, self.id_type,
str(self._id)]) | [
"def",
"_set_upload_url",
"(",
"self",
")",
":",
"# pylint: disable=protected-access",
"self",
".",
"_upload_url",
"=",
"\"/\"",
".",
"join",
"(",
"[",
"self",
".",
"jss",
".",
"_url",
",",
"self",
".",
"_url",
",",
"self",
".",
"resource_type",
",",
"self... | Generate the full URL for a POST. | [
"Generate",
"the",
"full",
"URL",
"for",
"a",
"POST",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L430-L435 | train | 27,852 |
jssimporter/python-jss | jss/jssobjects.py | FileUpload.save | def save(self):
"""POST the object to the JSS."""
try:
response = requests.post(self._upload_url,
auth=self.jss.session.auth,
verify=self.jss.session.verify,
files=self.resource)
except JSSPostError as error:
if error.status_code == 409:
raise JSSPostError(error)
else:
raise JSSMethodNotAllowedError(self.__class__.__name__)
if response.status_code == 201:
if self.jss.verbose:
print "POST: Success"
print response.text.encode("utf-8")
elif response.status_code >= 400:
error_handler(JSSPostError, response) | python | def save(self):
"""POST the object to the JSS."""
try:
response = requests.post(self._upload_url,
auth=self.jss.session.auth,
verify=self.jss.session.verify,
files=self.resource)
except JSSPostError as error:
if error.status_code == 409:
raise JSSPostError(error)
else:
raise JSSMethodNotAllowedError(self.__class__.__name__)
if response.status_code == 201:
if self.jss.verbose:
print "POST: Success"
print response.text.encode("utf-8")
elif response.status_code >= 400:
error_handler(JSSPostError, response) | [
"def",
"save",
"(",
"self",
")",
":",
"try",
":",
"response",
"=",
"requests",
".",
"post",
"(",
"self",
".",
"_upload_url",
",",
"auth",
"=",
"self",
".",
"jss",
".",
"session",
".",
"auth",
",",
"verify",
"=",
"self",
".",
"jss",
".",
"session",
... | POST the object to the JSS. | [
"POST",
"the",
"object",
"to",
"the",
"JSS",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L438-L456 | train | 27,853 |
jssimporter/python-jss | jss/jssobjects.py | LDAPServer.search_users | def search_users(self, user):
"""Search for LDAP users.
Args:
user: User to search for. It is not entirely clear how the
JSS determines the results- are regexes allowed, or
globbing?
Returns:
LDAPUsersResult object.
Raises:
Will raise a JSSGetError if no results are found.
"""
user_url = "%s/%s/%s" % (self.url, "user", user)
response = self.jss.get(user_url)
return LDAPUsersResults(self.jss, response) | python | def search_users(self, user):
"""Search for LDAP users.
Args:
user: User to search for. It is not entirely clear how the
JSS determines the results- are regexes allowed, or
globbing?
Returns:
LDAPUsersResult object.
Raises:
Will raise a JSSGetError if no results are found.
"""
user_url = "%s/%s/%s" % (self.url, "user", user)
response = self.jss.get(user_url)
return LDAPUsersResults(self.jss, response) | [
"def",
"search_users",
"(",
"self",
",",
"user",
")",
":",
"user_url",
"=",
"\"%s/%s/%s\"",
"%",
"(",
"self",
".",
"url",
",",
"\"user\"",
",",
"user",
")",
"response",
"=",
"self",
".",
"jss",
".",
"get",
"(",
"user_url",
")",
"return",
"LDAPUsersResu... | Search for LDAP users.
Args:
user: User to search for. It is not entirely clear how the
JSS determines the results- are regexes allowed, or
globbing?
Returns:
LDAPUsersResult object.
Raises:
Will raise a JSSGetError if no results are found. | [
"Search",
"for",
"LDAP",
"users",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L486-L502 | train | 27,854 |
jssimporter/python-jss | jss/jssobjects.py | LDAPServer.search_groups | def search_groups(self, group):
"""Search for LDAP groups.
Args:
group: Group to search for. It is not entirely clear how the
JSS determines the results- are regexes allowed, or
globbing?
Returns:
LDAPGroupsResult object.
Raises:
JSSGetError if no results are found.
"""
group_url = "%s/%s/%s" % (self.url, "group", group)
response = self.jss.get(group_url)
return LDAPGroupsResults(self.jss, response) | python | def search_groups(self, group):
"""Search for LDAP groups.
Args:
group: Group to search for. It is not entirely clear how the
JSS determines the results- are regexes allowed, or
globbing?
Returns:
LDAPGroupsResult object.
Raises:
JSSGetError if no results are found.
"""
group_url = "%s/%s/%s" % (self.url, "group", group)
response = self.jss.get(group_url)
return LDAPGroupsResults(self.jss, response) | [
"def",
"search_groups",
"(",
"self",
",",
"group",
")",
":",
"group_url",
"=",
"\"%s/%s/%s\"",
"%",
"(",
"self",
".",
"url",
",",
"\"group\"",
",",
"group",
")",
"response",
"=",
"self",
".",
"jss",
".",
"get",
"(",
"group_url",
")",
"return",
"LDAPGro... | Search for LDAP groups.
Args:
group: Group to search for. It is not entirely clear how the
JSS determines the results- are regexes allowed, or
globbing?
Returns:
LDAPGroupsResult object.
Raises:
JSSGetError if no results are found. | [
"Search",
"for",
"LDAP",
"groups",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L504-L520 | train | 27,855 |
jssimporter/python-jss | jss/jssobjects.py | LDAPServer.is_user_in_group | def is_user_in_group(self, user, group):
"""Test for whether a user is in a group.
There is also the ability in the API to test for whether
multiple users are members of an LDAP group, but you should just
call is_user_in_group over an enumerated list of users.
Args:
user: String username.
group: String group name.
Returns bool.
"""
search_url = "%s/%s/%s/%s/%s" % (self.url, "group", group,
"user", user)
response = self.jss.get(search_url)
# Sanity check
length = len(response)
result = False
if length == 1:
# User doesn't exist. Use default False value.
pass
elif length == 2:
if response.findtext("ldap_user/username") == user:
if response.findtext("ldap_user/is_member") == "Yes":
result = True
elif len(response) >= 2:
raise JSSGetError("Unexpected response.")
return result | python | def is_user_in_group(self, user, group):
"""Test for whether a user is in a group.
There is also the ability in the API to test for whether
multiple users are members of an LDAP group, but you should just
call is_user_in_group over an enumerated list of users.
Args:
user: String username.
group: String group name.
Returns bool.
"""
search_url = "%s/%s/%s/%s/%s" % (self.url, "group", group,
"user", user)
response = self.jss.get(search_url)
# Sanity check
length = len(response)
result = False
if length == 1:
# User doesn't exist. Use default False value.
pass
elif length == 2:
if response.findtext("ldap_user/username") == user:
if response.findtext("ldap_user/is_member") == "Yes":
result = True
elif len(response) >= 2:
raise JSSGetError("Unexpected response.")
return result | [
"def",
"is_user_in_group",
"(",
"self",
",",
"user",
",",
"group",
")",
":",
"search_url",
"=",
"\"%s/%s/%s/%s/%s\"",
"%",
"(",
"self",
".",
"url",
",",
"\"group\"",
",",
"group",
",",
"\"user\"",
",",
"user",
")",
"response",
"=",
"self",
".",
"jss",
... | Test for whether a user is in a group.
There is also the ability in the API to test for whether
multiple users are members of an LDAP group, but you should just
call is_user_in_group over an enumerated list of users.
Args:
user: String username.
group: String group name.
Returns bool. | [
"Test",
"for",
"whether",
"a",
"user",
"is",
"in",
"a",
"group",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L522-L550 | train | 27,856 |
jssimporter/python-jss | jss/jssobjects.py | LogFlush.log_flush_with_xml | def log_flush_with_xml(self, data):
"""Flush logs for devices with a supplied xml string.
From the Casper API docs:
log, log_id, interval, and devices specified in an XML file.
Sample file:
<logflush>
<log>policy</log>
<log_id>2</log_id>
<interval>THREE MONTHS</interval>
<computers>
<computer>
<id>1</id>
</computer>
<computer>
<id>2</id>
</computer>
</computers>
</logflush>
Args:
data (string): XML string following the above structure or
an ElementTree/Element.
Elements:
logflush (root)
log (Unknown; "policy" is the only one listed in
docs).
log_id: Log ID value.
interval: Combination of "Zero", "One", "Two",
"Three", "Six", and "Day", "Week", "Month",
"Year". e.g. ("Three+Months")
Please note: The documentation for this
specifies the singular form (e.g. "Month"),
and plural ("Months") at different times, and
further the construction is listed as
"THREE MONTHS" elsewhere. Limited testing
indicates that pluralization does not matter,
nor does capitalization. The "+" seems optional
as well.
Please test!
Device Arrays:
Again, acceptable values are not listed in the
docs, aside from the example ("computers").
Presumably "mobiledevices", and possibly
"computergroups" and "mobiledevicegroups" work.
Raises:
JSSDeleteError if provided url_path has a >= 400 response.
"""
if not isinstance(data, basestring):
data = ElementTree.tostring(data)
response = self.delete(data) | python | def log_flush_with_xml(self, data):
"""Flush logs for devices with a supplied xml string.
From the Casper API docs:
log, log_id, interval, and devices specified in an XML file.
Sample file:
<logflush>
<log>policy</log>
<log_id>2</log_id>
<interval>THREE MONTHS</interval>
<computers>
<computer>
<id>1</id>
</computer>
<computer>
<id>2</id>
</computer>
</computers>
</logflush>
Args:
data (string): XML string following the above structure or
an ElementTree/Element.
Elements:
logflush (root)
log (Unknown; "policy" is the only one listed in
docs).
log_id: Log ID value.
interval: Combination of "Zero", "One", "Two",
"Three", "Six", and "Day", "Week", "Month",
"Year". e.g. ("Three+Months")
Please note: The documentation for this
specifies the singular form (e.g. "Month"),
and plural ("Months") at different times, and
further the construction is listed as
"THREE MONTHS" elsewhere. Limited testing
indicates that pluralization does not matter,
nor does capitalization. The "+" seems optional
as well.
Please test!
Device Arrays:
Again, acceptable values are not listed in the
docs, aside from the example ("computers").
Presumably "mobiledevices", and possibly
"computergroups" and "mobiledevicegroups" work.
Raises:
JSSDeleteError if provided url_path has a >= 400 response.
"""
if not isinstance(data, basestring):
data = ElementTree.tostring(data)
response = self.delete(data) | [
"def",
"log_flush_with_xml",
"(",
"self",
",",
"data",
")",
":",
"if",
"not",
"isinstance",
"(",
"data",
",",
"basestring",
")",
":",
"data",
"=",
"ElementTree",
".",
"tostring",
"(",
"data",
")",
"response",
"=",
"self",
".",
"delete",
"(",
"data",
")... | Flush logs for devices with a supplied xml string.
From the Casper API docs:
log, log_id, interval, and devices specified in an XML file.
Sample file:
<logflush>
<log>policy</log>
<log_id>2</log_id>
<interval>THREE MONTHS</interval>
<computers>
<computer>
<id>1</id>
</computer>
<computer>
<id>2</id>
</computer>
</computers>
</logflush>
Args:
data (string): XML string following the above structure or
an ElementTree/Element.
Elements:
logflush (root)
log (Unknown; "policy" is the only one listed in
docs).
log_id: Log ID value.
interval: Combination of "Zero", "One", "Two",
"Three", "Six", and "Day", "Week", "Month",
"Year". e.g. ("Three+Months")
Please note: The documentation for this
specifies the singular form (e.g. "Month"),
and plural ("Months") at different times, and
further the construction is listed as
"THREE MONTHS" elsewhere. Limited testing
indicates that pluralization does not matter,
nor does capitalization. The "+" seems optional
as well.
Please test!
Device Arrays:
Again, acceptable values are not listed in the
docs, aside from the example ("computers").
Presumably "mobiledevices", and possibly
"computergroups" and "mobiledevicegroups" work.
Raises:
JSSDeleteError if provided url_path has a >= 400 response. | [
"Flush",
"logs",
"for",
"devices",
"with",
"a",
"supplied",
"xml",
"string",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L607-L658 | train | 27,857 |
jssimporter/python-jss | jss/jssobjects.py | LogFlush.log_flush_for_interval | def log_flush_for_interval(self, log_type, interval):
"""Flush logs for an interval of time.
Args:
log_type (str): Only documented type is "policies". This
will be applied by default if nothing is passed.
interval (str): Combination of "Zero", "One", "Two",
"Three", "Six", and "Day", "Week", "Month", "Year". e.g.
("Three+Months") Please note: The documentation for this
specifies the singular form (e.g. "Month"), and plural
("Months") at different times, and further the
construction is listed as "THREE MONTHS" elsewhere.
Limited testing indicates that pluralization does not
matter, nor does capitalization.
Please test!
No validation is performed on this prior to the request
being made.
Raises:
JSSDeleteError if provided url_path has a >= 400 response.
"""
if not log_type:
log_type = "policies"
# The XML for the /logflush basic endpoint allows spaces
# instead of "+", so do a replace here just in case.
interval = interval.replace(" ", "+")
flush_url = "{}/{}/interval/{}".format(
self.url, log_type, interval)
self.jss.delete(flush_url) | python | def log_flush_for_interval(self, log_type, interval):
"""Flush logs for an interval of time.
Args:
log_type (str): Only documented type is "policies". This
will be applied by default if nothing is passed.
interval (str): Combination of "Zero", "One", "Two",
"Three", "Six", and "Day", "Week", "Month", "Year". e.g.
("Three+Months") Please note: The documentation for this
specifies the singular form (e.g. "Month"), and plural
("Months") at different times, and further the
construction is listed as "THREE MONTHS" elsewhere.
Limited testing indicates that pluralization does not
matter, nor does capitalization.
Please test!
No validation is performed on this prior to the request
being made.
Raises:
JSSDeleteError if provided url_path has a >= 400 response.
"""
if not log_type:
log_type = "policies"
# The XML for the /logflush basic endpoint allows spaces
# instead of "+", so do a replace here just in case.
interval = interval.replace(" ", "+")
flush_url = "{}/{}/interval/{}".format(
self.url, log_type, interval)
self.jss.delete(flush_url) | [
"def",
"log_flush_for_interval",
"(",
"self",
",",
"log_type",
",",
"interval",
")",
":",
"if",
"not",
"log_type",
":",
"log_type",
"=",
"\"policies\"",
"# The XML for the /logflush basic endpoint allows spaces",
"# instead of \"+\", so do a replace here just in case.",
"interv... | Flush logs for an interval of time.
Args:
log_type (str): Only documented type is "policies". This
will be applied by default if nothing is passed.
interval (str): Combination of "Zero", "One", "Two",
"Three", "Six", and "Day", "Week", "Month", "Year". e.g.
("Three+Months") Please note: The documentation for this
specifies the singular form (e.g. "Month"), and plural
("Months") at different times, and further the
construction is listed as "THREE MONTHS" elsewhere.
Limited testing indicates that pluralization does not
matter, nor does capitalization.
Please test!
No validation is performed on this prior to the request
being made.
Raises:
JSSDeleteError if provided url_path has a >= 400 response. | [
"Flush",
"logs",
"for",
"an",
"interval",
"of",
"time",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L660-L692 | train | 27,858 |
jssimporter/python-jss | jss/jssobjects.py | LogFlush.log_flush_for_obj_for_interval | def log_flush_for_obj_for_interval(self, log_type, obj_id, interval):
"""Flush logs for an interval of time for a specific object.
Please note, log_type is a variable according to the API docs,
but acceptable values are not listed. Only "policies" is
demonstrated as an acceptable value.
Args:
log_type (str): Only documented type is "policies". This
will be applied by default if nothing is passed.
obj_id (str or int): ID of the object to have logs flushed.
interval (str): Combination of "Zero", "One", "Two",
"Three", "Six", and "Day", "Week", "Month", "Year". e.g.
("Three+Months") Please note: The documentation for this
specifies the singular form (e.g. "Month"), and plural
("Months") at different times, and further the
construction is listed as "THREE MONTHS" elsewhere.
Limited testing indicates that pluralization does not
matter, nor does capitalization.
Please test!
No validation is performed on this prior to the request
being made.
Raises:
JSSDeleteError if provided url_path has a >= 400 response.
"""
if not log_type:
log_type = "policies"
# The XML for the /logflush basic endpoint allows spaces
# instead of "+", so do a replace here just in case.
interval = interval.replace(" ", "+")
flush_url = "{}/{}/id/{}/interval/{}".format(
self.url, log_type, obj_id, interval)
self.jss.delete(flush_url) | python | def log_flush_for_obj_for_interval(self, log_type, obj_id, interval):
"""Flush logs for an interval of time for a specific object.
Please note, log_type is a variable according to the API docs,
but acceptable values are not listed. Only "policies" is
demonstrated as an acceptable value.
Args:
log_type (str): Only documented type is "policies". This
will be applied by default if nothing is passed.
obj_id (str or int): ID of the object to have logs flushed.
interval (str): Combination of "Zero", "One", "Two",
"Three", "Six", and "Day", "Week", "Month", "Year". e.g.
("Three+Months") Please note: The documentation for this
specifies the singular form (e.g. "Month"), and plural
("Months") at different times, and further the
construction is listed as "THREE MONTHS" elsewhere.
Limited testing indicates that pluralization does not
matter, nor does capitalization.
Please test!
No validation is performed on this prior to the request
being made.
Raises:
JSSDeleteError if provided url_path has a >= 400 response.
"""
if not log_type:
log_type = "policies"
# The XML for the /logflush basic endpoint allows spaces
# instead of "+", so do a replace here just in case.
interval = interval.replace(" ", "+")
flush_url = "{}/{}/id/{}/interval/{}".format(
self.url, log_type, obj_id, interval)
self.jss.delete(flush_url) | [
"def",
"log_flush_for_obj_for_interval",
"(",
"self",
",",
"log_type",
",",
"obj_id",
",",
"interval",
")",
":",
"if",
"not",
"log_type",
":",
"log_type",
"=",
"\"policies\"",
"# The XML for the /logflush basic endpoint allows spaces",
"# instead of \"+\", so do a replace her... | Flush logs for an interval of time for a specific object.
Please note, log_type is a variable according to the API docs,
but acceptable values are not listed. Only "policies" is
demonstrated as an acceptable value.
Args:
log_type (str): Only documented type is "policies". This
will be applied by default if nothing is passed.
obj_id (str or int): ID of the object to have logs flushed.
interval (str): Combination of "Zero", "One", "Two",
"Three", "Six", and "Day", "Week", "Month", "Year". e.g.
("Three+Months") Please note: The documentation for this
specifies the singular form (e.g. "Month"), and plural
("Months") at different times, and further the
construction is listed as "THREE MONTHS" elsewhere.
Limited testing indicates that pluralization does not
matter, nor does capitalization.
Please test!
No validation is performed on this prior to the request
being made.
Raises:
JSSDeleteError if provided url_path has a >= 400 response. | [
"Flush",
"logs",
"for",
"an",
"interval",
"of",
"time",
"for",
"a",
"specific",
"object",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L694-L731 | train | 27,859 |
jssimporter/python-jss | jss/jssobjects.py | Package._new | def _new(self, name, **kwargs):
"""Create a new Package from scratch.
Args:
name: String filename of the package to use for the
Package object's Display Name (here, "name").
Will also be used as the "filename" value. Casper will
let you specify different values, but it is not
recommended.
kwargs:
Accepted keyword args include all top-level keys.
Values will be cast to string. (Int 10, bool False
become string values "10" and "false").
"""
# We want these to match, so circumvent the for loop.
# ElementTree.SubElement(self, "name").text = name
super(Package, self)._new(name, **kwargs)
ElementTree.SubElement(self, "filename").text = name | python | def _new(self, name, **kwargs):
"""Create a new Package from scratch.
Args:
name: String filename of the package to use for the
Package object's Display Name (here, "name").
Will also be used as the "filename" value. Casper will
let you specify different values, but it is not
recommended.
kwargs:
Accepted keyword args include all top-level keys.
Values will be cast to string. (Int 10, bool False
become string values "10" and "false").
"""
# We want these to match, so circumvent the for loop.
# ElementTree.SubElement(self, "name").text = name
super(Package, self)._new(name, **kwargs)
ElementTree.SubElement(self, "filename").text = name | [
"def",
"_new",
"(",
"self",
",",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"# We want these to match, so circumvent the for loop.",
"# ElementTree.SubElement(self, \"name\").text = name",
"super",
"(",
"Package",
",",
"self",
")",
".",
"_new",
"(",
"name",
",",
"*"... | Create a new Package from scratch.
Args:
name: String filename of the package to use for the
Package object's Display Name (here, "name").
Will also be used as the "filename" value. Casper will
let you specify different values, but it is not
recommended.
kwargs:
Accepted keyword args include all top-level keys.
Values will be cast to string. (Int 10, bool False
become string values "10" and "false"). | [
"Create",
"a",
"new",
"Package",
"from",
"scratch",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L860-L877 | train | 27,860 |
jssimporter/python-jss | jss/jssobjects.py | Package.set_category | def set_category(self, category):
"""Set package category
Args:
category: String of an existing category's name, or a
Category object.
"""
# For some reason, packages only have the category name, not the
# ID.
if isinstance(category, Category):
name = category.name
else:
name = category
self.find("category").text = name | python | def set_category(self, category):
"""Set package category
Args:
category: String of an existing category's name, or a
Category object.
"""
# For some reason, packages only have the category name, not the
# ID.
if isinstance(category, Category):
name = category.name
else:
name = category
self.find("category").text = name | [
"def",
"set_category",
"(",
"self",
",",
"category",
")",
":",
"# For some reason, packages only have the category name, not the",
"# ID.",
"if",
"isinstance",
"(",
"category",
",",
"Category",
")",
":",
"name",
"=",
"category",
".",
"name",
"else",
":",
"name",
"... | Set package category
Args:
category: String of an existing category's name, or a
Category object. | [
"Set",
"package",
"category"
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L888-L901 | train | 27,861 |
jssimporter/python-jss | jss/jssobjects.py | Policy.add_object_to_scope | def add_object_to_scope(self, obj):
"""Add an object to the appropriate scope block.
Args:
obj: JSSObject to add to scope. Accepted subclasses are:
Computer
ComputerGroup
Building
Department
Raises:
TypeError if invalid obj type is provided.
"""
if isinstance(obj, Computer):
self.add_object_to_path(obj, "scope/computers")
elif isinstance(obj, ComputerGroup):
self.add_object_to_path(obj, "scope/computer_groups")
elif isinstance(obj, Building):
self.add_object_to_path(obj, "scope/buildings")
elif isinstance(obj, Department):
self.add_object_to_path(obj, "scope/departments")
else:
raise TypeError | python | def add_object_to_scope(self, obj):
"""Add an object to the appropriate scope block.
Args:
obj: JSSObject to add to scope. Accepted subclasses are:
Computer
ComputerGroup
Building
Department
Raises:
TypeError if invalid obj type is provided.
"""
if isinstance(obj, Computer):
self.add_object_to_path(obj, "scope/computers")
elif isinstance(obj, ComputerGroup):
self.add_object_to_path(obj, "scope/computer_groups")
elif isinstance(obj, Building):
self.add_object_to_path(obj, "scope/buildings")
elif isinstance(obj, Department):
self.add_object_to_path(obj, "scope/departments")
else:
raise TypeError | [
"def",
"add_object_to_scope",
"(",
"self",
",",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"Computer",
")",
":",
"self",
".",
"add_object_to_path",
"(",
"obj",
",",
"\"scope/computers\"",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"ComputerGroup"... | Add an object to the appropriate scope block.
Args:
obj: JSSObject to add to scope. Accepted subclasses are:
Computer
ComputerGroup
Building
Department
Raises:
TypeError if invalid obj type is provided. | [
"Add",
"an",
"object",
"to",
"the",
"appropriate",
"scope",
"block",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L1006-L1028 | train | 27,862 |
jssimporter/python-jss | jss/jssobjects.py | Policy.add_package | def add_package(self, pkg, action_type="Install"):
"""Add a Package object to the policy with action=install.
Args:
pkg: A Package object to add.
action_type (str, optional): One of "Install", "Cache", or
"Install Cached". Defaults to "Install".
"""
if isinstance(pkg, Package):
if action_type not in ("Install", "Cache", "Install Cached"):
raise ValueError
package = self.add_object_to_path(
pkg, "package_configuration/packages")
# If there's already an action specified, get it, then
# overwrite. Otherwise, make a new subelement.
action = package.find("action")
if not action:
action = ElementTree.SubElement(package, "action")
action.text = action_type
else:
raise ValueError("Please pass a Package object to parameter: "
"pkg.") | python | def add_package(self, pkg, action_type="Install"):
"""Add a Package object to the policy with action=install.
Args:
pkg: A Package object to add.
action_type (str, optional): One of "Install", "Cache", or
"Install Cached". Defaults to "Install".
"""
if isinstance(pkg, Package):
if action_type not in ("Install", "Cache", "Install Cached"):
raise ValueError
package = self.add_object_to_path(
pkg, "package_configuration/packages")
# If there's already an action specified, get it, then
# overwrite. Otherwise, make a new subelement.
action = package.find("action")
if not action:
action = ElementTree.SubElement(package, "action")
action.text = action_type
else:
raise ValueError("Please pass a Package object to parameter: "
"pkg.") | [
"def",
"add_package",
"(",
"self",
",",
"pkg",
",",
"action_type",
"=",
"\"Install\"",
")",
":",
"if",
"isinstance",
"(",
"pkg",
",",
"Package",
")",
":",
"if",
"action_type",
"not",
"in",
"(",
"\"Install\"",
",",
"\"Cache\"",
",",
"\"Install Cached\"",
")... | Add a Package object to the policy with action=install.
Args:
pkg: A Package object to add.
action_type (str, optional): One of "Install", "Cache", or
"Install Cached". Defaults to "Install". | [
"Add",
"a",
"Package",
"object",
"to",
"the",
"policy",
"with",
"action",
"=",
"install",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L1068-L1089 | train | 27,863 |
jssimporter/python-jss | jss/jssobjects.py | Policy.set_category | def set_category(self, category):
"""Set the policy's category.
Args:
category: A category object.
"""
pcategory = self.find("general/category")
pcategory.clear()
name = ElementTree.SubElement(pcategory, "name")
if isinstance(category, Category):
id_ = ElementTree.SubElement(pcategory, "id")
id_.text = category.id
name.text = category.name
elif isinstance(category, basestring):
name.text = category | python | def set_category(self, category):
"""Set the policy's category.
Args:
category: A category object.
"""
pcategory = self.find("general/category")
pcategory.clear()
name = ElementTree.SubElement(pcategory, "name")
if isinstance(category, Category):
id_ = ElementTree.SubElement(pcategory, "id")
id_.text = category.id
name.text = category.name
elif isinstance(category, basestring):
name.text = category | [
"def",
"set_category",
"(",
"self",
",",
"category",
")",
":",
"pcategory",
"=",
"self",
".",
"find",
"(",
"\"general/category\"",
")",
"pcategory",
".",
"clear",
"(",
")",
"name",
"=",
"ElementTree",
".",
"SubElement",
"(",
"pcategory",
",",
"\"name\"",
"... | Set the policy's category.
Args:
category: A category object. | [
"Set",
"the",
"policy",
"s",
"category",
"."
] | b95185d74e0c0531b0b563f280d4129e21d5fe5d | https://github.com/jssimporter/python-jss/blob/b95185d74e0c0531b0b563f280d4129e21d5fe5d/jss/jssobjects.py#L1099-L1113 | train | 27,864 |
brentp/cyvcf2 | cyvcf2/cli.py | cyvcf2 | def cyvcf2(context, vcf, include, exclude, chrom, start, end, loglevel, silent,
individual, no_inds):
"""fast vcf parsing with cython + htslib"""
coloredlogs.install(log_level=loglevel)
start_parsing = datetime.now()
log.info("Running cyvcf2 version %s", __version__)
if include and exclude:
log.warning("Can not use include and exclude at the same time")
context.abort()
region = ''
if (chrom or start or end):
if not (chrom and start and end):
log.warning("Please specify chromosome, start and end for region")
context.abort()
else:
region = "{0}:{1}-{2}".format(chrom, start, end)
vcf_obj = VCF(vcf)
for inclusion in include:
if vcf_obj.contains(inclusion):
log.info("Including %s in output", inclusion)
else:
log.warning("%s does not exist in header", inclusion)
context.abort()
for exclusion in exclude:
if vcf_obj.contains(exclusion):
log.info("Excluding %s in output", exclusion)
else:
log.warning("%s does not exist in header", exclusion)
context.abort()
if individual:
# Check if the choosen individuals exists in vcf
test = True
for ind_id in individual:
if ind_id not in vcf_obj.samples:
log.warning("Individual '%s' does not exist in vcf", ind_id)
test = False
if not test:
context.abort()
# Convert individuals to list for VCF.set_individuals
individual = list(individual)
else:
individual = None
# Set individual to be empty list to skip all genotypes
if no_inds:
individual = []
if not silent:
print_header(vcf_obj, include, exclude, individual)
nr_variants = None
try:
for nr_variants, variant in enumerate(vcf_obj(region)):
if not silent:
print_variant(variant, include, exclude)
except Exception as err:
log.warning(err)
context.abort()
if nr_variants is None:
log.info("No variants in vcf")
return
log.info("{0} variants parsed".format(nr_variants+1))
log.info("Time to parse variants: {0}".format(datetime.now() - start_parsing)) | python | def cyvcf2(context, vcf, include, exclude, chrom, start, end, loglevel, silent,
individual, no_inds):
"""fast vcf parsing with cython + htslib"""
coloredlogs.install(log_level=loglevel)
start_parsing = datetime.now()
log.info("Running cyvcf2 version %s", __version__)
if include and exclude:
log.warning("Can not use include and exclude at the same time")
context.abort()
region = ''
if (chrom or start or end):
if not (chrom and start and end):
log.warning("Please specify chromosome, start and end for region")
context.abort()
else:
region = "{0}:{1}-{2}".format(chrom, start, end)
vcf_obj = VCF(vcf)
for inclusion in include:
if vcf_obj.contains(inclusion):
log.info("Including %s in output", inclusion)
else:
log.warning("%s does not exist in header", inclusion)
context.abort()
for exclusion in exclude:
if vcf_obj.contains(exclusion):
log.info("Excluding %s in output", exclusion)
else:
log.warning("%s does not exist in header", exclusion)
context.abort()
if individual:
# Check if the choosen individuals exists in vcf
test = True
for ind_id in individual:
if ind_id not in vcf_obj.samples:
log.warning("Individual '%s' does not exist in vcf", ind_id)
test = False
if not test:
context.abort()
# Convert individuals to list for VCF.set_individuals
individual = list(individual)
else:
individual = None
# Set individual to be empty list to skip all genotypes
if no_inds:
individual = []
if not silent:
print_header(vcf_obj, include, exclude, individual)
nr_variants = None
try:
for nr_variants, variant in enumerate(vcf_obj(region)):
if not silent:
print_variant(variant, include, exclude)
except Exception as err:
log.warning(err)
context.abort()
if nr_variants is None:
log.info("No variants in vcf")
return
log.info("{0} variants parsed".format(nr_variants+1))
log.info("Time to parse variants: {0}".format(datetime.now() - start_parsing)) | [
"def",
"cyvcf2",
"(",
"context",
",",
"vcf",
",",
"include",
",",
"exclude",
",",
"chrom",
",",
"start",
",",
"end",
",",
"loglevel",
",",
"silent",
",",
"individual",
",",
"no_inds",
")",
":",
"coloredlogs",
".",
"install",
"(",
"log_level",
"=",
"log... | fast vcf parsing with cython + htslib | [
"fast",
"vcf",
"parsing",
"with",
"cython",
"+",
"htslib"
] | 57f2c0e58ae64d2ec5673d833233834a1157c665 | https://github.com/brentp/cyvcf2/blob/57f2c0e58ae64d2ec5673d833233834a1157c665/cyvcf2/cli.py#L112-L182 | train | 27,865 |
brentp/cyvcf2 | setup.py | get_version | def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("cyvcf2", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located") | python | def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("cyvcf2", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located") | [
"def",
"get_version",
"(",
")",
":",
"import",
"ast",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"\"cyvcf2\"",
",",
"\"__init__.py\"",
")",
",",
"\"r\"",
")",
"as",
"init_file",
":",
"module",
"=",
"ast",
".",
"parse",
"(",
"init_file",
... | Get the version info from the mpld3 package without importing it | [
"Get",
"the",
"version",
"info",
"from",
"the",
"mpld3",
"package",
"without",
"importing",
"it"
] | 57f2c0e58ae64d2ec5673d833233834a1157c665 | https://github.com/brentp/cyvcf2/blob/57f2c0e58ae64d2ec5673d833233834a1157c665/setup.py#L13-L26 | train | 27,866 |
nabla-c0d3/nassl | nassl/ocsp_response.py | OcspResponse.verify | def verify(self, verify_locations: str) -> None:
"""Verify that the OCSP response is trusted.
Args:
verify_locations: The file path to a trust store containing pem-formatted certificates, to be used for
validating the OCSP response.
Raises OcspResponseNotTrustedError if the validation failed ie. the OCSP response is not trusted.
"""
# Ensure the file exists
with open(verify_locations):
pass
try:
self._ocsp_response.basic_verify(verify_locations)
except _nassl.OpenSSLError as e:
if 'certificate verify error' in str(e):
raise OcspResponseNotTrustedError(verify_locations)
raise | python | def verify(self, verify_locations: str) -> None:
"""Verify that the OCSP response is trusted.
Args:
verify_locations: The file path to a trust store containing pem-formatted certificates, to be used for
validating the OCSP response.
Raises OcspResponseNotTrustedError if the validation failed ie. the OCSP response is not trusted.
"""
# Ensure the file exists
with open(verify_locations):
pass
try:
self._ocsp_response.basic_verify(verify_locations)
except _nassl.OpenSSLError as e:
if 'certificate verify error' in str(e):
raise OcspResponseNotTrustedError(verify_locations)
raise | [
"def",
"verify",
"(",
"self",
",",
"verify_locations",
":",
"str",
")",
"->",
"None",
":",
"# Ensure the file exists",
"with",
"open",
"(",
"verify_locations",
")",
":",
"pass",
"try",
":",
"self",
".",
"_ocsp_response",
".",
"basic_verify",
"(",
"verify_locat... | Verify that the OCSP response is trusted.
Args:
verify_locations: The file path to a trust store containing pem-formatted certificates, to be used for
validating the OCSP response.
Raises OcspResponseNotTrustedError if the validation failed ie. the OCSP response is not trusted. | [
"Verify",
"that",
"the",
"OCSP",
"response",
"is",
"trusted",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/nassl/ocsp_response.py#L41-L59 | train | 27,867 |
nabla-c0d3/nassl | nassl/ocsp_response.py | OcspResponse._parse_ocsp_response_from_openssl_text | def _parse_ocsp_response_from_openssl_text(
cls,
response_text: str,
response_status: OcspResponseStatusEnum
) -> Dict[str, Any]:
"""Parse OpenSSL's text output and make a lot of assumptions.
"""
response_dict = {
'responseStatus': cls._get_value_from_text_output_no_p('OCSP Response Status:', response_text),
'version': cls._get_value_from_text_output_no_p('Version:', response_text),
'responseType': cls._get_value_from_text_output('Response Type:', response_text),
'responderID': cls._get_value_from_text_output('Responder Id:', response_text),
'producedAt': cls._get_value_from_text_output('Produced At:', response_text),
} # type: Dict[str, Any]
if response_status != OcspResponseStatusEnum.SUCCESSFUL:
return response_dict
# A successful OCSP response will contain more data - let's parse it
# TODO(ad): This will not work correctly if there are multiple responses as it assumes just one
response_dict['responses'] = [
{
'certID': {
'hashAlgorithm': cls._get_value_from_text_output('Hash Algorithm:', response_text),
'issuerNameHash': cls._get_value_from_text_output('Issuer Name Hash:', response_text),
'issuerKeyHash': cls._get_value_from_text_output('Issuer Key Hash:', response_text),
'serialNumber': cls._get_value_from_text_output('Serial Number:', response_text)
},
'certStatus': cls._get_value_from_text_output('Cert Status:', response_text),
'thisUpdate': cls._get_value_from_text_output('This Update:', response_text),
'nextUpdate': cls._get_value_from_text_output('Next Update:', response_text),
}
]
if cls._get_scts_from_text_output(response_text):
# SCT extension present
response_dict['responses'][0]['singleExtensions'] = {
'ctCertificateScts': cls._get_scts_from_text_output(response_text)
}
return response_dict | python | def _parse_ocsp_response_from_openssl_text(
cls,
response_text: str,
response_status: OcspResponseStatusEnum
) -> Dict[str, Any]:
"""Parse OpenSSL's text output and make a lot of assumptions.
"""
response_dict = {
'responseStatus': cls._get_value_from_text_output_no_p('OCSP Response Status:', response_text),
'version': cls._get_value_from_text_output_no_p('Version:', response_text),
'responseType': cls._get_value_from_text_output('Response Type:', response_text),
'responderID': cls._get_value_from_text_output('Responder Id:', response_text),
'producedAt': cls._get_value_from_text_output('Produced At:', response_text),
} # type: Dict[str, Any]
if response_status != OcspResponseStatusEnum.SUCCESSFUL:
return response_dict
# A successful OCSP response will contain more data - let's parse it
# TODO(ad): This will not work correctly if there are multiple responses as it assumes just one
response_dict['responses'] = [
{
'certID': {
'hashAlgorithm': cls._get_value_from_text_output('Hash Algorithm:', response_text),
'issuerNameHash': cls._get_value_from_text_output('Issuer Name Hash:', response_text),
'issuerKeyHash': cls._get_value_from_text_output('Issuer Key Hash:', response_text),
'serialNumber': cls._get_value_from_text_output('Serial Number:', response_text)
},
'certStatus': cls._get_value_from_text_output('Cert Status:', response_text),
'thisUpdate': cls._get_value_from_text_output('This Update:', response_text),
'nextUpdate': cls._get_value_from_text_output('Next Update:', response_text),
}
]
if cls._get_scts_from_text_output(response_text):
# SCT extension present
response_dict['responses'][0]['singleExtensions'] = {
'ctCertificateScts': cls._get_scts_from_text_output(response_text)
}
return response_dict | [
"def",
"_parse_ocsp_response_from_openssl_text",
"(",
"cls",
",",
"response_text",
":",
"str",
",",
"response_status",
":",
"OcspResponseStatusEnum",
")",
"->",
"Dict",
"[",
"str",
",",
"Any",
"]",
":",
"response_dict",
"=",
"{",
"'responseStatus'",
":",
"cls",
... | Parse OpenSSL's text output and make a lot of assumptions. | [
"Parse",
"OpenSSL",
"s",
"text",
"output",
"and",
"make",
"a",
"lot",
"of",
"assumptions",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/nassl/ocsp_response.py#L65-L103 | train | 27,868 |
nabla-c0d3/nassl | nassl/ssl_client.py | SslClient._init_base_objects | def _init_base_objects(self, ssl_version: OpenSslVersionEnum, underlying_socket: Optional[socket.socket]) -> None:
"""Setup the socket and SSL_CTX objects.
"""
self._is_handshake_completed = False
self._ssl_version = ssl_version
self._ssl_ctx = self._NASSL_MODULE.SSL_CTX(ssl_version.value)
# A Python socket handles transmission of the data
self._sock = underlying_socket | python | def _init_base_objects(self, ssl_version: OpenSslVersionEnum, underlying_socket: Optional[socket.socket]) -> None:
"""Setup the socket and SSL_CTX objects.
"""
self._is_handshake_completed = False
self._ssl_version = ssl_version
self._ssl_ctx = self._NASSL_MODULE.SSL_CTX(ssl_version.value)
# A Python socket handles transmission of the data
self._sock = underlying_socket | [
"def",
"_init_base_objects",
"(",
"self",
",",
"ssl_version",
":",
"OpenSslVersionEnum",
",",
"underlying_socket",
":",
"Optional",
"[",
"socket",
".",
"socket",
"]",
")",
"->",
"None",
":",
"self",
".",
"_is_handshake_completed",
"=",
"False",
"self",
".",
"_... | Setup the socket and SSL_CTX objects. | [
"Setup",
"the",
"socket",
"and",
"SSL_CTX",
"objects",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/nassl/ssl_client.py#L104-L112 | train | 27,869 |
nabla-c0d3/nassl | nassl/ssl_client.py | SslClient._init_server_authentication | def _init_server_authentication(self, ssl_verify: OpenSslVerifyEnum, ssl_verify_locations: Optional[str]) -> None:
"""Setup the certificate validation logic for authenticating the server.
"""
self._ssl_ctx.set_verify(ssl_verify.value)
if ssl_verify_locations:
# Ensure the file exists
with open(ssl_verify_locations):
pass
self._ssl_ctx.load_verify_locations(ssl_verify_locations) | python | def _init_server_authentication(self, ssl_verify: OpenSslVerifyEnum, ssl_verify_locations: Optional[str]) -> None:
"""Setup the certificate validation logic for authenticating the server.
"""
self._ssl_ctx.set_verify(ssl_verify.value)
if ssl_verify_locations:
# Ensure the file exists
with open(ssl_verify_locations):
pass
self._ssl_ctx.load_verify_locations(ssl_verify_locations) | [
"def",
"_init_server_authentication",
"(",
"self",
",",
"ssl_verify",
":",
"OpenSslVerifyEnum",
",",
"ssl_verify_locations",
":",
"Optional",
"[",
"str",
"]",
")",
"->",
"None",
":",
"self",
".",
"_ssl_ctx",
".",
"set_verify",
"(",
"ssl_verify",
".",
"value",
... | Setup the certificate validation logic for authenticating the server. | [
"Setup",
"the",
"certificate",
"validation",
"logic",
"for",
"authenticating",
"the",
"server",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/nassl/ssl_client.py#L114-L122 | train | 27,870 |
nabla-c0d3/nassl | nassl/ssl_client.py | SslClient._init_client_authentication | def _init_client_authentication(
self,
client_certchain_file: Optional[str],
client_key_file: Optional[str],
client_key_type: OpenSslFileTypeEnum,
client_key_password: str,
ignore_client_authentication_requests: bool
) -> None:
"""Setup client authentication using the supplied certificate and key.
"""
if client_certchain_file is not None and client_key_file is not None:
self._use_private_key(client_certchain_file, client_key_file, client_key_type, client_key_password)
if ignore_client_authentication_requests:
if client_certchain_file:
raise ValueError('Cannot enable both client_certchain_file and ignore_client_authentication_requests')
self._ssl_ctx.set_client_cert_cb_NULL() | python | def _init_client_authentication(
self,
client_certchain_file: Optional[str],
client_key_file: Optional[str],
client_key_type: OpenSslFileTypeEnum,
client_key_password: str,
ignore_client_authentication_requests: bool
) -> None:
"""Setup client authentication using the supplied certificate and key.
"""
if client_certchain_file is not None and client_key_file is not None:
self._use_private_key(client_certchain_file, client_key_file, client_key_type, client_key_password)
if ignore_client_authentication_requests:
if client_certchain_file:
raise ValueError('Cannot enable both client_certchain_file and ignore_client_authentication_requests')
self._ssl_ctx.set_client_cert_cb_NULL() | [
"def",
"_init_client_authentication",
"(",
"self",
",",
"client_certchain_file",
":",
"Optional",
"[",
"str",
"]",
",",
"client_key_file",
":",
"Optional",
"[",
"str",
"]",
",",
"client_key_type",
":",
"OpenSslFileTypeEnum",
",",
"client_key_password",
":",
"str",
... | Setup client authentication using the supplied certificate and key. | [
"Setup",
"client",
"authentication",
"using",
"the",
"supplied",
"certificate",
"and",
"key",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/nassl/ssl_client.py#L124-L141 | train | 27,871 |
nabla-c0d3/nassl | nassl/ssl_client.py | SslClient.shutdown | def shutdown(self) -> None:
"""Close the TLS connection and the underlying network socket.
"""
self._is_handshake_completed = False
try:
self._flush_ssl_engine()
except IOError:
# Ensure shutting down the connection never raises an exception
pass
try:
self._ssl.shutdown()
except OpenSSLError as e:
# Ignore "uninitialized" exception
if 'SSL_shutdown:uninitialized' not in str(e) and 'shutdown while in init' not in str(e):
raise
if self._sock:
self._sock.close() | python | def shutdown(self) -> None:
"""Close the TLS connection and the underlying network socket.
"""
self._is_handshake_completed = False
try:
self._flush_ssl_engine()
except IOError:
# Ensure shutting down the connection never raises an exception
pass
try:
self._ssl.shutdown()
except OpenSSLError as e:
# Ignore "uninitialized" exception
if 'SSL_shutdown:uninitialized' not in str(e) and 'shutdown while in init' not in str(e):
raise
if self._sock:
self._sock.close() | [
"def",
"shutdown",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_is_handshake_completed",
"=",
"False",
"try",
":",
"self",
".",
"_flush_ssl_engine",
"(",
")",
"except",
"IOError",
":",
"# Ensure shutting down the connection never raises an exception",
"pass",
... | Close the TLS connection and the underlying network socket. | [
"Close",
"the",
"TLS",
"connection",
"and",
"the",
"underlying",
"network",
"socket",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/nassl/ssl_client.py#L282-L299 | train | 27,872 |
nabla-c0d3/nassl | nassl/ssl_client.py | SslClient._use_private_key | def _use_private_key(
self,
client_certchain_file: str,
client_key_file: str,
client_key_type: OpenSslFileTypeEnum,
client_key_password: str
) -> None:
"""The certificate chain file must be in PEM format. Private method because it should be set via the
constructor.
"""
# Ensure the files exist
with open(client_certchain_file):
pass
with open(client_key_file):
pass
self._ssl_ctx.use_certificate_chain_file(client_certchain_file)
self._ssl_ctx.set_private_key_password(client_key_password)
try:
self._ssl_ctx.use_PrivateKey_file(client_key_file, client_key_type.value)
except OpenSSLError as e:
if 'bad password read' in str(e) or 'bad decrypt' in str(e):
raise ValueError('Invalid Private Key')
else:
raise
self._ssl_ctx.check_private_key() | python | def _use_private_key(
self,
client_certchain_file: str,
client_key_file: str,
client_key_type: OpenSslFileTypeEnum,
client_key_password: str
) -> None:
"""The certificate chain file must be in PEM format. Private method because it should be set via the
constructor.
"""
# Ensure the files exist
with open(client_certchain_file):
pass
with open(client_key_file):
pass
self._ssl_ctx.use_certificate_chain_file(client_certchain_file)
self._ssl_ctx.set_private_key_password(client_key_password)
try:
self._ssl_ctx.use_PrivateKey_file(client_key_file, client_key_type.value)
except OpenSSLError as e:
if 'bad password read' in str(e) or 'bad decrypt' in str(e):
raise ValueError('Invalid Private Key')
else:
raise
self._ssl_ctx.check_private_key() | [
"def",
"_use_private_key",
"(",
"self",
",",
"client_certchain_file",
":",
"str",
",",
"client_key_file",
":",
"str",
",",
"client_key_type",
":",
"OpenSslFileTypeEnum",
",",
"client_key_password",
":",
"str",
")",
"->",
"None",
":",
"# Ensure the files exist",
"wit... | The certificate chain file must be in PEM format. Private method because it should be set via the
constructor. | [
"The",
"certificate",
"chain",
"file",
"must",
"be",
"in",
"PEM",
"format",
".",
"Private",
"method",
"because",
"it",
"should",
"be",
"set",
"via",
"the",
"constructor",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/nassl/ssl_client.py#L327-L353 | train | 27,873 |
nabla-c0d3/nassl | nassl/ssl_client.py | SslClient.get_tlsext_status_ocsp_resp | def get_tlsext_status_ocsp_resp(self) -> Optional[OcspResponse]:
"""Retrieve the server's OCSP Stapling status.
"""
ocsp_response = self._ssl.get_tlsext_status_ocsp_resp()
if ocsp_response:
return OcspResponse(ocsp_response)
else:
return None | python | def get_tlsext_status_ocsp_resp(self) -> Optional[OcspResponse]:
"""Retrieve the server's OCSP Stapling status.
"""
ocsp_response = self._ssl.get_tlsext_status_ocsp_resp()
if ocsp_response:
return OcspResponse(ocsp_response)
else:
return None | [
"def",
"get_tlsext_status_ocsp_resp",
"(",
"self",
")",
"->",
"Optional",
"[",
"OcspResponse",
"]",
":",
"ocsp_response",
"=",
"self",
".",
"_ssl",
".",
"get_tlsext_status_ocsp_resp",
"(",
")",
"if",
"ocsp_response",
":",
"return",
"OcspResponse",
"(",
"ocsp_respo... | Retrieve the server's OCSP Stapling status. | [
"Retrieve",
"the",
"server",
"s",
"OCSP",
"Stapling",
"status",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/nassl/ssl_client.py#L367-L374 | train | 27,874 |
nabla-c0d3/nassl | build_tasks.py | BuildConfig.fetch_source | def fetch_source(self) -> None:
"""Download the tar archive that contains the source code for the library.
"""
import requests # Do not import at the top that this file can be imported by setup.py
with TemporaryFile() as temp_file:
# Download the source archive
request = requests.get(self.src_tar_gz_url)
temp_file.write(request.content)
# Rewind the file
temp_file.seek(0)
# Extract the content of the archive
tar_file = tarfile.open(fileobj=temp_file)
tar_file.extractall(path=_DEPS_PATH) | python | def fetch_source(self) -> None:
"""Download the tar archive that contains the source code for the library.
"""
import requests # Do not import at the top that this file can be imported by setup.py
with TemporaryFile() as temp_file:
# Download the source archive
request = requests.get(self.src_tar_gz_url)
temp_file.write(request.content)
# Rewind the file
temp_file.seek(0)
# Extract the content of the archive
tar_file = tarfile.open(fileobj=temp_file)
tar_file.extractall(path=_DEPS_PATH) | [
"def",
"fetch_source",
"(",
"self",
")",
"->",
"None",
":",
"import",
"requests",
"# Do not import at the top that this file can be imported by setup.py",
"with",
"TemporaryFile",
"(",
")",
"as",
"temp_file",
":",
"# Download the source archive",
"request",
"=",
"requests",... | Download the tar archive that contains the source code for the library. | [
"Download",
"the",
"tar",
"archive",
"that",
"contains",
"the",
"source",
"code",
"for",
"the",
"library",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/build_tasks.py#L79-L91 | train | 27,875 |
nabla-c0d3/nassl | nassl/legacy_ssl_client.py | LegacySslClient.do_renegotiate | def do_renegotiate(self) -> None:
"""Initiate an SSL renegotiation.
"""
if not self._is_handshake_completed:
raise IOError('SSL Handshake was not completed; cannot renegotiate.')
self._ssl.renegotiate()
self.do_handshake() | python | def do_renegotiate(self) -> None:
"""Initiate an SSL renegotiation.
"""
if not self._is_handshake_completed:
raise IOError('SSL Handshake was not completed; cannot renegotiate.')
self._ssl.renegotiate()
self.do_handshake() | [
"def",
"do_renegotiate",
"(",
"self",
")",
"->",
"None",
":",
"if",
"not",
"self",
".",
"_is_handshake_completed",
":",
"raise",
"IOError",
"(",
"'SSL Handshake was not completed; cannot renegotiate.'",
")",
"self",
".",
"_ssl",
".",
"renegotiate",
"(",
")",
"self... | Initiate an SSL renegotiation. | [
"Initiate",
"an",
"SSL",
"renegotiation",
"."
] | 7dce9a2235f4324191865d58dcbeec5c3a2097a3 | https://github.com/nabla-c0d3/nassl/blob/7dce9a2235f4324191865d58dcbeec5c3a2097a3/nassl/legacy_ssl_client.py#L71-L78 | train | 27,876 |
jobovy/galpy | doc/source/examples/dierickx_eccentricities.py | _download_file_vizier | def _download_file_vizier(cat,filePath,catalogname='catalog.dat'):
'''
Stolen from Jo Bovy's gaia_tools package!
'''
sys.stdout.write('\r'+"Downloading file %s ...\r" \
% (os.path.basename(filePath)))
sys.stdout.flush()
try:
# make all intermediate directories
os.makedirs(os.path.dirname(filePath))
except OSError: pass
# Safe way of downloading
downloading= True
interrupted= False
file, tmp_savefilename= tempfile.mkstemp()
os.close(file) #Easier this way
ntries= 1
while downloading:
try:
ftp= FTP('cdsarc.u-strasbg.fr')
ftp.login('anonymous', 'test')
ftp.cwd(os.path.join('pub','cats',cat))
with open(tmp_savefilename,'wb') as savefile:
ftp.retrbinary('RETR %s' % catalogname,savefile.write)
shutil.move(tmp_savefilename,filePath)
downloading= False
if interrupted:
raise KeyboardInterrupt
except:
raise
if not downloading: #Assume KeyboardInterrupt
raise
elif ntries > _MAX_NTRIES:
raise IOError('File %s does not appear to exist on the server ...' % (os.path.basename(filePath)))
finally:
if os.path.exists(tmp_savefilename):
os.remove(tmp_savefilename)
ntries+= 1
sys.stdout.write('\r'+_ERASESTR+'\r')
sys.stdout.flush()
return None | python | def _download_file_vizier(cat,filePath,catalogname='catalog.dat'):
'''
Stolen from Jo Bovy's gaia_tools package!
'''
sys.stdout.write('\r'+"Downloading file %s ...\r" \
% (os.path.basename(filePath)))
sys.stdout.flush()
try:
# make all intermediate directories
os.makedirs(os.path.dirname(filePath))
except OSError: pass
# Safe way of downloading
downloading= True
interrupted= False
file, tmp_savefilename= tempfile.mkstemp()
os.close(file) #Easier this way
ntries= 1
while downloading:
try:
ftp= FTP('cdsarc.u-strasbg.fr')
ftp.login('anonymous', 'test')
ftp.cwd(os.path.join('pub','cats',cat))
with open(tmp_savefilename,'wb') as savefile:
ftp.retrbinary('RETR %s' % catalogname,savefile.write)
shutil.move(tmp_savefilename,filePath)
downloading= False
if interrupted:
raise KeyboardInterrupt
except:
raise
if not downloading: #Assume KeyboardInterrupt
raise
elif ntries > _MAX_NTRIES:
raise IOError('File %s does not appear to exist on the server ...' % (os.path.basename(filePath)))
finally:
if os.path.exists(tmp_savefilename):
os.remove(tmp_savefilename)
ntries+= 1
sys.stdout.write('\r'+_ERASESTR+'\r')
sys.stdout.flush()
return None | [
"def",
"_download_file_vizier",
"(",
"cat",
",",
"filePath",
",",
"catalogname",
"=",
"'catalog.dat'",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'\\r'",
"+",
"\"Downloading file %s ...\\r\"",
"%",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"file... | Stolen from Jo Bovy's gaia_tools package! | [
"Stolen",
"from",
"Jo",
"Bovy",
"s",
"gaia_tools",
"package!"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/doc/source/examples/dierickx_eccentricities.py#L133-L173 | train | 27,877 |
jobovy/galpy | doc/source/examples/dierickx_eccentricities.py | ensure_dir | def ensure_dir(f):
""" Ensure a a file exists and if not make the relevant path """
d = os.path.dirname(f)
if not os.path.exists(d):
os.makedirs(d) | python | def ensure_dir(f):
""" Ensure a a file exists and if not make the relevant path """
d = os.path.dirname(f)
if not os.path.exists(d):
os.makedirs(d) | [
"def",
"ensure_dir",
"(",
"f",
")",
":",
"d",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"f",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"d",
")",
":",
"os",
".",
"makedirs",
"(",
"d",
")"
] | Ensure a a file exists and if not make the relevant path | [
"Ensure",
"a",
"a",
"file",
"exists",
"and",
"if",
"not",
"make",
"the",
"relevant",
"path"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/doc/source/examples/dierickx_eccentricities.py#L175-L179 | train | 27,878 |
jobovy/galpy | galpy/actionAngle/actionAngleIsochroneApprox.py | dePeriod | def dePeriod(arr):
"""make an array of periodic angles increase linearly"""
diff= arr-nu.roll(arr,1,axis=1)
w= diff < -6.
addto= nu.cumsum(w.astype(int),axis=1)
return arr+_TWOPI*addto | python | def dePeriod(arr):
"""make an array of periodic angles increase linearly"""
diff= arr-nu.roll(arr,1,axis=1)
w= diff < -6.
addto= nu.cumsum(w.astype(int),axis=1)
return arr+_TWOPI*addto | [
"def",
"dePeriod",
"(",
"arr",
")",
":",
"diff",
"=",
"arr",
"-",
"nu",
".",
"roll",
"(",
"arr",
",",
"1",
",",
"axis",
"=",
"1",
")",
"w",
"=",
"diff",
"<",
"-",
"6.",
"addto",
"=",
"nu",
".",
"cumsum",
"(",
"w",
".",
"astype",
"(",
"int",... | make an array of periodic angles increase linearly | [
"make",
"an",
"array",
"of",
"periodic",
"angles",
"increase",
"linearly"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/actionAngle/actionAngleIsochroneApprox.py#L760-L765 | train | 27,879 |
jobovy/galpy | galpy/util/leung_dop853.py | hinit | def hinit(func, x, t, pos_neg, f0, iord, hmax, rtol, atol, args):
"""
Estimate initial step size
"""
sk = atol + rtol * np.fabs(x)
dnf = np.sum(np.square(f0 / sk), axis=0)
dny = np.sum(np.square(x / sk), axis=0)
h = np.sqrt(dny / dnf) * 0.01
h = np.min([h, np.fabs(hmax)])
h = custom_sign(h, pos_neg)
# perform an explicit Euler step
xx1 = x + h * f0
f1 = np.array(func(xx1, t[0] + h, *args))
# estimate the second derivative of the solution
der2 = np.sum(np.square((f1 - f0) / sk), axis=0)
der2 = np.sqrt(der2) / h
# step size is computed such that h ** iord * max_d(norm(f0), norm(der2)) = 0.01
der12 = np.max([np.fabs(der2), np.sqrt(dnf)])
h1 = np.power(0.01 / der12, 1.0 / iord)
h = np.min([100.0 * np.fabs(h), np.min([np.fabs(h1), np.fabs(hmax)])])
return custom_sign(h, pos_neg), f0, f1, xx1 | python | def hinit(func, x, t, pos_neg, f0, iord, hmax, rtol, atol, args):
"""
Estimate initial step size
"""
sk = atol + rtol * np.fabs(x)
dnf = np.sum(np.square(f0 / sk), axis=0)
dny = np.sum(np.square(x / sk), axis=0)
h = np.sqrt(dny / dnf) * 0.01
h = np.min([h, np.fabs(hmax)])
h = custom_sign(h, pos_neg)
# perform an explicit Euler step
xx1 = x + h * f0
f1 = np.array(func(xx1, t[0] + h, *args))
# estimate the second derivative of the solution
der2 = np.sum(np.square((f1 - f0) / sk), axis=0)
der2 = np.sqrt(der2) / h
# step size is computed such that h ** iord * max_d(norm(f0), norm(der2)) = 0.01
der12 = np.max([np.fabs(der2), np.sqrt(dnf)])
h1 = np.power(0.01 / der12, 1.0 / iord)
h = np.min([100.0 * np.fabs(h), np.min([np.fabs(h1), np.fabs(hmax)])])
return custom_sign(h, pos_neg), f0, f1, xx1 | [
"def",
"hinit",
"(",
"func",
",",
"x",
",",
"t",
",",
"pos_neg",
",",
"f0",
",",
"iord",
",",
"hmax",
",",
"rtol",
",",
"atol",
",",
"args",
")",
":",
"sk",
"=",
"atol",
"+",
"rtol",
"*",
"np",
".",
"fabs",
"(",
"x",
")",
"dnf",
"=",
"np",
... | Estimate initial step size | [
"Estimate",
"initial",
"step",
"size"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/util/leung_dop853.py#L211-L238 | train | 27,880 |
jobovy/galpy | galpy/util/leung_dop853.py | dense_output | def dense_output(t_current, t_old, h_current, rcont):
"""
Dense output function, basically extrapolatin
"""
# initialization
s = (t_current - t_old) / h_current
s1 = 1.0 - s
return rcont[0] + s * (rcont[1] + s1 * (
rcont[2] + s * (rcont[3] + s1 * (rcont[4] + s * (rcont[5] + s1 * (rcont[6] + s * rcont[7])))))) | python | def dense_output(t_current, t_old, h_current, rcont):
"""
Dense output function, basically extrapolatin
"""
# initialization
s = (t_current - t_old) / h_current
s1 = 1.0 - s
return rcont[0] + s * (rcont[1] + s1 * (
rcont[2] + s * (rcont[3] + s1 * (rcont[4] + s * (rcont[5] + s1 * (rcont[6] + s * rcont[7])))))) | [
"def",
"dense_output",
"(",
"t_current",
",",
"t_old",
",",
"h_current",
",",
"rcont",
")",
":",
"# initialization",
"s",
"=",
"(",
"t_current",
"-",
"t_old",
")",
"/",
"h_current",
"s1",
"=",
"1.0",
"-",
"s",
"return",
"rcont",
"[",
"0",
"]",
"+",
"... | Dense output function, basically extrapolatin | [
"Dense",
"output",
"function",
"basically",
"extrapolatin"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/util/leung_dop853.py#L241-L250 | train | 27,881 |
jobovy/galpy | galpy/potential/DiskSCFPotential.py | phiME_dens | def phiME_dens(R,z,phi,dens,Sigma,dSigmadR,d2SigmadR2,hz,Hz,dHzdz,Sigma_amp):
"""The density corresponding to phi_ME"""
r= numpy.sqrt(R**2.+z**2.)
out= dens(R,z,phi)
for a,s,ds,d2s,h,H,dH \
in zip(Sigma_amp,Sigma,dSigmadR,d2SigmadR2,hz,Hz,dHzdz):
out-= a*(s(r)*h(z)+d2s(r)*H(z)+2./r*ds(r)*(H(z)+z*dH(z)))
return out | python | def phiME_dens(R,z,phi,dens,Sigma,dSigmadR,d2SigmadR2,hz,Hz,dHzdz,Sigma_amp):
"""The density corresponding to phi_ME"""
r= numpy.sqrt(R**2.+z**2.)
out= dens(R,z,phi)
for a,s,ds,d2s,h,H,dH \
in zip(Sigma_amp,Sigma,dSigmadR,d2SigmadR2,hz,Hz,dHzdz):
out-= a*(s(r)*h(z)+d2s(r)*H(z)+2./r*ds(r)*(H(z)+z*dH(z)))
return out | [
"def",
"phiME_dens",
"(",
"R",
",",
"z",
",",
"phi",
",",
"dens",
",",
"Sigma",
",",
"dSigmadR",
",",
"d2SigmadR2",
",",
"hz",
",",
"Hz",
",",
"dHzdz",
",",
"Sigma_amp",
")",
":",
"r",
"=",
"numpy",
".",
"sqrt",
"(",
"R",
"**",
"2.",
"+",
"z",
... | The density corresponding to phi_ME | [
"The",
"density",
"corresponding",
"to",
"phi_ME"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/potential/DiskSCFPotential.py#L479-L486 | train | 27,882 |
jobovy/galpy | galpy/orbit/integratePlanarOrbit.py | _parse_integrator | def _parse_integrator(int_method):
"""parse the integrator method to pass to C"""
#Pick integrator
if int_method.lower() == 'rk4_c':
int_method_c= 1
elif int_method.lower() == 'rk6_c':
int_method_c= 2
elif int_method.lower() == 'symplec4_c':
int_method_c= 3
elif int_method.lower() == 'symplec6_c':
int_method_c= 4
elif int_method.lower() == 'dopr54_c':
int_method_c= 5
elif int_method.lower() == 'dop853_c':
int_method_c= 6
else:
int_method_c= 0
return int_method_c | python | def _parse_integrator(int_method):
"""parse the integrator method to pass to C"""
#Pick integrator
if int_method.lower() == 'rk4_c':
int_method_c= 1
elif int_method.lower() == 'rk6_c':
int_method_c= 2
elif int_method.lower() == 'symplec4_c':
int_method_c= 3
elif int_method.lower() == 'symplec6_c':
int_method_c= 4
elif int_method.lower() == 'dopr54_c':
int_method_c= 5
elif int_method.lower() == 'dop853_c':
int_method_c= 6
else:
int_method_c= 0
return int_method_c | [
"def",
"_parse_integrator",
"(",
"int_method",
")",
":",
"#Pick integrator",
"if",
"int_method",
".",
"lower",
"(",
")",
"==",
"'rk4_c'",
":",
"int_method_c",
"=",
"1",
"elif",
"int_method",
".",
"lower",
"(",
")",
"==",
"'rk6_c'",
":",
"int_method_c",
"=",
... | parse the integrator method to pass to C | [
"parse",
"the",
"integrator",
"method",
"to",
"pass",
"to",
"C"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/orbit/integratePlanarOrbit.py#L310-L327 | train | 27,883 |
jobovy/galpy | galpy/orbit/integratePlanarOrbit.py | _parse_tol | def _parse_tol(rtol,atol):
"""Parse the tolerance keywords"""
#Process atol and rtol
if rtol is None:
rtol= -12.*nu.log(10.)
else: #pragma: no cover
rtol= nu.log(rtol)
if atol is None:
atol= -12.*nu.log(10.)
else: #pragma: no cover
atol= nu.log(atol)
return (rtol,atol) | python | def _parse_tol(rtol,atol):
"""Parse the tolerance keywords"""
#Process atol and rtol
if rtol is None:
rtol= -12.*nu.log(10.)
else: #pragma: no cover
rtol= nu.log(rtol)
if atol is None:
atol= -12.*nu.log(10.)
else: #pragma: no cover
atol= nu.log(atol)
return (rtol,atol) | [
"def",
"_parse_tol",
"(",
"rtol",
",",
"atol",
")",
":",
"#Process atol and rtol",
"if",
"rtol",
"is",
"None",
":",
"rtol",
"=",
"-",
"12.",
"*",
"nu",
".",
"log",
"(",
"10.",
")",
"else",
":",
"#pragma: no cover",
"rtol",
"=",
"nu",
".",
"log",
"(",... | Parse the tolerance keywords | [
"Parse",
"the",
"tolerance",
"keywords"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/orbit/integratePlanarOrbit.py#L329-L340 | train | 27,884 |
jobovy/galpy | galpy/orbit/Orbit.py | _check_integrate_dt | def _check_integrate_dt(t,dt):
"""Check that the stepszie in t is an integer x dt"""
if dt is None:
return True
mult= round((t[1]-t[0])/dt)
if nu.fabs(mult*dt-t[1]+t[0]) < 10.**-10.:
return True
else:
return False | python | def _check_integrate_dt(t,dt):
"""Check that the stepszie in t is an integer x dt"""
if dt is None:
return True
mult= round((t[1]-t[0])/dt)
if nu.fabs(mult*dt-t[1]+t[0]) < 10.**-10.:
return True
else:
return False | [
"def",
"_check_integrate_dt",
"(",
"t",
",",
"dt",
")",
":",
"if",
"dt",
"is",
"None",
":",
"return",
"True",
"mult",
"=",
"round",
"(",
"(",
"t",
"[",
"1",
"]",
"-",
"t",
"[",
"0",
"]",
")",
"/",
"dt",
")",
"if",
"nu",
".",
"fabs",
"(",
"m... | Check that the stepszie in t is an integer x dt | [
"Check",
"that",
"the",
"stepszie",
"in",
"t",
"is",
"an",
"integer",
"x",
"dt"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/orbit/Orbit.py#L3941-L3949 | train | 27,885 |
jobovy/galpy | galpy/potential/EllipsoidalPotential.py | _forceInt | def _forceInt(x,y,z,dens,b2,c2,i,glx=None,glw=None):
"""Integral that gives the force in x,y,z"""
def integrand(s):
t= 1/s**2.-1.
return dens(numpy.sqrt(x**2./(1.+t)+y**2./(b2+t)+z**2./(c2+t)))\
*(x/(1.+t)*(i==0)+y/(b2+t)*(i==1)+z/(c2+t)*(i==2))\
/numpy.sqrt((1.+(b2-1.)*s**2.)*(1.+(c2-1.)*s**2.))
if glx is None:
return integrate.quad(integrand,0.,1.)[0]
else:
return numpy.sum(glw*integrand(glx)) | python | def _forceInt(x,y,z,dens,b2,c2,i,glx=None,glw=None):
"""Integral that gives the force in x,y,z"""
def integrand(s):
t= 1/s**2.-1.
return dens(numpy.sqrt(x**2./(1.+t)+y**2./(b2+t)+z**2./(c2+t)))\
*(x/(1.+t)*(i==0)+y/(b2+t)*(i==1)+z/(c2+t)*(i==2))\
/numpy.sqrt((1.+(b2-1.)*s**2.)*(1.+(c2-1.)*s**2.))
if glx is None:
return integrate.quad(integrand,0.,1.)[0]
else:
return numpy.sum(glw*integrand(glx)) | [
"def",
"_forceInt",
"(",
"x",
",",
"y",
",",
"z",
",",
"dens",
",",
"b2",
",",
"c2",
",",
"i",
",",
"glx",
"=",
"None",
",",
"glw",
"=",
"None",
")",
":",
"def",
"integrand",
"(",
"s",
")",
":",
"t",
"=",
"1",
"/",
"s",
"**",
"2.",
"-",
... | Integral that gives the force in x,y,z | [
"Integral",
"that",
"gives",
"the",
"force",
"in",
"x",
"y",
"z"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/potential/EllipsoidalPotential.py#L485-L495 | train | 27,886 |
jobovy/galpy | galpy/potential/EllipsoidalPotential.py | _2ndDerivInt | def _2ndDerivInt(x,y,z,dens,densDeriv,b2,c2,i,j,glx=None,glw=None):
"""Integral that gives the 2nd derivative of the potential in x,y,z"""
def integrand(s):
t= 1/s**2.-1.
m= numpy.sqrt(x**2./(1.+t)+y**2./(b2+t)+z**2./(c2+t))
return (densDeriv(m)
*(x/(1.+t)*(i==0)+y/(b2+t)*(i==1)+z/(c2+t)*(i==2))
*(x/(1.+t)*(j==0)+y/(b2+t)*(j==1)+z/(c2+t)*(j==2))/m\
+dens(m)*(i==j)*((1./(1.+t)*(i==0)+1./(b2+t)*(i==1)+1./(c2+t)*(i==2))))\
/numpy.sqrt((1.+(b2-1.)*s**2.)*(1.+(c2-1.)*s**2.))
if glx is None:
return integrate.quad(integrand,0.,1.)[0]
else:
return numpy.sum(glw*integrand(glx)) | python | def _2ndDerivInt(x,y,z,dens,densDeriv,b2,c2,i,j,glx=None,glw=None):
"""Integral that gives the 2nd derivative of the potential in x,y,z"""
def integrand(s):
t= 1/s**2.-1.
m= numpy.sqrt(x**2./(1.+t)+y**2./(b2+t)+z**2./(c2+t))
return (densDeriv(m)
*(x/(1.+t)*(i==0)+y/(b2+t)*(i==1)+z/(c2+t)*(i==2))
*(x/(1.+t)*(j==0)+y/(b2+t)*(j==1)+z/(c2+t)*(j==2))/m\
+dens(m)*(i==j)*((1./(1.+t)*(i==0)+1./(b2+t)*(i==1)+1./(c2+t)*(i==2))))\
/numpy.sqrt((1.+(b2-1.)*s**2.)*(1.+(c2-1.)*s**2.))
if glx is None:
return integrate.quad(integrand,0.,1.)[0]
else:
return numpy.sum(glw*integrand(glx)) | [
"def",
"_2ndDerivInt",
"(",
"x",
",",
"y",
",",
"z",
",",
"dens",
",",
"densDeriv",
",",
"b2",
",",
"c2",
",",
"i",
",",
"j",
",",
"glx",
"=",
"None",
",",
"glw",
"=",
"None",
")",
":",
"def",
"integrand",
"(",
"s",
")",
":",
"t",
"=",
"1",... | Integral that gives the 2nd derivative of the potential in x,y,z | [
"Integral",
"that",
"gives",
"the",
"2nd",
"derivative",
"of",
"the",
"potential",
"in",
"x",
"y",
"z"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/potential/EllipsoidalPotential.py#L497-L510 | train | 27,887 |
jobovy/galpy | galpy/potential/TwoPowerTriaxialPotential.py | TwoPowerTriaxialPotential._mdens | def _mdens(self,m):
"""Density as a function of m"""
return (self.a/m)**self.alpha/(1.+m/self.a)**(self.betaminusalpha) | python | def _mdens(self,m):
"""Density as a function of m"""
return (self.a/m)**self.alpha/(1.+m/self.a)**(self.betaminusalpha) | [
"def",
"_mdens",
"(",
"self",
",",
"m",
")",
":",
"return",
"(",
"self",
".",
"a",
"/",
"m",
")",
"**",
"self",
".",
"alpha",
"/",
"(",
"1.",
"+",
"m",
"/",
"self",
".",
"a",
")",
"**",
"(",
"self",
".",
"betaminusalpha",
")"
] | Density as a function of m | [
"Density",
"as",
"a",
"function",
"of",
"m"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/potential/TwoPowerTriaxialPotential.py#L128-L130 | train | 27,888 |
jobovy/galpy | galpy/orbit/FullOrbit.py | _fit_orbit | def _fit_orbit(orb,vxvv,vxvv_err,pot,radec=False,lb=False,
customsky=False,lb_to_customsky=None,
pmllpmbb_to_customsky=None,
tintJ=100,ntintJ=1000,integrate_method='dopr54_c',
ro=None,vo=None,obs=None,disp=False):
"""Fit an orbit to data in a given potential"""
# Need to turn this off for speed
coords._APY_COORDS_ORIG= coords._APY_COORDS
coords._APY_COORDS= False
#Import here, because otherwise there is an infinite loop of imports
from galpy.actionAngle import actionAngleIsochroneApprox, actionAngle
#Mock this up, bc we want to use its orbit-integration routines
class mockActionAngleIsochroneApprox(actionAngleIsochroneApprox):
def __init__(self,tintJ,ntintJ,pot,integrate_method='dopr54_c'):
actionAngle.__init__(self)
self._tintJ= tintJ
self._ntintJ=ntintJ
self._tsJ= nu.linspace(0.,self._tintJ,self._ntintJ)
self._integrate_dt= None
self._pot= pot
self._integrate_method= integrate_method
return None
tmockAA= mockActionAngleIsochroneApprox(tintJ,ntintJ,pot,
integrate_method=integrate_method)
opt_vxvv= optimize.fmin_powell(_fit_orbit_mlogl,orb.vxvv,
args=(vxvv,vxvv_err,pot,radec,lb,
customsky,lb_to_customsky,
pmllpmbb_to_customsky,
tmockAA,
ro,vo,obs),
disp=disp)
maxLogL= -_fit_orbit_mlogl(opt_vxvv,vxvv,vxvv_err,pot,radec,lb,
customsky,lb_to_customsky,pmllpmbb_to_customsky,
tmockAA,
ro,vo,obs)
coords._APY_COORDS= coords._APY_COORDS_ORIG
return (opt_vxvv,maxLogL) | python | def _fit_orbit(orb,vxvv,vxvv_err,pot,radec=False,lb=False,
customsky=False,lb_to_customsky=None,
pmllpmbb_to_customsky=None,
tintJ=100,ntintJ=1000,integrate_method='dopr54_c',
ro=None,vo=None,obs=None,disp=False):
"""Fit an orbit to data in a given potential"""
# Need to turn this off for speed
coords._APY_COORDS_ORIG= coords._APY_COORDS
coords._APY_COORDS= False
#Import here, because otherwise there is an infinite loop of imports
from galpy.actionAngle import actionAngleIsochroneApprox, actionAngle
#Mock this up, bc we want to use its orbit-integration routines
class mockActionAngleIsochroneApprox(actionAngleIsochroneApprox):
def __init__(self,tintJ,ntintJ,pot,integrate_method='dopr54_c'):
actionAngle.__init__(self)
self._tintJ= tintJ
self._ntintJ=ntintJ
self._tsJ= nu.linspace(0.,self._tintJ,self._ntintJ)
self._integrate_dt= None
self._pot= pot
self._integrate_method= integrate_method
return None
tmockAA= mockActionAngleIsochroneApprox(tintJ,ntintJ,pot,
integrate_method=integrate_method)
opt_vxvv= optimize.fmin_powell(_fit_orbit_mlogl,orb.vxvv,
args=(vxvv,vxvv_err,pot,radec,lb,
customsky,lb_to_customsky,
pmllpmbb_to_customsky,
tmockAA,
ro,vo,obs),
disp=disp)
maxLogL= -_fit_orbit_mlogl(opt_vxvv,vxvv,vxvv_err,pot,radec,lb,
customsky,lb_to_customsky,pmllpmbb_to_customsky,
tmockAA,
ro,vo,obs)
coords._APY_COORDS= coords._APY_COORDS_ORIG
return (opt_vxvv,maxLogL) | [
"def",
"_fit_orbit",
"(",
"orb",
",",
"vxvv",
",",
"vxvv_err",
",",
"pot",
",",
"radec",
"=",
"False",
",",
"lb",
"=",
"False",
",",
"customsky",
"=",
"False",
",",
"lb_to_customsky",
"=",
"None",
",",
"pmllpmbb_to_customsky",
"=",
"None",
",",
"tintJ",
... | Fit an orbit to data in a given potential | [
"Fit",
"an",
"orbit",
"to",
"data",
"in",
"a",
"given",
"potential"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/orbit/FullOrbit.py#L721-L757 | train | 27,889 |
jobovy/galpy | galpy/util/bovy_conversion.py | actionAngle_physical_input | def actionAngle_physical_input(method):
"""Decorator to convert inputs to actionAngle functions from physical
to internal coordinates"""
@wraps(method)
def wrapper(*args,**kwargs):
if len(args) < 3: # orbit input
return method(*args,**kwargs)
ro= kwargs.get('ro',None)
if ro is None and hasattr(args[0],'_ro'):
ro= args[0]._ro
if _APY_LOADED and isinstance(ro,units.Quantity):
ro= ro.to(units.kpc).value
vo= kwargs.get('vo',None)
if vo is None and hasattr(args[0],'_vo'):
vo= args[0]._vo
if _APY_LOADED and isinstance(vo,units.Quantity):
vo= vo.to(units.km/units.s).value
# Loop through args
newargs= ()
for ii in range(len(args)):
if _APY_LOADED and isinstance(args[ii],units.Quantity):
try:
targ= args[ii].to(units.kpc).value/ro
except units.UnitConversionError:
try:
targ= args[ii].to(units.km/units.s).value/vo
except units.UnitConversionError:
try:
targ= args[ii].to(units.rad).value
except units.UnitConversionError:
raise units.UnitConversionError("Input units not understood")
newargs= newargs+(targ,)
else:
newargs= newargs+(args[ii],)
args= newargs
return method(*args,**kwargs)
return wrapper | python | def actionAngle_physical_input(method):
"""Decorator to convert inputs to actionAngle functions from physical
to internal coordinates"""
@wraps(method)
def wrapper(*args,**kwargs):
if len(args) < 3: # orbit input
return method(*args,**kwargs)
ro= kwargs.get('ro',None)
if ro is None and hasattr(args[0],'_ro'):
ro= args[0]._ro
if _APY_LOADED and isinstance(ro,units.Quantity):
ro= ro.to(units.kpc).value
vo= kwargs.get('vo',None)
if vo is None and hasattr(args[0],'_vo'):
vo= args[0]._vo
if _APY_LOADED and isinstance(vo,units.Quantity):
vo= vo.to(units.km/units.s).value
# Loop through args
newargs= ()
for ii in range(len(args)):
if _APY_LOADED and isinstance(args[ii],units.Quantity):
try:
targ= args[ii].to(units.kpc).value/ro
except units.UnitConversionError:
try:
targ= args[ii].to(units.km/units.s).value/vo
except units.UnitConversionError:
try:
targ= args[ii].to(units.rad).value
except units.UnitConversionError:
raise units.UnitConversionError("Input units not understood")
newargs= newargs+(targ,)
else:
newargs= newargs+(args[ii],)
args= newargs
return method(*args,**kwargs)
return wrapper | [
"def",
"actionAngle_physical_input",
"(",
"method",
")",
":",
"@",
"wraps",
"(",
"method",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"len",
"(",
"args",
")",
"<",
"3",
":",
"# orbit input",
"return",
"method",
... | Decorator to convert inputs to actionAngle functions from physical
to internal coordinates | [
"Decorator",
"to",
"convert",
"inputs",
"to",
"actionAngle",
"functions",
"from",
"physical",
"to",
"internal",
"coordinates"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/util/bovy_conversion.py#L788-L824 | train | 27,890 |
jobovy/galpy | galpy/snapshot/directnbody.py | _direct_nbody_force | def _direct_nbody_force(q,m,t,pot,softening,softening_args):
"""Calculate the force"""
#First do the particles
#Calculate all the distances
nq= len(q)
dim= len(q[0])
dist_vec= nu.zeros((nq,nq,dim))
dist= nu.zeros((nq,nq))
for ii in range(nq):
for jj in range(ii+1,nq):
dist_vec[ii,jj,:]= q[jj]-q[ii]
dist_vec[jj,ii,:]= -dist_vec[ii,jj,:]
dist[ii,jj]= linalg.norm(dist_vec[ii,jj,:])
dist[jj,ii]= dist[ii,jj]
#Calculate all the forces
force= []
for ii in range(nq):
thisforce= nu.zeros(dim)
for jj in range(nq):
if ii == jj: continue
thisforce+= m[jj]*softening(dist[ii,jj],*softening_args)\
/dist[ii,jj]*dist_vec[ii,jj,:]
force.append(thisforce)
#Then add the external force
if pot is None: return force
for ii in range(nq):
force[ii]+= _external_force(q[ii],t,pot)
return force | python | def _direct_nbody_force(q,m,t,pot,softening,softening_args):
"""Calculate the force"""
#First do the particles
#Calculate all the distances
nq= len(q)
dim= len(q[0])
dist_vec= nu.zeros((nq,nq,dim))
dist= nu.zeros((nq,nq))
for ii in range(nq):
for jj in range(ii+1,nq):
dist_vec[ii,jj,:]= q[jj]-q[ii]
dist_vec[jj,ii,:]= -dist_vec[ii,jj,:]
dist[ii,jj]= linalg.norm(dist_vec[ii,jj,:])
dist[jj,ii]= dist[ii,jj]
#Calculate all the forces
force= []
for ii in range(nq):
thisforce= nu.zeros(dim)
for jj in range(nq):
if ii == jj: continue
thisforce+= m[jj]*softening(dist[ii,jj],*softening_args)\
/dist[ii,jj]*dist_vec[ii,jj,:]
force.append(thisforce)
#Then add the external force
if pot is None: return force
for ii in range(nq):
force[ii]+= _external_force(q[ii],t,pot)
return force | [
"def",
"_direct_nbody_force",
"(",
"q",
",",
"m",
",",
"t",
",",
"pot",
",",
"softening",
",",
"softening_args",
")",
":",
"#First do the particles",
"#Calculate all the distances",
"nq",
"=",
"len",
"(",
"q",
")",
"dim",
"=",
"len",
"(",
"q",
"[",
"0",
... | Calculate the force | [
"Calculate",
"the",
"force"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/snapshot/directnbody.py#L73-L100 | train | 27,891 |
jobovy/galpy | galpy/df/evolveddiskdf.py | _vmomentsurfaceIntegrand | def _vmomentsurfaceIntegrand(vR,vT,R,az,df,n,m,sigmaR1,sigmaT1,t,initvmoment):
"""Internal function that is the integrand for the velocity moment times
surface mass integration"""
o= Orbit([R,vR*sigmaR1,vT*sigmaT1,az])
return vR**n*vT**m*df(o,t)/initvmoment | python | def _vmomentsurfaceIntegrand(vR,vT,R,az,df,n,m,sigmaR1,sigmaT1,t,initvmoment):
"""Internal function that is the integrand for the velocity moment times
surface mass integration"""
o= Orbit([R,vR*sigmaR1,vT*sigmaT1,az])
return vR**n*vT**m*df(o,t)/initvmoment | [
"def",
"_vmomentsurfaceIntegrand",
"(",
"vR",
",",
"vT",
",",
"R",
",",
"az",
",",
"df",
",",
"n",
",",
"m",
",",
"sigmaR1",
",",
"sigmaT1",
",",
"t",
",",
"initvmoment",
")",
":",
"o",
"=",
"Orbit",
"(",
"[",
"R",
",",
"vR",
"*",
"sigmaR1",
",... | Internal function that is the integrand for the velocity moment times
surface mass integration | [
"Internal",
"function",
"that",
"is",
"the",
"integrand",
"for",
"the",
"velocity",
"moment",
"times",
"surface",
"mass",
"integration"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/evolveddiskdf.py#L2282-L2286 | train | 27,892 |
jobovy/galpy | galpy/df/evolveddiskdf.py | evolveddiskdf._vmomentsurfacemassGrid | def _vmomentsurfacemassGrid(self,n,m,grid):
"""Internal function to evaluate vmomentsurfacemass using a grid
rather than direct integration"""
if len(grid.df.shape) == 3: tlist= True
else: tlist= False
if tlist:
nt= grid.df.shape[2]
out= []
for ii in range(nt):
out.append(nu.dot(grid.vRgrid**n,nu.dot(grid.df[:,:,ii],grid.vTgrid**m))*\
(grid.vRgrid[1]-grid.vRgrid[0])*(grid.vTgrid[1]-grid.vTgrid[0]))
return nu.array(out)
else:
return nu.dot(grid.vRgrid**n,nu.dot(grid.df,grid.vTgrid**m))*\
(grid.vRgrid[1]-grid.vRgrid[0])*(grid.vTgrid[1]-grid.vTgrid[0]) | python | def _vmomentsurfacemassGrid(self,n,m,grid):
"""Internal function to evaluate vmomentsurfacemass using a grid
rather than direct integration"""
if len(grid.df.shape) == 3: tlist= True
else: tlist= False
if tlist:
nt= grid.df.shape[2]
out= []
for ii in range(nt):
out.append(nu.dot(grid.vRgrid**n,nu.dot(grid.df[:,:,ii],grid.vTgrid**m))*\
(grid.vRgrid[1]-grid.vRgrid[0])*(grid.vTgrid[1]-grid.vTgrid[0]))
return nu.array(out)
else:
return nu.dot(grid.vRgrid**n,nu.dot(grid.df,grid.vTgrid**m))*\
(grid.vRgrid[1]-grid.vRgrid[0])*(grid.vTgrid[1]-grid.vTgrid[0]) | [
"def",
"_vmomentsurfacemassGrid",
"(",
"self",
",",
"n",
",",
"m",
",",
"grid",
")",
":",
"if",
"len",
"(",
"grid",
".",
"df",
".",
"shape",
")",
"==",
"3",
":",
"tlist",
"=",
"True",
"else",
":",
"tlist",
"=",
"False",
"if",
"tlist",
":",
"nt",
... | Internal function to evaluate vmomentsurfacemass using a grid
rather than direct integration | [
"Internal",
"function",
"to",
"evaluate",
"vmomentsurfacemass",
"using",
"a",
"grid",
"rather",
"than",
"direct",
"integration"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/evolveddiskdf.py#L1826-L1840 | train | 27,893 |
jobovy/galpy | galpy/df/evolveddiskdf.py | evolveddiskdf._buildvgrid | def _buildvgrid(self,R,phi,nsigma,t,sigmaR1,sigmaT1,meanvR,meanvT,
gridpoints,print_progress,integrate_method,deriv):
"""Internal function to grid the vDF at a given location"""
out= evolveddiskdfGrid()
out.sigmaR1= sigmaR1
out.sigmaT1= sigmaT1
out.meanvR= meanvR
out.meanvT= meanvT
out.vRgrid= nu.linspace(meanvR-nsigma*sigmaR1,meanvR+nsigma*sigmaR1,
gridpoints)
out.vTgrid= nu.linspace(meanvT-nsigma*sigmaT1,meanvT+nsigma*sigmaT1,
gridpoints)
if isinstance(t,(list,nu.ndarray)):
nt= len(t)
out.df= nu.zeros((gridpoints,gridpoints,nt))
for ii in range(gridpoints):
for jj in range(gridpoints-1,-1,-1):#Reverse, so we get the peak before we get to the extreme lags NOT NECESSARY
if print_progress: #pragma: no cover
sys.stdout.write('\r'+"Velocity gridpoint %i out of %i" % \
(jj+ii*gridpoints+1,gridpoints*gridpoints))
sys.stdout.flush()
thiso= Orbit([R,out.vRgrid[ii],out.vTgrid[jj],phi])
out.df[ii,jj,:]= self(thiso,nu.array(t).flatten(),
integrate_method=integrate_method,
deriv=deriv,use_physical=False)
out.df[ii,jj,nu.isnan(out.df[ii,jj,:])]= 0. #BOVY: for now
if print_progress: sys.stdout.write('\n') #pragma: no cover
else:
out.df= nu.zeros((gridpoints,gridpoints))
for ii in range(gridpoints):
for jj in range(gridpoints):
if print_progress: #pragma: no cover
sys.stdout.write('\r'+"Velocity gridpoint %i out of %i" % \
(jj+ii*gridpoints+1,gridpoints*gridpoints))
sys.stdout.flush()
thiso= Orbit([R,out.vRgrid[ii],out.vTgrid[jj],phi])
out.df[ii,jj]= self(thiso,t,
integrate_method=integrate_method,
deriv=deriv,use_physical=False)
if nu.isnan(out.df[ii,jj]): out.df[ii,jj]= 0. #BOVY: for now
if print_progress: sys.stdout.write('\n') #pragma: no cover
return out | python | def _buildvgrid(self,R,phi,nsigma,t,sigmaR1,sigmaT1,meanvR,meanvT,
gridpoints,print_progress,integrate_method,deriv):
"""Internal function to grid the vDF at a given location"""
out= evolveddiskdfGrid()
out.sigmaR1= sigmaR1
out.sigmaT1= sigmaT1
out.meanvR= meanvR
out.meanvT= meanvT
out.vRgrid= nu.linspace(meanvR-nsigma*sigmaR1,meanvR+nsigma*sigmaR1,
gridpoints)
out.vTgrid= nu.linspace(meanvT-nsigma*sigmaT1,meanvT+nsigma*sigmaT1,
gridpoints)
if isinstance(t,(list,nu.ndarray)):
nt= len(t)
out.df= nu.zeros((gridpoints,gridpoints,nt))
for ii in range(gridpoints):
for jj in range(gridpoints-1,-1,-1):#Reverse, so we get the peak before we get to the extreme lags NOT NECESSARY
if print_progress: #pragma: no cover
sys.stdout.write('\r'+"Velocity gridpoint %i out of %i" % \
(jj+ii*gridpoints+1,gridpoints*gridpoints))
sys.stdout.flush()
thiso= Orbit([R,out.vRgrid[ii],out.vTgrid[jj],phi])
out.df[ii,jj,:]= self(thiso,nu.array(t).flatten(),
integrate_method=integrate_method,
deriv=deriv,use_physical=False)
out.df[ii,jj,nu.isnan(out.df[ii,jj,:])]= 0. #BOVY: for now
if print_progress: sys.stdout.write('\n') #pragma: no cover
else:
out.df= nu.zeros((gridpoints,gridpoints))
for ii in range(gridpoints):
for jj in range(gridpoints):
if print_progress: #pragma: no cover
sys.stdout.write('\r'+"Velocity gridpoint %i out of %i" % \
(jj+ii*gridpoints+1,gridpoints*gridpoints))
sys.stdout.flush()
thiso= Orbit([R,out.vRgrid[ii],out.vTgrid[jj],phi])
out.df[ii,jj]= self(thiso,t,
integrate_method=integrate_method,
deriv=deriv,use_physical=False)
if nu.isnan(out.df[ii,jj]): out.df[ii,jj]= 0. #BOVY: for now
if print_progress: sys.stdout.write('\n') #pragma: no cover
return out | [
"def",
"_buildvgrid",
"(",
"self",
",",
"R",
",",
"phi",
",",
"nsigma",
",",
"t",
",",
"sigmaR1",
",",
"sigmaT1",
",",
"meanvR",
",",
"meanvT",
",",
"gridpoints",
",",
"print_progress",
",",
"integrate_method",
",",
"deriv",
")",
":",
"out",
"=",
"evol... | Internal function to grid the vDF at a given location | [
"Internal",
"function",
"to",
"grid",
"the",
"vDF",
"at",
"a",
"given",
"location"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/evolveddiskdf.py#L1842-L1883 | train | 27,894 |
jobovy/galpy | galpy/df/streamdf.py | _determine_stream_spread_single | def _determine_stream_spread_single(sigomatrixEig,
thetasTrack,
sigOmega,
sigAngle,
allinvjacsTrack):
"""sigAngle input may either be a function that returns the dispersion in
perpendicular angle as a function of parallel angle, or a value"""
#Estimate the spread in all frequencies and angles
sigObig2= sigOmega(thetasTrack)**2.
tsigOdiag= copy.copy(sigomatrixEig[0])
tsigOdiag[numpy.argmax(tsigOdiag)]= sigObig2
tsigO= numpy.dot(sigomatrixEig[1],
numpy.dot(numpy.diag(tsigOdiag),
numpy.linalg.inv(sigomatrixEig[1])))
#angles
if hasattr(sigAngle,'__call__'):
sigangle2= sigAngle(thetasTrack)**2.
else:
sigangle2= sigAngle**2.
tsigadiag= numpy.ones(3)*sigangle2
tsigadiag[numpy.argmax(tsigOdiag)]= 1.
tsiga= numpy.dot(sigomatrixEig[1],
numpy.dot(numpy.diag(tsigadiag),
numpy.linalg.inv(sigomatrixEig[1])))
#correlations, assume half correlated for now (can be calculated)
correlations= numpy.diag(0.5*numpy.ones(3))*numpy.sqrt(tsigOdiag*tsigadiag)
correlations[numpy.argmax(tsigOdiag),numpy.argmax(tsigOdiag)]= 0.
correlations= numpy.dot(sigomatrixEig[1],
numpy.dot(correlations,
numpy.linalg.inv(sigomatrixEig[1])))
#Now convert
fullMatrix= numpy.empty((6,6))
fullMatrix[:3,:3]= tsigO
fullMatrix[3:,3:]= tsiga
fullMatrix[3:,:3]= correlations
fullMatrix[:3,3:]= correlations.T
return numpy.dot(allinvjacsTrack,numpy.dot(fullMatrix,allinvjacsTrack.T)) | python | def _determine_stream_spread_single(sigomatrixEig,
thetasTrack,
sigOmega,
sigAngle,
allinvjacsTrack):
"""sigAngle input may either be a function that returns the dispersion in
perpendicular angle as a function of parallel angle, or a value"""
#Estimate the spread in all frequencies and angles
sigObig2= sigOmega(thetasTrack)**2.
tsigOdiag= copy.copy(sigomatrixEig[0])
tsigOdiag[numpy.argmax(tsigOdiag)]= sigObig2
tsigO= numpy.dot(sigomatrixEig[1],
numpy.dot(numpy.diag(tsigOdiag),
numpy.linalg.inv(sigomatrixEig[1])))
#angles
if hasattr(sigAngle,'__call__'):
sigangle2= sigAngle(thetasTrack)**2.
else:
sigangle2= sigAngle**2.
tsigadiag= numpy.ones(3)*sigangle2
tsigadiag[numpy.argmax(tsigOdiag)]= 1.
tsiga= numpy.dot(sigomatrixEig[1],
numpy.dot(numpy.diag(tsigadiag),
numpy.linalg.inv(sigomatrixEig[1])))
#correlations, assume half correlated for now (can be calculated)
correlations= numpy.diag(0.5*numpy.ones(3))*numpy.sqrt(tsigOdiag*tsigadiag)
correlations[numpy.argmax(tsigOdiag),numpy.argmax(tsigOdiag)]= 0.
correlations= numpy.dot(sigomatrixEig[1],
numpy.dot(correlations,
numpy.linalg.inv(sigomatrixEig[1])))
#Now convert
fullMatrix= numpy.empty((6,6))
fullMatrix[:3,:3]= tsigO
fullMatrix[3:,3:]= tsiga
fullMatrix[3:,:3]= correlations
fullMatrix[:3,3:]= correlations.T
return numpy.dot(allinvjacsTrack,numpy.dot(fullMatrix,allinvjacsTrack.T)) | [
"def",
"_determine_stream_spread_single",
"(",
"sigomatrixEig",
",",
"thetasTrack",
",",
"sigOmega",
",",
"sigAngle",
",",
"allinvjacsTrack",
")",
":",
"#Estimate the spread in all frequencies and angles",
"sigObig2",
"=",
"sigOmega",
"(",
"thetasTrack",
")",
"**",
"2.",
... | sigAngle input may either be a function that returns the dispersion in
perpendicular angle as a function of parallel angle, or a value | [
"sigAngle",
"input",
"may",
"either",
"be",
"a",
"function",
"that",
"returns",
"the",
"dispersion",
"in",
"perpendicular",
"angle",
"as",
"a",
"function",
"of",
"parallel",
"angle",
"or",
"a",
"value"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/streamdf.py#L3242-L3278 | train | 27,895 |
jobovy/galpy | galpy/df/streamdf.py | streamdf._progenitor_setup | def _progenitor_setup(self,progenitor,leading,useTMHessian):
"""The part of the setup relating to the progenitor's orbit"""
#Progenitor orbit: Calculate actions, frequencies, and angles for the progenitor
self._progenitor= progenitor() #call to get new Orbit
# Make sure we do not use physical coordinates
self._progenitor.turn_physical_off()
acfs= self._aA.actionsFreqsAngles(self._progenitor,
_firstFlip=(not leading),
use_physical=False)
self._progenitor_jr= acfs[0][0]
self._progenitor_lz= acfs[1][0]
self._progenitor_jz= acfs[2][0]
self._progenitor_Omegar= acfs[3]
self._progenitor_Omegaphi= acfs[4]
self._progenitor_Omegaz= acfs[5]
self._progenitor_Omega= numpy.array([acfs[3],acfs[4],acfs[5]]).reshape(3)
self._progenitor_angler= acfs[6]
self._progenitor_anglephi= acfs[7]
self._progenitor_anglez= acfs[8]
self._progenitor_angle= numpy.array([acfs[6],acfs[7],acfs[8]]).reshape(3)
#Calculate dO/dJ Jacobian at the progenitor
if useTMHessian:
h, fr,fp,fz,e= self._aAT.hessianFreqs(self._progenitor_jr,
self._progenitor_lz,
self._progenitor_jz)
self._dOdJp= h
# Replace frequencies with TM frequencies
self._progenitor_Omegar= fr
self._progenitor_Omegaphi= fp
self._progenitor_Omegaz= fz
self._progenitor_Omega= numpy.array([self._progenitor_Omegar,
self._progenitor_Omegaphi,
self._progenitor_Omegaz]).reshape(3)
else:
self._dOdJp= calcaAJac(self._progenitor._orb.vxvv,
self._aA,dxv=None,dOdJ=True,
_initacfs=acfs)
self._dOdJpInv= numpy.linalg.inv(self._dOdJp)
self._dOdJpEig= numpy.linalg.eig(self._dOdJp)
return None | python | def _progenitor_setup(self,progenitor,leading,useTMHessian):
"""The part of the setup relating to the progenitor's orbit"""
#Progenitor orbit: Calculate actions, frequencies, and angles for the progenitor
self._progenitor= progenitor() #call to get new Orbit
# Make sure we do not use physical coordinates
self._progenitor.turn_physical_off()
acfs= self._aA.actionsFreqsAngles(self._progenitor,
_firstFlip=(not leading),
use_physical=False)
self._progenitor_jr= acfs[0][0]
self._progenitor_lz= acfs[1][0]
self._progenitor_jz= acfs[2][0]
self._progenitor_Omegar= acfs[3]
self._progenitor_Omegaphi= acfs[4]
self._progenitor_Omegaz= acfs[5]
self._progenitor_Omega= numpy.array([acfs[3],acfs[4],acfs[5]]).reshape(3)
self._progenitor_angler= acfs[6]
self._progenitor_anglephi= acfs[7]
self._progenitor_anglez= acfs[8]
self._progenitor_angle= numpy.array([acfs[6],acfs[7],acfs[8]]).reshape(3)
#Calculate dO/dJ Jacobian at the progenitor
if useTMHessian:
h, fr,fp,fz,e= self._aAT.hessianFreqs(self._progenitor_jr,
self._progenitor_lz,
self._progenitor_jz)
self._dOdJp= h
# Replace frequencies with TM frequencies
self._progenitor_Omegar= fr
self._progenitor_Omegaphi= fp
self._progenitor_Omegaz= fz
self._progenitor_Omega= numpy.array([self._progenitor_Omegar,
self._progenitor_Omegaphi,
self._progenitor_Omegaz]).reshape(3)
else:
self._dOdJp= calcaAJac(self._progenitor._orb.vxvv,
self._aA,dxv=None,dOdJ=True,
_initacfs=acfs)
self._dOdJpInv= numpy.linalg.inv(self._dOdJp)
self._dOdJpEig= numpy.linalg.eig(self._dOdJp)
return None | [
"def",
"_progenitor_setup",
"(",
"self",
",",
"progenitor",
",",
"leading",
",",
"useTMHessian",
")",
":",
"#Progenitor orbit: Calculate actions, frequencies, and angles for the progenitor",
"self",
".",
"_progenitor",
"=",
"progenitor",
"(",
")",
"#call to get new Orbit",
... | The part of the setup relating to the progenitor's orbit | [
"The",
"part",
"of",
"the",
"setup",
"relating",
"to",
"the",
"progenitor",
"s",
"orbit"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/streamdf.py#L218-L257 | train | 27,896 |
jobovy/galpy | galpy/df/streamdf.py | streamdf._setup_progIsTrack | def _setup_progIsTrack(self):
"""If progIsTrack, the progenitor orbit that was passed to the
streamdf initialization is the track at zero angle separation;
this routine computes an actual progenitor position that gives
the desired track given the parameters of the streamdf"""
# We need to flip the sign of the offset, to go to the progenitor
self._sigMeanSign*= -1.
# Use _determine_stream_track_single to calculate the track-progenitor
# offset at zero angle separation
prog_stream_offset=\
_determine_stream_track_single(self._aA,
self._progenitor,
0., #time = 0
self._progenitor_angle,
self._sigMeanSign,
self._dsigomeanProgDirection,
lambda x: self.meanOmega(x,use_physical=False),
0.) #angle = 0
# Setup the new progenitor orbit
progenitor= Orbit(prog_stream_offset[3])
# Flip the offset sign again
self._sigMeanSign*= -1.
# Now re-do the previous setup
self._progenitor_setup(progenitor,self._leading,False)
self._offset_setup(self._sigangle,self._leading,
self._deltaAngleTrack)
return None | python | def _setup_progIsTrack(self):
"""If progIsTrack, the progenitor orbit that was passed to the
streamdf initialization is the track at zero angle separation;
this routine computes an actual progenitor position that gives
the desired track given the parameters of the streamdf"""
# We need to flip the sign of the offset, to go to the progenitor
self._sigMeanSign*= -1.
# Use _determine_stream_track_single to calculate the track-progenitor
# offset at zero angle separation
prog_stream_offset=\
_determine_stream_track_single(self._aA,
self._progenitor,
0., #time = 0
self._progenitor_angle,
self._sigMeanSign,
self._dsigomeanProgDirection,
lambda x: self.meanOmega(x,use_physical=False),
0.) #angle = 0
# Setup the new progenitor orbit
progenitor= Orbit(prog_stream_offset[3])
# Flip the offset sign again
self._sigMeanSign*= -1.
# Now re-do the previous setup
self._progenitor_setup(progenitor,self._leading,False)
self._offset_setup(self._sigangle,self._leading,
self._deltaAngleTrack)
return None | [
"def",
"_setup_progIsTrack",
"(",
"self",
")",
":",
"# We need to flip the sign of the offset, to go to the progenitor",
"self",
".",
"_sigMeanSign",
"*=",
"-",
"1.",
"# Use _determine_stream_track_single to calculate the track-progenitor",
"# offset at zero angle separation",
"prog_st... | If progIsTrack, the progenitor orbit that was passed to the
streamdf initialization is the track at zero angle separation;
this routine computes an actual progenitor position that gives
the desired track given the parameters of the streamdf | [
"If",
"progIsTrack",
"the",
"progenitor",
"orbit",
"that",
"was",
"passed",
"to",
"the",
"streamdf",
"initialization",
"is",
"the",
"track",
"at",
"zero",
"angle",
"separation",
";",
"this",
"routine",
"computes",
"an",
"actual",
"progenitor",
"position",
"that"... | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/streamdf.py#L341-L367 | train | 27,897 |
jobovy/galpy | galpy/df/streamdf.py | streamdf._determine_nTrackIterations | def _determine_nTrackIterations(self,nTrackIterations):
"""Determine a good value for nTrackIterations based on the misalignment between stream and orbit; just based on some rough experience for now"""
if not nTrackIterations is None:
self.nTrackIterations= nTrackIterations
return None
if numpy.fabs(self.misalignment(quantity=False)) < 1./180.*numpy.pi:
self.nTrackIterations= 0
elif numpy.fabs(self.misalignment(quantity=False)) >= 1./180.*numpy.pi \
and numpy.fabs(self.misalignment(quantity=False)) < 3./180.*numpy.pi:
self.nTrackIterations= 1
elif numpy.fabs(self.misalignment(quantity=False)) >= 3./180.*numpy.pi:
self.nTrackIterations= 2
return None | python | def _determine_nTrackIterations(self,nTrackIterations):
"""Determine a good value for nTrackIterations based on the misalignment between stream and orbit; just based on some rough experience for now"""
if not nTrackIterations is None:
self.nTrackIterations= nTrackIterations
return None
if numpy.fabs(self.misalignment(quantity=False)) < 1./180.*numpy.pi:
self.nTrackIterations= 0
elif numpy.fabs(self.misalignment(quantity=False)) >= 1./180.*numpy.pi \
and numpy.fabs(self.misalignment(quantity=False)) < 3./180.*numpy.pi:
self.nTrackIterations= 1
elif numpy.fabs(self.misalignment(quantity=False)) >= 3./180.*numpy.pi:
self.nTrackIterations= 2
return None | [
"def",
"_determine_nTrackIterations",
"(",
"self",
",",
"nTrackIterations",
")",
":",
"if",
"not",
"nTrackIterations",
"is",
"None",
":",
"self",
".",
"nTrackIterations",
"=",
"nTrackIterations",
"return",
"None",
"if",
"numpy",
".",
"fabs",
"(",
"self",
".",
... | Determine a good value for nTrackIterations based on the misalignment between stream and orbit; just based on some rough experience for now | [
"Determine",
"a",
"good",
"value",
"for",
"nTrackIterations",
"based",
"on",
"the",
"misalignment",
"between",
"stream",
"and",
"orbit",
";",
"just",
"based",
"on",
"some",
"rough",
"experience",
"for",
"now"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/streamdf.py#L912-L924 | train | 27,898 |
jobovy/galpy | galpy/df/streamdf.py | streamdf._interpolate_stream_track_aA | def _interpolate_stream_track_aA(self):
"""Build interpolations of the stream track in action-angle coordinates"""
if hasattr(self,'_interpolatedObsTrackAA'):
return None #Already did this
#Calculate 1D meanOmega on a fine grid in angle and interpolate
if not hasattr(self,'_interpolatedThetasTrack'):
self._interpolate_stream_track()
dmOs= numpy.array([self.meanOmega(da,oned=True,use_physical=False)
for da in self._interpolatedThetasTrack])
self._interpTrackAAdmeanOmegaOneD=\
interpolate.InterpolatedUnivariateSpline(\
self._interpolatedThetasTrack,dmOs,k=3)
#Build the interpolated AA
self._interpolatedObsTrackAA=\
numpy.empty((len(self._interpolatedThetasTrack),6))
for ii in range(len(self._interpolatedThetasTrack)):
self._interpolatedObsTrackAA[ii,:3]=\
self._progenitor_Omega+dmOs[ii]*self._dsigomeanProgDirection\
*self._sigMeanSign
self._interpolatedObsTrackAA[ii,3:]=\
self._progenitor_angle+self._interpolatedThetasTrack[ii]\
*self._dsigomeanProgDirection*self._sigMeanSign
self._interpolatedObsTrackAA[ii,3:]=\
numpy.mod(self._interpolatedObsTrackAA[ii,3:],2.*numpy.pi)
return None | python | def _interpolate_stream_track_aA(self):
"""Build interpolations of the stream track in action-angle coordinates"""
if hasattr(self,'_interpolatedObsTrackAA'):
return None #Already did this
#Calculate 1D meanOmega on a fine grid in angle and interpolate
if not hasattr(self,'_interpolatedThetasTrack'):
self._interpolate_stream_track()
dmOs= numpy.array([self.meanOmega(da,oned=True,use_physical=False)
for da in self._interpolatedThetasTrack])
self._interpTrackAAdmeanOmegaOneD=\
interpolate.InterpolatedUnivariateSpline(\
self._interpolatedThetasTrack,dmOs,k=3)
#Build the interpolated AA
self._interpolatedObsTrackAA=\
numpy.empty((len(self._interpolatedThetasTrack),6))
for ii in range(len(self._interpolatedThetasTrack)):
self._interpolatedObsTrackAA[ii,:3]=\
self._progenitor_Omega+dmOs[ii]*self._dsigomeanProgDirection\
*self._sigMeanSign
self._interpolatedObsTrackAA[ii,3:]=\
self._progenitor_angle+self._interpolatedThetasTrack[ii]\
*self._dsigomeanProgDirection*self._sigMeanSign
self._interpolatedObsTrackAA[ii,3:]=\
numpy.mod(self._interpolatedObsTrackAA[ii,3:],2.*numpy.pi)
return None | [
"def",
"_interpolate_stream_track_aA",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'_interpolatedObsTrackAA'",
")",
":",
"return",
"None",
"#Already did this",
"#Calculate 1D meanOmega on a fine grid in angle and interpolate",
"if",
"not",
"hasattr",
"(",
"... | Build interpolations of the stream track in action-angle coordinates | [
"Build",
"interpolations",
"of",
"the",
"stream",
"track",
"in",
"action",
"-",
"angle",
"coordinates"
] | 9c5b9fe65d58835624dffe432be282060918ee08 | https://github.com/jobovy/galpy/blob/9c5b9fe65d58835624dffe432be282060918ee08/galpy/df/streamdf.py#L1428-L1452 | train | 27,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.