repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
s1s1ty/py-jsonq
|
pyjsonq/query.py
|
JsonQ.or_where
|
python
|
def or_where(self, key, operator, value):
if len(self._queries) > 0:
self._current_query_index += 1
self.__store_query({"key": key, "operator": operator, "value": value})
return self
|
Make or_where clause
:@param key
:@param operator
:@param value
:@type key, operator, value: string
:@return self
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L190-L203
|
[
"def __store_query(self, query_items):\n \"\"\"Make where clause\n\n :@param query_items\n :@type query_items: dict\n \"\"\"\n temp_index = self._current_query_index\n if len(self._queries) - 1 < temp_index:\n self._queries.append([])\n\n self._queries[temp_index].append(query_items)\n"
] |
class JsonQ(object):
"""Query over Json file"""
def __init__(self, file_path="", data={}):
"""
:@param file_path: Set main json file path
:@type file_path: string
"""
if file_path != "":
self.from_file(file_path)
if data:
self.__parse_json_data(data)
self.__reset_queries()
self._matcher = Matcher()
def __reset_queries(self):
"""Reset previous query data"""
self._queries = []
self._current_query_index = 0
def __parse_json_data(self, data):
"""Process Json data
:@param data
:@type data: json/dict
:throws TypeError
"""
if isinstance(data, dict) or isinstance(data, list):
self._raw_data = data
self._json_data = copy.deepcopy(self._raw_data)
else:
raise TypeError("Provided Data is not json")
def __parse_json_file(self, file_path):
"""Process Json file data
:@param file_path
:@type file_path: string
:@throws IOError
"""
if file_path == '' or os.path.splitext(file_path)[1] != '.json':
raise IOError('Invalid Json file')
with open(file_path) as json_file:
self._raw_data = json.load(json_file)
self._json_data = copy.deepcopy(self._raw_data)
def __get_value_from_data(self, key, data):
"""Find value from json data
:@pram key
:@type: string
:@pram data
:@type data: dict
:@return object
:@throws KeyError
"""
if key.isdigit():
return data[int(key)]
if key not in data:
raise KeyError("Key not exists")
return data.get(key)
def get(self):
"""Getting prepared data
:@return object
"""
self.__prepare()
return self._json_data
def from_file(self, file_path):
"""Set main json file path
:@param file_path
:@type file_path: string
:@throws FileNotFoundError
"""
self.__parse_json_file(file_path)
return self
def at(self, root):
"""Set root where PyJsonq start to prepare
:@param root
:@type root: string
:@return self
:@throws KeyError
"""
leafs = root.strip(" ").split('.')
for leaf in leafs:
if leaf:
self._json_data = self.__get_value_from_data(leaf, self._json_data)
return self
def clone(self):
"""Clone the exact same copy of the current object instance."""
return copy.deepcopy(self._json_data)
def reset(self, data={}):
"""JsonQuery object cen be reset to new data
according to given data or previously given raw Json data
:@param data: {}
:@type data: json/dict
:@return self
"""
if data and (isinstance(data, dict) or isinstance(data, list)):
self._json_data = data
else:
self._json_data = copy.deepcopy(self._raw_data)
self.__reset_queries()
return self
def __store_query(self, query_items):
"""Make where clause
:@param query_items
:@type query_items: dict
"""
temp_index = self._current_query_index
if len(self._queries) - 1 < temp_index:
self._queries.append([])
self._queries[temp_index].append(query_items)
def __prepare(self):
"""Prepare query result"""
if len(self._queries) > 0:
self.__execute_queries()
self.__reset_queries()
def __execute_queries(self):
"""Execute all condition and filter result data"""
def func(item):
or_check = False
for queries in self._queries:
and_check = True
for query in queries:
and_check &= self._matcher._match(
item.get(query.get('key'), None),
query.get('operator'),
query.get('value')
)
or_check |= and_check
return or_check
self._json_data = list(filter(lambda item: func(item), self._json_data))
# ---------- Query Methods ------------- #
def where(self, key, operator, value):
"""Make where clause
:@param key
:@param operator
:@param value
:@type key,operator,value: string
:@return self
"""
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def where_in(self, key, value):
"""Make where_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'in', value)
return self
def where_not_in(self, key, value):
"""Make where_not_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'notin', value)
return self
def where_null(self, key):
"""Make where_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '=', 'None')
return self
def where_not_null(self, key):
"""Make where_not_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '!=', 'None')
return self
def where_start_with(self, key, value):
"""Make where_start_with clause
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'startswith', value)
return self
def where_end_with(self, key, value):
"""Make where_ends_with clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'endswith', value)
return self
def where_contains(self, key, value):
"""Make where_contains clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'contains', value)
return self
# ---------- Aggregate Methods ------------- #
def count(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def size(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def first(self):
"""Getting the first element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[0] if self.count() > 0 else None
def last(self):
"""Getting the last element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[-1] if self.count() > 0 else None
def nth(self, index):
"""Getting the nth element of the collection
:@param index
:@type index: int
:@return object
"""
self.__prepare()
return None if self.count() < math.fabs(index) else self._json_data[index]
def sum(self, property):
"""Getting the sum according to the given property
:@param property
:@type property: string
:@return int/float
"""
self.__prepare()
total = 0
for i in self._json_data:
total += i.get(property)
return total
def max(self, property):
"""Getting the maximum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return max(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def min(self, property):
"""Getting the minimum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return min(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def avg(self, property):
"""Getting average according to given property
:@param property
:@type property: string
:@return average: int/float
"""
self.__prepare()
return self.sum(property) / self.count()
def chunk(self, size=0):
"""Group the resulted collection to multiple chunk
:@param size: 0
:@type size: integer
:@return Chunked List
"""
if size == 0:
raise ValueError('Invalid chunk size')
self.__prepare()
_new_content = []
while(len(self._json_data) > 0):
_new_content.append(self._json_data[0:size])
self._json_data = self._json_data[size:]
self._json_data = _new_content
return self._json_data
def group_by(self, property):
"""Getting the grouped result by the given property
:@param property
:@type property: string
:@return self
"""
self.__prepare()
group_data = {}
for data in self._json_data:
if data[property] not in group_data:
group_data[data[property]] = []
group_data[data[property]].append(data)
self._json_data = group_data
return self
def sort(self, order="asc"):
"""Getting the sorted result of the given list
:@param order: "asc"
:@type order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(self._json_data)
else:
self._json_data = sorted(self._json_data, reverse=True)
return self
def sort_by(self, property, order="asc"):
"""Getting the sorted result by the given property
:@param property, order: "asc"
:@type property, order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property)
)
else:
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property),
reverse=True
)
return self
|
s1s1ty/py-jsonq
|
pyjsonq/query.py
|
JsonQ.nth
|
python
|
def nth(self, index):
self.__prepare()
return None if self.count() < math.fabs(index) else self._json_data[index]
|
Getting the nth element of the collection
:@param index
:@type index: int
:@return object
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L321-L330
|
[
"def __prepare(self):\n \"\"\"Prepare query result\"\"\"\n\n if len(self._queries) > 0:\n self.__execute_queries()\n self.__reset_queries()\n",
"def count(self):\n \"\"\"Getting the size of the collection\n\n :@return int\n \"\"\"\n self.__prepare()\n return len(self._json_data)\n"
] |
class JsonQ(object):
"""Query over Json file"""
def __init__(self, file_path="", data={}):
"""
:@param file_path: Set main json file path
:@type file_path: string
"""
if file_path != "":
self.from_file(file_path)
if data:
self.__parse_json_data(data)
self.__reset_queries()
self._matcher = Matcher()
def __reset_queries(self):
"""Reset previous query data"""
self._queries = []
self._current_query_index = 0
def __parse_json_data(self, data):
"""Process Json data
:@param data
:@type data: json/dict
:throws TypeError
"""
if isinstance(data, dict) or isinstance(data, list):
self._raw_data = data
self._json_data = copy.deepcopy(self._raw_data)
else:
raise TypeError("Provided Data is not json")
def __parse_json_file(self, file_path):
"""Process Json file data
:@param file_path
:@type file_path: string
:@throws IOError
"""
if file_path == '' or os.path.splitext(file_path)[1] != '.json':
raise IOError('Invalid Json file')
with open(file_path) as json_file:
self._raw_data = json.load(json_file)
self._json_data = copy.deepcopy(self._raw_data)
def __get_value_from_data(self, key, data):
"""Find value from json data
:@pram key
:@type: string
:@pram data
:@type data: dict
:@return object
:@throws KeyError
"""
if key.isdigit():
return data[int(key)]
if key not in data:
raise KeyError("Key not exists")
return data.get(key)
def get(self):
"""Getting prepared data
:@return object
"""
self.__prepare()
return self._json_data
def from_file(self, file_path):
"""Set main json file path
:@param file_path
:@type file_path: string
:@throws FileNotFoundError
"""
self.__parse_json_file(file_path)
return self
def at(self, root):
"""Set root where PyJsonq start to prepare
:@param root
:@type root: string
:@return self
:@throws KeyError
"""
leafs = root.strip(" ").split('.')
for leaf in leafs:
if leaf:
self._json_data = self.__get_value_from_data(leaf, self._json_data)
return self
def clone(self):
"""Clone the exact same copy of the current object instance."""
return copy.deepcopy(self._json_data)
def reset(self, data={}):
"""JsonQuery object cen be reset to new data
according to given data or previously given raw Json data
:@param data: {}
:@type data: json/dict
:@return self
"""
if data and (isinstance(data, dict) or isinstance(data, list)):
self._json_data = data
else:
self._json_data = copy.deepcopy(self._raw_data)
self.__reset_queries()
return self
def __store_query(self, query_items):
"""Make where clause
:@param query_items
:@type query_items: dict
"""
temp_index = self._current_query_index
if len(self._queries) - 1 < temp_index:
self._queries.append([])
self._queries[temp_index].append(query_items)
def __prepare(self):
"""Prepare query result"""
if len(self._queries) > 0:
self.__execute_queries()
self.__reset_queries()
def __execute_queries(self):
"""Execute all condition and filter result data"""
def func(item):
or_check = False
for queries in self._queries:
and_check = True
for query in queries:
and_check &= self._matcher._match(
item.get(query.get('key'), None),
query.get('operator'),
query.get('value')
)
or_check |= and_check
return or_check
self._json_data = list(filter(lambda item: func(item), self._json_data))
# ---------- Query Methods ------------- #
def where(self, key, operator, value):
"""Make where clause
:@param key
:@param operator
:@param value
:@type key,operator,value: string
:@return self
"""
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def or_where(self, key, operator, value):
"""Make or_where clause
:@param key
:@param operator
:@param value
:@type key, operator, value: string
:@return self
"""
if len(self._queries) > 0:
self._current_query_index += 1
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def where_in(self, key, value):
"""Make where_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'in', value)
return self
def where_not_in(self, key, value):
"""Make where_not_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'notin', value)
return self
def where_null(self, key):
"""Make where_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '=', 'None')
return self
def where_not_null(self, key):
"""Make where_not_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '!=', 'None')
return self
def where_start_with(self, key, value):
"""Make where_start_with clause
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'startswith', value)
return self
def where_end_with(self, key, value):
"""Make where_ends_with clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'endswith', value)
return self
def where_contains(self, key, value):
"""Make where_contains clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'contains', value)
return self
# ---------- Aggregate Methods ------------- #
def count(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def size(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def first(self):
"""Getting the first element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[0] if self.count() > 0 else None
def last(self):
"""Getting the last element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[-1] if self.count() > 0 else None
def sum(self, property):
"""Getting the sum according to the given property
:@param property
:@type property: string
:@return int/float
"""
self.__prepare()
total = 0
for i in self._json_data:
total += i.get(property)
return total
def max(self, property):
"""Getting the maximum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return max(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def min(self, property):
"""Getting the minimum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return min(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def avg(self, property):
"""Getting average according to given property
:@param property
:@type property: string
:@return average: int/float
"""
self.__prepare()
return self.sum(property) / self.count()
def chunk(self, size=0):
"""Group the resulted collection to multiple chunk
:@param size: 0
:@type size: integer
:@return Chunked List
"""
if size == 0:
raise ValueError('Invalid chunk size')
self.__prepare()
_new_content = []
while(len(self._json_data) > 0):
_new_content.append(self._json_data[0:size])
self._json_data = self._json_data[size:]
self._json_data = _new_content
return self._json_data
def group_by(self, property):
"""Getting the grouped result by the given property
:@param property
:@type property: string
:@return self
"""
self.__prepare()
group_data = {}
for data in self._json_data:
if data[property] not in group_data:
group_data[data[property]] = []
group_data[data[property]].append(data)
self._json_data = group_data
return self
def sort(self, order="asc"):
"""Getting the sorted result of the given list
:@param order: "asc"
:@type order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(self._json_data)
else:
self._json_data = sorted(self._json_data, reverse=True)
return self
def sort_by(self, property, order="asc"):
"""Getting the sorted result by the given property
:@param property, order: "asc"
:@type property, order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property)
)
else:
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property),
reverse=True
)
return self
|
s1s1ty/py-jsonq
|
pyjsonq/query.py
|
JsonQ.sum
|
python
|
def sum(self, property):
self.__prepare()
total = 0
for i in self._json_data:
total += i.get(property)
return total
|
Getting the sum according to the given property
:@param property
:@type property: string
:@return int/float
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L332-L345
|
[
"def __prepare(self):\n \"\"\"Prepare query result\"\"\"\n\n if len(self._queries) > 0:\n self.__execute_queries()\n self.__reset_queries()\n"
] |
class JsonQ(object):
"""Query over Json file"""
def __init__(self, file_path="", data={}):
"""
:@param file_path: Set main json file path
:@type file_path: string
"""
if file_path != "":
self.from_file(file_path)
if data:
self.__parse_json_data(data)
self.__reset_queries()
self._matcher = Matcher()
def __reset_queries(self):
"""Reset previous query data"""
self._queries = []
self._current_query_index = 0
def __parse_json_data(self, data):
"""Process Json data
:@param data
:@type data: json/dict
:throws TypeError
"""
if isinstance(data, dict) or isinstance(data, list):
self._raw_data = data
self._json_data = copy.deepcopy(self._raw_data)
else:
raise TypeError("Provided Data is not json")
def __parse_json_file(self, file_path):
"""Process Json file data
:@param file_path
:@type file_path: string
:@throws IOError
"""
if file_path == '' or os.path.splitext(file_path)[1] != '.json':
raise IOError('Invalid Json file')
with open(file_path) as json_file:
self._raw_data = json.load(json_file)
self._json_data = copy.deepcopy(self._raw_data)
def __get_value_from_data(self, key, data):
"""Find value from json data
:@pram key
:@type: string
:@pram data
:@type data: dict
:@return object
:@throws KeyError
"""
if key.isdigit():
return data[int(key)]
if key not in data:
raise KeyError("Key not exists")
return data.get(key)
def get(self):
"""Getting prepared data
:@return object
"""
self.__prepare()
return self._json_data
def from_file(self, file_path):
"""Set main json file path
:@param file_path
:@type file_path: string
:@throws FileNotFoundError
"""
self.__parse_json_file(file_path)
return self
def at(self, root):
"""Set root where PyJsonq start to prepare
:@param root
:@type root: string
:@return self
:@throws KeyError
"""
leafs = root.strip(" ").split('.')
for leaf in leafs:
if leaf:
self._json_data = self.__get_value_from_data(leaf, self._json_data)
return self
def clone(self):
"""Clone the exact same copy of the current object instance."""
return copy.deepcopy(self._json_data)
def reset(self, data={}):
"""JsonQuery object cen be reset to new data
according to given data or previously given raw Json data
:@param data: {}
:@type data: json/dict
:@return self
"""
if data and (isinstance(data, dict) or isinstance(data, list)):
self._json_data = data
else:
self._json_data = copy.deepcopy(self._raw_data)
self.__reset_queries()
return self
def __store_query(self, query_items):
"""Make where clause
:@param query_items
:@type query_items: dict
"""
temp_index = self._current_query_index
if len(self._queries) - 1 < temp_index:
self._queries.append([])
self._queries[temp_index].append(query_items)
def __prepare(self):
"""Prepare query result"""
if len(self._queries) > 0:
self.__execute_queries()
self.__reset_queries()
def __execute_queries(self):
"""Execute all condition and filter result data"""
def func(item):
or_check = False
for queries in self._queries:
and_check = True
for query in queries:
and_check &= self._matcher._match(
item.get(query.get('key'), None),
query.get('operator'),
query.get('value')
)
or_check |= and_check
return or_check
self._json_data = list(filter(lambda item: func(item), self._json_data))
# ---------- Query Methods ------------- #
def where(self, key, operator, value):
"""Make where clause
:@param key
:@param operator
:@param value
:@type key,operator,value: string
:@return self
"""
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def or_where(self, key, operator, value):
"""Make or_where clause
:@param key
:@param operator
:@param value
:@type key, operator, value: string
:@return self
"""
if len(self._queries) > 0:
self._current_query_index += 1
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def where_in(self, key, value):
"""Make where_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'in', value)
return self
def where_not_in(self, key, value):
"""Make where_not_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'notin', value)
return self
def where_null(self, key):
"""Make where_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '=', 'None')
return self
def where_not_null(self, key):
"""Make where_not_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '!=', 'None')
return self
def where_start_with(self, key, value):
"""Make where_start_with clause
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'startswith', value)
return self
def where_end_with(self, key, value):
"""Make where_ends_with clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'endswith', value)
return self
def where_contains(self, key, value):
"""Make where_contains clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'contains', value)
return self
# ---------- Aggregate Methods ------------- #
def count(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def size(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def first(self):
"""Getting the first element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[0] if self.count() > 0 else None
def last(self):
"""Getting the last element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[-1] if self.count() > 0 else None
def nth(self, index):
"""Getting the nth element of the collection
:@param index
:@type index: int
:@return object
"""
self.__prepare()
return None if self.count() < math.fabs(index) else self._json_data[index]
def max(self, property):
"""Getting the maximum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return max(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def min(self, property):
"""Getting the minimum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return min(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def avg(self, property):
"""Getting average according to given property
:@param property
:@type property: string
:@return average: int/float
"""
self.__prepare()
return self.sum(property) / self.count()
def chunk(self, size=0):
"""Group the resulted collection to multiple chunk
:@param size: 0
:@type size: integer
:@return Chunked List
"""
if size == 0:
raise ValueError('Invalid chunk size')
self.__prepare()
_new_content = []
while(len(self._json_data) > 0):
_new_content.append(self._json_data[0:size])
self._json_data = self._json_data[size:]
self._json_data = _new_content
return self._json_data
def group_by(self, property):
"""Getting the grouped result by the given property
:@param property
:@type property: string
:@return self
"""
self.__prepare()
group_data = {}
for data in self._json_data:
if data[property] not in group_data:
group_data[data[property]] = []
group_data[data[property]].append(data)
self._json_data = group_data
return self
def sort(self, order="asc"):
"""Getting the sorted result of the given list
:@param order: "asc"
:@type order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(self._json_data)
else:
self._json_data = sorted(self._json_data, reverse=True)
return self
def sort_by(self, property, order="asc"):
"""Getting the sorted result by the given property
:@param property, order: "asc"
:@type property, order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property)
)
else:
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property),
reverse=True
)
return self
|
s1s1ty/py-jsonq
|
pyjsonq/query.py
|
JsonQ.max
|
python
|
def max(self, property):
self.__prepare()
try:
return max(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
|
Getting the maximum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L347-L360
|
[
"def __prepare(self):\n \"\"\"Prepare query result\"\"\"\n\n if len(self._queries) > 0:\n self.__execute_queries()\n self.__reset_queries()\n"
] |
class JsonQ(object):
"""Query over Json file"""
def __init__(self, file_path="", data={}):
"""
:@param file_path: Set main json file path
:@type file_path: string
"""
if file_path != "":
self.from_file(file_path)
if data:
self.__parse_json_data(data)
self.__reset_queries()
self._matcher = Matcher()
def __reset_queries(self):
"""Reset previous query data"""
self._queries = []
self._current_query_index = 0
def __parse_json_data(self, data):
"""Process Json data
:@param data
:@type data: json/dict
:throws TypeError
"""
if isinstance(data, dict) or isinstance(data, list):
self._raw_data = data
self._json_data = copy.deepcopy(self._raw_data)
else:
raise TypeError("Provided Data is not json")
def __parse_json_file(self, file_path):
"""Process Json file data
:@param file_path
:@type file_path: string
:@throws IOError
"""
if file_path == '' or os.path.splitext(file_path)[1] != '.json':
raise IOError('Invalid Json file')
with open(file_path) as json_file:
self._raw_data = json.load(json_file)
self._json_data = copy.deepcopy(self._raw_data)
def __get_value_from_data(self, key, data):
"""Find value from json data
:@pram key
:@type: string
:@pram data
:@type data: dict
:@return object
:@throws KeyError
"""
if key.isdigit():
return data[int(key)]
if key not in data:
raise KeyError("Key not exists")
return data.get(key)
def get(self):
"""Getting prepared data
:@return object
"""
self.__prepare()
return self._json_data
def from_file(self, file_path):
"""Set main json file path
:@param file_path
:@type file_path: string
:@throws FileNotFoundError
"""
self.__parse_json_file(file_path)
return self
def at(self, root):
"""Set root where PyJsonq start to prepare
:@param root
:@type root: string
:@return self
:@throws KeyError
"""
leafs = root.strip(" ").split('.')
for leaf in leafs:
if leaf:
self._json_data = self.__get_value_from_data(leaf, self._json_data)
return self
def clone(self):
"""Clone the exact same copy of the current object instance."""
return copy.deepcopy(self._json_data)
def reset(self, data={}):
"""JsonQuery object cen be reset to new data
according to given data or previously given raw Json data
:@param data: {}
:@type data: json/dict
:@return self
"""
if data and (isinstance(data, dict) or isinstance(data, list)):
self._json_data = data
else:
self._json_data = copy.deepcopy(self._raw_data)
self.__reset_queries()
return self
def __store_query(self, query_items):
"""Make where clause
:@param query_items
:@type query_items: dict
"""
temp_index = self._current_query_index
if len(self._queries) - 1 < temp_index:
self._queries.append([])
self._queries[temp_index].append(query_items)
def __prepare(self):
"""Prepare query result"""
if len(self._queries) > 0:
self.__execute_queries()
self.__reset_queries()
def __execute_queries(self):
"""Execute all condition and filter result data"""
def func(item):
or_check = False
for queries in self._queries:
and_check = True
for query in queries:
and_check &= self._matcher._match(
item.get(query.get('key'), None),
query.get('operator'),
query.get('value')
)
or_check |= and_check
return or_check
self._json_data = list(filter(lambda item: func(item), self._json_data))
# ---------- Query Methods ------------- #
def where(self, key, operator, value):
"""Make where clause
:@param key
:@param operator
:@param value
:@type key,operator,value: string
:@return self
"""
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def or_where(self, key, operator, value):
"""Make or_where clause
:@param key
:@param operator
:@param value
:@type key, operator, value: string
:@return self
"""
if len(self._queries) > 0:
self._current_query_index += 1
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def where_in(self, key, value):
"""Make where_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'in', value)
return self
def where_not_in(self, key, value):
"""Make where_not_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'notin', value)
return self
def where_null(self, key):
"""Make where_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '=', 'None')
return self
def where_not_null(self, key):
"""Make where_not_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '!=', 'None')
return self
def where_start_with(self, key, value):
"""Make where_start_with clause
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'startswith', value)
return self
def where_end_with(self, key, value):
"""Make where_ends_with clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'endswith', value)
return self
def where_contains(self, key, value):
"""Make where_contains clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'contains', value)
return self
# ---------- Aggregate Methods ------------- #
def count(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def size(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def first(self):
"""Getting the first element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[0] if self.count() > 0 else None
def last(self):
"""Getting the last element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[-1] if self.count() > 0 else None
def nth(self, index):
"""Getting the nth element of the collection
:@param index
:@type index: int
:@return object
"""
self.__prepare()
return None if self.count() < math.fabs(index) else self._json_data[index]
def sum(self, property):
"""Getting the sum according to the given property
:@param property
:@type property: string
:@return int/float
"""
self.__prepare()
total = 0
for i in self._json_data:
total += i.get(property)
return total
def min(self, property):
"""Getting the minimum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return min(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def avg(self, property):
"""Getting average according to given property
:@param property
:@type property: string
:@return average: int/float
"""
self.__prepare()
return self.sum(property) / self.count()
def chunk(self, size=0):
"""Group the resulted collection to multiple chunk
:@param size: 0
:@type size: integer
:@return Chunked List
"""
if size == 0:
raise ValueError('Invalid chunk size')
self.__prepare()
_new_content = []
while(len(self._json_data) > 0):
_new_content.append(self._json_data[0:size])
self._json_data = self._json_data[size:]
self._json_data = _new_content
return self._json_data
def group_by(self, property):
"""Getting the grouped result by the given property
:@param property
:@type property: string
:@return self
"""
self.__prepare()
group_data = {}
for data in self._json_data:
if data[property] not in group_data:
group_data[data[property]] = []
group_data[data[property]].append(data)
self._json_data = group_data
return self
def sort(self, order="asc"):
"""Getting the sorted result of the given list
:@param order: "asc"
:@type order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(self._json_data)
else:
self._json_data = sorted(self._json_data, reverse=True)
return self
def sort_by(self, property, order="asc"):
"""Getting the sorted result by the given property
:@param property, order: "asc"
:@type property, order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property)
)
else:
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property),
reverse=True
)
return self
|
s1s1ty/py-jsonq
|
pyjsonq/query.py
|
JsonQ.avg
|
python
|
def avg(self, property):
self.__prepare()
return self.sum(property) / self.count()
|
Getting average according to given property
:@param property
:@type property: string
:@return average: int/float
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L377-L386
|
[
"def __prepare(self):\n \"\"\"Prepare query result\"\"\"\n\n if len(self._queries) > 0:\n self.__execute_queries()\n self.__reset_queries()\n",
"def count(self):\n \"\"\"Getting the size of the collection\n\n :@return int\n \"\"\"\n self.__prepare()\n return len(self._json_data)\n",
"def sum(self, property):\n \"\"\"Getting the sum according to the given property\n\n :@param property\n :@type property: string\n\n :@return int/float\n \"\"\"\n self.__prepare()\n total = 0\n for i in self._json_data:\n total += i.get(property)\n\n return total\n"
] |
class JsonQ(object):
"""Query over Json file"""
def __init__(self, file_path="", data={}):
"""
:@param file_path: Set main json file path
:@type file_path: string
"""
if file_path != "":
self.from_file(file_path)
if data:
self.__parse_json_data(data)
self.__reset_queries()
self._matcher = Matcher()
def __reset_queries(self):
"""Reset previous query data"""
self._queries = []
self._current_query_index = 0
def __parse_json_data(self, data):
"""Process Json data
:@param data
:@type data: json/dict
:throws TypeError
"""
if isinstance(data, dict) or isinstance(data, list):
self._raw_data = data
self._json_data = copy.deepcopy(self._raw_data)
else:
raise TypeError("Provided Data is not json")
def __parse_json_file(self, file_path):
"""Process Json file data
:@param file_path
:@type file_path: string
:@throws IOError
"""
if file_path == '' or os.path.splitext(file_path)[1] != '.json':
raise IOError('Invalid Json file')
with open(file_path) as json_file:
self._raw_data = json.load(json_file)
self._json_data = copy.deepcopy(self._raw_data)
def __get_value_from_data(self, key, data):
"""Find value from json data
:@pram key
:@type: string
:@pram data
:@type data: dict
:@return object
:@throws KeyError
"""
if key.isdigit():
return data[int(key)]
if key not in data:
raise KeyError("Key not exists")
return data.get(key)
def get(self):
"""Getting prepared data
:@return object
"""
self.__prepare()
return self._json_data
def from_file(self, file_path):
"""Set main json file path
:@param file_path
:@type file_path: string
:@throws FileNotFoundError
"""
self.__parse_json_file(file_path)
return self
def at(self, root):
"""Set root where PyJsonq start to prepare
:@param root
:@type root: string
:@return self
:@throws KeyError
"""
leafs = root.strip(" ").split('.')
for leaf in leafs:
if leaf:
self._json_data = self.__get_value_from_data(leaf, self._json_data)
return self
def clone(self):
"""Clone the exact same copy of the current object instance."""
return copy.deepcopy(self._json_data)
def reset(self, data={}):
"""JsonQuery object cen be reset to new data
according to given data or previously given raw Json data
:@param data: {}
:@type data: json/dict
:@return self
"""
if data and (isinstance(data, dict) or isinstance(data, list)):
self._json_data = data
else:
self._json_data = copy.deepcopy(self._raw_data)
self.__reset_queries()
return self
def __store_query(self, query_items):
"""Make where clause
:@param query_items
:@type query_items: dict
"""
temp_index = self._current_query_index
if len(self._queries) - 1 < temp_index:
self._queries.append([])
self._queries[temp_index].append(query_items)
def __prepare(self):
"""Prepare query result"""
if len(self._queries) > 0:
self.__execute_queries()
self.__reset_queries()
def __execute_queries(self):
"""Execute all condition and filter result data"""
def func(item):
or_check = False
for queries in self._queries:
and_check = True
for query in queries:
and_check &= self._matcher._match(
item.get(query.get('key'), None),
query.get('operator'),
query.get('value')
)
or_check |= and_check
return or_check
self._json_data = list(filter(lambda item: func(item), self._json_data))
# ---------- Query Methods ------------- #
def where(self, key, operator, value):
"""Make where clause
:@param key
:@param operator
:@param value
:@type key,operator,value: string
:@return self
"""
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def or_where(self, key, operator, value):
"""Make or_where clause
:@param key
:@param operator
:@param value
:@type key, operator, value: string
:@return self
"""
if len(self._queries) > 0:
self._current_query_index += 1
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def where_in(self, key, value):
"""Make where_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'in', value)
return self
def where_not_in(self, key, value):
"""Make where_not_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'notin', value)
return self
def where_null(self, key):
"""Make where_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '=', 'None')
return self
def where_not_null(self, key):
"""Make where_not_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '!=', 'None')
return self
def where_start_with(self, key, value):
"""Make where_start_with clause
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'startswith', value)
return self
def where_end_with(self, key, value):
"""Make where_ends_with clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'endswith', value)
return self
def where_contains(self, key, value):
"""Make where_contains clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'contains', value)
return self
# ---------- Aggregate Methods ------------- #
def count(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def size(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def first(self):
"""Getting the first element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[0] if self.count() > 0 else None
def last(self):
"""Getting the last element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[-1] if self.count() > 0 else None
def nth(self, index):
"""Getting the nth element of the collection
:@param index
:@type index: int
:@return object
"""
self.__prepare()
return None if self.count() < math.fabs(index) else self._json_data[index]
def sum(self, property):
"""Getting the sum according to the given property
:@param property
:@type property: string
:@return int/float
"""
self.__prepare()
total = 0
for i in self._json_data:
total += i.get(property)
return total
def max(self, property):
"""Getting the maximum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return max(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def min(self, property):
"""Getting the minimum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return min(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def chunk(self, size=0):
"""Group the resulted collection to multiple chunk
:@param size: 0
:@type size: integer
:@return Chunked List
"""
if size == 0:
raise ValueError('Invalid chunk size')
self.__prepare()
_new_content = []
while(len(self._json_data) > 0):
_new_content.append(self._json_data[0:size])
self._json_data = self._json_data[size:]
self._json_data = _new_content
return self._json_data
def group_by(self, property):
"""Getting the grouped result by the given property
:@param property
:@type property: string
:@return self
"""
self.__prepare()
group_data = {}
for data in self._json_data:
if data[property] not in group_data:
group_data[data[property]] = []
group_data[data[property]].append(data)
self._json_data = group_data
return self
def sort(self, order="asc"):
"""Getting the sorted result of the given list
:@param order: "asc"
:@type order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(self._json_data)
else:
self._json_data = sorted(self._json_data, reverse=True)
return self
def sort_by(self, property, order="asc"):
"""Getting the sorted result by the given property
:@param property, order: "asc"
:@type property, order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property)
)
else:
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property),
reverse=True
)
return self
|
s1s1ty/py-jsonq
|
pyjsonq/query.py
|
JsonQ.chunk
|
python
|
def chunk(self, size=0):
if size == 0:
raise ValueError('Invalid chunk size')
self.__prepare()
_new_content = []
while(len(self._json_data) > 0):
_new_content.append(self._json_data[0:size])
self._json_data = self._json_data[size:]
self._json_data = _new_content
return self._json_data
|
Group the resulted collection to multiple chunk
:@param size: 0
:@type size: integer
:@return Chunked List
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L388-L409
|
[
"def __prepare(self):\n \"\"\"Prepare query result\"\"\"\n\n if len(self._queries) > 0:\n self.__execute_queries()\n self.__reset_queries()\n"
] |
class JsonQ(object):
"""Query over Json file"""
def __init__(self, file_path="", data={}):
"""
:@param file_path: Set main json file path
:@type file_path: string
"""
if file_path != "":
self.from_file(file_path)
if data:
self.__parse_json_data(data)
self.__reset_queries()
self._matcher = Matcher()
def __reset_queries(self):
"""Reset previous query data"""
self._queries = []
self._current_query_index = 0
def __parse_json_data(self, data):
"""Process Json data
:@param data
:@type data: json/dict
:throws TypeError
"""
if isinstance(data, dict) or isinstance(data, list):
self._raw_data = data
self._json_data = copy.deepcopy(self._raw_data)
else:
raise TypeError("Provided Data is not json")
def __parse_json_file(self, file_path):
"""Process Json file data
:@param file_path
:@type file_path: string
:@throws IOError
"""
if file_path == '' or os.path.splitext(file_path)[1] != '.json':
raise IOError('Invalid Json file')
with open(file_path) as json_file:
self._raw_data = json.load(json_file)
self._json_data = copy.deepcopy(self._raw_data)
def __get_value_from_data(self, key, data):
"""Find value from json data
:@pram key
:@type: string
:@pram data
:@type data: dict
:@return object
:@throws KeyError
"""
if key.isdigit():
return data[int(key)]
if key not in data:
raise KeyError("Key not exists")
return data.get(key)
def get(self):
"""Getting prepared data
:@return object
"""
self.__prepare()
return self._json_data
def from_file(self, file_path):
"""Set main json file path
:@param file_path
:@type file_path: string
:@throws FileNotFoundError
"""
self.__parse_json_file(file_path)
return self
def at(self, root):
"""Set root where PyJsonq start to prepare
:@param root
:@type root: string
:@return self
:@throws KeyError
"""
leafs = root.strip(" ").split('.')
for leaf in leafs:
if leaf:
self._json_data = self.__get_value_from_data(leaf, self._json_data)
return self
def clone(self):
"""Clone the exact same copy of the current object instance."""
return copy.deepcopy(self._json_data)
def reset(self, data={}):
"""JsonQuery object cen be reset to new data
according to given data or previously given raw Json data
:@param data: {}
:@type data: json/dict
:@return self
"""
if data and (isinstance(data, dict) or isinstance(data, list)):
self._json_data = data
else:
self._json_data = copy.deepcopy(self._raw_data)
self.__reset_queries()
return self
def __store_query(self, query_items):
"""Make where clause
:@param query_items
:@type query_items: dict
"""
temp_index = self._current_query_index
if len(self._queries) - 1 < temp_index:
self._queries.append([])
self._queries[temp_index].append(query_items)
def __prepare(self):
"""Prepare query result"""
if len(self._queries) > 0:
self.__execute_queries()
self.__reset_queries()
def __execute_queries(self):
"""Execute all condition and filter result data"""
def func(item):
or_check = False
for queries in self._queries:
and_check = True
for query in queries:
and_check &= self._matcher._match(
item.get(query.get('key'), None),
query.get('operator'),
query.get('value')
)
or_check |= and_check
return or_check
self._json_data = list(filter(lambda item: func(item), self._json_data))
# ---------- Query Methods ------------- #
def where(self, key, operator, value):
"""Make where clause
:@param key
:@param operator
:@param value
:@type key,operator,value: string
:@return self
"""
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def or_where(self, key, operator, value):
"""Make or_where clause
:@param key
:@param operator
:@param value
:@type key, operator, value: string
:@return self
"""
if len(self._queries) > 0:
self._current_query_index += 1
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def where_in(self, key, value):
"""Make where_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'in', value)
return self
def where_not_in(self, key, value):
"""Make where_not_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'notin', value)
return self
def where_null(self, key):
"""Make where_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '=', 'None')
return self
def where_not_null(self, key):
"""Make where_not_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '!=', 'None')
return self
def where_start_with(self, key, value):
"""Make where_start_with clause
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'startswith', value)
return self
def where_end_with(self, key, value):
"""Make where_ends_with clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'endswith', value)
return self
def where_contains(self, key, value):
"""Make where_contains clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'contains', value)
return self
# ---------- Aggregate Methods ------------- #
def count(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def size(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def first(self):
"""Getting the first element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[0] if self.count() > 0 else None
def last(self):
"""Getting the last element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[-1] if self.count() > 0 else None
def nth(self, index):
"""Getting the nth element of the collection
:@param index
:@type index: int
:@return object
"""
self.__prepare()
return None if self.count() < math.fabs(index) else self._json_data[index]
def sum(self, property):
"""Getting the sum according to the given property
:@param property
:@type property: string
:@return int/float
"""
self.__prepare()
total = 0
for i in self._json_data:
total += i.get(property)
return total
def max(self, property):
"""Getting the maximum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return max(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def min(self, property):
"""Getting the minimum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return min(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def avg(self, property):
"""Getting average according to given property
:@param property
:@type property: string
:@return average: int/float
"""
self.__prepare()
return self.sum(property) / self.count()
def group_by(self, property):
"""Getting the grouped result by the given property
:@param property
:@type property: string
:@return self
"""
self.__prepare()
group_data = {}
for data in self._json_data:
if data[property] not in group_data:
group_data[data[property]] = []
group_data[data[property]].append(data)
self._json_data = group_data
return self
def sort(self, order="asc"):
"""Getting the sorted result of the given list
:@param order: "asc"
:@type order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(self._json_data)
else:
self._json_data = sorted(self._json_data, reverse=True)
return self
def sort_by(self, property, order="asc"):
"""Getting the sorted result by the given property
:@param property, order: "asc"
:@type property, order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property)
)
else:
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property),
reverse=True
)
return self
|
s1s1ty/py-jsonq
|
pyjsonq/query.py
|
JsonQ.group_by
|
python
|
def group_by(self, property):
self.__prepare()
group_data = {}
for data in self._json_data:
if data[property] not in group_data:
group_data[data[property]] = []
group_data[data[property]].append(data)
self._json_data = group_data
return self
|
Getting the grouped result by the given property
:@param property
:@type property: string
:@return self
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L411-L427
|
[
"def __prepare(self):\n \"\"\"Prepare query result\"\"\"\n\n if len(self._queries) > 0:\n self.__execute_queries()\n self.__reset_queries()\n"
] |
class JsonQ(object):
"""Query over Json file"""
def __init__(self, file_path="", data={}):
"""
:@param file_path: Set main json file path
:@type file_path: string
"""
if file_path != "":
self.from_file(file_path)
if data:
self.__parse_json_data(data)
self.__reset_queries()
self._matcher = Matcher()
def __reset_queries(self):
"""Reset previous query data"""
self._queries = []
self._current_query_index = 0
def __parse_json_data(self, data):
"""Process Json data
:@param data
:@type data: json/dict
:throws TypeError
"""
if isinstance(data, dict) or isinstance(data, list):
self._raw_data = data
self._json_data = copy.deepcopy(self._raw_data)
else:
raise TypeError("Provided Data is not json")
def __parse_json_file(self, file_path):
"""Process Json file data
:@param file_path
:@type file_path: string
:@throws IOError
"""
if file_path == '' or os.path.splitext(file_path)[1] != '.json':
raise IOError('Invalid Json file')
with open(file_path) as json_file:
self._raw_data = json.load(json_file)
self._json_data = copy.deepcopy(self._raw_data)
def __get_value_from_data(self, key, data):
"""Find value from json data
:@pram key
:@type: string
:@pram data
:@type data: dict
:@return object
:@throws KeyError
"""
if key.isdigit():
return data[int(key)]
if key not in data:
raise KeyError("Key not exists")
return data.get(key)
def get(self):
"""Getting prepared data
:@return object
"""
self.__prepare()
return self._json_data
def from_file(self, file_path):
"""Set main json file path
:@param file_path
:@type file_path: string
:@throws FileNotFoundError
"""
self.__parse_json_file(file_path)
return self
def at(self, root):
"""Set root where PyJsonq start to prepare
:@param root
:@type root: string
:@return self
:@throws KeyError
"""
leafs = root.strip(" ").split('.')
for leaf in leafs:
if leaf:
self._json_data = self.__get_value_from_data(leaf, self._json_data)
return self
def clone(self):
"""Clone the exact same copy of the current object instance."""
return copy.deepcopy(self._json_data)
def reset(self, data={}):
"""JsonQuery object cen be reset to new data
according to given data or previously given raw Json data
:@param data: {}
:@type data: json/dict
:@return self
"""
if data and (isinstance(data, dict) or isinstance(data, list)):
self._json_data = data
else:
self._json_data = copy.deepcopy(self._raw_data)
self.__reset_queries()
return self
def __store_query(self, query_items):
"""Make where clause
:@param query_items
:@type query_items: dict
"""
temp_index = self._current_query_index
if len(self._queries) - 1 < temp_index:
self._queries.append([])
self._queries[temp_index].append(query_items)
def __prepare(self):
"""Prepare query result"""
if len(self._queries) > 0:
self.__execute_queries()
self.__reset_queries()
def __execute_queries(self):
"""Execute all condition and filter result data"""
def func(item):
or_check = False
for queries in self._queries:
and_check = True
for query in queries:
and_check &= self._matcher._match(
item.get(query.get('key'), None),
query.get('operator'),
query.get('value')
)
or_check |= and_check
return or_check
self._json_data = list(filter(lambda item: func(item), self._json_data))
# ---------- Query Methods ------------- #
def where(self, key, operator, value):
"""Make where clause
:@param key
:@param operator
:@param value
:@type key,operator,value: string
:@return self
"""
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def or_where(self, key, operator, value):
"""Make or_where clause
:@param key
:@param operator
:@param value
:@type key, operator, value: string
:@return self
"""
if len(self._queries) > 0:
self._current_query_index += 1
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def where_in(self, key, value):
"""Make where_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'in', value)
return self
def where_not_in(self, key, value):
"""Make where_not_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'notin', value)
return self
def where_null(self, key):
"""Make where_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '=', 'None')
return self
def where_not_null(self, key):
"""Make where_not_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '!=', 'None')
return self
def where_start_with(self, key, value):
"""Make where_start_with clause
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'startswith', value)
return self
def where_end_with(self, key, value):
"""Make where_ends_with clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'endswith', value)
return self
def where_contains(self, key, value):
"""Make where_contains clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'contains', value)
return self
# ---------- Aggregate Methods ------------- #
def count(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def size(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def first(self):
"""Getting the first element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[0] if self.count() > 0 else None
def last(self):
"""Getting the last element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[-1] if self.count() > 0 else None
def nth(self, index):
"""Getting the nth element of the collection
:@param index
:@type index: int
:@return object
"""
self.__prepare()
return None if self.count() < math.fabs(index) else self._json_data[index]
def sum(self, property):
"""Getting the sum according to the given property
:@param property
:@type property: string
:@return int/float
"""
self.__prepare()
total = 0
for i in self._json_data:
total += i.get(property)
return total
def max(self, property):
"""Getting the maximum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return max(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def min(self, property):
"""Getting the minimum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return min(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def avg(self, property):
"""Getting average according to given property
:@param property
:@type property: string
:@return average: int/float
"""
self.__prepare()
return self.sum(property) / self.count()
def chunk(self, size=0):
"""Group the resulted collection to multiple chunk
:@param size: 0
:@type size: integer
:@return Chunked List
"""
if size == 0:
raise ValueError('Invalid chunk size')
self.__prepare()
_new_content = []
while(len(self._json_data) > 0):
_new_content.append(self._json_data[0:size])
self._json_data = self._json_data[size:]
self._json_data = _new_content
return self._json_data
def sort(self, order="asc"):
"""Getting the sorted result of the given list
:@param order: "asc"
:@type order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(self._json_data)
else:
self._json_data = sorted(self._json_data, reverse=True)
return self
def sort_by(self, property, order="asc"):
"""Getting the sorted result by the given property
:@param property, order: "asc"
:@type property, order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property)
)
else:
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property),
reverse=True
)
return self
|
s1s1ty/py-jsonq
|
pyjsonq/query.py
|
JsonQ.sort
|
python
|
def sort(self, order="asc"):
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(self._json_data)
else:
self._json_data = sorted(self._json_data, reverse=True)
return self
|
Getting the sorted result of the given list
:@param order: "asc"
:@type order: string
:@return self
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L429-L444
|
[
"def __prepare(self):\n \"\"\"Prepare query result\"\"\"\n\n if len(self._queries) > 0:\n self.__execute_queries()\n self.__reset_queries()\n"
] |
class JsonQ(object):
"""Query over Json file"""
def __init__(self, file_path="", data={}):
"""
:@param file_path: Set main json file path
:@type file_path: string
"""
if file_path != "":
self.from_file(file_path)
if data:
self.__parse_json_data(data)
self.__reset_queries()
self._matcher = Matcher()
def __reset_queries(self):
"""Reset previous query data"""
self._queries = []
self._current_query_index = 0
def __parse_json_data(self, data):
"""Process Json data
:@param data
:@type data: json/dict
:throws TypeError
"""
if isinstance(data, dict) or isinstance(data, list):
self._raw_data = data
self._json_data = copy.deepcopy(self._raw_data)
else:
raise TypeError("Provided Data is not json")
def __parse_json_file(self, file_path):
"""Process Json file data
:@param file_path
:@type file_path: string
:@throws IOError
"""
if file_path == '' or os.path.splitext(file_path)[1] != '.json':
raise IOError('Invalid Json file')
with open(file_path) as json_file:
self._raw_data = json.load(json_file)
self._json_data = copy.deepcopy(self._raw_data)
def __get_value_from_data(self, key, data):
"""Find value from json data
:@pram key
:@type: string
:@pram data
:@type data: dict
:@return object
:@throws KeyError
"""
if key.isdigit():
return data[int(key)]
if key not in data:
raise KeyError("Key not exists")
return data.get(key)
def get(self):
"""Getting prepared data
:@return object
"""
self.__prepare()
return self._json_data
def from_file(self, file_path):
"""Set main json file path
:@param file_path
:@type file_path: string
:@throws FileNotFoundError
"""
self.__parse_json_file(file_path)
return self
def at(self, root):
"""Set root where PyJsonq start to prepare
:@param root
:@type root: string
:@return self
:@throws KeyError
"""
leafs = root.strip(" ").split('.')
for leaf in leafs:
if leaf:
self._json_data = self.__get_value_from_data(leaf, self._json_data)
return self
def clone(self):
"""Clone the exact same copy of the current object instance."""
return copy.deepcopy(self._json_data)
def reset(self, data={}):
"""JsonQuery object cen be reset to new data
according to given data or previously given raw Json data
:@param data: {}
:@type data: json/dict
:@return self
"""
if data and (isinstance(data, dict) or isinstance(data, list)):
self._json_data = data
else:
self._json_data = copy.deepcopy(self._raw_data)
self.__reset_queries()
return self
def __store_query(self, query_items):
"""Make where clause
:@param query_items
:@type query_items: dict
"""
temp_index = self._current_query_index
if len(self._queries) - 1 < temp_index:
self._queries.append([])
self._queries[temp_index].append(query_items)
def __prepare(self):
"""Prepare query result"""
if len(self._queries) > 0:
self.__execute_queries()
self.__reset_queries()
def __execute_queries(self):
"""Execute all condition and filter result data"""
def func(item):
or_check = False
for queries in self._queries:
and_check = True
for query in queries:
and_check &= self._matcher._match(
item.get(query.get('key'), None),
query.get('operator'),
query.get('value')
)
or_check |= and_check
return or_check
self._json_data = list(filter(lambda item: func(item), self._json_data))
# ---------- Query Methods ------------- #
def where(self, key, operator, value):
"""Make where clause
:@param key
:@param operator
:@param value
:@type key,operator,value: string
:@return self
"""
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def or_where(self, key, operator, value):
"""Make or_where clause
:@param key
:@param operator
:@param value
:@type key, operator, value: string
:@return self
"""
if len(self._queries) > 0:
self._current_query_index += 1
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def where_in(self, key, value):
"""Make where_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'in', value)
return self
def where_not_in(self, key, value):
"""Make where_not_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'notin', value)
return self
def where_null(self, key):
"""Make where_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '=', 'None')
return self
def where_not_null(self, key):
"""Make where_not_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '!=', 'None')
return self
def where_start_with(self, key, value):
"""Make where_start_with clause
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'startswith', value)
return self
def where_end_with(self, key, value):
"""Make where_ends_with clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'endswith', value)
return self
def where_contains(self, key, value):
"""Make where_contains clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'contains', value)
return self
# ---------- Aggregate Methods ------------- #
def count(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def size(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def first(self):
"""Getting the first element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[0] if self.count() > 0 else None
def last(self):
"""Getting the last element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[-1] if self.count() > 0 else None
def nth(self, index):
"""Getting the nth element of the collection
:@param index
:@type index: int
:@return object
"""
self.__prepare()
return None if self.count() < math.fabs(index) else self._json_data[index]
def sum(self, property):
"""Getting the sum according to the given property
:@param property
:@type property: string
:@return int/float
"""
self.__prepare()
total = 0
for i in self._json_data:
total += i.get(property)
return total
def max(self, property):
"""Getting the maximum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return max(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def min(self, property):
"""Getting the minimum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return min(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def avg(self, property):
"""Getting average according to given property
:@param property
:@type property: string
:@return average: int/float
"""
self.__prepare()
return self.sum(property) / self.count()
def chunk(self, size=0):
"""Group the resulted collection to multiple chunk
:@param size: 0
:@type size: integer
:@return Chunked List
"""
if size == 0:
raise ValueError('Invalid chunk size')
self.__prepare()
_new_content = []
while(len(self._json_data) > 0):
_new_content.append(self._json_data[0:size])
self._json_data = self._json_data[size:]
self._json_data = _new_content
return self._json_data
def group_by(self, property):
"""Getting the grouped result by the given property
:@param property
:@type property: string
:@return self
"""
self.__prepare()
group_data = {}
for data in self._json_data:
if data[property] not in group_data:
group_data[data[property]] = []
group_data[data[property]].append(data)
self._json_data = group_data
return self
def sort_by(self, property, order="asc"):
"""Getting the sorted result by the given property
:@param property, order: "asc"
:@type property, order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property)
)
else:
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property),
reverse=True
)
return self
|
s1s1ty/py-jsonq
|
pyjsonq/query.py
|
JsonQ.sort_by
|
python
|
def sort_by(self, property, order="asc"):
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property)
)
else:
self._json_data = sorted(
self._json_data,
key=lambda x: x.get(property),
reverse=True
)
return self
|
Getting the sorted result by the given property
:@param property, order: "asc"
:@type property, order: string
:@return self
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/query.py#L446-L468
|
[
"def __prepare(self):\n \"\"\"Prepare query result\"\"\"\n\n if len(self._queries) > 0:\n self.__execute_queries()\n self.__reset_queries()\n"
] |
class JsonQ(object):
"""Query over Json file"""
def __init__(self, file_path="", data={}):
"""
:@param file_path: Set main json file path
:@type file_path: string
"""
if file_path != "":
self.from_file(file_path)
if data:
self.__parse_json_data(data)
self.__reset_queries()
self._matcher = Matcher()
def __reset_queries(self):
"""Reset previous query data"""
self._queries = []
self._current_query_index = 0
def __parse_json_data(self, data):
"""Process Json data
:@param data
:@type data: json/dict
:throws TypeError
"""
if isinstance(data, dict) or isinstance(data, list):
self._raw_data = data
self._json_data = copy.deepcopy(self._raw_data)
else:
raise TypeError("Provided Data is not json")
def __parse_json_file(self, file_path):
"""Process Json file data
:@param file_path
:@type file_path: string
:@throws IOError
"""
if file_path == '' or os.path.splitext(file_path)[1] != '.json':
raise IOError('Invalid Json file')
with open(file_path) as json_file:
self._raw_data = json.load(json_file)
self._json_data = copy.deepcopy(self._raw_data)
def __get_value_from_data(self, key, data):
"""Find value from json data
:@pram key
:@type: string
:@pram data
:@type data: dict
:@return object
:@throws KeyError
"""
if key.isdigit():
return data[int(key)]
if key not in data:
raise KeyError("Key not exists")
return data.get(key)
def get(self):
"""Getting prepared data
:@return object
"""
self.__prepare()
return self._json_data
def from_file(self, file_path):
"""Set main json file path
:@param file_path
:@type file_path: string
:@throws FileNotFoundError
"""
self.__parse_json_file(file_path)
return self
def at(self, root):
"""Set root where PyJsonq start to prepare
:@param root
:@type root: string
:@return self
:@throws KeyError
"""
leafs = root.strip(" ").split('.')
for leaf in leafs:
if leaf:
self._json_data = self.__get_value_from_data(leaf, self._json_data)
return self
def clone(self):
"""Clone the exact same copy of the current object instance."""
return copy.deepcopy(self._json_data)
def reset(self, data={}):
"""JsonQuery object cen be reset to new data
according to given data or previously given raw Json data
:@param data: {}
:@type data: json/dict
:@return self
"""
if data and (isinstance(data, dict) or isinstance(data, list)):
self._json_data = data
else:
self._json_data = copy.deepcopy(self._raw_data)
self.__reset_queries()
return self
def __store_query(self, query_items):
"""Make where clause
:@param query_items
:@type query_items: dict
"""
temp_index = self._current_query_index
if len(self._queries) - 1 < temp_index:
self._queries.append([])
self._queries[temp_index].append(query_items)
def __prepare(self):
"""Prepare query result"""
if len(self._queries) > 0:
self.__execute_queries()
self.__reset_queries()
def __execute_queries(self):
"""Execute all condition and filter result data"""
def func(item):
or_check = False
for queries in self._queries:
and_check = True
for query in queries:
and_check &= self._matcher._match(
item.get(query.get('key'), None),
query.get('operator'),
query.get('value')
)
or_check |= and_check
return or_check
self._json_data = list(filter(lambda item: func(item), self._json_data))
# ---------- Query Methods ------------- #
def where(self, key, operator, value):
"""Make where clause
:@param key
:@param operator
:@param value
:@type key,operator,value: string
:@return self
"""
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def or_where(self, key, operator, value):
"""Make or_where clause
:@param key
:@param operator
:@param value
:@type key, operator, value: string
:@return self
"""
if len(self._queries) > 0:
self._current_query_index += 1
self.__store_query({"key": key, "operator": operator, "value": value})
return self
def where_in(self, key, value):
"""Make where_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'in', value)
return self
def where_not_in(self, key, value):
"""Make where_not_in clause
:@param key
:@param value
:@type key, value: string
:@return self
"""
self.where(key, 'notin', value)
return self
def where_null(self, key):
"""Make where_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '=', 'None')
return self
def where_not_null(self, key):
"""Make where_not_null clause
:@param key
:@type key: string
:@return self
"""
self.where(key, '!=', 'None')
return self
def where_start_with(self, key, value):
"""Make where_start_with clause
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'startswith', value)
return self
def where_end_with(self, key, value):
"""Make where_ends_with clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'endswith', value)
return self
def where_contains(self, key, value):
"""Make where_contains clause.
:@param key
:@param value
:@type key,value: string
:@return self
"""
self.where(key, 'contains', value)
return self
# ---------- Aggregate Methods ------------- #
def count(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def size(self):
"""Getting the size of the collection
:@return int
"""
self.__prepare()
return len(self._json_data)
def first(self):
"""Getting the first element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[0] if self.count() > 0 else None
def last(self):
"""Getting the last element of the collection otherwise None
:@return object
"""
self.__prepare()
return self._json_data[-1] if self.count() > 0 else None
def nth(self, index):
"""Getting the nth element of the collection
:@param index
:@type index: int
:@return object
"""
self.__prepare()
return None if self.count() < math.fabs(index) else self._json_data[index]
def sum(self, property):
"""Getting the sum according to the given property
:@param property
:@type property: string
:@return int/float
"""
self.__prepare()
total = 0
for i in self._json_data:
total += i.get(property)
return total
def max(self, property):
"""Getting the maximum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return max(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def min(self, property):
"""Getting the minimum value from the prepared data
:@param property
:@type property: string
:@return object
:@throws KeyError
"""
self.__prepare()
try:
return min(self._json_data, key=lambda x: x[property]).get(property)
except KeyError:
raise KeyError("Key is not exists")
def avg(self, property):
"""Getting average according to given property
:@param property
:@type property: string
:@return average: int/float
"""
self.__prepare()
return self.sum(property) / self.count()
def chunk(self, size=0):
"""Group the resulted collection to multiple chunk
:@param size: 0
:@type size: integer
:@return Chunked List
"""
if size == 0:
raise ValueError('Invalid chunk size')
self.__prepare()
_new_content = []
while(len(self._json_data) > 0):
_new_content.append(self._json_data[0:size])
self._json_data = self._json_data[size:]
self._json_data = _new_content
return self._json_data
def group_by(self, property):
"""Getting the grouped result by the given property
:@param property
:@type property: string
:@return self
"""
self.__prepare()
group_data = {}
for data in self._json_data:
if data[property] not in group_data:
group_data[data[property]] = []
group_data[data[property]].append(data)
self._json_data = group_data
return self
def sort(self, order="asc"):
"""Getting the sorted result of the given list
:@param order: "asc"
:@type order: string
:@return self
"""
self.__prepare()
if isinstance(self._json_data, list):
if order == "asc":
self._json_data = sorted(self._json_data)
else:
self._json_data = sorted(self._json_data, reverse=True)
return self
|
s1s1ty/py-jsonq
|
pyjsonq/matcher.py
|
Matcher._match
|
python
|
def _match(self, x, op, y):
if (op not in self.condition_mapper):
raise ValueError('Invalid where condition given')
func = getattr(self, self.condition_mapper.get(op))
return func(x, y)
|
Compare the given `x` and `y` based on `op`
:@param x, y, op
:@type x, y: mixed
:@type op: string
:@return bool
:@throws ValueError
|
train
|
https://github.com/s1s1ty/py-jsonq/blob/9625597a2578bddcbed4e540174d5253b1fc3b75/pyjsonq/matcher.py#L162-L176
| null |
class Matcher(object):
"""docstring for Helper."""
def __init__(self):
self.condition_mapper = {
'=': '_is_equal',
'eq': '_is_equal',
'!=': '_is_not_equal',
'neq': '_is_not_equal',
'>': '_is_greater',
'gt': '_is_greater',
'<': '_is_smaller',
'lt': '_is_smaller',
'>=': '_is_greater_equal',
'gte': '_is_greater_equal',
'<=': '_is_smaller_equal',
'lte': '_is_smaller_equal',
'in': '_is_in',
'notin': '_is_not_in',
'null': '_is_null',
'notnull': '_is_not_null',
'startswith': '_is_starts_with',
'endswith': '_is_ends_with',
'contains': '_is_contain'
}
def _is_equal(self, x, y):
"""Checks the given values are equal
:@param x, y
:@type x, y: mixed
:@return bool
"""
return x == y
def _is_not_equal(self, x, y):
"""Checks the given values are not equal
:@param x, y
:@type x, y: mixed
:@return bool
"""
return x != y
def _is_greater(self, x, y):
"""Checks the given value `x` is greater than the given value `y`
:@param x, y
:@type x, y: mixed
:@return bool
"""
return x > y
def _is_smaller(self, x, y):
"""Checks the given value `x` is less than the given value `y`
:@param x, y
:@type x, y: mixed
:@return bool
"""
return x < y
def _is_greater_equal(self, x, y):
"""Checks the given value `x` is greater than or equal the given value `y`
:@param x, y
:@type x, y: mixed
:@return bool
"""
return x >= y
def _is_smaller_equal(self, x, y):
"""Checks the given value `x` is less than or equal the given value `y`
:@param x, y
:@type x, y: mixed
:@return bool
"""
return x <= y
def _is_in(self, key, arr):
"""Checks the given `key` is exists in the given `list`
:@param key, arr
:@type key: mixed
:type arr: list
:@return bool
"""
return isinstance(arr, list) and (key in arr)
def _is_not_in(self, key, arr):
"""Checks the given `key` is not exists in the given `arr`
:@param x, y
:@type x, y: mixed
:@return bool
"""
return isinstance(arr, list) and (key not in arr)
def _is_null(self, x, y=None):
"""Checks the given value `x` is None
:@param x, y
:@type x, y: mixed
:@return bool
"""
return x is None
def _is_not_null(self, x, y=None):
"""Checks the given value `x` is not None
:@param x, y
:@type x, y: mixed
:@return bool
"""
return x is not None
def _is_starts_with(self, data, val):
"""Checks the given string `data` starts with the given string `val`
:@param data
:@param val
:@type data: string
:@type val: string
:@return bool
"""
return data.startswith(val)
def _is_ends_with(self, data, val):
"""Checks the given string `data` ends with the given string `val`
:@param data
:@param val
:@type data: string
:@type val: string
:@return bool
"""
return data.endswith(val)
def _is_contain(self, str, val):
"""Checks the given `val` is exists in the given `string`
:@param str, val
:@type: string/list
:@type val: string
:@return bool
"""
return val in str
|
jgorset/fandjango
|
fandjango/decorators.py
|
facebook_authorization_required
|
python
|
def facebook_authorization_required(redirect_uri=FACEBOOK_AUTHORIZATION_REDIRECT_URL, permissions=None):
def decorator(function):
@wraps(function)
def wrapper(request, *args, **kwargs):
# We know the user has been authenticated via a canvas page if a signed request is set.
canvas = request.facebook is not False and hasattr(request.facebook, "signed_request")
# The user has already authorized the application, but the given view requires
# permissions besides the defaults listed in ``FACEBOOK_APPLICATION_DEFAULT_PERMISSIONS``.
#
# Derive a list of outstanding permissions and prompt the user to grant them.
if request.facebook and request.facebook.user and permissions:
outstanding_permissions = [p for p in permissions if p not in request.facebook.user.permissions]
if outstanding_permissions:
return authorize_application(
request = request,
redirect_uri = redirect_uri or get_post_authorization_redirect_url(request, canvas=canvas),
permissions = outstanding_permissions
)
# The user has not authorized the application yet.
#
# Concatenate the default permissions with permissions required for this particular view.
if not request.facebook or not request.facebook.user:
return authorize_application(
request = request,
redirect_uri = redirect_uri or get_post_authorization_redirect_url(request, canvas=canvas),
permissions = (FACEBOOK_APPLICATION_INITIAL_PERMISSIONS or []) + (permissions or [])
)
return function(request, *args, **kwargs)
return wrapper
if callable(redirect_uri):
function = redirect_uri
redirect_uri = None
return decorator(function)
else:
return decorator
|
Require the user to authorize the application.
:param redirect_uri: A string describing an URL to redirect to after authorization is complete.
If ``None``, redirects to the current URL in the Facebook canvas
(e.g. ``http://apps.facebook.com/myapp/current/path``). Defaults to
``FACEBOOK_AUTHORIZATION_REDIRECT_URL`` (which, in turn, defaults to ``None``).
:param permissions: A list of strings describing Facebook permissions.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/decorators.py#L14-L65
|
[
"def decorator(function):\n @wraps(function)\n def wrapper(request, *args, **kwargs):\n\n # We know the user has been authenticated via a canvas page if a signed request is set.\n canvas = request.facebook is not False and hasattr(request.facebook, \"signed_request\")\n\n # The user has already authorized the application, but the given view requires\n # permissions besides the defaults listed in ``FACEBOOK_APPLICATION_DEFAULT_PERMISSIONS``.\n #\n # Derive a list of outstanding permissions and prompt the user to grant them.\n if request.facebook and request.facebook.user and permissions:\n outstanding_permissions = [p for p in permissions if p not in request.facebook.user.permissions]\n\n if outstanding_permissions:\n return authorize_application(\n request = request,\n redirect_uri = redirect_uri or get_post_authorization_redirect_url(request, canvas=canvas),\n permissions = outstanding_permissions\n )\n\n # The user has not authorized the application yet.\n #\n # Concatenate the default permissions with permissions required for this particular view.\n if not request.facebook or not request.facebook.user:\n return authorize_application(\n request = request,\n redirect_uri = redirect_uri or get_post_authorization_redirect_url(request, canvas=canvas),\n permissions = (FACEBOOK_APPLICATION_INITIAL_PERMISSIONS or []) + (permissions or [])\n )\n\n return function(request, *args, **kwargs)\n return wrapper\n"
] |
from functools import wraps
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.core.handlers.wsgi import WSGIRequest
from fandjango.utils import get_post_authorization_redirect_url
from fandjango.views import authorize_application
from fandjango.settings import FACEBOOK_APPLICATION_DOMAIN
from fandjango.settings import FACEBOOK_APPLICATION_NAMESPACE
from fandjango.settings import FACEBOOK_APPLICATION_INITIAL_PERMISSIONS
from fandjango.settings import FACEBOOK_AUTHORIZATION_REDIRECT_URL
|
jgorset/fandjango
|
fandjango/models.py
|
User.full_name
|
python
|
def full_name(self):
if self.first_name and self.middle_name and self.last_name:
return "%s %s %s" % (self.first_name, self.middle_name, self.last_name)
if self.first_name and self.last_name:
return "%s %s" % (self.first_name, self.last_name)
|
Return the user's first name.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/models.py#L86-L91
| null |
class User(models.Model):
"""
Instances of the User class represent Facebook users who
have authorized the application.
"""
facebook_id = models.BigIntegerField(_('facebook id'), unique=True)
"""An integer describing the user's Facebook ID."""
facebook_username = models.CharField(_('facebook username'), max_length=255, blank=True, null=True)
"""A string describing the user's Facebook username."""
first_name = models.CharField(_('first name'), max_length=255, blank=True, null=True)
"""A string describing the user's first name."""
middle_name = models.CharField(_('middle name'), max_length=255, blank=True, null=True)
"""A string describing the user's middle name."""
last_name = models.CharField(_('last name'), max_length=255, blank=True, null=True)
"""A string describing the user's last name."""
birthday = models.DateField(_('birthday'), blank=True, null=True)
"""A ``datetime`` object describing the user's birthday."""
email = models.CharField(_('email'), max_length=255, blank=True, null=True)
"""A string describing the user's email."""
locale = models.CharField(_('locale'), max_length=255, blank=True, null=True)
"""A string describing the user's locale."""
gender = models.CharField(_('gender'), max_length=255, blank=True, null=True)
"""A string describing the user's gender."""
authorized = models.BooleanField(_('authorized'), default=True)
"""A boolean describing whether the user has currently authorized the application."""
oauth_token = models.OneToOneField('OAuthToken', verbose_name=_('OAuth token'))
"""An ``OAuthToken`` object."""
created_at = models.DateTimeField(_('created at'), auto_now_add=True)
"""A ``datetime`` object describing when the user was registered."""
last_seen_at = models.DateTimeField(_('last seen at'), auto_now_add=True)
"""A ``datetime`` object describing when the user was last seen."""
extra_data = jsonfield.JSONField()
"""A ``JSONField`` object containig all additional facebookdata."""
@property
@property
@cached(days=30)
def picture(self):
"""
A string describing the URL to the user's profile picture.
"""
return requests.get('http://graph.facebook.com/%s/picture' % self.facebook_id).url
@property
def permissions(self):
"""
A list of strings describing `permissions`_ the user has granted your application.
.. _permissions: http://developers.facebook.com/docs/reference/api/permissions/
"""
records = self.graph.get('me/permissions')['data']
permissions = []
for record in records:
if record['status'] == 'granted':
permissions.append(record['permission'])
return permissions
@property
def graph(self):
"""
A ``Facepy.GraphAPI`` instance initialized with the user's access token (See `Facepy`_).
.. _Facepy: http://github.com/jgorset/facepy
"""
return GraphAPI(self.oauth_token.token)
def synchronize(self, graph_data=None):
"""
Synchronize ``facebook_username``, ``first_name``, ``middle_name``,
``last_name`` and ``birthday`` with Facebook.
:param graph_data: Optional pre-fetched graph data
"""
profile = graph_data or self.graph.get('me')
self.facebook_username = profile.get('username')
self.first_name = profile.get('first_name')
self.middle_name = profile.get('middle_name')
self.last_name = profile.get('last_name')
self.birthday = datetime.strptime(profile['birthday'], '%m/%d/%Y') if profile.has_key('birthday') else None
self.email = profile.get('email')
self.locale = profile.get('locale')
self.gender = profile.get('gender')
self.extra_data = profile
self.save()
def __unicode__(self):
if self.full_name:
return u'%s' % self.full_name
elif self.facebook_username:
return u'%s' % self.facebook_username
else:
return u'%s' % self.facebook_id
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
|
jgorset/fandjango
|
fandjango/models.py
|
User.permissions
|
python
|
def permissions(self):
records = self.graph.get('me/permissions')['data']
permissions = []
for record in records:
if record['status'] == 'granted':
permissions.append(record['permission'])
return permissions
|
A list of strings describing `permissions`_ the user has granted your application.
.. _permissions: http://developers.facebook.com/docs/reference/api/permissions/
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/models.py#L102-L115
| null |
class User(models.Model):
"""
Instances of the User class represent Facebook users who
have authorized the application.
"""
facebook_id = models.BigIntegerField(_('facebook id'), unique=True)
"""An integer describing the user's Facebook ID."""
facebook_username = models.CharField(_('facebook username'), max_length=255, blank=True, null=True)
"""A string describing the user's Facebook username."""
first_name = models.CharField(_('first name'), max_length=255, blank=True, null=True)
"""A string describing the user's first name."""
middle_name = models.CharField(_('middle name'), max_length=255, blank=True, null=True)
"""A string describing the user's middle name."""
last_name = models.CharField(_('last name'), max_length=255, blank=True, null=True)
"""A string describing the user's last name."""
birthday = models.DateField(_('birthday'), blank=True, null=True)
"""A ``datetime`` object describing the user's birthday."""
email = models.CharField(_('email'), max_length=255, blank=True, null=True)
"""A string describing the user's email."""
locale = models.CharField(_('locale'), max_length=255, blank=True, null=True)
"""A string describing the user's locale."""
gender = models.CharField(_('gender'), max_length=255, blank=True, null=True)
"""A string describing the user's gender."""
authorized = models.BooleanField(_('authorized'), default=True)
"""A boolean describing whether the user has currently authorized the application."""
oauth_token = models.OneToOneField('OAuthToken', verbose_name=_('OAuth token'))
"""An ``OAuthToken`` object."""
created_at = models.DateTimeField(_('created at'), auto_now_add=True)
"""A ``datetime`` object describing when the user was registered."""
last_seen_at = models.DateTimeField(_('last seen at'), auto_now_add=True)
"""A ``datetime`` object describing when the user was last seen."""
extra_data = jsonfield.JSONField()
"""A ``JSONField`` object containig all additional facebookdata."""
@property
def full_name(self):
"""Return the user's first name."""
if self.first_name and self.middle_name and self.last_name:
return "%s %s %s" % (self.first_name, self.middle_name, self.last_name)
if self.first_name and self.last_name:
return "%s %s" % (self.first_name, self.last_name)
@property
@cached(days=30)
def picture(self):
"""
A string describing the URL to the user's profile picture.
"""
return requests.get('http://graph.facebook.com/%s/picture' % self.facebook_id).url
@property
@property
def graph(self):
"""
A ``Facepy.GraphAPI`` instance initialized with the user's access token (See `Facepy`_).
.. _Facepy: http://github.com/jgorset/facepy
"""
return GraphAPI(self.oauth_token.token)
def synchronize(self, graph_data=None):
"""
Synchronize ``facebook_username``, ``first_name``, ``middle_name``,
``last_name`` and ``birthday`` with Facebook.
:param graph_data: Optional pre-fetched graph data
"""
profile = graph_data or self.graph.get('me')
self.facebook_username = profile.get('username')
self.first_name = profile.get('first_name')
self.middle_name = profile.get('middle_name')
self.last_name = profile.get('last_name')
self.birthday = datetime.strptime(profile['birthday'], '%m/%d/%Y') if profile.has_key('birthday') else None
self.email = profile.get('email')
self.locale = profile.get('locale')
self.gender = profile.get('gender')
self.extra_data = profile
self.save()
def __unicode__(self):
if self.full_name:
return u'%s' % self.full_name
elif self.facebook_username:
return u'%s' % self.facebook_username
else:
return u'%s' % self.facebook_id
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
|
jgorset/fandjango
|
fandjango/models.py
|
User.synchronize
|
python
|
def synchronize(self, graph_data=None):
profile = graph_data or self.graph.get('me')
self.facebook_username = profile.get('username')
self.first_name = profile.get('first_name')
self.middle_name = profile.get('middle_name')
self.last_name = profile.get('last_name')
self.birthday = datetime.strptime(profile['birthday'], '%m/%d/%Y') if profile.has_key('birthday') else None
self.email = profile.get('email')
self.locale = profile.get('locale')
self.gender = profile.get('gender')
self.extra_data = profile
self.save()
|
Synchronize ``facebook_username``, ``first_name``, ``middle_name``,
``last_name`` and ``birthday`` with Facebook.
:param graph_data: Optional pre-fetched graph data
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/models.py#L126-L144
| null |
class User(models.Model):
"""
Instances of the User class represent Facebook users who
have authorized the application.
"""
facebook_id = models.BigIntegerField(_('facebook id'), unique=True)
"""An integer describing the user's Facebook ID."""
facebook_username = models.CharField(_('facebook username'), max_length=255, blank=True, null=True)
"""A string describing the user's Facebook username."""
first_name = models.CharField(_('first name'), max_length=255, blank=True, null=True)
"""A string describing the user's first name."""
middle_name = models.CharField(_('middle name'), max_length=255, blank=True, null=True)
"""A string describing the user's middle name."""
last_name = models.CharField(_('last name'), max_length=255, blank=True, null=True)
"""A string describing the user's last name."""
birthday = models.DateField(_('birthday'), blank=True, null=True)
"""A ``datetime`` object describing the user's birthday."""
email = models.CharField(_('email'), max_length=255, blank=True, null=True)
"""A string describing the user's email."""
locale = models.CharField(_('locale'), max_length=255, blank=True, null=True)
"""A string describing the user's locale."""
gender = models.CharField(_('gender'), max_length=255, blank=True, null=True)
"""A string describing the user's gender."""
authorized = models.BooleanField(_('authorized'), default=True)
"""A boolean describing whether the user has currently authorized the application."""
oauth_token = models.OneToOneField('OAuthToken', verbose_name=_('OAuth token'))
"""An ``OAuthToken`` object."""
created_at = models.DateTimeField(_('created at'), auto_now_add=True)
"""A ``datetime`` object describing when the user was registered."""
last_seen_at = models.DateTimeField(_('last seen at'), auto_now_add=True)
"""A ``datetime`` object describing when the user was last seen."""
extra_data = jsonfield.JSONField()
"""A ``JSONField`` object containig all additional facebookdata."""
@property
def full_name(self):
"""Return the user's first name."""
if self.first_name and self.middle_name and self.last_name:
return "%s %s %s" % (self.first_name, self.middle_name, self.last_name)
if self.first_name and self.last_name:
return "%s %s" % (self.first_name, self.last_name)
@property
@cached(days=30)
def picture(self):
"""
A string describing the URL to the user's profile picture.
"""
return requests.get('http://graph.facebook.com/%s/picture' % self.facebook_id).url
@property
def permissions(self):
"""
A list of strings describing `permissions`_ the user has granted your application.
.. _permissions: http://developers.facebook.com/docs/reference/api/permissions/
"""
records = self.graph.get('me/permissions')['data']
permissions = []
for record in records:
if record['status'] == 'granted':
permissions.append(record['permission'])
return permissions
@property
def graph(self):
"""
A ``Facepy.GraphAPI`` instance initialized with the user's access token (See `Facepy`_).
.. _Facepy: http://github.com/jgorset/facepy
"""
return GraphAPI(self.oauth_token.token)
def __unicode__(self):
if self.full_name:
return u'%s' % self.full_name
elif self.facebook_username:
return u'%s' % self.facebook_username
else:
return u'%s' % self.facebook_id
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
|
jgorset/fandjango
|
fandjango/models.py
|
OAuthToken.extended
|
python
|
def extended(self):
if self.expires_at:
return self.expires_at - self.issued_at > timedelta(days=30)
else:
return False
|
Determine whether the OAuth token has been extended.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/models.py#L179-L184
| null |
class OAuthToken(models.Model):
"""
Instances of the OAuthToken class are credentials used to query
the Facebook API on behalf of a user.
"""
token = models.TextField(_('token'))
"""A string describing the OAuth token itself."""
issued_at = models.DateTimeField(_('issued at'))
"""A ``datetime`` object describing when the token was issued."""
expires_at = models.DateTimeField(_('expires at'), null=True, blank=True)
"""A ``datetime`` object describing when the token expires (or ``None`` if it doesn't)"""
@property
def expired(self):
"""Determine whether the OAuth token has expired."""
return self.expires_at < now() if self.expires_at else False
@property
def extend(self):
"""Extend the OAuth token."""
graph = GraphAPI()
response = graph.get('oauth/access_token',
client_id = FACEBOOK_APPLICATION_ID,
client_secret = FACEBOOK_APPLICATION_SECRET_KEY,
grant_type = 'fb_exchange_token',
fb_exchange_token = self.token
)
components = parse_qs(response)
self.token = components['access_token'][0]
self.expires_at = now() + timedelta(seconds = int(components['expires'][0]))
self.save()
class Meta:
verbose_name = _('OAuth token')
verbose_name_plural = _('OAuth tokens')
|
jgorset/fandjango
|
fandjango/models.py
|
OAuthToken.extend
|
python
|
def extend(self):
graph = GraphAPI()
response = graph.get('oauth/access_token',
client_id = FACEBOOK_APPLICATION_ID,
client_secret = FACEBOOK_APPLICATION_SECRET_KEY,
grant_type = 'fb_exchange_token',
fb_exchange_token = self.token
)
components = parse_qs(response)
self.token = components['access_token'][0]
self.expires_at = now() + timedelta(seconds = int(components['expires'][0]))
self.save()
|
Extend the OAuth token.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/models.py#L186-L202
|
[
"def now():\n return datetime.now()\n"
] |
class OAuthToken(models.Model):
"""
Instances of the OAuthToken class are credentials used to query
the Facebook API on behalf of a user.
"""
token = models.TextField(_('token'))
"""A string describing the OAuth token itself."""
issued_at = models.DateTimeField(_('issued at'))
"""A ``datetime`` object describing when the token was issued."""
expires_at = models.DateTimeField(_('expires at'), null=True, blank=True)
"""A ``datetime`` object describing when the token expires (or ``None`` if it doesn't)"""
@property
def expired(self):
"""Determine whether the OAuth token has expired."""
return self.expires_at < now() if self.expires_at else False
@property
def extended(self):
"""Determine whether the OAuth token has been extended."""
if self.expires_at:
return self.expires_at - self.issued_at > timedelta(days=30)
else:
return False
class Meta:
verbose_name = _('OAuth token')
verbose_name_plural = _('OAuth tokens')
|
jgorset/fandjango
|
fandjango/middleware.py
|
FacebookMiddleware.process_request
|
python
|
def process_request(self, request):
# User has already been authed by alternate middleware
if hasattr(request, "facebook") and request.facebook:
return
request.facebook = False
if not self.is_valid_path(request):
return
if self.is_access_denied(request):
return authorization_denied_view(request)
# No signed request found in either GET, POST nor COOKIES...
if 'signed_request' not in request.REQUEST and 'signed_request' not in request.COOKIES:
return
# If the request method is POST and its body only contains the signed request,
# chances are it's a request from the Facebook platform and we'll override
# the request method to HTTP GET to rectify their misinterpretation
# of the HTTP standard.
#
# References:
# "POST for Canvas" migration at http://developers.facebook.com/docs/canvas/post/
# "Incorrect use of the HTTP protocol" discussion at http://forum.developers.facebook.net/viewtopic.php?id=93554
if request.method == 'POST' and 'signed_request' in request.POST:
request.POST = QueryDict('')
request.method = 'GET'
request.facebook = Facebook()
try:
request.facebook.signed_request = SignedRequest(
signed_request = request.REQUEST.get('signed_request') or request.COOKIES.get('signed_request'),
application_secret_key = FACEBOOK_APPLICATION_SECRET_KEY
)
except SignedRequest.Error:
request.facebook = False
# Valid signed request and user has authorized the application
if request.facebook \
and request.facebook.signed_request.user.has_authorized_application \
and not request.facebook.signed_request.user.oauth_token.has_expired:
# Initialize a User object and its corresponding OAuth token
try:
user = User.objects.get(facebook_id=request.facebook.signed_request.user.id)
except User.DoesNotExist:
oauth_token = OAuthToken.objects.create(
token = request.facebook.signed_request.user.oauth_token.token,
issued_at = request.facebook.signed_request.user.oauth_token.issued_at.replace(tzinfo=tzlocal()),
expires_at = request.facebook.signed_request.user.oauth_token.expires_at.replace(tzinfo=tzlocal())
)
user = User.objects.create(
facebook_id = request.facebook.signed_request.user.id,
oauth_token = oauth_token
)
user.synchronize()
# Update the user's details and OAuth token
else:
user.last_seen_at = now()
if 'signed_request' in request.REQUEST:
user.authorized = True
if request.facebook.signed_request.user.oauth_token:
user.oauth_token.token = request.facebook.signed_request.user.oauth_token.token
user.oauth_token.issued_at = request.facebook.signed_request.user.oauth_token.issued_at.replace(tzinfo=tzlocal())
user.oauth_token.expires_at = request.facebook.signed_request.user.oauth_token.expires_at.replace(tzinfo=tzlocal())
user.oauth_token.save()
user.save()
if not user.oauth_token.extended:
# Attempt to extend the OAuth token, but ignore exceptions raised by
# bug #102727766518358 in the Facebook Platform.
#
# http://developers.facebook.com/bugs/102727766518358/
try:
user.oauth_token.extend()
except:
pass
request.facebook.user = user
|
Process the signed request.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/middleware.py#L53-L141
|
[
"def is_valid_path(self, request):\n if ENABLED_PATHS and DISABLED_PATHS:\n raise ImproperlyConfigured(\n 'You may configure either FANDJANGO_ENABLED_PATHS '\n 'or FANDJANGO_DISABLED_PATHS, but not both.'\n )\n\n if DISABLED_PATHS and is_disabled_path(request.path):\n return False\n\n if ENABLED_PATHS and not is_enabled_path(request.path):\n return False\n\n return True\n"
] |
class FacebookMiddleware(BaseMiddleware):
"""Middleware for Facebook canvas applications."""
def process_response(self, request, response):
"""
Set compact P3P policies and save signed request to cookie.
P3P is a WC3 standard (see http://www.w3.org/TR/P3P/), and although largely ignored by most
browsers it is considered by IE before accepting third-party cookies (ie. cookies set by
documents in iframes). If they are not set correctly, IE will not set these cookies.
"""
response['P3P'] = 'CP="IDC CURa ADMa OUR IND PHY ONL COM STA"'
if FANDJANGO_CACHE_SIGNED_REQUEST:
if hasattr(request, "facebook") and request.facebook and request.facebook.signed_request:
response.set_cookie('signed_request', request.facebook.signed_request.generate())
else:
response.delete_cookie('signed_request')
return response
|
jgorset/fandjango
|
fandjango/middleware.py
|
FacebookMiddleware.process_response
|
python
|
def process_response(self, request, response):
response['P3P'] = 'CP="IDC CURa ADMa OUR IND PHY ONL COM STA"'
if FANDJANGO_CACHE_SIGNED_REQUEST:
if hasattr(request, "facebook") and request.facebook and request.facebook.signed_request:
response.set_cookie('signed_request', request.facebook.signed_request.generate())
else:
response.delete_cookie('signed_request')
return response
|
Set compact P3P policies and save signed request to cookie.
P3P is a WC3 standard (see http://www.w3.org/TR/P3P/), and although largely ignored by most
browsers it is considered by IE before accepting third-party cookies (ie. cookies set by
documents in iframes). If they are not set correctly, IE will not set these cookies.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/middleware.py#L143-L159
| null |
class FacebookMiddleware(BaseMiddleware):
"""Middleware for Facebook canvas applications."""
def process_request(self, request):
"""Process the signed request."""
# User has already been authed by alternate middleware
if hasattr(request, "facebook") and request.facebook:
return
request.facebook = False
if not self.is_valid_path(request):
return
if self.is_access_denied(request):
return authorization_denied_view(request)
# No signed request found in either GET, POST nor COOKIES...
if 'signed_request' not in request.REQUEST and 'signed_request' not in request.COOKIES:
return
# If the request method is POST and its body only contains the signed request,
# chances are it's a request from the Facebook platform and we'll override
# the request method to HTTP GET to rectify their misinterpretation
# of the HTTP standard.
#
# References:
# "POST for Canvas" migration at http://developers.facebook.com/docs/canvas/post/
# "Incorrect use of the HTTP protocol" discussion at http://forum.developers.facebook.net/viewtopic.php?id=93554
if request.method == 'POST' and 'signed_request' in request.POST:
request.POST = QueryDict('')
request.method = 'GET'
request.facebook = Facebook()
try:
request.facebook.signed_request = SignedRequest(
signed_request = request.REQUEST.get('signed_request') or request.COOKIES.get('signed_request'),
application_secret_key = FACEBOOK_APPLICATION_SECRET_KEY
)
except SignedRequest.Error:
request.facebook = False
# Valid signed request and user has authorized the application
if request.facebook \
and request.facebook.signed_request.user.has_authorized_application \
and not request.facebook.signed_request.user.oauth_token.has_expired:
# Initialize a User object and its corresponding OAuth token
try:
user = User.objects.get(facebook_id=request.facebook.signed_request.user.id)
except User.DoesNotExist:
oauth_token = OAuthToken.objects.create(
token = request.facebook.signed_request.user.oauth_token.token,
issued_at = request.facebook.signed_request.user.oauth_token.issued_at.replace(tzinfo=tzlocal()),
expires_at = request.facebook.signed_request.user.oauth_token.expires_at.replace(tzinfo=tzlocal())
)
user = User.objects.create(
facebook_id = request.facebook.signed_request.user.id,
oauth_token = oauth_token
)
user.synchronize()
# Update the user's details and OAuth token
else:
user.last_seen_at = now()
if 'signed_request' in request.REQUEST:
user.authorized = True
if request.facebook.signed_request.user.oauth_token:
user.oauth_token.token = request.facebook.signed_request.user.oauth_token.token
user.oauth_token.issued_at = request.facebook.signed_request.user.oauth_token.issued_at.replace(tzinfo=tzlocal())
user.oauth_token.expires_at = request.facebook.signed_request.user.oauth_token.expires_at.replace(tzinfo=tzlocal())
user.oauth_token.save()
user.save()
if not user.oauth_token.extended:
# Attempt to extend the OAuth token, but ignore exceptions raised by
# bug #102727766518358 in the Facebook Platform.
#
# http://developers.facebook.com/bugs/102727766518358/
try:
user.oauth_token.extend()
except:
pass
request.facebook.user = user
def process_response(self, request, response):
"""
Set compact P3P policies and save signed request to cookie.
P3P is a WC3 standard (see http://www.w3.org/TR/P3P/), and although largely ignored by most
browsers it is considered by IE before accepting third-party cookies (ie. cookies set by
documents in iframes). If they are not set correctly, IE will not set these cookies.
"""
response['P3P'] = 'CP="IDC CURa ADMa OUR IND PHY ONL COM STA"'
if FANDJANGO_CACHE_SIGNED_REQUEST:
if hasattr(request, "facebook") and request.facebook and request.facebook.signed_request:
response.set_cookie('signed_request', request.facebook.signed_request.generate())
else:
response.delete_cookie('signed_request')
return response
|
jgorset/fandjango
|
fandjango/middleware.py
|
FacebookWebMiddleware.process_request
|
python
|
def process_request(self, request):
# User has already been authed by alternate middleware
if hasattr(request, "facebook") and request.facebook:
return
request.facebook = False
if not self.is_valid_path(request):
return
if self.is_access_denied(request):
return authorization_denied_view(request)
request.facebook = Facebook()
oauth_token = False
# Is there a token cookie already present?
if 'oauth_token' in request.COOKIES:
try:
# Check if the current token is already in DB
oauth_token = OAuthToken.objects.get(token=request.COOKIES['oauth_token'])
except OAuthToken.DoesNotExist:
request.facebook = False
return
# Is there a code in the GET request?
elif 'code' in request.GET:
try:
graph = GraphAPI()
# Exchange code for an access_token
response = graph.get('oauth/access_token',
client_id = FACEBOOK_APPLICATION_ID,
redirect_uri = get_post_authorization_redirect_url(request, canvas=False),
client_secret = FACEBOOK_APPLICATION_SECRET_KEY,
code = request.GET['code'],
)
components = parse_qs(response)
# Save new OAuth-token in DB
oauth_token, new_oauth_token = OAuthToken.objects.get_or_create(
token = components['access_token'][0],
issued_at = now(),
expires_at = now() + timedelta(seconds = int(components['expires'][0]))
)
except GraphAPI.OAuthError:
pass
# There isn't a valid access_token
if not oauth_token or oauth_token.expired:
request.facebook = False
return
# Is there a user already connected to the current token?
try:
user = oauth_token.user
if not user.authorized:
request.facebook = False
return
user.last_seen_at = now()
user.save()
except User.DoesNotExist:
graph = GraphAPI(oauth_token.token)
profile = graph.get('me')
# Either the user already exists and its just a new token, or user and token both are new
try:
user = User.objects.get(facebook_id = profile.get('id'))
if not user.authorized:
if new_oauth_token:
user.last_seen_at = now()
user.authorized = True
else:
request.facebook = False
return
except User.DoesNotExist:
# Create a new user to go with token
user = User.objects.create(
facebook_id = profile.get('id'),
oauth_token = oauth_token
)
user.synchronize(profile)
# Delete old access token if there is any and only if the new one is different
old_oauth_token = None
if user.oauth_token != oauth_token:
old_oauth_token = user.oauth_token
user.oauth_token = oauth_token
user.save()
if old_oauth_token:
old_oauth_token.delete()
if not user.oauth_token.extended:
# Attempt to extend the OAuth token, but ignore exceptions raised by
# bug #102727766518358 in the Facebook Platform.
#
# http://developers.facebook.com/bugs/102727766518358/
try:
user.oauth_token.extend()
except:
pass
request.facebook.user = user
request.facebook.oauth_token = oauth_token
|
Process the web-based auth request.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/middleware.py#L164-L274
|
[
"def is_valid_path(self, request):\n if ENABLED_PATHS and DISABLED_PATHS:\n raise ImproperlyConfigured(\n 'You may configure either FANDJANGO_ENABLED_PATHS '\n 'or FANDJANGO_DISABLED_PATHS, but not both.'\n )\n\n if DISABLED_PATHS and is_disabled_path(request.path):\n return False\n\n if ENABLED_PATHS and not is_enabled_path(request.path):\n return False\n\n return True\n"
] |
class FacebookWebMiddleware(BaseMiddleware):
"""Middleware for Facebook auth on websites."""
def process_request(self, request):
"""Process the web-based auth request."""
# User has already been authed by alternate middleware
if hasattr(request, "facebook") and request.facebook:
return
request.facebook = False
if not self.is_valid_path(request):
return
if self.is_access_denied(request):
return authorization_denied_view(request)
request.facebook = Facebook()
oauth_token = False
# Is there a token cookie already present?
if 'oauth_token' in request.COOKIES:
try:
# Check if the current token is already in DB
oauth_token = OAuthToken.objects.get(token=request.COOKIES['oauth_token'])
except OAuthToken.DoesNotExist:
request.facebook = False
return
# Is there a code in the GET request?
elif 'code' in request.GET:
try:
graph = GraphAPI()
# Exchange code for an access_token
response = graph.get('oauth/access_token',
client_id = FACEBOOK_APPLICATION_ID,
redirect_uri = get_post_authorization_redirect_url(request, canvas=False),
client_secret = FACEBOOK_APPLICATION_SECRET_KEY,
code = request.GET['code'],
)
components = parse_qs(response)
# Save new OAuth-token in DB
oauth_token, new_oauth_token = OAuthToken.objects.get_or_create(
token = components['access_token'][0],
issued_at = now(),
expires_at = now() + timedelta(seconds = int(components['expires'][0]))
)
except GraphAPI.OAuthError:
pass
# There isn't a valid access_token
if not oauth_token or oauth_token.expired:
request.facebook = False
return
# Is there a user already connected to the current token?
try:
user = oauth_token.user
if not user.authorized:
request.facebook = False
return
user.last_seen_at = now()
user.save()
except User.DoesNotExist:
graph = GraphAPI(oauth_token.token)
profile = graph.get('me')
# Either the user already exists and its just a new token, or user and token both are new
try:
user = User.objects.get(facebook_id = profile.get('id'))
if not user.authorized:
if new_oauth_token:
user.last_seen_at = now()
user.authorized = True
else:
request.facebook = False
return
except User.DoesNotExist:
# Create a new user to go with token
user = User.objects.create(
facebook_id = profile.get('id'),
oauth_token = oauth_token
)
user.synchronize(profile)
# Delete old access token if there is any and only if the new one is different
old_oauth_token = None
if user.oauth_token != oauth_token:
old_oauth_token = user.oauth_token
user.oauth_token = oauth_token
user.save()
if old_oauth_token:
old_oauth_token.delete()
if not user.oauth_token.extended:
# Attempt to extend the OAuth token, but ignore exceptions raised by
# bug #102727766518358 in the Facebook Platform.
#
# http://developers.facebook.com/bugs/102727766518358/
try:
user.oauth_token.extend()
except:
pass
request.facebook.user = user
request.facebook.oauth_token = oauth_token
def process_response(self, request, response):
"""
Set compact P3P policies and save auth token to cookie.
P3P is a WC3 standard (see http://www.w3.org/TR/P3P/), and although largely ignored by most
browsers it is considered by IE before accepting third-party cookies (ie. cookies set by
documents in iframes). If they are not set correctly, IE will not set these cookies.
"""
if hasattr(request, "facebook") and request.facebook and request.facebook.oauth_token:
if "code" in request.REQUEST:
""" Remove auth related query params """
path = get_full_path(request, remove_querystrings=['code', 'web_canvas'])
response = HttpResponseRedirect(path)
response.set_cookie('oauth_token', request.facebook.oauth_token.token)
else:
response.delete_cookie('oauth_token')
response['P3P'] = 'CP="IDC CURa ADMa OUR IND PHY ONL COM STA"'
return response
|
jgorset/fandjango
|
fandjango/middleware.py
|
FacebookWebMiddleware.process_response
|
python
|
def process_response(self, request, response):
if hasattr(request, "facebook") and request.facebook and request.facebook.oauth_token:
if "code" in request.REQUEST:
""" Remove auth related query params """
path = get_full_path(request, remove_querystrings=['code', 'web_canvas'])
response = HttpResponseRedirect(path)
response.set_cookie('oauth_token', request.facebook.oauth_token.token)
else:
response.delete_cookie('oauth_token')
response['P3P'] = 'CP="IDC CURa ADMa OUR IND PHY ONL COM STA"'
return response
|
Set compact P3P policies and save auth token to cookie.
P3P is a WC3 standard (see http://www.w3.org/TR/P3P/), and although largely ignored by most
browsers it is considered by IE before accepting third-party cookies (ie. cookies set by
documents in iframes). If they are not set correctly, IE will not set these cookies.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/middleware.py#L277-L297
|
[
"def get_full_path(request, remove_querystrings=[]):\n \"\"\"Gets the current path, removing specified querstrings\"\"\"\n\n path = request.get_full_path()\n for qs in remove_querystrings:\n path = re.sub(r'&?' + qs + '=?(.+)?&?', '', path)\n return path\n"
] |
class FacebookWebMiddleware(BaseMiddleware):
"""Middleware for Facebook auth on websites."""
def process_request(self, request):
"""Process the web-based auth request."""
# User has already been authed by alternate middleware
if hasattr(request, "facebook") and request.facebook:
return
request.facebook = False
if not self.is_valid_path(request):
return
if self.is_access_denied(request):
return authorization_denied_view(request)
request.facebook = Facebook()
oauth_token = False
# Is there a token cookie already present?
if 'oauth_token' in request.COOKIES:
try:
# Check if the current token is already in DB
oauth_token = OAuthToken.objects.get(token=request.COOKIES['oauth_token'])
except OAuthToken.DoesNotExist:
request.facebook = False
return
# Is there a code in the GET request?
elif 'code' in request.GET:
try:
graph = GraphAPI()
# Exchange code for an access_token
response = graph.get('oauth/access_token',
client_id = FACEBOOK_APPLICATION_ID,
redirect_uri = get_post_authorization_redirect_url(request, canvas=False),
client_secret = FACEBOOK_APPLICATION_SECRET_KEY,
code = request.GET['code'],
)
components = parse_qs(response)
# Save new OAuth-token in DB
oauth_token, new_oauth_token = OAuthToken.objects.get_or_create(
token = components['access_token'][0],
issued_at = now(),
expires_at = now() + timedelta(seconds = int(components['expires'][0]))
)
except GraphAPI.OAuthError:
pass
# There isn't a valid access_token
if not oauth_token or oauth_token.expired:
request.facebook = False
return
# Is there a user already connected to the current token?
try:
user = oauth_token.user
if not user.authorized:
request.facebook = False
return
user.last_seen_at = now()
user.save()
except User.DoesNotExist:
graph = GraphAPI(oauth_token.token)
profile = graph.get('me')
# Either the user already exists and its just a new token, or user and token both are new
try:
user = User.objects.get(facebook_id = profile.get('id'))
if not user.authorized:
if new_oauth_token:
user.last_seen_at = now()
user.authorized = True
else:
request.facebook = False
return
except User.DoesNotExist:
# Create a new user to go with token
user = User.objects.create(
facebook_id = profile.get('id'),
oauth_token = oauth_token
)
user.synchronize(profile)
# Delete old access token if there is any and only if the new one is different
old_oauth_token = None
if user.oauth_token != oauth_token:
old_oauth_token = user.oauth_token
user.oauth_token = oauth_token
user.save()
if old_oauth_token:
old_oauth_token.delete()
if not user.oauth_token.extended:
# Attempt to extend the OAuth token, but ignore exceptions raised by
# bug #102727766518358 in the Facebook Platform.
#
# http://developers.facebook.com/bugs/102727766518358/
try:
user.oauth_token.extend()
except:
pass
request.facebook.user = user
request.facebook.oauth_token = oauth_token
|
jgorset/fandjango
|
fandjango/utils.py
|
is_disabled_path
|
python
|
def is_disabled_path(path):
for disabled_path in DISABLED_PATHS:
match = re.search(disabled_path, path[1:])
if match:
return True
return False
|
Determine whether or not the path matches one or more paths
in the DISABLED_PATHS setting.
:param path: A string describing the path to be matched.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/utils.py#L17-L28
| null |
import re
from datetime import timedelta
from urlparse import urlparse
from functools import wraps
from django.core.cache import cache
from django.utils.importlib import import_module
from fandjango.settings import FACEBOOK_APPLICATION_CANVAS_URL
from fandjango.settings import FACEBOOK_APPLICATION_DOMAIN
from fandjango.settings import FACEBOOK_APPLICATION_NAMESPACE
from fandjango.settings import DISABLED_PATHS
from fandjango.settings import ENABLED_PATHS
from fandjango.settings import AUTHORIZATION_DENIED_VIEW
from fandjango.settings import FANDJANGO_SITE_URL
def is_enabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the ENABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for enabled_path in ENABLED_PATHS:
match = re.search(enabled_path, path[1:])
if match:
return True
return False
def cached_property(**kwargs):
"""Cache the return value of a property."""
def decorator(function):
@wraps(function)
def wrapper(self):
key = 'fandjango.%(model)s.%(property)s_%(pk)s' % {
'model': self.__class__.__name__,
'pk': self.pk,
'property': function.__name__
}
cached_value = cache.get(key)
delta = timedelta(**kwargs)
if cached_value is None:
value = function(self)
cache.set(key, value, delta.days * 86400 + delta.seconds)
else:
value = cached_value
return value
return wrapper
return decorator
def authorization_denied_view(request):
"""Proxy for the view referenced in ``FANDJANGO_AUTHORIZATION_DENIED_VIEW``."""
authorization_denied_module_name = AUTHORIZATION_DENIED_VIEW.rsplit('.', 1)[0]
authorization_denied_view_name = AUTHORIZATION_DENIED_VIEW.split('.')[-1]
authorization_denied_module = import_module(authorization_denied_module_name)
authorization_denied_view = getattr(authorization_denied_module, authorization_denied_view_name)
return authorization_denied_view(request)
def get_post_authorization_redirect_url(request, canvas=True):
"""
Determine the URL users should be redirected to upon authorization the application.
If request is non-canvas use user defined site url if set, else the site hostname.
"""
path = request.get_full_path()
if canvas:
if FACEBOOK_APPLICATION_CANVAS_URL:
path = path.replace(urlparse(FACEBOOK_APPLICATION_CANVAS_URL).path, '')
redirect_uri = 'https://%(domain)s/%(namespace)s%(path)s' % {
'domain': FACEBOOK_APPLICATION_DOMAIN,
'namespace': FACEBOOK_APPLICATION_NAMESPACE,
'path': path
}
else:
if FANDJANGO_SITE_URL:
site_url = FANDJANGO_SITE_URL
path = path.replace(urlparse(site_url).path, '')
else:
protocol = "https" if request.is_secure() else "http"
site_url = "%s://%s" % (protocol, request.get_host())
redirect_uri = site_url + path
return redirect_uri
def get_full_path(request, remove_querystrings=[]):
"""Gets the current path, removing specified querstrings"""
path = request.get_full_path()
for qs in remove_querystrings:
path = re.sub(r'&?' + qs + '=?(.+)?&?', '', path)
return path
|
jgorset/fandjango
|
fandjango/utils.py
|
is_enabled_path
|
python
|
def is_enabled_path(path):
for enabled_path in ENABLED_PATHS:
match = re.search(enabled_path, path[1:])
if match:
return True
return False
|
Determine whether or not the path matches one or more paths
in the ENABLED_PATHS setting.
:param path: A string describing the path to be matched.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/utils.py#L30-L41
| null |
import re
from datetime import timedelta
from urlparse import urlparse
from functools import wraps
from django.core.cache import cache
from django.utils.importlib import import_module
from fandjango.settings import FACEBOOK_APPLICATION_CANVAS_URL
from fandjango.settings import FACEBOOK_APPLICATION_DOMAIN
from fandjango.settings import FACEBOOK_APPLICATION_NAMESPACE
from fandjango.settings import DISABLED_PATHS
from fandjango.settings import ENABLED_PATHS
from fandjango.settings import AUTHORIZATION_DENIED_VIEW
from fandjango.settings import FANDJANGO_SITE_URL
def is_disabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the DISABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for disabled_path in DISABLED_PATHS:
match = re.search(disabled_path, path[1:])
if match:
return True
return False
def cached_property(**kwargs):
"""Cache the return value of a property."""
def decorator(function):
@wraps(function)
def wrapper(self):
key = 'fandjango.%(model)s.%(property)s_%(pk)s' % {
'model': self.__class__.__name__,
'pk': self.pk,
'property': function.__name__
}
cached_value = cache.get(key)
delta = timedelta(**kwargs)
if cached_value is None:
value = function(self)
cache.set(key, value, delta.days * 86400 + delta.seconds)
else:
value = cached_value
return value
return wrapper
return decorator
def authorization_denied_view(request):
"""Proxy for the view referenced in ``FANDJANGO_AUTHORIZATION_DENIED_VIEW``."""
authorization_denied_module_name = AUTHORIZATION_DENIED_VIEW.rsplit('.', 1)[0]
authorization_denied_view_name = AUTHORIZATION_DENIED_VIEW.split('.')[-1]
authorization_denied_module = import_module(authorization_denied_module_name)
authorization_denied_view = getattr(authorization_denied_module, authorization_denied_view_name)
return authorization_denied_view(request)
def get_post_authorization_redirect_url(request, canvas=True):
"""
Determine the URL users should be redirected to upon authorization the application.
If request is non-canvas use user defined site url if set, else the site hostname.
"""
path = request.get_full_path()
if canvas:
if FACEBOOK_APPLICATION_CANVAS_URL:
path = path.replace(urlparse(FACEBOOK_APPLICATION_CANVAS_URL).path, '')
redirect_uri = 'https://%(domain)s/%(namespace)s%(path)s' % {
'domain': FACEBOOK_APPLICATION_DOMAIN,
'namespace': FACEBOOK_APPLICATION_NAMESPACE,
'path': path
}
else:
if FANDJANGO_SITE_URL:
site_url = FANDJANGO_SITE_URL
path = path.replace(urlparse(site_url).path, '')
else:
protocol = "https" if request.is_secure() else "http"
site_url = "%s://%s" % (protocol, request.get_host())
redirect_uri = site_url + path
return redirect_uri
def get_full_path(request, remove_querystrings=[]):
"""Gets the current path, removing specified querstrings"""
path = request.get_full_path()
for qs in remove_querystrings:
path = re.sub(r'&?' + qs + '=?(.+)?&?', '', path)
return path
|
jgorset/fandjango
|
fandjango/utils.py
|
cached_property
|
python
|
def cached_property(**kwargs):
def decorator(function):
@wraps(function)
def wrapper(self):
key = 'fandjango.%(model)s.%(property)s_%(pk)s' % {
'model': self.__class__.__name__,
'pk': self.pk,
'property': function.__name__
}
cached_value = cache.get(key)
delta = timedelta(**kwargs)
if cached_value is None:
value = function(self)
cache.set(key, value, delta.days * 86400 + delta.seconds)
else:
value = cached_value
return value
return wrapper
return decorator
|
Cache the return value of a property.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/utils.py#L43-L66
| null |
import re
from datetime import timedelta
from urlparse import urlparse
from functools import wraps
from django.core.cache import cache
from django.utils.importlib import import_module
from fandjango.settings import FACEBOOK_APPLICATION_CANVAS_URL
from fandjango.settings import FACEBOOK_APPLICATION_DOMAIN
from fandjango.settings import FACEBOOK_APPLICATION_NAMESPACE
from fandjango.settings import DISABLED_PATHS
from fandjango.settings import ENABLED_PATHS
from fandjango.settings import AUTHORIZATION_DENIED_VIEW
from fandjango.settings import FANDJANGO_SITE_URL
def is_disabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the DISABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for disabled_path in DISABLED_PATHS:
match = re.search(disabled_path, path[1:])
if match:
return True
return False
def is_enabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the ENABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for enabled_path in ENABLED_PATHS:
match = re.search(enabled_path, path[1:])
if match:
return True
return False
def authorization_denied_view(request):
"""Proxy for the view referenced in ``FANDJANGO_AUTHORIZATION_DENIED_VIEW``."""
authorization_denied_module_name = AUTHORIZATION_DENIED_VIEW.rsplit('.', 1)[0]
authorization_denied_view_name = AUTHORIZATION_DENIED_VIEW.split('.')[-1]
authorization_denied_module = import_module(authorization_denied_module_name)
authorization_denied_view = getattr(authorization_denied_module, authorization_denied_view_name)
return authorization_denied_view(request)
def get_post_authorization_redirect_url(request, canvas=True):
"""
Determine the URL users should be redirected to upon authorization the application.
If request is non-canvas use user defined site url if set, else the site hostname.
"""
path = request.get_full_path()
if canvas:
if FACEBOOK_APPLICATION_CANVAS_URL:
path = path.replace(urlparse(FACEBOOK_APPLICATION_CANVAS_URL).path, '')
redirect_uri = 'https://%(domain)s/%(namespace)s%(path)s' % {
'domain': FACEBOOK_APPLICATION_DOMAIN,
'namespace': FACEBOOK_APPLICATION_NAMESPACE,
'path': path
}
else:
if FANDJANGO_SITE_URL:
site_url = FANDJANGO_SITE_URL
path = path.replace(urlparse(site_url).path, '')
else:
protocol = "https" if request.is_secure() else "http"
site_url = "%s://%s" % (protocol, request.get_host())
redirect_uri = site_url + path
return redirect_uri
def get_full_path(request, remove_querystrings=[]):
"""Gets the current path, removing specified querstrings"""
path = request.get_full_path()
for qs in remove_querystrings:
path = re.sub(r'&?' + qs + '=?(.+)?&?', '', path)
return path
|
jgorset/fandjango
|
fandjango/utils.py
|
authorization_denied_view
|
python
|
def authorization_denied_view(request):
authorization_denied_module_name = AUTHORIZATION_DENIED_VIEW.rsplit('.', 1)[0]
authorization_denied_view_name = AUTHORIZATION_DENIED_VIEW.split('.')[-1]
authorization_denied_module = import_module(authorization_denied_module_name)
authorization_denied_view = getattr(authorization_denied_module, authorization_denied_view_name)
return authorization_denied_view(request)
|
Proxy for the view referenced in ``FANDJANGO_AUTHORIZATION_DENIED_VIEW``.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/utils.py#L68-L76
| null |
import re
from datetime import timedelta
from urlparse import urlparse
from functools import wraps
from django.core.cache import cache
from django.utils.importlib import import_module
from fandjango.settings import FACEBOOK_APPLICATION_CANVAS_URL
from fandjango.settings import FACEBOOK_APPLICATION_DOMAIN
from fandjango.settings import FACEBOOK_APPLICATION_NAMESPACE
from fandjango.settings import DISABLED_PATHS
from fandjango.settings import ENABLED_PATHS
from fandjango.settings import AUTHORIZATION_DENIED_VIEW
from fandjango.settings import FANDJANGO_SITE_URL
def is_disabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the DISABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for disabled_path in DISABLED_PATHS:
match = re.search(disabled_path, path[1:])
if match:
return True
return False
def is_enabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the ENABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for enabled_path in ENABLED_PATHS:
match = re.search(enabled_path, path[1:])
if match:
return True
return False
def cached_property(**kwargs):
"""Cache the return value of a property."""
def decorator(function):
@wraps(function)
def wrapper(self):
key = 'fandjango.%(model)s.%(property)s_%(pk)s' % {
'model': self.__class__.__name__,
'pk': self.pk,
'property': function.__name__
}
cached_value = cache.get(key)
delta = timedelta(**kwargs)
if cached_value is None:
value = function(self)
cache.set(key, value, delta.days * 86400 + delta.seconds)
else:
value = cached_value
return value
return wrapper
return decorator
def get_post_authorization_redirect_url(request, canvas=True):
"""
Determine the URL users should be redirected to upon authorization the application.
If request is non-canvas use user defined site url if set, else the site hostname.
"""
path = request.get_full_path()
if canvas:
if FACEBOOK_APPLICATION_CANVAS_URL:
path = path.replace(urlparse(FACEBOOK_APPLICATION_CANVAS_URL).path, '')
redirect_uri = 'https://%(domain)s/%(namespace)s%(path)s' % {
'domain': FACEBOOK_APPLICATION_DOMAIN,
'namespace': FACEBOOK_APPLICATION_NAMESPACE,
'path': path
}
else:
if FANDJANGO_SITE_URL:
site_url = FANDJANGO_SITE_URL
path = path.replace(urlparse(site_url).path, '')
else:
protocol = "https" if request.is_secure() else "http"
site_url = "%s://%s" % (protocol, request.get_host())
redirect_uri = site_url + path
return redirect_uri
def get_full_path(request, remove_querystrings=[]):
"""Gets the current path, removing specified querstrings"""
path = request.get_full_path()
for qs in remove_querystrings:
path = re.sub(r'&?' + qs + '=?(.+)?&?', '', path)
return path
|
jgorset/fandjango
|
fandjango/utils.py
|
get_post_authorization_redirect_url
|
python
|
def get_post_authorization_redirect_url(request, canvas=True):
path = request.get_full_path()
if canvas:
if FACEBOOK_APPLICATION_CANVAS_URL:
path = path.replace(urlparse(FACEBOOK_APPLICATION_CANVAS_URL).path, '')
redirect_uri = 'https://%(domain)s/%(namespace)s%(path)s' % {
'domain': FACEBOOK_APPLICATION_DOMAIN,
'namespace': FACEBOOK_APPLICATION_NAMESPACE,
'path': path
}
else:
if FANDJANGO_SITE_URL:
site_url = FANDJANGO_SITE_URL
path = path.replace(urlparse(site_url).path, '')
else:
protocol = "https" if request.is_secure() else "http"
site_url = "%s://%s" % (protocol, request.get_host())
redirect_uri = site_url + path
return redirect_uri
|
Determine the URL users should be redirected to upon authorization the application.
If request is non-canvas use user defined site url if set, else the site hostname.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/utils.py#L78-L105
| null |
import re
from datetime import timedelta
from urlparse import urlparse
from functools import wraps
from django.core.cache import cache
from django.utils.importlib import import_module
from fandjango.settings import FACEBOOK_APPLICATION_CANVAS_URL
from fandjango.settings import FACEBOOK_APPLICATION_DOMAIN
from fandjango.settings import FACEBOOK_APPLICATION_NAMESPACE
from fandjango.settings import DISABLED_PATHS
from fandjango.settings import ENABLED_PATHS
from fandjango.settings import AUTHORIZATION_DENIED_VIEW
from fandjango.settings import FANDJANGO_SITE_URL
def is_disabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the DISABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for disabled_path in DISABLED_PATHS:
match = re.search(disabled_path, path[1:])
if match:
return True
return False
def is_enabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the ENABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for enabled_path in ENABLED_PATHS:
match = re.search(enabled_path, path[1:])
if match:
return True
return False
def cached_property(**kwargs):
"""Cache the return value of a property."""
def decorator(function):
@wraps(function)
def wrapper(self):
key = 'fandjango.%(model)s.%(property)s_%(pk)s' % {
'model': self.__class__.__name__,
'pk': self.pk,
'property': function.__name__
}
cached_value = cache.get(key)
delta = timedelta(**kwargs)
if cached_value is None:
value = function(self)
cache.set(key, value, delta.days * 86400 + delta.seconds)
else:
value = cached_value
return value
return wrapper
return decorator
def authorization_denied_view(request):
"""Proxy for the view referenced in ``FANDJANGO_AUTHORIZATION_DENIED_VIEW``."""
authorization_denied_module_name = AUTHORIZATION_DENIED_VIEW.rsplit('.', 1)[0]
authorization_denied_view_name = AUTHORIZATION_DENIED_VIEW.split('.')[-1]
authorization_denied_module = import_module(authorization_denied_module_name)
authorization_denied_view = getattr(authorization_denied_module, authorization_denied_view_name)
return authorization_denied_view(request)
def get_full_path(request, remove_querystrings=[]):
"""Gets the current path, removing specified querstrings"""
path = request.get_full_path()
for qs in remove_querystrings:
path = re.sub(r'&?' + qs + '=?(.+)?&?', '', path)
return path
|
jgorset/fandjango
|
fandjango/utils.py
|
get_full_path
|
python
|
def get_full_path(request, remove_querystrings=[]):
path = request.get_full_path()
for qs in remove_querystrings:
path = re.sub(r'&?' + qs + '=?(.+)?&?', '', path)
return path
|
Gets the current path, removing specified querstrings
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/utils.py#L107-L113
| null |
import re
from datetime import timedelta
from urlparse import urlparse
from functools import wraps
from django.core.cache import cache
from django.utils.importlib import import_module
from fandjango.settings import FACEBOOK_APPLICATION_CANVAS_URL
from fandjango.settings import FACEBOOK_APPLICATION_DOMAIN
from fandjango.settings import FACEBOOK_APPLICATION_NAMESPACE
from fandjango.settings import DISABLED_PATHS
from fandjango.settings import ENABLED_PATHS
from fandjango.settings import AUTHORIZATION_DENIED_VIEW
from fandjango.settings import FANDJANGO_SITE_URL
def is_disabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the DISABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for disabled_path in DISABLED_PATHS:
match = re.search(disabled_path, path[1:])
if match:
return True
return False
def is_enabled_path(path):
"""
Determine whether or not the path matches one or more paths
in the ENABLED_PATHS setting.
:param path: A string describing the path to be matched.
"""
for enabled_path in ENABLED_PATHS:
match = re.search(enabled_path, path[1:])
if match:
return True
return False
def cached_property(**kwargs):
"""Cache the return value of a property."""
def decorator(function):
@wraps(function)
def wrapper(self):
key = 'fandjango.%(model)s.%(property)s_%(pk)s' % {
'model': self.__class__.__name__,
'pk': self.pk,
'property': function.__name__
}
cached_value = cache.get(key)
delta = timedelta(**kwargs)
if cached_value is None:
value = function(self)
cache.set(key, value, delta.days * 86400 + delta.seconds)
else:
value = cached_value
return value
return wrapper
return decorator
def authorization_denied_view(request):
"""Proxy for the view referenced in ``FANDJANGO_AUTHORIZATION_DENIED_VIEW``."""
authorization_denied_module_name = AUTHORIZATION_DENIED_VIEW.rsplit('.', 1)[0]
authorization_denied_view_name = AUTHORIZATION_DENIED_VIEW.split('.')[-1]
authorization_denied_module = import_module(authorization_denied_module_name)
authorization_denied_view = getattr(authorization_denied_module, authorization_denied_view_name)
return authorization_denied_view(request)
def get_post_authorization_redirect_url(request, canvas=True):
"""
Determine the URL users should be redirected to upon authorization the application.
If request is non-canvas use user defined site url if set, else the site hostname.
"""
path = request.get_full_path()
if canvas:
if FACEBOOK_APPLICATION_CANVAS_URL:
path = path.replace(urlparse(FACEBOOK_APPLICATION_CANVAS_URL).path, '')
redirect_uri = 'https://%(domain)s/%(namespace)s%(path)s' % {
'domain': FACEBOOK_APPLICATION_DOMAIN,
'namespace': FACEBOOK_APPLICATION_NAMESPACE,
'path': path
}
else:
if FANDJANGO_SITE_URL:
site_url = FANDJANGO_SITE_URL
path = path.replace(urlparse(site_url).path, '')
else:
protocol = "https" if request.is_secure() else "http"
site_url = "%s://%s" % (protocol, request.get_host())
redirect_uri = site_url + path
return redirect_uri
|
jgorset/fandjango
|
fandjango/views.py
|
authorize_application
|
python
|
def authorize_application(
request,
redirect_uri = 'https://%s/%s' % (FACEBOOK_APPLICATION_DOMAIN, FACEBOOK_APPLICATION_NAMESPACE),
permissions = FACEBOOK_APPLICATION_INITIAL_PERMISSIONS
):
query = {
'client_id': FACEBOOK_APPLICATION_ID,
'redirect_uri': redirect_uri
}
if permissions:
query['scope'] = ', '.join(permissions)
return render(
request = request,
template_name = 'fandjango/authorize_application.html',
dictionary = {
'url': 'https://www.facebook.com/dialog/oauth?%s' % urlencode(query)
},
status = 401
)
|
Redirect the user to authorize the application.
Redirection is done by rendering a JavaScript snippet that redirects the parent
window to the authorization URI, since Facebook will not allow this inside an iframe.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/views.py#L15-L41
| null |
from urllib import urlencode
from django.http import HttpResponse
from django.shortcuts import render
from facepy import SignedRequest
from fandjango.models import User
from fandjango.settings import (
FACEBOOK_APPLICATION_ID, FACEBOOK_APPLICATION_DOMAIN,
FACEBOOK_APPLICATION_NAMESPACE, FACEBOOK_APPLICATION_SECRET_KEY,
FACEBOOK_APPLICATION_INITIAL_PERMISSIONS
)
def authorization_denied(request):
"""
Render a template for users that refuse to authorize the application.
"""
return render(
request = request,
template_name = 'fandjango/authorization_denied.html',
status = 403
)
def deauthorize_application(request):
"""
When a user deauthorizes an application, Facebook sends a HTTP POST request to the application's
"deauthorization callback" URL. This view picks up on requests of this sort and marks the corresponding
users as unauthorized.
"""
if request.facebook:
user = User.objects.get(
facebook_id = request.facebook.signed_request.user.id
)
user.authorized = False
user.save()
return HttpResponse()
else:
return HttpResponse(status=400)
|
jgorset/fandjango
|
fandjango/views.py
|
deauthorize_application
|
python
|
def deauthorize_application(request):
if request.facebook:
user = User.objects.get(
facebook_id = request.facebook.signed_request.user.id
)
user.authorized = False
user.save()
return HttpResponse()
else:
return HttpResponse(status=400)
|
When a user deauthorizes an application, Facebook sends a HTTP POST request to the application's
"deauthorization callback" URL. This view picks up on requests of this sort and marks the corresponding
users as unauthorized.
|
train
|
https://github.com/jgorset/fandjango/blob/01334a76c1d9f0629842aa6830678ae097756551/fandjango/views.py#L53-L69
| null |
from urllib import urlencode
from django.http import HttpResponse
from django.shortcuts import render
from facepy import SignedRequest
from fandjango.models import User
from fandjango.settings import (
FACEBOOK_APPLICATION_ID, FACEBOOK_APPLICATION_DOMAIN,
FACEBOOK_APPLICATION_NAMESPACE, FACEBOOK_APPLICATION_SECRET_KEY,
FACEBOOK_APPLICATION_INITIAL_PERMISSIONS
)
def authorize_application(
request,
redirect_uri = 'https://%s/%s' % (FACEBOOK_APPLICATION_DOMAIN, FACEBOOK_APPLICATION_NAMESPACE),
permissions = FACEBOOK_APPLICATION_INITIAL_PERMISSIONS
):
"""
Redirect the user to authorize the application.
Redirection is done by rendering a JavaScript snippet that redirects the parent
window to the authorization URI, since Facebook will not allow this inside an iframe.
"""
query = {
'client_id': FACEBOOK_APPLICATION_ID,
'redirect_uri': redirect_uri
}
if permissions:
query['scope'] = ', '.join(permissions)
return render(
request = request,
template_name = 'fandjango/authorize_application.html',
dictionary = {
'url': 'https://www.facebook.com/dialog/oauth?%s' % urlencode(query)
},
status = 401
)
def authorization_denied(request):
"""
Render a template for users that refuse to authorize the application.
"""
return render(
request = request,
template_name = 'fandjango/authorization_denied.html',
status = 403
)
|
walkr/oi
|
oi/core.py
|
BaseProgram.new_parser
|
python
|
def new_parser(self):
parser = argparse.ArgumentParser(description=self.description)
parser.add_argument(
'--version', help='show version and exit',
default=False, action='store_true')
parser.add_argument(
'--debug', help='enable debugging',
default=False, action='store_true')
return parser
|
Create a command line argument parser
Add a few default flags, such as --version
for displaying the program version when invoked
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L48-L61
| null |
class BaseProgram(object):
""" Subclass this """
def __init__(self, description, address=None, state=None, workers=None):
self.description = description
self.address = address
self.parser = self.new_parser()
self.state = state or State()
self.workers = workers or []
self.registered = {} # registered commands
def add_command(self, command, function, description=None):
""" Register a new function with a the name `command` and
`description` (which will be shown then help is invoked). """
self.registered[command] = {
'function': function, 'description': description
}
def run(self, args=None):
""" Parse command line arguments if necessary then run program.
By default this method will just take of the --version flag.
The logic for other flags should be handled by your subclass """
args = args or self.parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
if args.version:
print(version.VERSION)
sys.exit(0)
|
walkr/oi
|
oi/core.py
|
BaseProgram.add_command
|
python
|
def add_command(self, command, function, description=None):
self.registered[command] = {
'function': function, 'description': description
}
|
Register a new function with a the name `command` and
`description` (which will be shown then help is invoked).
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L63-L69
| null |
class BaseProgram(object):
""" Subclass this """
def __init__(self, description, address=None, state=None, workers=None):
self.description = description
self.address = address
self.parser = self.new_parser()
self.state = state or State()
self.workers = workers or []
self.registered = {} # registered commands
def new_parser(self):
""" Create a command line argument parser
Add a few default flags, such as --version
for displaying the program version when invoked """
parser = argparse.ArgumentParser(description=self.description)
parser.add_argument(
'--version', help='show version and exit',
default=False, action='store_true')
parser.add_argument(
'--debug', help='enable debugging',
default=False, action='store_true')
return parser
def run(self, args=None):
""" Parse command line arguments if necessary then run program.
By default this method will just take of the --version flag.
The logic for other flags should be handled by your subclass """
args = args or self.parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
if args.version:
print(version.VERSION)
sys.exit(0)
|
walkr/oi
|
oi/core.py
|
BaseProgram.run
|
python
|
def run(self, args=None):
args = args or self.parser.parse_args()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
if args.version:
print(version.VERSION)
sys.exit(0)
|
Parse command line arguments if necessary then run program.
By default this method will just take of the --version flag.
The logic for other flags should be handled by your subclass
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L71-L84
| null |
class BaseProgram(object):
""" Subclass this """
def __init__(self, description, address=None, state=None, workers=None):
self.description = description
self.address = address
self.parser = self.new_parser()
self.state = state or State()
self.workers = workers or []
self.registered = {} # registered commands
def new_parser(self):
""" Create a command line argument parser
Add a few default flags, such as --version
for displaying the program version when invoked """
parser = argparse.ArgumentParser(description=self.description)
parser.add_argument(
'--version', help='show version and exit',
default=False, action='store_true')
parser.add_argument(
'--debug', help='enable debugging',
default=False, action='store_true')
return parser
def add_command(self, command, function, description=None):
""" Register a new function with a the name `command` and
`description` (which will be shown then help is invoked). """
self.registered[command] = {
'function': function, 'description': description
}
|
walkr/oi
|
oi/core.py
|
Program.help_function
|
python
|
def help_function(self, command=None):
if command:
return self.registered[command].get(
'description', 'No help available'
)
return ', '.join(sorted(self.registered))
|
Show help for all available commands or just a single one
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L115-L121
| null |
class Program(BaseProgram):
""" Long running program with a nanoservice endpoint.
`service` - nanoservice Service object
`config` - the configuration parsed from --config <filepath> """
def __init__(self, description, address):
super(Program, self).__init__(description, address)
self.service = Service(address) if address else None
self.config = compat.configparser.ConfigParser()
# Add the flag for parsing configuration file
self.parser.add_argument(
'--config', help='configuration file to use', nargs='?')
if self.service is None:
return
# Add default service worker, which will respond to ctl commands
# Other workers will perform other kind of work, such as
# fetching resources from the web, etc
self.workers.append(worker.ServiceWorker(self.service))
# Add default commands
self.add_command('ping', lambda: 'pong')
self.add_command('help', self.help_function)
def add_command(self, command, function, description=None):
""" Register a new function for command """
super(Program, self).add_command(command, function, description)
self.service.register(command, function)
def run(self, args=None):
""" Parse comand line arguments/flags and run program """
args = args or self.parser.parse_args()
super(Program, self).run(args)
# Read configuration file if any
if args.config is not None:
filepath = args.config
self.config.read(filepath)
# Start workers then wait until they finish work
[w.start() for w in self.workers]
[w.join() for w in self.workers]
|
walkr/oi
|
oi/core.py
|
Program.add_command
|
python
|
def add_command(self, command, function, description=None):
super(Program, self).add_command(command, function, description)
self.service.register(command, function)
|
Register a new function for command
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L123-L126
|
[
"def add_command(self, command, function, description=None):\n \"\"\" Register a new function with a the name `command` and\n `description` (which will be shown then help is invoked). \"\"\"\n\n self.registered[command] = {\n 'function': function, 'description': description\n }\n"
] |
class Program(BaseProgram):
""" Long running program with a nanoservice endpoint.
`service` - nanoservice Service object
`config` - the configuration parsed from --config <filepath> """
def __init__(self, description, address):
super(Program, self).__init__(description, address)
self.service = Service(address) if address else None
self.config = compat.configparser.ConfigParser()
# Add the flag for parsing configuration file
self.parser.add_argument(
'--config', help='configuration file to use', nargs='?')
if self.service is None:
return
# Add default service worker, which will respond to ctl commands
# Other workers will perform other kind of work, such as
# fetching resources from the web, etc
self.workers.append(worker.ServiceWorker(self.service))
# Add default commands
self.add_command('ping', lambda: 'pong')
self.add_command('help', self.help_function)
def help_function(self, command=None):
""" Show help for all available commands or just a single one """
if command:
return self.registered[command].get(
'description', 'No help available'
)
return ', '.join(sorted(self.registered))
def run(self, args=None):
""" Parse comand line arguments/flags and run program """
args = args or self.parser.parse_args()
super(Program, self).run(args)
# Read configuration file if any
if args.config is not None:
filepath = args.config
self.config.read(filepath)
# Start workers then wait until they finish work
[w.start() for w in self.workers]
[w.join() for w in self.workers]
|
walkr/oi
|
oi/core.py
|
Program.run
|
python
|
def run(self, args=None):
args = args or self.parser.parse_args()
super(Program, self).run(args)
# Read configuration file if any
if args.config is not None:
filepath = args.config
self.config.read(filepath)
# Start workers then wait until they finish work
[w.start() for w in self.workers]
[w.join() for w in self.workers]
|
Parse comand line arguments/flags and run program
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L128-L141
|
[
"def run(self, args=None):\n \"\"\" Parse command line arguments if necessary then run program.\n By default this method will just take of the --version flag.\n\n The logic for other flags should be handled by your subclass \"\"\"\n\n args = args or self.parser.parse_args()\n\n if args.debug:\n logging.basicConfig(level=logging.DEBUG)\n\n if args.version:\n print(version.VERSION)\n sys.exit(0)\n"
] |
class Program(BaseProgram):
""" Long running program with a nanoservice endpoint.
`service` - nanoservice Service object
`config` - the configuration parsed from --config <filepath> """
def __init__(self, description, address):
super(Program, self).__init__(description, address)
self.service = Service(address) if address else None
self.config = compat.configparser.ConfigParser()
# Add the flag for parsing configuration file
self.parser.add_argument(
'--config', help='configuration file to use', nargs='?')
if self.service is None:
return
# Add default service worker, which will respond to ctl commands
# Other workers will perform other kind of work, such as
# fetching resources from the web, etc
self.workers.append(worker.ServiceWorker(self.service))
# Add default commands
self.add_command('ping', lambda: 'pong')
self.add_command('help', self.help_function)
def help_function(self, command=None):
""" Show help for all available commands or just a single one """
if command:
return self.registered[command].get(
'description', 'No help available'
)
return ', '.join(sorted(self.registered))
def add_command(self, command, function, description=None):
""" Register a new function for command """
super(Program, self).add_command(command, function, description)
self.service.register(command, function)
|
walkr/oi
|
oi/core.py
|
ClientWrapper.create_client
|
python
|
def create_client(self, addr, timeout):
def make(addr):
c = Client(addr)
c.socket._set_recv_timeout(timeout)
return c
if ',' in addr:
addrs = addr.split(',')
addrs = [a.strip() for a in addrs]
return {a: make(a) for a in addrs}
return make(addr)
|
Create client(s) based on addr
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L151-L163
|
[
"def make(addr):\n c = Client(addr)\n c.socket._set_recv_timeout(timeout)\n return c\n"
] |
class ClientWrapper(object):
""" An wrapper over nanoservice.Client to deal with one or multiple
clients in a similar fasion """
def __init__(self, address, timeout):
self.c = self.create_client(address, timeout)
def _call_single(self, client, command, *args):
""" Call single """
try:
return client.call(command, *args)
except Exception as e:
return None, str(e)
def _call_multi(self, clients, command, *args):
""" Call multi """
responses, errors = {}, {}
for addr, client in clients.items():
res, err = self._call_single(client, command, *args)
responses[addr] = res
errors[addr] = err
return responses, errors
def call(self, command, *args):
""" Call remote service(s) """
if isinstance(self.c, dict):
return self._call_multi(self.c, command, *args)
return self._call_single(self.c, command, *args)
def is_multi(self):
""" Does this object include multiple clients """
return isinstance(self.c, dict)
def close(self):
""" Close socket(s) """
if isinstance(self.c, dict):
for client in self.c.values():
client.sock.close()
return
self.c.socket.close()
|
walkr/oi
|
oi/core.py
|
ClientWrapper._call_single
|
python
|
def _call_single(self, client, command, *args):
try:
return client.call(command, *args)
except Exception as e:
return None, str(e)
|
Call single
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L165-L170
| null |
class ClientWrapper(object):
""" An wrapper over nanoservice.Client to deal with one or multiple
clients in a similar fasion """
def __init__(self, address, timeout):
self.c = self.create_client(address, timeout)
def create_client(self, addr, timeout):
""" Create client(s) based on addr """
def make(addr):
c = Client(addr)
c.socket._set_recv_timeout(timeout)
return c
if ',' in addr:
addrs = addr.split(',')
addrs = [a.strip() for a in addrs]
return {a: make(a) for a in addrs}
return make(addr)
def _call_multi(self, clients, command, *args):
""" Call multi """
responses, errors = {}, {}
for addr, client in clients.items():
res, err = self._call_single(client, command, *args)
responses[addr] = res
errors[addr] = err
return responses, errors
def call(self, command, *args):
""" Call remote service(s) """
if isinstance(self.c, dict):
return self._call_multi(self.c, command, *args)
return self._call_single(self.c, command, *args)
def is_multi(self):
""" Does this object include multiple clients """
return isinstance(self.c, dict)
def close(self):
""" Close socket(s) """
if isinstance(self.c, dict):
for client in self.c.values():
client.sock.close()
return
self.c.socket.close()
|
walkr/oi
|
oi/core.py
|
ClientWrapper._call_multi
|
python
|
def _call_multi(self, clients, command, *args):
responses, errors = {}, {}
for addr, client in clients.items():
res, err = self._call_single(client, command, *args)
responses[addr] = res
errors[addr] = err
return responses, errors
|
Call multi
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L172-L179
|
[
"def _call_single(self, client, command, *args):\n \"\"\" Call single \"\"\"\n try:\n return client.call(command, *args)\n except Exception as e:\n return None, str(e)\n"
] |
class ClientWrapper(object):
""" An wrapper over nanoservice.Client to deal with one or multiple
clients in a similar fasion """
def __init__(self, address, timeout):
self.c = self.create_client(address, timeout)
def create_client(self, addr, timeout):
""" Create client(s) based on addr """
def make(addr):
c = Client(addr)
c.socket._set_recv_timeout(timeout)
return c
if ',' in addr:
addrs = addr.split(',')
addrs = [a.strip() for a in addrs]
return {a: make(a) for a in addrs}
return make(addr)
def _call_single(self, client, command, *args):
""" Call single """
try:
return client.call(command, *args)
except Exception as e:
return None, str(e)
def call(self, command, *args):
""" Call remote service(s) """
if isinstance(self.c, dict):
return self._call_multi(self.c, command, *args)
return self._call_single(self.c, command, *args)
def is_multi(self):
""" Does this object include multiple clients """
return isinstance(self.c, dict)
def close(self):
""" Close socket(s) """
if isinstance(self.c, dict):
for client in self.c.values():
client.sock.close()
return
self.c.socket.close()
|
walkr/oi
|
oi/core.py
|
ClientWrapper.call
|
python
|
def call(self, command, *args):
if isinstance(self.c, dict):
return self._call_multi(self.c, command, *args)
return self._call_single(self.c, command, *args)
|
Call remote service(s)
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L181-L185
|
[
"def _call_single(self, client, command, *args):\n \"\"\" Call single \"\"\"\n try:\n return client.call(command, *args)\n except Exception as e:\n return None, str(e)\n",
"def _call_multi(self, clients, command, *args):\n \"\"\" Call multi \"\"\"\n responses, errors = {}, {}\n for addr, client in clients.items():\n res, err = self._call_single(client, command, *args)\n responses[addr] = res\n errors[addr] = err\n return responses, errors\n"
] |
class ClientWrapper(object):
""" An wrapper over nanoservice.Client to deal with one or multiple
clients in a similar fasion """
def __init__(self, address, timeout):
self.c = self.create_client(address, timeout)
def create_client(self, addr, timeout):
""" Create client(s) based on addr """
def make(addr):
c = Client(addr)
c.socket._set_recv_timeout(timeout)
return c
if ',' in addr:
addrs = addr.split(',')
addrs = [a.strip() for a in addrs]
return {a: make(a) for a in addrs}
return make(addr)
def _call_single(self, client, command, *args):
""" Call single """
try:
return client.call(command, *args)
except Exception as e:
return None, str(e)
def _call_multi(self, clients, command, *args):
""" Call multi """
responses, errors = {}, {}
for addr, client in clients.items():
res, err = self._call_single(client, command, *args)
responses[addr] = res
errors[addr] = err
return responses, errors
def is_multi(self):
""" Does this object include multiple clients """
return isinstance(self.c, dict)
def close(self):
""" Close socket(s) """
if isinstance(self.c, dict):
for client in self.c.values():
client.sock.close()
return
self.c.socket.close()
|
walkr/oi
|
oi/core.py
|
ClientWrapper.close
|
python
|
def close(self):
if isinstance(self.c, dict):
for client in self.c.values():
client.sock.close()
return
self.c.socket.close()
|
Close socket(s)
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L191-L197
| null |
class ClientWrapper(object):
""" An wrapper over nanoservice.Client to deal with one or multiple
clients in a similar fasion """
def __init__(self, address, timeout):
self.c = self.create_client(address, timeout)
def create_client(self, addr, timeout):
""" Create client(s) based on addr """
def make(addr):
c = Client(addr)
c.socket._set_recv_timeout(timeout)
return c
if ',' in addr:
addrs = addr.split(',')
addrs = [a.strip() for a in addrs]
return {a: make(a) for a in addrs}
return make(addr)
def _call_single(self, client, command, *args):
""" Call single """
try:
return client.call(command, *args)
except Exception as e:
return None, str(e)
def _call_multi(self, clients, command, *args):
""" Call multi """
responses, errors = {}, {}
for addr, client in clients.items():
res, err = self._call_single(client, command, *args)
responses[addr] = res
errors[addr] = err
return responses, errors
def call(self, command, *args):
""" Call remote service(s) """
if isinstance(self.c, dict):
return self._call_multi(self.c, command, *args)
return self._call_single(self.c, command, *args)
def is_multi(self):
""" Does this object include multiple clients """
return isinstance(self.c, dict)
|
walkr/oi
|
oi/core.py
|
Response._show
|
python
|
def _show(self, res, err, prefix='', colored=False):
if self.kind is 'local':
what = res if not err else err
print(what)
return
if self.kind is 'remote':
if colored:
red, green, reset = Fore.RED, Fore.GREEN, Fore.RESET
else:
red = green = reset = ''
if err:
what = prefix + red + 'remote err: {}'.format(err) + reset
else:
what = prefix + green + str(res) + reset
print(what)
|
Show result or error
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L210-L227
| null |
class Response(object):
""" A local or remote response for a command """
def __init__(self, kind, res, err, multi=False):
super(Response, self).__init__()
self.kind = kind
self.res = res
self.err = err
self.multi = multi
def show(self):
if self.multi:
for addr in self.res:
self._show(
self.res[addr], self.err[addr],
prefix='- {}: '.format(addr), colored=True
)
return
self._show(self.res, self.err)
|
walkr/oi
|
oi/core.py
|
CtlProgram.call
|
python
|
def call(self, command, *args):
if not command:
return
# Look for local methods first
try:
res = self.registered[command]['function'](self, *args)
return Response('local', res, None)
# Method not found, try remote
except KeyError:
# Execute remote command
res, err = self.client.call(command, *args)
return Response('remote', res, err, self.client.is_multi())
# Local exception
except Exception as e:
return Response('local', res, str(e))
|
Execute local OR remote command and show response
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L267-L287
| null |
class CtlProgram(BaseProgram):
""" The Ctl program
Note:
When a CtlProgram accepts a command it will make a request
to the remote service with that command and any args extracted.
When we add commands via `add_command` method, then those
commands will be executed by our registered function; they will
be not dispatched to the remote service. This is helpful, because
it allows us to register certain local commands, such as `quit`, etc
"""
def __init__(self, description, address, timeout=3000):
super(CtlProgram, self).__init__(description, address)
self.client = ClientWrapper(address, timeout) if address else None
# Add command argument
self.parser.add_argument(
'command', help='command name to execute', nargs='*',
metavar='command')
# Add default commands
self.add_command('quit', lambda p: sys.exit(0), 'quit ctl')
def parse_input(self, text):
""" Parse ctl user input. Double quotes are used
to group together multi words arguments. """
parts = util.split(text)
command = parts[0] if text and parts else None
command = command.lower() if command else None
args = parts[1:] if len(parts) > 1 else []
return (command, args)
def loop(self):
""" Enter loop, read user input then run command. Repeat """
while True:
text = compat.input('ctl > ')
command, args = self.parse_input(text)
if not command:
continue
response = self.call(command, *args)
response.show()
def run(self, args=None, loop=True):
args = self.parser.parse_args()
super(CtlProgram, self).run(args)
# Execute a single command then exit
if args.command:
# command will come as a list (zero or more elements)
# so, extract the first element as the command name
# and the rest will all be positional arguments
command = args.command[0]
args = args.command[1:] if len(args.command) > 1 else []
response = self.call(command, *args)
response.show()
sys.exit(0)
# Enter command loop
if loop:
self.loop()
|
walkr/oi
|
oi/core.py
|
CtlProgram.parse_input
|
python
|
def parse_input(self, text):
parts = util.split(text)
command = parts[0] if text and parts else None
command = command.lower() if command else None
args = parts[1:] if len(parts) > 1 else []
return (command, args)
|
Parse ctl user input. Double quotes are used
to group together multi words arguments.
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L289-L298
| null |
class CtlProgram(BaseProgram):
""" The Ctl program
Note:
When a CtlProgram accepts a command it will make a request
to the remote service with that command and any args extracted.
When we add commands via `add_command` method, then those
commands will be executed by our registered function; they will
be not dispatched to the remote service. This is helpful, because
it allows us to register certain local commands, such as `quit`, etc
"""
def __init__(self, description, address, timeout=3000):
super(CtlProgram, self).__init__(description, address)
self.client = ClientWrapper(address, timeout) if address else None
# Add command argument
self.parser.add_argument(
'command', help='command name to execute', nargs='*',
metavar='command')
# Add default commands
self.add_command('quit', lambda p: sys.exit(0), 'quit ctl')
def call(self, command, *args):
""" Execute local OR remote command and show response """
if not command:
return
# Look for local methods first
try:
res = self.registered[command]['function'](self, *args)
return Response('local', res, None)
# Method not found, try remote
except KeyError:
# Execute remote command
res, err = self.client.call(command, *args)
return Response('remote', res, err, self.client.is_multi())
# Local exception
except Exception as e:
return Response('local', res, str(e))
def loop(self):
""" Enter loop, read user input then run command. Repeat """
while True:
text = compat.input('ctl > ')
command, args = self.parse_input(text)
if not command:
continue
response = self.call(command, *args)
response.show()
def run(self, args=None, loop=True):
args = self.parser.parse_args()
super(CtlProgram, self).run(args)
# Execute a single command then exit
if args.command:
# command will come as a list (zero or more elements)
# so, extract the first element as the command name
# and the rest will all be positional arguments
command = args.command[0]
args = args.command[1:] if len(args.command) > 1 else []
response = self.call(command, *args)
response.show()
sys.exit(0)
# Enter command loop
if loop:
self.loop()
|
walkr/oi
|
oi/core.py
|
CtlProgram.loop
|
python
|
def loop(self):
while True:
text = compat.input('ctl > ')
command, args = self.parse_input(text)
if not command:
continue
response = self.call(command, *args)
response.show()
|
Enter loop, read user input then run command. Repeat
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/core.py#L300-L309
|
[
"def show(self):\n if self.multi:\n for addr in self.res:\n self._show(\n self.res[addr], self.err[addr],\n prefix='- {}: '.format(addr), colored=True\n )\n return\n self._show(self.res, self.err)\n",
"def call(self, command, *args):\n \"\"\" Execute local OR remote command and show response \"\"\"\n\n if not command:\n return\n\n # Look for local methods first\n try:\n res = self.registered[command]['function'](self, *args)\n return Response('local', res, None)\n\n # Method not found, try remote\n except KeyError:\n\n # Execute remote command\n res, err = self.client.call(command, *args)\n return Response('remote', res, err, self.client.is_multi())\n\n # Local exception\n except Exception as e:\n return Response('local', res, str(e))\n",
"def parse_input(self, text):\n \"\"\" Parse ctl user input. Double quotes are used\n to group together multi words arguments. \"\"\"\n\n parts = util.split(text)\n command = parts[0] if text and parts else None\n command = command.lower() if command else None\n args = parts[1:] if len(parts) > 1 else []\n\n return (command, args)\n"
] |
class CtlProgram(BaseProgram):
""" The Ctl program
Note:
When a CtlProgram accepts a command it will make a request
to the remote service with that command and any args extracted.
When we add commands via `add_command` method, then those
commands will be executed by our registered function; they will
be not dispatched to the remote service. This is helpful, because
it allows us to register certain local commands, such as `quit`, etc
"""
def __init__(self, description, address, timeout=3000):
super(CtlProgram, self).__init__(description, address)
self.client = ClientWrapper(address, timeout) if address else None
# Add command argument
self.parser.add_argument(
'command', help='command name to execute', nargs='*',
metavar='command')
# Add default commands
self.add_command('quit', lambda p: sys.exit(0), 'quit ctl')
def call(self, command, *args):
""" Execute local OR remote command and show response """
if not command:
return
# Look for local methods first
try:
res = self.registered[command]['function'](self, *args)
return Response('local', res, None)
# Method not found, try remote
except KeyError:
# Execute remote command
res, err = self.client.call(command, *args)
return Response('remote', res, err, self.client.is_multi())
# Local exception
except Exception as e:
return Response('local', res, str(e))
def parse_input(self, text):
""" Parse ctl user input. Double quotes are used
to group together multi words arguments. """
parts = util.split(text)
command = parts[0] if text and parts else None
command = command.lower() if command else None
args = parts[1:] if len(parts) > 1 else []
return (command, args)
def run(self, args=None, loop=True):
args = self.parser.parse_args()
super(CtlProgram, self).run(args)
# Execute a single command then exit
if args.command:
# command will come as a list (zero or more elements)
# so, extract the first element as the command name
# and the rest will all be positional arguments
command = args.command[0]
args = args.command[1:] if len(args.command) > 1 else []
response = self.call(command, *args)
response.show()
sys.exit(0)
# Enter command loop
if loop:
self.loop()
|
walkr/oi
|
oi/util.py
|
split
|
python
|
def split(text):
# Cleanup text
text = text.strip()
text = re.sub('\s+', ' ', text) # collpse multiple spaces
space, quote, parts = ' ', '"', []
part, quoted = '', False
for char in text:
# Encoutered beginning double quote
if char is quote and quoted is False:
quoted = True
continue
# Encountered the ending double quote
if char is quote and quoted is True:
quoted = False
parts.append(part.strip())
part = ''
continue
# Found space in quoted
if char is space and quoted is True:
part += char
continue
# Found space but not quoted
if char is space:
if part:
parts.append(part)
part = ''
continue
# Found other character
if char is not space:
part += char
continue
if part:
parts.append(part.strip())
return parts
|
Split text into arguments accounting for muti-word arguments
which are double quoted
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/oi/util.py#L6-L51
| null |
# Utility functions that didn't fit anywhere else
import re
|
walkr/oi
|
setup.py
|
read_long_description
|
python
|
def read_long_description(readme_file):
try:
import pypandoc
except (ImportError, OSError) as e:
print('No pypandoc or pandoc: %s' % (e,))
if is_py3:
fh = open(readme_file, encoding='utf-8')
else:
fh = open(readme_file)
long_description = fh.read()
fh.close()
return long_description
else:
return pypandoc.convert(readme_file, 'rst')
|
Read package long description from README file
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/setup.py#L17-L31
| null |
#!/usr/bin/env python
import sys
try:
import setuptools
from setuptools import setup
except ImportError:
setuptools = None
from distutils.core import setup
is_py3 = sys.version_info.major == 3
readme_file = 'README.md'
def read_version():
""" Read package version """
with open('./oi/version.py') as fh:
for line in fh:
if line.startswith('VERSION'):
return line.split('=')[1].strip().strip("'")
setup(
name='oi',
version=read_version(),
packages=['oi'],
author='Tony Walker',
author_email='walkr.walkr@gmail.com',
url='https://github.com/walkr/oi',
license='MIT',
description='A library for writing long running processes '
'with a cli interface',
long_description=read_long_description(readme_file),
install_requires=[
'nose',
'nanomsg',
'nanoservice',
'colorama',
],
dependency_links=[
'git+https://github.com/tonysimpson/nanomsg-python.git@master#egg=nanomsg',
],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
entry_points={
'console_scripts': [
'oi = oi.script:main',
],
},
)
|
walkr/oi
|
setup.py
|
read_version
|
python
|
def read_version():
with open('./oi/version.py') as fh:
for line in fh:
if line.startswith('VERSION'):
return line.split('=')[1].strip().strip("'")
|
Read package version
|
train
|
https://github.com/walkr/oi/blob/d9d8491d0bc920e493d8f716d6078762b8b2c6d3/setup.py#L34-L39
| null |
#!/usr/bin/env python
import sys
try:
import setuptools
from setuptools import setup
except ImportError:
setuptools = None
from distutils.core import setup
is_py3 = sys.version_info.major == 3
readme_file = 'README.md'
def read_long_description(readme_file):
""" Read package long description from README file """
try:
import pypandoc
except (ImportError, OSError) as e:
print('No pypandoc or pandoc: %s' % (e,))
if is_py3:
fh = open(readme_file, encoding='utf-8')
else:
fh = open(readme_file)
long_description = fh.read()
fh.close()
return long_description
else:
return pypandoc.convert(readme_file, 'rst')
setup(
name='oi',
version=read_version(),
packages=['oi'],
author='Tony Walker',
author_email='walkr.walkr@gmail.com',
url='https://github.com/walkr/oi',
license='MIT',
description='A library for writing long running processes '
'with a cli interface',
long_description=read_long_description(readme_file),
install_requires=[
'nose',
'nanomsg',
'nanoservice',
'colorama',
],
dependency_links=[
'git+https://github.com/tonysimpson/nanomsg-python.git@master#egg=nanomsg',
],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
entry_points={
'console_scripts': [
'oi = oi.script:main',
],
},
)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
Engine.repositories
|
python
|
def repositories(self):
return RepositoriesDataFrame(self.__engine.getRepositories(),
self.session, self.__implicits)
|
Returns a DataFrame with the data about the repositories found at
the specified repositories path in the form of siva files.
>>> repos_df = engine.repositories
:rtype: RepositoriesDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L57-L67
| null |
class Engine(object):
"""
This is the entry point to any functionality exposed by the source{d}
Engine. It contains methods to initialize the analysis on top of source
code repositories.
>>> from sourced.engine import Engine
>>> repos_df = Engine(sparkSession, "/path/to/my/repositories").repositories
>>> repos_df.show()
:param session: spark session to use
:type session: pyspark.sql.SparkSession
:param repos_path: path to the folder where siva files are stored
:type repos_path: str
:param repos_format: format of the repositories inside the provided folder.
It can be siva, bare or standard.
:type repos_format: str
:param skip_cleanup: don't delete unpacked siva files after using them
:type skip_cleanup: bool
:param skip_read_errors: skip any error encountered during repository reads
:type skip_read_errors: bool
"""
def __init__(self, session, repos_path, repos_format, skip_cleanup=False, skip_read_errors=False):
self.session = session
self.__jsparkSession = session._jsparkSession
self.session.conf.set('spark.tech.sourced.engine.repositories.path', repos_path)
self.session.conf.set('spark.tech.sourced.engine.repositories.format', repos_format)
self.__jvm = self.session.sparkContext._gateway.jvm
java_import(self.__jvm, 'tech.sourced.engine.Engine')
java_import(self.__jvm, 'tech.sourced.engine.package$')
try:
self.__engine = self.__jvm.tech.sourced.engine.Engine.apply(self.__jsparkSession, repos_path, repos_format)
except TypeError as e:
if 'JavaPackage' in e.args[0]:
raise Exception("package \"tech.sourced:engine:<version>\" cannot be found. Please, provide a jar with the package or install the package using --packages")
else:
raise e
if skip_cleanup:
self.__engine.skipCleanup(True)
if skip_read_errors:
self.__engine.skipReadErrors(True)
self.__implicits = getattr(getattr(self.__jvm.tech.sourced.engine, 'package$'), 'MODULE$')
@property
def blobs(self, repository_ids=[], reference_names=[], commit_hashes=[]):
"""
Retrieves the blobs of a list of repositories, reference names and commit hashes.
So the result will be a DataFrame of all the blobs in the given commits that are
in the given references that belong to the given repositories.
>>> blobs_df = engine.blobs(repo_ids, ref_names, hashes)
Calling this function with no arguments is the same as:
>>> engine.repositories.references.commits.tree_entries.blobs
:param repository_ids: list of repository ids to filter by (optional)
:type repository_ids: list of strings
:param reference_names: list of reference names to filter by (optional)
:type reference_names: list of strings
:param commit_hashes: list of hashes to filter by (optional)
:type commit_hashes: list of strings
:rtype: BlobsDataFrame
"""
if not isinstance(repository_ids, list):
raise Exception("repository_ids must be a list")
if not isinstance(reference_names, list):
raise Exception("reference_names must be a list")
if not isinstance(commit_hashes, list):
raise Exception("commit_hashes must be a list")
return BlobsDataFrame(self.__engine.getBlobs(repository_ids,
reference_names,
commit_hashes),
self.session,
self.__implicits)
def from_metadata(self, db_path, db_name='engine_metadata.db'):
"""
Registers in the current session the views of the MetadataSource so the
data is obtained from the metadata database instead of reading the
repositories with the DefaultSource.
:param db_path: path to the folder that contains the database.
:type db_path: str
:param db_name: name of the database file (engine_metadata.db) by default.
:type db_name: str
:returns: the same instance of the engine
:rtype: Engine
"""
self.__engine.fromMetadata(db_path, db_name)
return self
def from_repositories(self):
"""
Registers in the current session the views of the DefaultSource so the
data is obtained by reading the repositories instead of reading from
the MetadataSource. This has no effect if :method:`fromMetadata` has
not been called before.
:returns: the same instance of the engine
:rtype: Engine
"""
return self.__engine.fromRepositories()
def save_metadata(self, path, db_name='engine_metadata.db'):
"""
Saves all the metadata in a SQLite database on the given path with the given filename
(which if not given is "engine_metadata.db". If the database already exists, it will be
overwritten. The given path must exist and must be a directory, otherwise it will throw
a [[SparkException]].
Saved tables are repositories, references, commits and tree_entries. Blobs are not saved.
:param path: where database with the metadata will be stored.
:param db_name: name of the database file (default is "engine_metadata.db")
:raise Exception: when the given path is not a folder or does not exist.
"""
self.__engine.saveMetadata(path, db_name)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
Engine.blobs
|
python
|
def blobs(self, repository_ids=[], reference_names=[], commit_hashes=[]):
if not isinstance(repository_ids, list):
raise Exception("repository_ids must be a list")
if not isinstance(reference_names, list):
raise Exception("reference_names must be a list")
if not isinstance(commit_hashes, list):
raise Exception("commit_hashes must be a list")
return BlobsDataFrame(self.__engine.getBlobs(repository_ids,
reference_names,
commit_hashes),
self.session,
self.__implicits)
|
Retrieves the blobs of a list of repositories, reference names and commit hashes.
So the result will be a DataFrame of all the blobs in the given commits that are
in the given references that belong to the given repositories.
>>> blobs_df = engine.blobs(repo_ids, ref_names, hashes)
Calling this function with no arguments is the same as:
>>> engine.repositories.references.commits.tree_entries.blobs
:param repository_ids: list of repository ids to filter by (optional)
:type repository_ids: list of strings
:param reference_names: list of reference names to filter by (optional)
:type reference_names: list of strings
:param commit_hashes: list of hashes to filter by (optional)
:type commit_hashes: list of strings
:rtype: BlobsDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L70-L103
| null |
class Engine(object):
"""
This is the entry point to any functionality exposed by the source{d}
Engine. It contains methods to initialize the analysis on top of source
code repositories.
>>> from sourced.engine import Engine
>>> repos_df = Engine(sparkSession, "/path/to/my/repositories").repositories
>>> repos_df.show()
:param session: spark session to use
:type session: pyspark.sql.SparkSession
:param repos_path: path to the folder where siva files are stored
:type repos_path: str
:param repos_format: format of the repositories inside the provided folder.
It can be siva, bare or standard.
:type repos_format: str
:param skip_cleanup: don't delete unpacked siva files after using them
:type skip_cleanup: bool
:param skip_read_errors: skip any error encountered during repository reads
:type skip_read_errors: bool
"""
def __init__(self, session, repos_path, repos_format, skip_cleanup=False, skip_read_errors=False):
self.session = session
self.__jsparkSession = session._jsparkSession
self.session.conf.set('spark.tech.sourced.engine.repositories.path', repos_path)
self.session.conf.set('spark.tech.sourced.engine.repositories.format', repos_format)
self.__jvm = self.session.sparkContext._gateway.jvm
java_import(self.__jvm, 'tech.sourced.engine.Engine')
java_import(self.__jvm, 'tech.sourced.engine.package$')
try:
self.__engine = self.__jvm.tech.sourced.engine.Engine.apply(self.__jsparkSession, repos_path, repos_format)
except TypeError as e:
if 'JavaPackage' in e.args[0]:
raise Exception("package \"tech.sourced:engine:<version>\" cannot be found. Please, provide a jar with the package or install the package using --packages")
else:
raise e
if skip_cleanup:
self.__engine.skipCleanup(True)
if skip_read_errors:
self.__engine.skipReadErrors(True)
self.__implicits = getattr(getattr(self.__jvm.tech.sourced.engine, 'package$'), 'MODULE$')
@property
def repositories(self):
"""
Returns a DataFrame with the data about the repositories found at
the specified repositories path in the form of siva files.
>>> repos_df = engine.repositories
:rtype: RepositoriesDataFrame
"""
return RepositoriesDataFrame(self.__engine.getRepositories(),
self.session, self.__implicits)
def from_metadata(self, db_path, db_name='engine_metadata.db'):
"""
Registers in the current session the views of the MetadataSource so the
data is obtained from the metadata database instead of reading the
repositories with the DefaultSource.
:param db_path: path to the folder that contains the database.
:type db_path: str
:param db_name: name of the database file (engine_metadata.db) by default.
:type db_name: str
:returns: the same instance of the engine
:rtype: Engine
"""
self.__engine.fromMetadata(db_path, db_name)
return self
def from_repositories(self):
"""
Registers in the current session the views of the DefaultSource so the
data is obtained by reading the repositories instead of reading from
the MetadataSource. This has no effect if :method:`fromMetadata` has
not been called before.
:returns: the same instance of the engine
:rtype: Engine
"""
return self.__engine.fromRepositories()
def save_metadata(self, path, db_name='engine_metadata.db'):
"""
Saves all the metadata in a SQLite database on the given path with the given filename
(which if not given is "engine_metadata.db". If the database already exists, it will be
overwritten. The given path must exist and must be a directory, otherwise it will throw
a [[SparkException]].
Saved tables are repositories, references, commits and tree_entries. Blobs are not saved.
:param path: where database with the metadata will be stored.
:param db_name: name of the database file (default is "engine_metadata.db")
:raise Exception: when the given path is not a folder or does not exist.
"""
self.__engine.saveMetadata(path, db_name)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
Engine.from_metadata
|
python
|
def from_metadata(self, db_path, db_name='engine_metadata.db'):
self.__engine.fromMetadata(db_path, db_name)
return self
|
Registers in the current session the views of the MetadataSource so the
data is obtained from the metadata database instead of reading the
repositories with the DefaultSource.
:param db_path: path to the folder that contains the database.
:type db_path: str
:param db_name: name of the database file (engine_metadata.db) by default.
:type db_name: str
:returns: the same instance of the engine
:rtype: Engine
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L106-L121
| null |
class Engine(object):
"""
This is the entry point to any functionality exposed by the source{d}
Engine. It contains methods to initialize the analysis on top of source
code repositories.
>>> from sourced.engine import Engine
>>> repos_df = Engine(sparkSession, "/path/to/my/repositories").repositories
>>> repos_df.show()
:param session: spark session to use
:type session: pyspark.sql.SparkSession
:param repos_path: path to the folder where siva files are stored
:type repos_path: str
:param repos_format: format of the repositories inside the provided folder.
It can be siva, bare or standard.
:type repos_format: str
:param skip_cleanup: don't delete unpacked siva files after using them
:type skip_cleanup: bool
:param skip_read_errors: skip any error encountered during repository reads
:type skip_read_errors: bool
"""
def __init__(self, session, repos_path, repos_format, skip_cleanup=False, skip_read_errors=False):
self.session = session
self.__jsparkSession = session._jsparkSession
self.session.conf.set('spark.tech.sourced.engine.repositories.path', repos_path)
self.session.conf.set('spark.tech.sourced.engine.repositories.format', repos_format)
self.__jvm = self.session.sparkContext._gateway.jvm
java_import(self.__jvm, 'tech.sourced.engine.Engine')
java_import(self.__jvm, 'tech.sourced.engine.package$')
try:
self.__engine = self.__jvm.tech.sourced.engine.Engine.apply(self.__jsparkSession, repos_path, repos_format)
except TypeError as e:
if 'JavaPackage' in e.args[0]:
raise Exception("package \"tech.sourced:engine:<version>\" cannot be found. Please, provide a jar with the package or install the package using --packages")
else:
raise e
if skip_cleanup:
self.__engine.skipCleanup(True)
if skip_read_errors:
self.__engine.skipReadErrors(True)
self.__implicits = getattr(getattr(self.__jvm.tech.sourced.engine, 'package$'), 'MODULE$')
@property
def repositories(self):
"""
Returns a DataFrame with the data about the repositories found at
the specified repositories path in the form of siva files.
>>> repos_df = engine.repositories
:rtype: RepositoriesDataFrame
"""
return RepositoriesDataFrame(self.__engine.getRepositories(),
self.session, self.__implicits)
def blobs(self, repository_ids=[], reference_names=[], commit_hashes=[]):
"""
Retrieves the blobs of a list of repositories, reference names and commit hashes.
So the result will be a DataFrame of all the blobs in the given commits that are
in the given references that belong to the given repositories.
>>> blobs_df = engine.blobs(repo_ids, ref_names, hashes)
Calling this function with no arguments is the same as:
>>> engine.repositories.references.commits.tree_entries.blobs
:param repository_ids: list of repository ids to filter by (optional)
:type repository_ids: list of strings
:param reference_names: list of reference names to filter by (optional)
:type reference_names: list of strings
:param commit_hashes: list of hashes to filter by (optional)
:type commit_hashes: list of strings
:rtype: BlobsDataFrame
"""
if not isinstance(repository_ids, list):
raise Exception("repository_ids must be a list")
if not isinstance(reference_names, list):
raise Exception("reference_names must be a list")
if not isinstance(commit_hashes, list):
raise Exception("commit_hashes must be a list")
return BlobsDataFrame(self.__engine.getBlobs(repository_ids,
reference_names,
commit_hashes),
self.session,
self.__implicits)
def from_repositories(self):
"""
Registers in the current session the views of the DefaultSource so the
data is obtained by reading the repositories instead of reading from
the MetadataSource. This has no effect if :method:`fromMetadata` has
not been called before.
:returns: the same instance of the engine
:rtype: Engine
"""
return self.__engine.fromRepositories()
def save_metadata(self, path, db_name='engine_metadata.db'):
"""
Saves all the metadata in a SQLite database on the given path with the given filename
(which if not given is "engine_metadata.db". If the database already exists, it will be
overwritten. The given path must exist and must be a directory, otherwise it will throw
a [[SparkException]].
Saved tables are repositories, references, commits and tree_entries. Blobs are not saved.
:param path: where database with the metadata will be stored.
:param db_name: name of the database file (default is "engine_metadata.db")
:raise Exception: when the given path is not a folder or does not exist.
"""
self.__engine.saveMetadata(path, db_name)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
SourcedDataFrame.__generate_method
|
python
|
def __generate_method(name):
try:
func = getattr(DataFrame, name)
except AttributeError as e:
# PySpark version is too old
def func(self, *args, **kwargs):
raise e
return func
wraps = getattr(functools, "wraps", lambda _: lambda f: f) # py3.4+
@wraps(func)
def _wrapper(self, *args, **kwargs):
dataframe = func(self, *args, **kwargs)
if self.__class__ != SourcedDataFrame \
and isinstance(self, SourcedDataFrame) \
and isinstance(dataframe, DataFrame):
return self.__class__(dataframe._jdf, self._session, self._implicits)
return dataframe
return _wrapper
|
Wraps the DataFrame's original method by name to return the derived class instance.
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L176-L198
| null |
class SourcedDataFrame(DataFrame):
"""
Custom source{d} Engine DataFrame that contains some DataFrame overriden methods and
utilities. This class should not be used directly, please get your SourcedDataFrames
using the provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
DataFrame.__init__(self, jdf, session)
self._session = session
self._implicits = implicits
@property
def _engine_dataframe(self):
return self._implicits.EngineDataFrame(self._jdf)
# The following code wraps all the methods of DataFrame as of 2.3
alias = __generate_method("alias")
checkpoint = __generate_method("checkpoint")
coalesce = __generate_method("coalesce")
crossJoin = __generate_method("crossJoin")
crosstab = __generate_method("crosstab")
describe = __generate_method("describe")
distinct = __generate_method("distinct")
dropDuplicates = __generate_method("dropDuplicates")
drop_duplicates = dropDuplicates
drop = __generate_method("drop")
dropna = __generate_method("dropna")
fillna = __generate_method("fillna")
filter = __generate_method("filter")
freqItems = __generate_method("freqItems")
hint = __generate_method("hint")
intersect = __generate_method("intersect")
join = __generate_method("join")
limit = __generate_method("limit")
randomSplit = __generate_method("randomSplit")
repartition = __generate_method("repartition")
replace = __generate_method("replace")
sampleBy = __generate_method("sampleBy")
sample = __generate_method("sample")
selectExpr = __generate_method("selectExpr")
select = __generate_method("select")
sort = __generate_method("sort")
orderBy = sort
sortWithinPartitions = __generate_method("sortWithinPartitions")
subtract = __generate_method("subtract")
summary = __generate_method("summary")
toDF = __generate_method("toDF")
unionByName = __generate_method("unionByName")
union = __generate_method("union")
where = filter
withColumn = __generate_method("withColumn")
withColumnRenamed = __generate_method("withColumnRenamed")
withWatermark = __generate_method("withWatermark")
__generate_method = staticmethod(__generate_method) # to make IntelliSense happy
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
RepositoriesDataFrame.references
|
python
|
def references(self):
return ReferencesDataFrame(self._engine_dataframe.getReferences(),
self._session, self._implicits)
|
Returns the joined DataFrame of references and repositories.
>>> refs_df = repos_df.references
:rtype: ReferencesDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L261-L270
| null |
class RepositoriesDataFrame(SourcedDataFrame):
"""
DataFrame containing repositories.
This class should not be instantiated directly, please get your RepositoriesDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
@property
def remote_references(self):
"""
Returns a new DataFrame with only the remote references of the
current repositories.
>>> remote_refs_df = repos_df.remote_references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getRemoteReferences(),
self._session, self._implicits)
@property
def head_ref(self):
"""
Filters the current DataFrame references to only contain those rows whose reference is HEAD.
>>> heads_df = repos_df.head_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getReferences().getHEAD(),
self._session, self._implicits)
@property
def master_ref(self):
"""
Filters the current DataFrame references to only contain those rows whose reference is master.
>>> master_df = repos_df.master_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getReferences().getHEAD(),
self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
RepositoriesDataFrame.remote_references
|
python
|
def remote_references(self):
return ReferencesDataFrame(self._engine_dataframe.getRemoteReferences(),
self._session, self._implicits)
|
Returns a new DataFrame with only the remote references of the
current repositories.
>>> remote_refs_df = repos_df.remote_references
:rtype: ReferencesDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L274-L284
| null |
class RepositoriesDataFrame(SourcedDataFrame):
"""
DataFrame containing repositories.
This class should not be instantiated directly, please get your RepositoriesDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
def references(self):
"""
Returns the joined DataFrame of references and repositories.
>>> refs_df = repos_df.references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getReferences(),
self._session, self._implicits)
@property
@property
def head_ref(self):
"""
Filters the current DataFrame references to only contain those rows whose reference is HEAD.
>>> heads_df = repos_df.head_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getReferences().getHEAD(),
self._session, self._implicits)
@property
def master_ref(self):
"""
Filters the current DataFrame references to only contain those rows whose reference is master.
>>> master_df = repos_df.master_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getReferences().getHEAD(),
self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
RepositoriesDataFrame.master_ref
|
python
|
def master_ref(self):
return ReferencesDataFrame(self._engine_dataframe.getReferences().getHEAD(),
self._session, self._implicits)
|
Filters the current DataFrame references to only contain those rows whose reference is master.
>>> master_df = repos_df.master_ref
:rtype: ReferencesDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L301-L310
| null |
class RepositoriesDataFrame(SourcedDataFrame):
"""
DataFrame containing repositories.
This class should not be instantiated directly, please get your RepositoriesDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
def references(self):
"""
Returns the joined DataFrame of references and repositories.
>>> refs_df = repos_df.references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getReferences(),
self._session, self._implicits)
@property
def remote_references(self):
"""
Returns a new DataFrame with only the remote references of the
current repositories.
>>> remote_refs_df = repos_df.remote_references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getRemoteReferences(),
self._session, self._implicits)
@property
def head_ref(self):
"""
Filters the current DataFrame references to only contain those rows whose reference is HEAD.
>>> heads_df = repos_df.head_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getReferences().getHEAD(),
self._session, self._implicits)
@property
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
ReferencesDataFrame.head_ref
|
python
|
def head_ref(self):
return ReferencesDataFrame(self._engine_dataframe.getHEAD(),
self._session, self._implicits)
|
Filters the current DataFrame to only contain those rows whose reference is HEAD.
>>> heads_df = refs_df.head_ref
:rtype: ReferencesDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L346-L355
| null |
class ReferencesDataFrame(SourcedDataFrame):
"""
DataFrame with references.
This class should not be instantiated directly, please get your ReferencesDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
def remote_references(self):
"""
Returns a new DataFrame with only the remote references of all the current
references.
>>> remote_refs_df = refs_df.remote_references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getRemoteReferences(),
self._session, self._implicits)
@property
@property
def master_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is master.
>>> master_df = refs_df.master_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getMaster(),
self._session, self._implicits)
return self.ref('refs/heads/master')
def ref(self, ref):
"""
Filters the current DataFrame to only contain those rows whose reference is the given
reference name.
>>> heads_df = refs_df.ref('refs/heads/HEAD')
:param ref: Reference to get
:type ref: str
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self.filter(self.name == ref)._jdf,
self._session, self._implicits)
@property
def all_reference_commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame, with all of the commits
in all references.
>>> commits_df = refs_df.all_reference_commits
Take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
Most of the time, you just want the HEAD commit of each reference:
>>> commits_df = refs_df.commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getAllReferenceCommits(), self._session, self._implicits)
@property
def commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame. It just returns
the last commit in a reference (aka the current state).
>>> commits_df = refs_df.commits
If you want all commits from the references, use the `all_reference_commits` method,
but take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
>>> commits_df = refs_df.all_reference_commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getCommits(), self._session, self._implicits)
@property
def blobs(self):
"""
Returns this DataFrame joined with the blobs DataSource.
>>> blobs_df = refs_df.blobs
:rtype: BlobsDataFrame
"""
return BlobsDataFrame(self._engine_dataframe.getBlobs(), self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
ReferencesDataFrame.master_ref
|
python
|
def master_ref(self):
return ReferencesDataFrame(self._engine_dataframe.getMaster(),
self._session, self._implicits)
return self.ref('refs/heads/master')
|
Filters the current DataFrame to only contain those rows whose reference is master.
>>> master_df = refs_df.master_ref
:rtype: ReferencesDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L359-L369
| null |
class ReferencesDataFrame(SourcedDataFrame):
"""
DataFrame with references.
This class should not be instantiated directly, please get your ReferencesDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
def remote_references(self):
"""
Returns a new DataFrame with only the remote references of all the current
references.
>>> remote_refs_df = refs_df.remote_references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getRemoteReferences(),
self._session, self._implicits)
@property
def head_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is HEAD.
>>> heads_df = refs_df.head_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getHEAD(),
self._session, self._implicits)
@property
def ref(self, ref):
"""
Filters the current DataFrame to only contain those rows whose reference is the given
reference name.
>>> heads_df = refs_df.ref('refs/heads/HEAD')
:param ref: Reference to get
:type ref: str
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self.filter(self.name == ref)._jdf,
self._session, self._implicits)
@property
def all_reference_commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame, with all of the commits
in all references.
>>> commits_df = refs_df.all_reference_commits
Take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
Most of the time, you just want the HEAD commit of each reference:
>>> commits_df = refs_df.commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getAllReferenceCommits(), self._session, self._implicits)
@property
def commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame. It just returns
the last commit in a reference (aka the current state).
>>> commits_df = refs_df.commits
If you want all commits from the references, use the `all_reference_commits` method,
but take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
>>> commits_df = refs_df.all_reference_commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getCommits(), self._session, self._implicits)
@property
def blobs(self):
"""
Returns this DataFrame joined with the blobs DataSource.
>>> blobs_df = refs_df.blobs
:rtype: BlobsDataFrame
"""
return BlobsDataFrame(self._engine_dataframe.getBlobs(), self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
ReferencesDataFrame.ref
|
python
|
def ref(self, ref):
return ReferencesDataFrame(self.filter(self.name == ref)._jdf,
self._session, self._implicits)
|
Filters the current DataFrame to only contain those rows whose reference is the given
reference name.
>>> heads_df = refs_df.ref('refs/heads/HEAD')
:param ref: Reference to get
:type ref: str
:rtype: ReferencesDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L372-L384
| null |
class ReferencesDataFrame(SourcedDataFrame):
"""
DataFrame with references.
This class should not be instantiated directly, please get your ReferencesDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
def remote_references(self):
"""
Returns a new DataFrame with only the remote references of all the current
references.
>>> remote_refs_df = refs_df.remote_references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getRemoteReferences(),
self._session, self._implicits)
@property
def head_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is HEAD.
>>> heads_df = refs_df.head_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getHEAD(),
self._session, self._implicits)
@property
def master_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is master.
>>> master_df = refs_df.master_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getMaster(),
self._session, self._implicits)
return self.ref('refs/heads/master')
@property
def all_reference_commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame, with all of the commits
in all references.
>>> commits_df = refs_df.all_reference_commits
Take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
Most of the time, you just want the HEAD commit of each reference:
>>> commits_df = refs_df.commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getAllReferenceCommits(), self._session, self._implicits)
@property
def commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame. It just returns
the last commit in a reference (aka the current state).
>>> commits_df = refs_df.commits
If you want all commits from the references, use the `all_reference_commits` method,
but take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
>>> commits_df = refs_df.all_reference_commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getCommits(), self._session, self._implicits)
@property
def blobs(self):
"""
Returns this DataFrame joined with the blobs DataSource.
>>> blobs_df = refs_df.blobs
:rtype: BlobsDataFrame
"""
return BlobsDataFrame(self._engine_dataframe.getBlobs(), self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
ReferencesDataFrame.all_reference_commits
|
python
|
def all_reference_commits(self):
return CommitsDataFrame(self._engine_dataframe.getAllReferenceCommits(), self._session, self._implicits)
|
Returns the current DataFrame joined with the commits DataFrame, with all of the commits
in all references.
>>> commits_df = refs_df.all_reference_commits
Take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
Most of the time, you just want the HEAD commit of each reference:
>>> commits_df = refs_df.commits
:rtype: CommitsDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L388-L404
| null |
class ReferencesDataFrame(SourcedDataFrame):
"""
DataFrame with references.
This class should not be instantiated directly, please get your ReferencesDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
def remote_references(self):
"""
Returns a new DataFrame with only the remote references of all the current
references.
>>> remote_refs_df = refs_df.remote_references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getRemoteReferences(),
self._session, self._implicits)
@property
def head_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is HEAD.
>>> heads_df = refs_df.head_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getHEAD(),
self._session, self._implicits)
@property
def master_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is master.
>>> master_df = refs_df.master_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getMaster(),
self._session, self._implicits)
return self.ref('refs/heads/master')
def ref(self, ref):
"""
Filters the current DataFrame to only contain those rows whose reference is the given
reference name.
>>> heads_df = refs_df.ref('refs/heads/HEAD')
:param ref: Reference to get
:type ref: str
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self.filter(self.name == ref)._jdf,
self._session, self._implicits)
@property
@property
def commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame. It just returns
the last commit in a reference (aka the current state).
>>> commits_df = refs_df.commits
If you want all commits from the references, use the `all_reference_commits` method,
but take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
>>> commits_df = refs_df.all_reference_commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getCommits(), self._session, self._implicits)
@property
def blobs(self):
"""
Returns this DataFrame joined with the blobs DataSource.
>>> blobs_df = refs_df.blobs
:rtype: BlobsDataFrame
"""
return BlobsDataFrame(self._engine_dataframe.getBlobs(), self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
ReferencesDataFrame.commits
|
python
|
def commits(self):
return CommitsDataFrame(self._engine_dataframe.getCommits(), self._session, self._implicits)
|
Returns the current DataFrame joined with the commits DataFrame. It just returns
the last commit in a reference (aka the current state).
>>> commits_df = refs_df.commits
If you want all commits from the references, use the `all_reference_commits` method,
but take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
>>> commits_df = refs_df.all_reference_commits
:rtype: CommitsDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L408-L423
| null |
class ReferencesDataFrame(SourcedDataFrame):
"""
DataFrame with references.
This class should not be instantiated directly, please get your ReferencesDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
def remote_references(self):
"""
Returns a new DataFrame with only the remote references of all the current
references.
>>> remote_refs_df = refs_df.remote_references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getRemoteReferences(),
self._session, self._implicits)
@property
def head_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is HEAD.
>>> heads_df = refs_df.head_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getHEAD(),
self._session, self._implicits)
@property
def master_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is master.
>>> master_df = refs_df.master_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getMaster(),
self._session, self._implicits)
return self.ref('refs/heads/master')
def ref(self, ref):
"""
Filters the current DataFrame to only contain those rows whose reference is the given
reference name.
>>> heads_df = refs_df.ref('refs/heads/HEAD')
:param ref: Reference to get
:type ref: str
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self.filter(self.name == ref)._jdf,
self._session, self._implicits)
@property
def all_reference_commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame, with all of the commits
in all references.
>>> commits_df = refs_df.all_reference_commits
Take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
Most of the time, you just want the HEAD commit of each reference:
>>> commits_df = refs_df.commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getAllReferenceCommits(), self._session, self._implicits)
@property
@property
def blobs(self):
"""
Returns this DataFrame joined with the blobs DataSource.
>>> blobs_df = refs_df.blobs
:rtype: BlobsDataFrame
"""
return BlobsDataFrame(self._engine_dataframe.getBlobs(), self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
ReferencesDataFrame.blobs
|
python
|
def blobs(self):
return BlobsDataFrame(self._engine_dataframe.getBlobs(), self._session, self._implicits)
|
Returns this DataFrame joined with the blobs DataSource.
>>> blobs_df = refs_df.blobs
:rtype: BlobsDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L427-L435
| null |
class ReferencesDataFrame(SourcedDataFrame):
"""
DataFrame with references.
This class should not be instantiated directly, please get your ReferencesDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
def remote_references(self):
"""
Returns a new DataFrame with only the remote references of all the current
references.
>>> remote_refs_df = refs_df.remote_references
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getRemoteReferences(),
self._session, self._implicits)
@property
def head_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is HEAD.
>>> heads_df = refs_df.head_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getHEAD(),
self._session, self._implicits)
@property
def master_ref(self):
"""
Filters the current DataFrame to only contain those rows whose reference is master.
>>> master_df = refs_df.master_ref
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self._engine_dataframe.getMaster(),
self._session, self._implicits)
return self.ref('refs/heads/master')
def ref(self, ref):
"""
Filters the current DataFrame to only contain those rows whose reference is the given
reference name.
>>> heads_df = refs_df.ref('refs/heads/HEAD')
:param ref: Reference to get
:type ref: str
:rtype: ReferencesDataFrame
"""
return ReferencesDataFrame(self.filter(self.name == ref)._jdf,
self._session, self._implicits)
@property
def all_reference_commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame, with all of the commits
in all references.
>>> commits_df = refs_df.all_reference_commits
Take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
Most of the time, you just want the HEAD commit of each reference:
>>> commits_df = refs_df.commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getAllReferenceCommits(), self._session, self._implicits)
@property
def commits(self):
"""
Returns the current DataFrame joined with the commits DataFrame. It just returns
the last commit in a reference (aka the current state).
>>> commits_df = refs_df.commits
If you want all commits from the references, use the `all_reference_commits` method,
but take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
>>> commits_df = refs_df.all_reference_commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getCommits(), self._session, self._implicits)
@property
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
CommitsDataFrame.tree_entries
|
python
|
def tree_entries(self):
return TreeEntriesDataFrame(self._engine_dataframe.getTreeEntries(), self._session, self._implicits)
|
Returns this DataFrame joined with the tree entries DataSource.
>>> entries_df = commits_df.tree_entries
:rtype: TreeEntriesDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L476-L484
| null |
class CommitsDataFrame(SourcedDataFrame):
"""
DataFrame with commits data.
This class should not be instantiated directly, please get your CommitsDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
@property
def all_reference_commits(self):
"""
Returns all the commits in all references.
>>> all_commits_df = commits_df.all_reference_commits
Take into account that getting all the commits will lead to a lot of repeated tree
entries and blobs, thus making your query very slow.
Most of the time, you just want the HEAD commit of each reference:
>>> commits_df = refs_df.commits
:rtype: CommitsDataFrame
"""
return CommitsDataFrame(self._engine_dataframe.getAllReferenceCommits(), self._session, self._implicits)
@property
@property
def blobs(self):
"""
Returns a new DataFrame with the blob associated to each tree entry of the commit.
>>> > blobs_df = commits_df.blobs
:rtype: BlobsDataFrame
"""
return BlobsDataFrame(self._engine_dataframe.getBlobs(), self._session,
self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
BlobsDataFrame.classify_languages
|
python
|
def classify_languages(self):
return BlobsWithLanguageDataFrame(self._engine_dataframe.classifyLanguages(),
self._session, self._implicits)
|
Returns a new DataFrame with the language data of any blob added to
its row.
>>> blobs_lang_df = blobs_df.classify_languages
:rtype: BlobsWithLanguageDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L549-L559
| null |
class BlobsDataFrame(SourcedDataFrame):
"""
DataFrame containing blobs data.
This class should not be instantiated directly, please get your BlobsDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
def extract_uasts(self):
"""
Returns a new DataFrame with the parsed UAST data of any blob added to
its row.
>>> blobs_df.extract_uasts
:rtype: UASTsDataFrame
"""
return UASTsDataFrame(self._engine_dataframe.extractUASTs(),
self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
BlobsDataFrame.extract_uasts
|
python
|
def extract_uasts(self):
return UASTsDataFrame(self._engine_dataframe.extractUASTs(),
self._session, self._implicits)
|
Returns a new DataFrame with the parsed UAST data of any blob added to
its row.
>>> blobs_df.extract_uasts
:rtype: UASTsDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L562-L572
| null |
class BlobsDataFrame(SourcedDataFrame):
"""
DataFrame containing blobs data.
This class should not be instantiated directly, please get your BlobsDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
def classify_languages(self):
"""
Returns a new DataFrame with the language data of any blob added to
its row.
>>> blobs_lang_df = blobs_df.classify_languages
:rtype: BlobsWithLanguageDataFrame
"""
return BlobsWithLanguageDataFrame(self._engine_dataframe.classifyLanguages(),
self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
UASTsDataFrame.query_uast
|
python
|
def query_uast(self, query, query_col='uast', output_col='result'):
return UASTsDataFrame(self._engine_dataframe.queryUAST(query,
query_col,
output_col),
self._session, self._implicits)
|
Queries the UAST of a file with the given query to get specific nodes.
>>> rows = uasts_df.query_uast('//*[@roleIdentifier]').collect()
>>> rows = uasts_df.query_uast('//*[@roleIdentifier]', 'foo', 'bar')
:param query: xpath query
:type query: str
:param query_col: column containing the list of nodes to query
:type query_col: str
:param output_col: column to place the result of the query
:type output_col: str
:rtype: UASTsDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L624-L642
| null |
class UASTsDataFrame(SourcedDataFrame):
"""
DataFrame containing UAST data.
This class should not be instantiated directly, please get your UASTsDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
def extract_tokens(self, input_col='result', output_col='tokens'):
"""
Extracts the tokens from UAST nodes.
>>> rows = uasts_df.query_uast('//*[@roleIdentifier]').extract_tokens().collect()
>>> rows = uasts_df.query_uast('//*[@roleIdentifier]', output_col='foo').extract_tokens('foo', 'bar')
:param input_col: column containing the list of nodes to extract tokens from
:type input_col: str
:param output_col: column to place the resultant tokens
:type output_col: str
:rtype: UASTsDataFrame
"""
return UASTsDataFrame(self._engine_dataframe.extractTokens(input_col, output_col),
self._session, self._implicits)
|
src-d/jgit-spark-connector
|
python/sourced/engine/engine.py
|
UASTsDataFrame.extract_tokens
|
python
|
def extract_tokens(self, input_col='result', output_col='tokens'):
return UASTsDataFrame(self._engine_dataframe.extractTokens(input_col, output_col),
self._session, self._implicits)
|
Extracts the tokens from UAST nodes.
>>> rows = uasts_df.query_uast('//*[@roleIdentifier]').extract_tokens().collect()
>>> rows = uasts_df.query_uast('//*[@roleIdentifier]', output_col='foo').extract_tokens('foo', 'bar')
:param input_col: column containing the list of nodes to extract tokens from
:type input_col: str
:param output_col: column to place the resultant tokens
:type output_col: str
:rtype: UASTsDataFrame
|
train
|
https://github.com/src-d/jgit-spark-connector/blob/79d05a0bcf0da435685d6118828a8884e2fe4b94/python/sourced/engine/engine.py#L645-L659
| null |
class UASTsDataFrame(SourcedDataFrame):
"""
DataFrame containing UAST data.
This class should not be instantiated directly, please get your UASTsDataFrame using the
provided methods.
:param jdf: Java DataFrame
:type jdf: py4j.java_gateway.JavaObject
:param session: Spark Session to use
:type session: pyspark.sql.SparkSession
:param implicits: Implicits object from Scala
:type implicits: py4j.java_gateway.JavaObject
"""
def __init__(self, jdf, session, implicits):
SourcedDataFrame.__init__(self, jdf, session, implicits)
def query_uast(self, query, query_col='uast', output_col='result'):
"""
Queries the UAST of a file with the given query to get specific nodes.
>>> rows = uasts_df.query_uast('//*[@roleIdentifier]').collect()
>>> rows = uasts_df.query_uast('//*[@roleIdentifier]', 'foo', 'bar')
:param query: xpath query
:type query: str
:param query_col: column containing the list of nodes to query
:type query_col: str
:param output_col: column to place the result of the query
:type output_col: str
:rtype: UASTsDataFrame
"""
return UASTsDataFrame(self._engine_dataframe.queryUAST(query,
query_col,
output_col),
self._session, self._implicits)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo.init_git_version
|
python
|
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
|
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L85-L132
| null |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def query_remote_ref(self, remote, ref):
"""Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
"""
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
"""Wrap a subprocess call with logging
:param meth: the calling method to use.
"""
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
def aggregate(self):
""" Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
"""
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _check_status(self):
"""Check repo status and except if dirty."""
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
def _fetch_options(self, merge):
"""Get the fetch options from the given merge dict."""
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _set_remote(self, name, url):
"""Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
"""
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def collect_prs_info(self):
"""Collect all pending merge PRs info.
:returns: mapping of PRs by state
"""
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
def show_closed_prs(self):
"""Log only closed PRs."""
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
def show_all_prs(self):
"""Log all PRs grouped by state."""
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo.query_remote_ref
|
python
|
def query_remote_ref(self, remote, ref):
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
|
Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L134-L151
|
[
"def log_call(self, cmd, callwith=subprocess.check_call,\n log_level=logging.DEBUG, **kw):\n \"\"\"Wrap a subprocess call with logging\n :param meth: the calling method to use.\n \"\"\"\n logger.log(log_level, \"%s> call %r\", self.cwd, cmd)\n ret = callwith(cmd, **kw)\n if callwith == subprocess.check_output:\n ret = console_to_str(ret)\n return ret\n"
] |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
"""Wrap a subprocess call with logging
:param meth: the calling method to use.
"""
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
def aggregate(self):
""" Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
"""
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _check_status(self):
"""Check repo status and except if dirty."""
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
def _fetch_options(self, merge):
"""Get the fetch options from the given merge dict."""
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _set_remote(self, name, url):
"""Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
"""
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def collect_prs_info(self):
"""Collect all pending merge PRs info.
:returns: mapping of PRs by state
"""
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
def show_closed_prs(self):
"""Log only closed PRs."""
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
def show_all_prs(self):
"""Log all PRs grouped by state."""
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo.log_call
|
python
|
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
|
Wrap a subprocess call with logging
:param meth: the calling method to use.
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L153-L162
|
[
"def console_to_str(s):\n \"\"\" From pypa/pip project, pip.backwardwardcompat. License MIT. \"\"\"\n try:\n return s.decode(console_encoding)\n except UnicodeDecodeError:\n return s.decode('utf_8')\n except AttributeError: # for tests, #13\n return s\n"
] |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
def query_remote_ref(self, remote, ref):
"""Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
"""
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
def aggregate(self):
""" Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
"""
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _check_status(self):
"""Check repo status and except if dirty."""
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
def _fetch_options(self, merge):
"""Get the fetch options from the given merge dict."""
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _set_remote(self, name, url):
"""Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
"""
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def collect_prs_info(self):
"""Collect all pending merge PRs info.
:returns: mapping of PRs by state
"""
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
def show_closed_prs(self):
"""Log only closed PRs."""
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
def show_all_prs(self):
"""Log all PRs grouped by state."""
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo.aggregate
|
python
|
def aggregate(self):
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
|
Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L164-L189
|
[
"def init_repository(self, target_dir):\n logger.info('Init empty git repository in %s', target_dir)\n self.log_call(['git', 'init', target_dir])\n",
"def fetch(self):\n basecmd = (\"git\", \"fetch\")\n logger.info(\"Fetching required remotes\")\n for merge in self.merges:\n cmd = basecmd + self._fetch_options(merge) + (merge[\"remote\"],)\n if merge[\"remote\"] not in self.fetch_all:\n cmd += (merge[\"ref\"],)\n self.log_call(cmd, cwd=self.cwd)\n",
"def _reset_to(self, remote, ref):\n if not self.force:\n self._check_status()\n logger.info('Reset branch to %s %s', remote, ref)\n rtype, sha = self.query_remote_ref(remote, ref)\n if rtype is None and not ishex(ref):\n raise GitAggregatorException(\n 'Could not reset %s to %s. No commit found for %s '\n % (remote, ref, ref))\n cmd = ['git', 'reset', '--hard', sha]\n if logger.getEffectiveLevel() != logging.DEBUG:\n cmd.insert(2, '--quiet')\n self.log_call(cmd, cwd=self.cwd)\n self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)\n",
"def _switch_to_branch(self, branch_name):\n # check if the branch already exists\n logger.info(\"Switch to branch %s\", branch_name)\n self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)\n",
"def _execute_shell_command_after(self):\n logger.info('Execute shell after commands')\n for cmd in self.shell_command_after:\n self.log_call(cmd, shell=True, cwd=self.cwd)\n",
"def _merge(self, merge):\n logger.info(\"Pull %s, %s\", merge[\"remote\"], merge[\"ref\"])\n cmd = (\"git\", \"pull\")\n if self.git_version >= (1, 7, 10):\n # --edit and --no-edit appear with Git 1.7.10\n # see Documentation/RelNotes/1.7.10.txt of Git\n # (https://git.kernel.org/cgit/git/git.git/tree)\n cmd += ('--no-edit',)\n if logger.getEffectiveLevel() != logging.DEBUG:\n cmd += ('--quiet',)\n cmd += self._fetch_options(merge) + (merge[\"remote\"], merge[\"ref\"])\n self.log_call(cmd, cwd=self.cwd)\n",
"def _set_remote(self, name, url):\n \"\"\"Add remote to the repository. It's equivalent to the command\n git remote add <name> <url>\n\n If the remote already exists with an other url, it's removed\n and added aggain\n \"\"\"\n remotes = self._get_remotes()\n exising_url = remotes.get(name)\n if exising_url == url:\n logger.info('Remote already exists %s <%s>', name, url)\n return\n if not exising_url:\n logger.info('Adding remote %s <%s>', name, url)\n self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)\n else:\n logger.info('Remote remote %s <%s> -> <%s>',\n name, exising_url, url)\n self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)\n self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)\n"
] |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
def query_remote_ref(self, remote, ref):
"""Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
"""
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
"""Wrap a subprocess call with logging
:param meth: the calling method to use.
"""
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _check_status(self):
"""Check repo status and except if dirty."""
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
def _fetch_options(self, merge):
"""Get the fetch options from the given merge dict."""
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _set_remote(self, name, url):
"""Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
"""
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def collect_prs_info(self):
"""Collect all pending merge PRs info.
:returns: mapping of PRs by state
"""
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
def show_closed_prs(self):
"""Log only closed PRs."""
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
def show_all_prs(self):
"""Log all PRs grouped by state."""
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo._check_status
|
python
|
def _check_status(self):
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
|
Check repo status and except if dirty.
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L210-L219
| null |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
def query_remote_ref(self, remote, ref):
"""Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
"""
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
"""Wrap a subprocess call with logging
:param meth: the calling method to use.
"""
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
def aggregate(self):
""" Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
"""
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _fetch_options(self, merge):
"""Get the fetch options from the given merge dict."""
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _set_remote(self, name, url):
"""Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
"""
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def collect_prs_info(self):
"""Collect all pending merge PRs info.
:returns: mapping of PRs by state
"""
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
def show_closed_prs(self):
"""Log only closed PRs."""
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
def show_all_prs(self):
"""Log all PRs grouped by state."""
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo._fetch_options
|
python
|
def _fetch_options(self, merge):
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
|
Get the fetch options from the given merge dict.
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L221-L228
| null |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
def query_remote_ref(self, remote, ref):
"""Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
"""
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
"""Wrap a subprocess call with logging
:param meth: the calling method to use.
"""
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
def aggregate(self):
""" Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
"""
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _check_status(self):
"""Check repo status and except if dirty."""
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _set_remote(self, name, url):
"""Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
"""
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def collect_prs_info(self):
"""Collect all pending merge PRs info.
:returns: mapping of PRs by state
"""
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
def show_closed_prs(self):
"""Log only closed PRs."""
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
def show_all_prs(self):
"""Log all PRs grouped by state."""
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo._set_remote
|
python
|
def _set_remote(self, name, url):
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
|
Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L285-L304
|
[
"def log_call(self, cmd, callwith=subprocess.check_call,\n log_level=logging.DEBUG, **kw):\n \"\"\"Wrap a subprocess call with logging\n :param meth: the calling method to use.\n \"\"\"\n logger.log(log_level, \"%s> call %r\", self.cwd, cmd)\n ret = callwith(cmd, **kw)\n if callwith == subprocess.check_output:\n ret = console_to_str(ret)\n return ret\n",
"def _get_remotes(self):\n lines = self.log_call(\n ['git', 'remote', '-v'],\n callwith=subprocess.check_output,\n cwd=self.cwd).splitlines()\n remotes = {}\n for line in lines:\n name, url = line.split('\\t')\n url = url.split(' ')[0]\n v = remotes.setdefault(name, url)\n if v != url:\n raise NotImplementedError(\n 'Different urls gor push and fetch for remote %s\\n'\n '%s != %s' % (name, url, v)\n )\n return remotes\n"
] |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
def query_remote_ref(self, remote, ref):
"""Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
"""
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
"""Wrap a subprocess call with logging
:param meth: the calling method to use.
"""
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
def aggregate(self):
""" Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
"""
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _check_status(self):
"""Check repo status and except if dirty."""
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
def _fetch_options(self, merge):
"""Get the fetch options from the given merge dict."""
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def collect_prs_info(self):
"""Collect all pending merge PRs info.
:returns: mapping of PRs by state
"""
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
def show_closed_prs(self):
"""Log only closed PRs."""
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
def show_all_prs(self):
"""Log all PRs grouped by state."""
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo.collect_prs_info
|
python
|
def collect_prs_info(self):
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
|
Collect all pending merge PRs info.
:returns: mapping of PRs by state
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L313-L357
|
[
"def _github_api_get(self, path):\n url = 'https://api.github.com' + path\n token = os.environ.get('GITHUB_TOKEN')\n if token:\n url += '?access_token=' + token\n return requests.get(url)\n"
] |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
def query_remote_ref(self, remote, ref):
"""Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
"""
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
"""Wrap a subprocess call with logging
:param meth: the calling method to use.
"""
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
def aggregate(self):
""" Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
"""
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _check_status(self):
"""Check repo status and except if dirty."""
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
def _fetch_options(self, merge):
"""Get the fetch options from the given merge dict."""
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _set_remote(self, name, url):
"""Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
"""
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def show_closed_prs(self):
"""Log only closed PRs."""
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
def show_all_prs(self):
"""Log all PRs grouped by state."""
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo.show_closed_prs
|
python
|
def show_closed_prs(self):
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
Log only closed PRs.
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L359-L365
|
[
"def collect_prs_info(self):\n \"\"\"Collect all pending merge PRs info.\n\n :returns: mapping of PRs by state\n \"\"\"\n REPO_RE = re.compile(\n '^(https://github.com/|git@github.com:)'\n '(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')\n PULL_RE = re.compile(\n '^(refs/)?pull/(?P<pr>[0-9]+)/head$')\n remotes = {r['name']: r['url'] for r in self.remotes}\n all_prs = {}\n for merge in self.merges:\n remote = merge['remote']\n ref = merge['ref']\n repo_url = remotes[remote]\n repo_mo = REPO_RE.match(repo_url)\n if not repo_mo:\n logger.debug('%s is not a github repo', repo_url)\n continue\n pull_mo = PULL_RE.match(ref)\n if not pull_mo:\n logger.debug('%s is not a github pull reqeust', ref)\n continue\n pr_info = {\n 'owner': repo_mo.group('owner'),\n 'repo': repo_mo.group('repo'),\n 'pr': pull_mo.group('pr'),\n }\n pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)\n pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)\n pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)\n r = self._github_api_get('/repos/{path}'.format(**pr_info))\n if r.status_code != 200:\n logger.warning(\n 'Could not get status of {path}. '\n 'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)\n )\n continue\n pr_info['state'] = r.json().get('state')\n pr_info['merged'] = (\n not r.json().get('merged') and 'not ' or ''\n ) + 'merged'\n all_prs.setdefault(pr_info['state'], []).append(pr_info)\n return all_prs\n"
] |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
def query_remote_ref(self, remote, ref):
"""Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
"""
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
"""Wrap a subprocess call with logging
:param meth: the calling method to use.
"""
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
def aggregate(self):
""" Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
"""
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _check_status(self):
"""Check repo status and except if dirty."""
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
def _fetch_options(self, merge):
"""Get the fetch options from the given merge dict."""
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _set_remote(self, name, url):
"""Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
"""
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def collect_prs_info(self):
"""Collect all pending merge PRs info.
:returns: mapping of PRs by state
"""
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
def show_all_prs(self):
"""Log all PRs grouped by state."""
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/repo.py
|
Repo.show_all_prs
|
python
|
def show_all_prs(self):
for __, prs in self.collect_prs_info().items():
for pr_info in prs:
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
Log all PRs grouped by state.
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/repo.py#L367-L373
|
[
"def collect_prs_info(self):\n \"\"\"Collect all pending merge PRs info.\n\n :returns: mapping of PRs by state\n \"\"\"\n REPO_RE = re.compile(\n '^(https://github.com/|git@github.com:)'\n '(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')\n PULL_RE = re.compile(\n '^(refs/)?pull/(?P<pr>[0-9]+)/head$')\n remotes = {r['name']: r['url'] for r in self.remotes}\n all_prs = {}\n for merge in self.merges:\n remote = merge['remote']\n ref = merge['ref']\n repo_url = remotes[remote]\n repo_mo = REPO_RE.match(repo_url)\n if not repo_mo:\n logger.debug('%s is not a github repo', repo_url)\n continue\n pull_mo = PULL_RE.match(ref)\n if not pull_mo:\n logger.debug('%s is not a github pull reqeust', ref)\n continue\n pr_info = {\n 'owner': repo_mo.group('owner'),\n 'repo': repo_mo.group('repo'),\n 'pr': pull_mo.group('pr'),\n }\n pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)\n pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)\n pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)\n r = self._github_api_get('/repos/{path}'.format(**pr_info))\n if r.status_code != 200:\n logger.warning(\n 'Could not get status of {path}. '\n 'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)\n )\n continue\n pr_info['state'] = r.json().get('state')\n pr_info['merged'] = (\n not r.json().get('merged') and 'not ' or ''\n ) + 'merged'\n all_prs.setdefault(pr_info['state'], []).append(pr_info)\n return all_prs\n"
] |
class Repo(object):
_git_version = None
def __init__(self, cwd, remotes, merges, target,
shell_command_after=None, fetch_all=False, defaults=None,
force=False):
"""Initialize a git repository aggregator
:param cwd: path to the directory where to initialize the repository
:param remotes: list of remote linked to the repository. A remote is
a dict {'name': '', 'url': ''}
:param: merges list of merge to apply to build the aggregated
repository. A merge is a dict {'remote': '', 'ref': ''}
:param target:
:param shell_command_after: an optional list of shell command to
execute after the aggregation
:param fetch_all:
Can be an iterable (recommended: ``frozenset``) that yields names
of remotes where all refs should be fetched, or ``True`` to do it
for every configured remote.
:param defaults:
Collection of default parameters to be passed to git.
:param bool force:
When ``False``, it will stop if repo is dirty.
"""
self.cwd = cwd
self.remotes = remotes
if fetch_all is True:
self.fetch_all = frozenset(r["name"] for r in remotes)
else:
self.fetch_all = fetch_all or frozenset()
self.merges = merges
self.target = target
self.shell_command_after = shell_command_after or []
self.defaults = defaults or dict()
self.force = force
@property
def git_version(self):
cls = self.__class__
version = cls._git_version
if version is not None:
return version
return cls.init_git_version(
console_to_str(subprocess.check_output(
['git', '--version'])))
@classmethod
def init_git_version(cls, v_str):
r"""Parse git version string and store the resulting tuple on self.
:returns: the parsed version tuple
Only the first 3 digits are kept. This is good enough for the few
version dependent cases we need, and coarse enough to avoid
more complicated parsing.
Some real-life examples::
>>> GitRepo.init_git_version('git version 1.8.5.3')
(1, 8, 5)
>>> GitRepo.init_git_version('git version 1.7.2.5')
(1, 7, 2)
Seen on MacOSX (not on MacPorts)::
>>> GitRepo.init_git_version('git version 1.8.5.2 (Apple Git-48)')
(1, 8, 5)
Seen on Windows (Tortoise Git)::
>>> GitRepo.init_git_version('git version 1.8.4.msysgit.0')
(1, 8, 4)
A compiled version::
>>> GitRepo.init_git_version('git version 2.0.3.2.g996b0fd')
(2, 0, 3)
Rewrapped by `hub <https://hub.github.com/>`_, it has two lines:
>>> GitRepo.init_git_version('git version 1.7.9\nhub version 1.11.0')
(1, 7, 9)
This one does not exist, allowing us to prove that this method
actually governs the :attr:`git_version` property
>>> GitRepo.init_git_version('git version 0.0.666')
(0, 0, 666)
>>> GitRepo('', '').git_version
(0, 0, 666)
Expected exceptions::
>>> try: GitRepo.init_git_version('invalid')
... except ValueError: pass
After playing with it, we must reset it so that tests can run with
the proper detected one, if needed::
>>> GitRepo.init_git_version(None)
"""
if v_str is None:
cls._git_version = None
return
v_str = v_str.strip()
try:
version = cls._git_version = tuple(
int(x) for x in v_str.split()[2].split('.')[:3])
except Exception:
raise ValueError("Could not parse git version output %r. Please "
"report this" % v_str)
return version
def query_remote_ref(self, remote, ref):
"""Query remote repo about given ref.
:return: ``('tag', sha)`` if ref is a tag in remote
``('branch', sha)`` if ref is branch (aka "head") in remote
``(None, ref)`` if ref does not exist in remote. This happens
notably if ref if a commit sha (they can't be queried)
"""
out = self.log_call(['git', 'ls-remote', remote, ref],
cwd=self.cwd,
callwith=subprocess.check_output).strip()
for sha, fullref in (l.split() for l in out.splitlines()):
if fullref == 'refs/heads/' + ref:
return 'branch', sha
elif fullref == 'refs/tags/' + ref:
return 'tag', sha
elif fullref == ref and ref == 'HEAD':
return 'HEAD', sha
return None, ref
def log_call(self, cmd, callwith=subprocess.check_call,
log_level=logging.DEBUG, **kw):
"""Wrap a subprocess call with logging
:param meth: the calling method to use.
"""
logger.log(log_level, "%s> call %r", self.cwd, cmd)
ret = callwith(cmd, **kw)
if callwith == subprocess.check_output:
ret = console_to_str(ret)
return ret
def aggregate(self):
""" Aggregate all merges into the target branch
If the target_dir doesn't exist, create an empty git repo otherwise
clean it, add all remotes , and merge all merges.
"""
logger.info('Start aggregation of %s', self.cwd)
target_dir = self.cwd
is_new = not os.path.exists(target_dir)
if is_new:
self.init_repository(target_dir)
self._switch_to_branch(self.target['branch'])
for r in self.remotes:
self._set_remote(**r)
self.fetch()
merges = self.merges
if not is_new:
# reset to the first merge
origin = merges[0]
merges = merges[1:]
self._reset_to(origin["remote"], origin["ref"])
for merge in merges:
self._merge(merge)
self._execute_shell_command_after()
logger.info('End aggregation of %s', self.cwd)
def init_repository(self, target_dir):
logger.info('Init empty git repository in %s', target_dir)
self.log_call(['git', 'init', target_dir])
def fetch(self):
basecmd = ("git", "fetch")
logger.info("Fetching required remotes")
for merge in self.merges:
cmd = basecmd + self._fetch_options(merge) + (merge["remote"],)
if merge["remote"] not in self.fetch_all:
cmd += (merge["ref"],)
self.log_call(cmd, cwd=self.cwd)
def push(self):
remote = self.target['remote']
branch = self.target['branch']
logger.info("Push %s to %s", branch, remote)
self.log_call(['git', 'push', '-f', remote, branch], cwd=self.cwd)
def _check_status(self):
"""Check repo status and except if dirty."""
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status)
def _fetch_options(self, merge):
"""Get the fetch options from the given merge dict."""
cmd = tuple()
for option in FETCH_DEFAULTS:
value = merge.get(option, self.defaults.get(option))
if value:
cmd += ("--%s" % option, str(value))
return cmd
def _reset_to(self, remote, ref):
if not self.force:
self._check_status()
logger.info('Reset branch to %s %s', remote, ref)
rtype, sha = self.query_remote_ref(remote, ref)
if rtype is None and not ishex(ref):
raise GitAggregatorException(
'Could not reset %s to %s. No commit found for %s '
% (remote, ref, ref))
cmd = ['git', 'reset', '--hard', sha]
if logger.getEffectiveLevel() != logging.DEBUG:
cmd.insert(2, '--quiet')
self.log_call(cmd, cwd=self.cwd)
self.log_call(['git', 'clean', '-ffd'], cwd=self.cwd)
def _switch_to_branch(self, branch_name):
# check if the branch already exists
logger.info("Switch to branch %s", branch_name)
self.log_call(['git', 'checkout', '-B', branch_name], cwd=self.cwd)
def _execute_shell_command_after(self):
logger.info('Execute shell after commands')
for cmd in self.shell_command_after:
self.log_call(cmd, shell=True, cwd=self.cwd)
def _merge(self, merge):
logger.info("Pull %s, %s", merge["remote"], merge["ref"])
cmd = ("git", "pull")
if self.git_version >= (1, 7, 10):
# --edit and --no-edit appear with Git 1.7.10
# see Documentation/RelNotes/1.7.10.txt of Git
# (https://git.kernel.org/cgit/git/git.git/tree)
cmd += ('--no-edit',)
if logger.getEffectiveLevel() != logging.DEBUG:
cmd += ('--quiet',)
cmd += self._fetch_options(merge) + (merge["remote"], merge["ref"])
self.log_call(cmd, cwd=self.cwd)
def _get_remotes(self):
lines = self.log_call(
['git', 'remote', '-v'],
callwith=subprocess.check_output,
cwd=self.cwd).splitlines()
remotes = {}
for line in lines:
name, url = line.split('\t')
url = url.split(' ')[0]
v = remotes.setdefault(name, url)
if v != url:
raise NotImplementedError(
'Different urls gor push and fetch for remote %s\n'
'%s != %s' % (name, url, v)
)
return remotes
def _set_remote(self, name, url):
"""Add remote to the repository. It's equivalent to the command
git remote add <name> <url>
If the remote already exists with an other url, it's removed
and added aggain
"""
remotes = self._get_remotes()
exising_url = remotes.get(name)
if exising_url == url:
logger.info('Remote already exists %s <%s>', name, url)
return
if not exising_url:
logger.info('Adding remote %s <%s>', name, url)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
else:
logger.info('Remote remote %s <%s> -> <%s>',
name, exising_url, url)
self.log_call(['git', 'remote', 'rm', name], cwd=self.cwd)
self.log_call(['git', 'remote', 'add', name, url], cwd=self.cwd)
def _github_api_get(self, path):
url = 'https://api.github.com' + path
token = os.environ.get('GITHUB_TOKEN')
if token:
url += '?access_token=' + token
return requests.get(url)
def collect_prs_info(self):
"""Collect all pending merge PRs info.
:returns: mapping of PRs by state
"""
REPO_RE = re.compile(
'^(https://github.com/|git@github.com:)'
'(?P<owner>.*?)/(?P<repo>.*?)(.git)?$')
PULL_RE = re.compile(
'^(refs/)?pull/(?P<pr>[0-9]+)/head$')
remotes = {r['name']: r['url'] for r in self.remotes}
all_prs = {}
for merge in self.merges:
remote = merge['remote']
ref = merge['ref']
repo_url = remotes[remote]
repo_mo = REPO_RE.match(repo_url)
if not repo_mo:
logger.debug('%s is not a github repo', repo_url)
continue
pull_mo = PULL_RE.match(ref)
if not pull_mo:
logger.debug('%s is not a github pull reqeust', ref)
continue
pr_info = {
'owner': repo_mo.group('owner'),
'repo': repo_mo.group('repo'),
'pr': pull_mo.group('pr'),
}
pr_info['path'] = '{owner}/{repo}/pulls/{pr}'.format(**pr_info)
pr_info['url'] = 'https://github.com/{path}'.format(**pr_info)
pr_info['shortcut'] = '{owner}/{repo}#{pr}'.format(**pr_info)
r = self._github_api_get('/repos/{path}'.format(**pr_info))
if r.status_code != 200:
logger.warning(
'Could not get status of {path}. '
'Reason: {r.status_code} {r.reason}'.format(r=r, **pr_info)
)
continue
pr_info['state'] = r.json().get('state')
pr_info['merged'] = (
not r.json().get('merged') and 'not ' or ''
) + 'merged'
all_prs.setdefault(pr_info['state'], []).append(pr_info)
return all_prs
def show_closed_prs(self):
"""Log only closed PRs."""
all_prs = self.collect_prs_info()
for pr_info in all_prs.get('closed', []):
logger.info(
'{url} in state {state} ({merged})'.format(**pr_info)
)
|
acsone/git-aggregator
|
git_aggregator/config.py
|
get_repos
|
python
|
def get_repos(config, force=False):
repo_list = []
for directory, repo_data in config.items():
if not os.path.isabs(directory):
directory = os.path.abspath(directory)
repo_dict = {
'cwd': directory,
'defaults': repo_data.get('defaults', dict()),
'force': force,
}
remote_names = set()
if 'remotes' in repo_data:
repo_dict['remotes'] = []
remotes_data = repo_data['remotes'] or {}
for remote_name, url in remotes_data.items():
if not url:
raise ConfigException(
'%s: No url defined for remote %s.' %
(directory, remote_name))
remote_dict = {
'name': remote_name,
'url': url
}
repo_dict['remotes'].append(remote_dict)
remote_names.add(remote_name)
if not remote_names:
raise ConfigException(
'%s: You should at least define one remote.' % directory)
else:
raise ConfigException('%s: remotes is not defined.' % directory)
if 'merges' in repo_data:
merges = []
merge_data = repo_data.get('merges') or []
for merge in merge_data:
try:
# Assume parts is a str
parts = merge.split(' ')
if len(parts) != 2:
raise ConfigException(
'%s: Merge must be formatted as '
'"remote_name ref".' % directory)
merge = {
"remote": parts[0],
"ref": parts[1],
}
except AttributeError:
# Parts is a dict
try:
merge["remote"] = str(merge["remote"])
merge["ref"] = str(merge["ref"])
except KeyError:
raise ConfigException(
'%s: Merge lacks mandatory '
'`remote` or `ref` keys.' % directory)
# Check remote is available
if merge["remote"] not in remote_names:
raise ConfigException(
'%s: Merge remote %s not defined in remotes.' %
(directory, merge["remote"]))
merges.append(merge)
repo_dict['merges'] = merges
if not merges:
raise ConfigException(
'%s: You should at least define one merge.' % directory)
else:
raise ConfigException(
'%s: merges is not defined.' % directory)
# Only fetch required remotes by default
repo_dict["fetch_all"] = repo_data.get("fetch_all", False)
if isinstance(repo_dict["fetch_all"], string_types):
repo_dict["fetch_all"] = frozenset((repo_dict["fetch_all"],))
elif isinstance(repo_dict["fetch_all"], list):
repo_dict["fetch_all"] = frozenset(repo_dict["fetch_all"])
if 'target' not in repo_data:
raise ConfigException('%s: No target defined.' % directory)
parts = (repo_data.get('target') or "") .split(' ')
if len(parts) != 2:
raise ConfigException(
'%s: Target must be formatted as '
'"remote_name branch_name"' % directory)
remote_name, branch = repo_data.get('target').split(' ')
if remote_name not in remote_names:
raise ConfigException(
'%s: Target remote %s not defined in remotes.' %
(directory, remote_name))
repo_dict['target'] = {
'remote': remote_name,
'branch': branch,
}
commands = []
if 'shell_command_after' in repo_data:
cmds = repo_data['shell_command_after']
# if str: turn to list
if cmds:
if isinstance(cmds, string_types):
cmds = [cmds]
commands = cmds
repo_dict['shell_command_after'] = commands
repo_list.append(repo_dict)
return repo_list
|
Return a :py:obj:`list` list of repos from config file.
:param config: the repos config in :py:class:`dict` format.
:param bool force: Force aggregate dirty repos or not.
:type config: dict
:rtype: list
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/config.py#L17-L123
| null |
# -*- coding: utf-8 -*-
# © 2015 ACSONE SA/NV
# License AGPLv3 (http://www.gnu.org/licenses/agpl-3.0-standalone.html)
import logging
import os
from string import Template
import kaptan
from .exception import ConfigException
from ._compat import string_types
log = logging.getLogger(__name__)
def load_config(config, expand_env=False, force=False):
"""Return repos from a directory and fnmatch. Not recursive.
:param config: paths to config file
:type config: str
:param expand_env: True to expand environment varialbes in the config.
:type expand_env: bool
:param bool force: True to aggregate even if repo is dirty.
:returns: expanded config dict item
:rtype: iter(dict)
"""
if not os.path.exists(config):
raise ConfigException('Unable to find configuration file: %s' % config)
file_extension = os.path.splitext(config)[1][1:]
conf = kaptan.Kaptan(handler=kaptan.HANDLER_EXT.get(file_extension))
if expand_env:
with open(config, 'r') as file_handler:
config = Template(file_handler.read())
config = config.substitute(os.environ)
conf.import_config(config)
return get_repos(conf.export('dict') or {}, force)
|
acsone/git-aggregator
|
git_aggregator/config.py
|
load_config
|
python
|
def load_config(config, expand_env=False, force=False):
if not os.path.exists(config):
raise ConfigException('Unable to find configuration file: %s' % config)
file_extension = os.path.splitext(config)[1][1:]
conf = kaptan.Kaptan(handler=kaptan.HANDLER_EXT.get(file_extension))
if expand_env:
with open(config, 'r') as file_handler:
config = Template(file_handler.read())
config = config.substitute(os.environ)
conf.import_config(config)
return get_repos(conf.export('dict') or {}, force)
|
Return repos from a directory and fnmatch. Not recursive.
:param config: paths to config file
:type config: str
:param expand_env: True to expand environment varialbes in the config.
:type expand_env: bool
:param bool force: True to aggregate even if repo is dirty.
:returns: expanded config dict item
:rtype: iter(dict)
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/config.py#L126-L149
|
[
"def get_repos(config, force=False):\n \"\"\"Return a :py:obj:`list` list of repos from config file.\n :param config: the repos config in :py:class:`dict` format.\n :param bool force: Force aggregate dirty repos or not.\n :type config: dict\n :rtype: list\n \"\"\"\n repo_list = []\n for directory, repo_data in config.items():\n if not os.path.isabs(directory):\n directory = os.path.abspath(directory)\n repo_dict = {\n 'cwd': directory,\n 'defaults': repo_data.get('defaults', dict()),\n 'force': force,\n }\n remote_names = set()\n if 'remotes' in repo_data:\n repo_dict['remotes'] = []\n remotes_data = repo_data['remotes'] or {}\n for remote_name, url in remotes_data.items():\n if not url:\n raise ConfigException(\n '%s: No url defined for remote %s.' %\n (directory, remote_name))\n remote_dict = {\n 'name': remote_name,\n 'url': url\n }\n repo_dict['remotes'].append(remote_dict)\n remote_names.add(remote_name)\n if not remote_names:\n raise ConfigException(\n '%s: You should at least define one remote.' % directory)\n else:\n raise ConfigException('%s: remotes is not defined.' % directory)\n if 'merges' in repo_data:\n merges = []\n merge_data = repo_data.get('merges') or []\n for merge in merge_data:\n try:\n # Assume parts is a str\n parts = merge.split(' ')\n if len(parts) != 2:\n raise ConfigException(\n '%s: Merge must be formatted as '\n '\"remote_name ref\".' % directory)\n merge = {\n \"remote\": parts[0],\n \"ref\": parts[1],\n }\n except AttributeError:\n # Parts is a dict\n try:\n merge[\"remote\"] = str(merge[\"remote\"])\n merge[\"ref\"] = str(merge[\"ref\"])\n except KeyError:\n raise ConfigException(\n '%s: Merge lacks mandatory '\n '`remote` or `ref` keys.' % directory)\n # Check remote is available\n if merge[\"remote\"] not in remote_names:\n raise ConfigException(\n '%s: Merge remote %s not defined in remotes.' %\n (directory, merge[\"remote\"]))\n merges.append(merge)\n repo_dict['merges'] = merges\n if not merges:\n raise ConfigException(\n '%s: You should at least define one merge.' % directory)\n else:\n raise ConfigException(\n '%s: merges is not defined.' % directory)\n # Only fetch required remotes by default\n repo_dict[\"fetch_all\"] = repo_data.get(\"fetch_all\", False)\n if isinstance(repo_dict[\"fetch_all\"], string_types):\n repo_dict[\"fetch_all\"] = frozenset((repo_dict[\"fetch_all\"],))\n elif isinstance(repo_dict[\"fetch_all\"], list):\n repo_dict[\"fetch_all\"] = frozenset(repo_dict[\"fetch_all\"])\n if 'target' not in repo_data:\n raise ConfigException('%s: No target defined.' % directory)\n parts = (repo_data.get('target') or \"\") .split(' ')\n if len(parts) != 2:\n raise ConfigException(\n '%s: Target must be formatted as '\n '\"remote_name branch_name\"' % directory)\n\n remote_name, branch = repo_data.get('target').split(' ')\n if remote_name not in remote_names:\n raise ConfigException(\n '%s: Target remote %s not defined in remotes.' %\n (directory, remote_name))\n repo_dict['target'] = {\n 'remote': remote_name,\n 'branch': branch,\n }\n commands = []\n if 'shell_command_after' in repo_data:\n cmds = repo_data['shell_command_after']\n # if str: turn to list\n if cmds:\n if isinstance(cmds, string_types):\n cmds = [cmds]\n commands = cmds\n repo_dict['shell_command_after'] = commands\n repo_list.append(repo_dict)\n return repo_list\n"
] |
# -*- coding: utf-8 -*-
# © 2015 ACSONE SA/NV
# License AGPLv3 (http://www.gnu.org/licenses/agpl-3.0-standalone.html)
import logging
import os
from string import Template
import kaptan
from .exception import ConfigException
from ._compat import string_types
log = logging.getLogger(__name__)
def get_repos(config, force=False):
"""Return a :py:obj:`list` list of repos from config file.
:param config: the repos config in :py:class:`dict` format.
:param bool force: Force aggregate dirty repos or not.
:type config: dict
:rtype: list
"""
repo_list = []
for directory, repo_data in config.items():
if not os.path.isabs(directory):
directory = os.path.abspath(directory)
repo_dict = {
'cwd': directory,
'defaults': repo_data.get('defaults', dict()),
'force': force,
}
remote_names = set()
if 'remotes' in repo_data:
repo_dict['remotes'] = []
remotes_data = repo_data['remotes'] or {}
for remote_name, url in remotes_data.items():
if not url:
raise ConfigException(
'%s: No url defined for remote %s.' %
(directory, remote_name))
remote_dict = {
'name': remote_name,
'url': url
}
repo_dict['remotes'].append(remote_dict)
remote_names.add(remote_name)
if not remote_names:
raise ConfigException(
'%s: You should at least define one remote.' % directory)
else:
raise ConfigException('%s: remotes is not defined.' % directory)
if 'merges' in repo_data:
merges = []
merge_data = repo_data.get('merges') or []
for merge in merge_data:
try:
# Assume parts is a str
parts = merge.split(' ')
if len(parts) != 2:
raise ConfigException(
'%s: Merge must be formatted as '
'"remote_name ref".' % directory)
merge = {
"remote": parts[0],
"ref": parts[1],
}
except AttributeError:
# Parts is a dict
try:
merge["remote"] = str(merge["remote"])
merge["ref"] = str(merge["ref"])
except KeyError:
raise ConfigException(
'%s: Merge lacks mandatory '
'`remote` or `ref` keys.' % directory)
# Check remote is available
if merge["remote"] not in remote_names:
raise ConfigException(
'%s: Merge remote %s not defined in remotes.' %
(directory, merge["remote"]))
merges.append(merge)
repo_dict['merges'] = merges
if not merges:
raise ConfigException(
'%s: You should at least define one merge.' % directory)
else:
raise ConfigException(
'%s: merges is not defined.' % directory)
# Only fetch required remotes by default
repo_dict["fetch_all"] = repo_data.get("fetch_all", False)
if isinstance(repo_dict["fetch_all"], string_types):
repo_dict["fetch_all"] = frozenset((repo_dict["fetch_all"],))
elif isinstance(repo_dict["fetch_all"], list):
repo_dict["fetch_all"] = frozenset(repo_dict["fetch_all"])
if 'target' not in repo_data:
raise ConfigException('%s: No target defined.' % directory)
parts = (repo_data.get('target') or "") .split(' ')
if len(parts) != 2:
raise ConfigException(
'%s: Target must be formatted as '
'"remote_name branch_name"' % directory)
remote_name, branch = repo_data.get('target').split(' ')
if remote_name not in remote_names:
raise ConfigException(
'%s: Target remote %s not defined in remotes.' %
(directory, remote_name))
repo_dict['target'] = {
'remote': remote_name,
'branch': branch,
}
commands = []
if 'shell_command_after' in repo_data:
cmds = repo_data['shell_command_after']
# if str: turn to list
if cmds:
if isinstance(cmds, string_types):
cmds = [cmds]
commands = cmds
repo_dict['shell_command_after'] = commands
repo_list.append(repo_dict)
return repo_list
|
acsone/git-aggregator
|
git_aggregator/main.py
|
setup_logger
|
python
|
def setup_logger(log=None, level=logging.INFO):
if not log:
log = logging.getLogger()
if not log.handlers:
channel = logging.StreamHandler()
if level == logging.DEBUG:
channel.setFormatter(DebugLogFormatter())
else:
channel.setFormatter(LogFormatter())
log.setLevel(level)
log.addHandler(channel)
|
Setup logging for CLI use.
:param log: instance of logger
:type log: :py:class:`Logger`
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/main.py#L45-L60
| null |
# -*- coding: utf-8 -*-
# © 2015 ACSONE SA/NV
# License AGPLv3 (http://www.gnu.org/licenses/agpl-3.0-standalone.html)
import logging
import os
import sys
import threading
import traceback
try:
from Queue import Queue, Empty as EmptyQueue
except ImportError:
from queue import Queue, Empty as EmptyQueue
import argparse
import argcomplete
import fnmatch
from .utils import ThreadNameKeeper
from .log import DebugLogFormatter
from .log import LogFormatter
from .config import load_config
from .repo import Repo
logger = logging.getLogger(__name__)
_LOG_LEVEL_STRINGS = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']
def _log_level_string_to_int(log_level_string):
if log_level_string not in _LOG_LEVEL_STRINGS:
message = 'invalid choice: {0} (choose from {1})'.format(
log_level_string, _LOG_LEVEL_STRINGS)
raise argparse.ArgumentTypeError(message)
log_level_int = getattr(logging, log_level_string, logging.INFO)
# check the logging log_level_choices have not changed from our expected
# values
assert isinstance(log_level_int, int)
return log_level_int
def get_parser():
"""Return :py:class:`argparse.ArgumentParser` instance for CLI."""
main_parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
main_parser.add_argument(
'-c', '--config',
dest='config',
type=str,
nargs='?',
help='Pull the latest repositories from config(s)'
).completer = argcomplete.completers.FilesCompleter(
allowednames=('.yaml', '.yml', '.json'), directories=False
)
main_parser.add_argument(
'-p', '--push',
dest='do_push',
action='store_true', default=False,
help='Push result to target',
)
main_parser.add_argument(
'-d', '--dirmatch',
dest='dirmatch',
type=str,
nargs='?',
help='Pull only from the directories. Accepts fnmatch(1)'
'by commands'
)
main_parser.add_argument(
'--log-level',
default='INFO',
dest='log_level',
type=_log_level_string_to_int,
nargs='?',
help='Set the logging output level. {0}'.format(_LOG_LEVEL_STRINGS))
main_parser.add_argument(
'-e', '--expand-env',
dest='expand_env',
default=False,
action='store_true',
help='Expand environment variables in configuration file',
)
main_parser.add_argument(
'-f', '--force',
dest='force',
default=False,
action='store_true',
help='Force cleanup and aggregation on dirty repositories.',
)
main_parser.add_argument(
'-j', '--jobs',
dest='jobs',
default=1,
type=int,
help='Amount of processes to use when aggregating repos. '
'This is useful when there are a lot of large repos. '
'Set `1` or less to disable multiprocessing (default).',
)
main_parser.add_argument(
'command',
nargs='?',
default='aggregate',
help='aggregate (default): run the aggregation process.\n'
'show-all-prs: show GitHub pull requests in merge sections\n'
' such pull requests are indentified as having\n'
' a github.com remote and a\n'
' refs/pull/NNN/head ref in the merge section.\n'
'show-closed-prs: show pull requests that are not open anymore.\n'
)
return main_parser
def main():
"""Main CLI application."""
parser = get_parser()
argcomplete.autocomplete(parser, always_complete_options=False)
args = parser.parse_args()
setup_logger(
level=args.log_level
)
try:
if args.config and \
args.command in \
('aggregate', 'show-closed-prs', 'show-all-prs'):
run(args)
else:
parser.print_help()
except KeyboardInterrupt:
pass
def match_dir(cwd, dirmatch=None):
if not dirmatch:
return True
return (fnmatch.fnmatch(cwd, dirmatch) or
fnmatch.fnmatch(os.path.relpath(cwd), dirmatch) or
os.path.relpath(cwd) == os.path.relpath(dirmatch))
def load_aggregate(args):
"""Load YAML and JSON configs and begin creating / updating , aggregating
and pushing the repos (deprecated in favor or run())"""
repos = load_config(args.config, args.expand_env)
dirmatch = args.dirmatch
for repo_dict in repos:
r = Repo(**repo_dict)
logger.debug('%s' % r)
if not match_dir(r.cwd, dirmatch):
logger.info("Skip %s", r.cwd)
continue
r.aggregate()
if args.do_push:
r.push()
def aggregate_repo(repo, args, sem, err_queue):
"""Aggregate one repo according to the args.
Args:
repo (Repo): The repository to aggregate.
args (argparse.Namespace): CLI arguments.
"""
try:
logger.debug('%s' % repo)
dirmatch = args.dirmatch
if not match_dir(repo.cwd, dirmatch):
logger.info("Skip %s", repo.cwd)
return
if args.command == 'aggregate':
repo.aggregate()
if args.do_push:
repo.push()
elif args.command == 'show-closed-prs':
repo.show_closed_prs()
elif args.command == 'show-all-prs':
repo.show_all_prs()
except Exception:
err_queue.put_nowait(sys.exc_info())
finally:
sem.release()
def run(args):
"""Load YAML and JSON configs and run the command specified
in args.command"""
repos = load_config(args.config, args.expand_env, args.force)
jobs = max(args.jobs, 1)
threads = []
sem = threading.Semaphore(jobs)
err_queue = Queue()
for repo_dict in repos:
if not err_queue.empty():
break
sem.acquire()
r = Repo(**repo_dict)
tname = os.path.basename(repo_dict['cwd'])
if jobs > 1:
t = threading.Thread(
target=aggregate_repo, args=(r, args, sem, err_queue))
t.daemon = True
t.name = tname
threads.append(t)
t.start()
else:
with ThreadNameKeeper():
threading.current_thread().name = tname
aggregate_repo(r, args, sem, err_queue)
for t in threads:
t.join()
if not err_queue.empty():
while True:
try:
exc_type, exc_obj, exc_trace = err_queue.get_nowait()
except EmptyQueue:
break
traceback.print_exception(exc_type, exc_obj, exc_trace)
sys.exit(1)
|
acsone/git-aggregator
|
git_aggregator/main.py
|
get_parser
|
python
|
def get_parser():
main_parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
main_parser.add_argument(
'-c', '--config',
dest='config',
type=str,
nargs='?',
help='Pull the latest repositories from config(s)'
).completer = argcomplete.completers.FilesCompleter(
allowednames=('.yaml', '.yml', '.json'), directories=False
)
main_parser.add_argument(
'-p', '--push',
dest='do_push',
action='store_true', default=False,
help='Push result to target',
)
main_parser.add_argument(
'-d', '--dirmatch',
dest='dirmatch',
type=str,
nargs='?',
help='Pull only from the directories. Accepts fnmatch(1)'
'by commands'
)
main_parser.add_argument(
'--log-level',
default='INFO',
dest='log_level',
type=_log_level_string_to_int,
nargs='?',
help='Set the logging output level. {0}'.format(_LOG_LEVEL_STRINGS))
main_parser.add_argument(
'-e', '--expand-env',
dest='expand_env',
default=False,
action='store_true',
help='Expand environment variables in configuration file',
)
main_parser.add_argument(
'-f', '--force',
dest='force',
default=False,
action='store_true',
help='Force cleanup and aggregation on dirty repositories.',
)
main_parser.add_argument(
'-j', '--jobs',
dest='jobs',
default=1,
type=int,
help='Amount of processes to use when aggregating repos. '
'This is useful when there are a lot of large repos. '
'Set `1` or less to disable multiprocessing (default).',
)
main_parser.add_argument(
'command',
nargs='?',
default='aggregate',
help='aggregate (default): run the aggregation process.\n'
'show-all-prs: show GitHub pull requests in merge sections\n'
' such pull requests are indentified as having\n'
' a github.com remote and a\n'
' refs/pull/NNN/head ref in the merge section.\n'
'show-closed-prs: show pull requests that are not open anymore.\n'
)
return main_parser
|
Return :py:class:`argparse.ArgumentParser` instance for CLI.
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/main.py#L63-L139
| null |
# -*- coding: utf-8 -*-
# © 2015 ACSONE SA/NV
# License AGPLv3 (http://www.gnu.org/licenses/agpl-3.0-standalone.html)
import logging
import os
import sys
import threading
import traceback
try:
from Queue import Queue, Empty as EmptyQueue
except ImportError:
from queue import Queue, Empty as EmptyQueue
import argparse
import argcomplete
import fnmatch
from .utils import ThreadNameKeeper
from .log import DebugLogFormatter
from .log import LogFormatter
from .config import load_config
from .repo import Repo
logger = logging.getLogger(__name__)
_LOG_LEVEL_STRINGS = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']
def _log_level_string_to_int(log_level_string):
if log_level_string not in _LOG_LEVEL_STRINGS:
message = 'invalid choice: {0} (choose from {1})'.format(
log_level_string, _LOG_LEVEL_STRINGS)
raise argparse.ArgumentTypeError(message)
log_level_int = getattr(logging, log_level_string, logging.INFO)
# check the logging log_level_choices have not changed from our expected
# values
assert isinstance(log_level_int, int)
return log_level_int
def setup_logger(log=None, level=logging.INFO):
"""Setup logging for CLI use.
:param log: instance of logger
:type log: :py:class:`Logger`
"""
if not log:
log = logging.getLogger()
if not log.handlers:
channel = logging.StreamHandler()
if level == logging.DEBUG:
channel.setFormatter(DebugLogFormatter())
else:
channel.setFormatter(LogFormatter())
log.setLevel(level)
log.addHandler(channel)
def main():
"""Main CLI application."""
parser = get_parser()
argcomplete.autocomplete(parser, always_complete_options=False)
args = parser.parse_args()
setup_logger(
level=args.log_level
)
try:
if args.config and \
args.command in \
('aggregate', 'show-closed-prs', 'show-all-prs'):
run(args)
else:
parser.print_help()
except KeyboardInterrupt:
pass
def match_dir(cwd, dirmatch=None):
if not dirmatch:
return True
return (fnmatch.fnmatch(cwd, dirmatch) or
fnmatch.fnmatch(os.path.relpath(cwd), dirmatch) or
os.path.relpath(cwd) == os.path.relpath(dirmatch))
def load_aggregate(args):
"""Load YAML and JSON configs and begin creating / updating , aggregating
and pushing the repos (deprecated in favor or run())"""
repos = load_config(args.config, args.expand_env)
dirmatch = args.dirmatch
for repo_dict in repos:
r = Repo(**repo_dict)
logger.debug('%s' % r)
if not match_dir(r.cwd, dirmatch):
logger.info("Skip %s", r.cwd)
continue
r.aggregate()
if args.do_push:
r.push()
def aggregate_repo(repo, args, sem, err_queue):
"""Aggregate one repo according to the args.
Args:
repo (Repo): The repository to aggregate.
args (argparse.Namespace): CLI arguments.
"""
try:
logger.debug('%s' % repo)
dirmatch = args.dirmatch
if not match_dir(repo.cwd, dirmatch):
logger.info("Skip %s", repo.cwd)
return
if args.command == 'aggregate':
repo.aggregate()
if args.do_push:
repo.push()
elif args.command == 'show-closed-prs':
repo.show_closed_prs()
elif args.command == 'show-all-prs':
repo.show_all_prs()
except Exception:
err_queue.put_nowait(sys.exc_info())
finally:
sem.release()
def run(args):
"""Load YAML and JSON configs and run the command specified
in args.command"""
repos = load_config(args.config, args.expand_env, args.force)
jobs = max(args.jobs, 1)
threads = []
sem = threading.Semaphore(jobs)
err_queue = Queue()
for repo_dict in repos:
if not err_queue.empty():
break
sem.acquire()
r = Repo(**repo_dict)
tname = os.path.basename(repo_dict['cwd'])
if jobs > 1:
t = threading.Thread(
target=aggregate_repo, args=(r, args, sem, err_queue))
t.daemon = True
t.name = tname
threads.append(t)
t.start()
else:
with ThreadNameKeeper():
threading.current_thread().name = tname
aggregate_repo(r, args, sem, err_queue)
for t in threads:
t.join()
if not err_queue.empty():
while True:
try:
exc_type, exc_obj, exc_trace = err_queue.get_nowait()
except EmptyQueue:
break
traceback.print_exception(exc_type, exc_obj, exc_trace)
sys.exit(1)
|
acsone/git-aggregator
|
git_aggregator/main.py
|
main
|
python
|
def main():
parser = get_parser()
argcomplete.autocomplete(parser, always_complete_options=False)
args = parser.parse_args()
setup_logger(
level=args.log_level
)
try:
if args.config and \
args.command in \
('aggregate', 'show-closed-prs', 'show-all-prs'):
run(args)
else:
parser.print_help()
except KeyboardInterrupt:
pass
|
Main CLI application.
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/main.py#L142-L163
|
[
"def run(args):\n \"\"\"Load YAML and JSON configs and run the command specified\n in args.command\"\"\"\n\n repos = load_config(args.config, args.expand_env, args.force)\n\n jobs = max(args.jobs, 1)\n threads = []\n sem = threading.Semaphore(jobs)\n err_queue = Queue()\n\n for repo_dict in repos:\n if not err_queue.empty():\n break\n\n sem.acquire()\n r = Repo(**repo_dict)\n tname = os.path.basename(repo_dict['cwd'])\n\n if jobs > 1:\n t = threading.Thread(\n target=aggregate_repo, args=(r, args, sem, err_queue))\n t.daemon = True\n t.name = tname\n threads.append(t)\n t.start()\n else:\n with ThreadNameKeeper():\n threading.current_thread().name = tname\n aggregate_repo(r, args, sem, err_queue)\n\n for t in threads:\n t.join()\n\n if not err_queue.empty():\n while True:\n try:\n exc_type, exc_obj, exc_trace = err_queue.get_nowait()\n except EmptyQueue:\n break\n traceback.print_exception(exc_type, exc_obj, exc_trace)\n sys.exit(1)\n",
"def setup_logger(log=None, level=logging.INFO):\n \"\"\"Setup logging for CLI use.\n :param log: instance of logger\n :type log: :py:class:`Logger`\n \"\"\"\n if not log:\n log = logging.getLogger()\n if not log.handlers:\n channel = logging.StreamHandler()\n if level == logging.DEBUG:\n channel.setFormatter(DebugLogFormatter())\n else:\n channel.setFormatter(LogFormatter())\n\n log.setLevel(level)\n log.addHandler(channel)\n",
"def get_parser():\n \"\"\"Return :py:class:`argparse.ArgumentParser` instance for CLI.\"\"\"\n\n main_parser = argparse.ArgumentParser(\n formatter_class=argparse.RawTextHelpFormatter)\n\n main_parser.add_argument(\n '-c', '--config',\n dest='config',\n type=str,\n nargs='?',\n help='Pull the latest repositories from config(s)'\n ).completer = argcomplete.completers.FilesCompleter(\n allowednames=('.yaml', '.yml', '.json'), directories=False\n )\n\n main_parser.add_argument(\n '-p', '--push',\n dest='do_push',\n action='store_true', default=False,\n help='Push result to target',\n )\n\n main_parser.add_argument(\n '-d', '--dirmatch',\n dest='dirmatch',\n type=str,\n nargs='?',\n help='Pull only from the directories. Accepts fnmatch(1)'\n 'by commands'\n )\n main_parser.add_argument(\n '--log-level',\n default='INFO',\n dest='log_level',\n type=_log_level_string_to_int,\n nargs='?',\n help='Set the logging output level. {0}'.format(_LOG_LEVEL_STRINGS))\n\n main_parser.add_argument(\n '-e', '--expand-env',\n dest='expand_env',\n default=False,\n action='store_true',\n help='Expand environment variables in configuration file',\n )\n main_parser.add_argument(\n '-f', '--force',\n dest='force',\n default=False,\n action='store_true',\n help='Force cleanup and aggregation on dirty repositories.',\n )\n\n main_parser.add_argument(\n '-j', '--jobs',\n dest='jobs',\n default=1,\n type=int,\n help='Amount of processes to use when aggregating repos. '\n 'This is useful when there are a lot of large repos. '\n 'Set `1` or less to disable multiprocessing (default).',\n )\n\n main_parser.add_argument(\n 'command',\n nargs='?',\n default='aggregate',\n help='aggregate (default): run the aggregation process.\\n'\n 'show-all-prs: show GitHub pull requests in merge sections\\n'\n ' such pull requests are indentified as having\\n'\n ' a github.com remote and a\\n'\n ' refs/pull/NNN/head ref in the merge section.\\n'\n 'show-closed-prs: show pull requests that are not open anymore.\\n'\n )\n\n return main_parser\n"
] |
# -*- coding: utf-8 -*-
# © 2015 ACSONE SA/NV
# License AGPLv3 (http://www.gnu.org/licenses/agpl-3.0-standalone.html)
import logging
import os
import sys
import threading
import traceback
try:
from Queue import Queue, Empty as EmptyQueue
except ImportError:
from queue import Queue, Empty as EmptyQueue
import argparse
import argcomplete
import fnmatch
from .utils import ThreadNameKeeper
from .log import DebugLogFormatter
from .log import LogFormatter
from .config import load_config
from .repo import Repo
logger = logging.getLogger(__name__)
_LOG_LEVEL_STRINGS = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']
def _log_level_string_to_int(log_level_string):
if log_level_string not in _LOG_LEVEL_STRINGS:
message = 'invalid choice: {0} (choose from {1})'.format(
log_level_string, _LOG_LEVEL_STRINGS)
raise argparse.ArgumentTypeError(message)
log_level_int = getattr(logging, log_level_string, logging.INFO)
# check the logging log_level_choices have not changed from our expected
# values
assert isinstance(log_level_int, int)
return log_level_int
def setup_logger(log=None, level=logging.INFO):
"""Setup logging for CLI use.
:param log: instance of logger
:type log: :py:class:`Logger`
"""
if not log:
log = logging.getLogger()
if not log.handlers:
channel = logging.StreamHandler()
if level == logging.DEBUG:
channel.setFormatter(DebugLogFormatter())
else:
channel.setFormatter(LogFormatter())
log.setLevel(level)
log.addHandler(channel)
def get_parser():
"""Return :py:class:`argparse.ArgumentParser` instance for CLI."""
main_parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
main_parser.add_argument(
'-c', '--config',
dest='config',
type=str,
nargs='?',
help='Pull the latest repositories from config(s)'
).completer = argcomplete.completers.FilesCompleter(
allowednames=('.yaml', '.yml', '.json'), directories=False
)
main_parser.add_argument(
'-p', '--push',
dest='do_push',
action='store_true', default=False,
help='Push result to target',
)
main_parser.add_argument(
'-d', '--dirmatch',
dest='dirmatch',
type=str,
nargs='?',
help='Pull only from the directories. Accepts fnmatch(1)'
'by commands'
)
main_parser.add_argument(
'--log-level',
default='INFO',
dest='log_level',
type=_log_level_string_to_int,
nargs='?',
help='Set the logging output level. {0}'.format(_LOG_LEVEL_STRINGS))
main_parser.add_argument(
'-e', '--expand-env',
dest='expand_env',
default=False,
action='store_true',
help='Expand environment variables in configuration file',
)
main_parser.add_argument(
'-f', '--force',
dest='force',
default=False,
action='store_true',
help='Force cleanup and aggregation on dirty repositories.',
)
main_parser.add_argument(
'-j', '--jobs',
dest='jobs',
default=1,
type=int,
help='Amount of processes to use when aggregating repos. '
'This is useful when there are a lot of large repos. '
'Set `1` or less to disable multiprocessing (default).',
)
main_parser.add_argument(
'command',
nargs='?',
default='aggregate',
help='aggregate (default): run the aggregation process.\n'
'show-all-prs: show GitHub pull requests in merge sections\n'
' such pull requests are indentified as having\n'
' a github.com remote and a\n'
' refs/pull/NNN/head ref in the merge section.\n'
'show-closed-prs: show pull requests that are not open anymore.\n'
)
return main_parser
def match_dir(cwd, dirmatch=None):
if not dirmatch:
return True
return (fnmatch.fnmatch(cwd, dirmatch) or
fnmatch.fnmatch(os.path.relpath(cwd), dirmatch) or
os.path.relpath(cwd) == os.path.relpath(dirmatch))
def load_aggregate(args):
"""Load YAML and JSON configs and begin creating / updating , aggregating
and pushing the repos (deprecated in favor or run())"""
repos = load_config(args.config, args.expand_env)
dirmatch = args.dirmatch
for repo_dict in repos:
r = Repo(**repo_dict)
logger.debug('%s' % r)
if not match_dir(r.cwd, dirmatch):
logger.info("Skip %s", r.cwd)
continue
r.aggregate()
if args.do_push:
r.push()
def aggregate_repo(repo, args, sem, err_queue):
"""Aggregate one repo according to the args.
Args:
repo (Repo): The repository to aggregate.
args (argparse.Namespace): CLI arguments.
"""
try:
logger.debug('%s' % repo)
dirmatch = args.dirmatch
if not match_dir(repo.cwd, dirmatch):
logger.info("Skip %s", repo.cwd)
return
if args.command == 'aggregate':
repo.aggregate()
if args.do_push:
repo.push()
elif args.command == 'show-closed-prs':
repo.show_closed_prs()
elif args.command == 'show-all-prs':
repo.show_all_prs()
except Exception:
err_queue.put_nowait(sys.exc_info())
finally:
sem.release()
def run(args):
"""Load YAML and JSON configs and run the command specified
in args.command"""
repos = load_config(args.config, args.expand_env, args.force)
jobs = max(args.jobs, 1)
threads = []
sem = threading.Semaphore(jobs)
err_queue = Queue()
for repo_dict in repos:
if not err_queue.empty():
break
sem.acquire()
r = Repo(**repo_dict)
tname = os.path.basename(repo_dict['cwd'])
if jobs > 1:
t = threading.Thread(
target=aggregate_repo, args=(r, args, sem, err_queue))
t.daemon = True
t.name = tname
threads.append(t)
t.start()
else:
with ThreadNameKeeper():
threading.current_thread().name = tname
aggregate_repo(r, args, sem, err_queue)
for t in threads:
t.join()
if not err_queue.empty():
while True:
try:
exc_type, exc_obj, exc_trace = err_queue.get_nowait()
except EmptyQueue:
break
traceback.print_exception(exc_type, exc_obj, exc_trace)
sys.exit(1)
|
acsone/git-aggregator
|
git_aggregator/main.py
|
load_aggregate
|
python
|
def load_aggregate(args):
repos = load_config(args.config, args.expand_env)
dirmatch = args.dirmatch
for repo_dict in repos:
r = Repo(**repo_dict)
logger.debug('%s' % r)
if not match_dir(r.cwd, dirmatch):
logger.info("Skip %s", r.cwd)
continue
r.aggregate()
if args.do_push:
r.push()
|
Load YAML and JSON configs and begin creating / updating , aggregating
and pushing the repos (deprecated in favor or run())
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/main.py#L174-L187
|
[
"def load_config(config, expand_env=False, force=False):\n \"\"\"Return repos from a directory and fnmatch. Not recursive.\n\n :param config: paths to config file\n :type config: str\n :param expand_env: True to expand environment varialbes in the config.\n :type expand_env: bool\n :param bool force: True to aggregate even if repo is dirty.\n :returns: expanded config dict item\n :rtype: iter(dict)\n \"\"\"\n if not os.path.exists(config):\n raise ConfigException('Unable to find configuration file: %s' % config)\n\n file_extension = os.path.splitext(config)[1][1:]\n conf = kaptan.Kaptan(handler=kaptan.HANDLER_EXT.get(file_extension))\n\n if expand_env:\n with open(config, 'r') as file_handler:\n config = Template(file_handler.read())\n config = config.substitute(os.environ)\n\n conf.import_config(config)\n return get_repos(conf.export('dict') or {}, force)\n",
"def match_dir(cwd, dirmatch=None):\n if not dirmatch:\n return True\n return (fnmatch.fnmatch(cwd, dirmatch) or\n fnmatch.fnmatch(os.path.relpath(cwd), dirmatch) or\n os.path.relpath(cwd) == os.path.relpath(dirmatch))\n"
] |
# -*- coding: utf-8 -*-
# © 2015 ACSONE SA/NV
# License AGPLv3 (http://www.gnu.org/licenses/agpl-3.0-standalone.html)
import logging
import os
import sys
import threading
import traceback
try:
from Queue import Queue, Empty as EmptyQueue
except ImportError:
from queue import Queue, Empty as EmptyQueue
import argparse
import argcomplete
import fnmatch
from .utils import ThreadNameKeeper
from .log import DebugLogFormatter
from .log import LogFormatter
from .config import load_config
from .repo import Repo
logger = logging.getLogger(__name__)
_LOG_LEVEL_STRINGS = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']
def _log_level_string_to_int(log_level_string):
if log_level_string not in _LOG_LEVEL_STRINGS:
message = 'invalid choice: {0} (choose from {1})'.format(
log_level_string, _LOG_LEVEL_STRINGS)
raise argparse.ArgumentTypeError(message)
log_level_int = getattr(logging, log_level_string, logging.INFO)
# check the logging log_level_choices have not changed from our expected
# values
assert isinstance(log_level_int, int)
return log_level_int
def setup_logger(log=None, level=logging.INFO):
"""Setup logging for CLI use.
:param log: instance of logger
:type log: :py:class:`Logger`
"""
if not log:
log = logging.getLogger()
if not log.handlers:
channel = logging.StreamHandler()
if level == logging.DEBUG:
channel.setFormatter(DebugLogFormatter())
else:
channel.setFormatter(LogFormatter())
log.setLevel(level)
log.addHandler(channel)
def get_parser():
"""Return :py:class:`argparse.ArgumentParser` instance for CLI."""
main_parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
main_parser.add_argument(
'-c', '--config',
dest='config',
type=str,
nargs='?',
help='Pull the latest repositories from config(s)'
).completer = argcomplete.completers.FilesCompleter(
allowednames=('.yaml', '.yml', '.json'), directories=False
)
main_parser.add_argument(
'-p', '--push',
dest='do_push',
action='store_true', default=False,
help='Push result to target',
)
main_parser.add_argument(
'-d', '--dirmatch',
dest='dirmatch',
type=str,
nargs='?',
help='Pull only from the directories. Accepts fnmatch(1)'
'by commands'
)
main_parser.add_argument(
'--log-level',
default='INFO',
dest='log_level',
type=_log_level_string_to_int,
nargs='?',
help='Set the logging output level. {0}'.format(_LOG_LEVEL_STRINGS))
main_parser.add_argument(
'-e', '--expand-env',
dest='expand_env',
default=False,
action='store_true',
help='Expand environment variables in configuration file',
)
main_parser.add_argument(
'-f', '--force',
dest='force',
default=False,
action='store_true',
help='Force cleanup and aggregation on dirty repositories.',
)
main_parser.add_argument(
'-j', '--jobs',
dest='jobs',
default=1,
type=int,
help='Amount of processes to use when aggregating repos. '
'This is useful when there are a lot of large repos. '
'Set `1` or less to disable multiprocessing (default).',
)
main_parser.add_argument(
'command',
nargs='?',
default='aggregate',
help='aggregate (default): run the aggregation process.\n'
'show-all-prs: show GitHub pull requests in merge sections\n'
' such pull requests are indentified as having\n'
' a github.com remote and a\n'
' refs/pull/NNN/head ref in the merge section.\n'
'show-closed-prs: show pull requests that are not open anymore.\n'
)
return main_parser
def main():
"""Main CLI application."""
parser = get_parser()
argcomplete.autocomplete(parser, always_complete_options=False)
args = parser.parse_args()
setup_logger(
level=args.log_level
)
try:
if args.config and \
args.command in \
('aggregate', 'show-closed-prs', 'show-all-prs'):
run(args)
else:
parser.print_help()
except KeyboardInterrupt:
pass
def match_dir(cwd, dirmatch=None):
if not dirmatch:
return True
return (fnmatch.fnmatch(cwd, dirmatch) or
fnmatch.fnmatch(os.path.relpath(cwd), dirmatch) or
os.path.relpath(cwd) == os.path.relpath(dirmatch))
def aggregate_repo(repo, args, sem, err_queue):
"""Aggregate one repo according to the args.
Args:
repo (Repo): The repository to aggregate.
args (argparse.Namespace): CLI arguments.
"""
try:
logger.debug('%s' % repo)
dirmatch = args.dirmatch
if not match_dir(repo.cwd, dirmatch):
logger.info("Skip %s", repo.cwd)
return
if args.command == 'aggregate':
repo.aggregate()
if args.do_push:
repo.push()
elif args.command == 'show-closed-prs':
repo.show_closed_prs()
elif args.command == 'show-all-prs':
repo.show_all_prs()
except Exception:
err_queue.put_nowait(sys.exc_info())
finally:
sem.release()
def run(args):
"""Load YAML and JSON configs and run the command specified
in args.command"""
repos = load_config(args.config, args.expand_env, args.force)
jobs = max(args.jobs, 1)
threads = []
sem = threading.Semaphore(jobs)
err_queue = Queue()
for repo_dict in repos:
if not err_queue.empty():
break
sem.acquire()
r = Repo(**repo_dict)
tname = os.path.basename(repo_dict['cwd'])
if jobs > 1:
t = threading.Thread(
target=aggregate_repo, args=(r, args, sem, err_queue))
t.daemon = True
t.name = tname
threads.append(t)
t.start()
else:
with ThreadNameKeeper():
threading.current_thread().name = tname
aggregate_repo(r, args, sem, err_queue)
for t in threads:
t.join()
if not err_queue.empty():
while True:
try:
exc_type, exc_obj, exc_trace = err_queue.get_nowait()
except EmptyQueue:
break
traceback.print_exception(exc_type, exc_obj, exc_trace)
sys.exit(1)
|
acsone/git-aggregator
|
git_aggregator/main.py
|
aggregate_repo
|
python
|
def aggregate_repo(repo, args, sem, err_queue):
try:
logger.debug('%s' % repo)
dirmatch = args.dirmatch
if not match_dir(repo.cwd, dirmatch):
logger.info("Skip %s", repo.cwd)
return
if args.command == 'aggregate':
repo.aggregate()
if args.do_push:
repo.push()
elif args.command == 'show-closed-prs':
repo.show_closed_prs()
elif args.command == 'show-all-prs':
repo.show_all_prs()
except Exception:
err_queue.put_nowait(sys.exc_info())
finally:
sem.release()
|
Aggregate one repo according to the args.
Args:
repo (Repo): The repository to aggregate.
args (argparse.Namespace): CLI arguments.
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/main.py#L190-L214
|
[
"def match_dir(cwd, dirmatch=None):\n if not dirmatch:\n return True\n return (fnmatch.fnmatch(cwd, dirmatch) or\n fnmatch.fnmatch(os.path.relpath(cwd), dirmatch) or\n os.path.relpath(cwd) == os.path.relpath(dirmatch))\n"
] |
# -*- coding: utf-8 -*-
# © 2015 ACSONE SA/NV
# License AGPLv3 (http://www.gnu.org/licenses/agpl-3.0-standalone.html)
import logging
import os
import sys
import threading
import traceback
try:
from Queue import Queue, Empty as EmptyQueue
except ImportError:
from queue import Queue, Empty as EmptyQueue
import argparse
import argcomplete
import fnmatch
from .utils import ThreadNameKeeper
from .log import DebugLogFormatter
from .log import LogFormatter
from .config import load_config
from .repo import Repo
logger = logging.getLogger(__name__)
_LOG_LEVEL_STRINGS = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']
def _log_level_string_to_int(log_level_string):
if log_level_string not in _LOG_LEVEL_STRINGS:
message = 'invalid choice: {0} (choose from {1})'.format(
log_level_string, _LOG_LEVEL_STRINGS)
raise argparse.ArgumentTypeError(message)
log_level_int = getattr(logging, log_level_string, logging.INFO)
# check the logging log_level_choices have not changed from our expected
# values
assert isinstance(log_level_int, int)
return log_level_int
def setup_logger(log=None, level=logging.INFO):
"""Setup logging for CLI use.
:param log: instance of logger
:type log: :py:class:`Logger`
"""
if not log:
log = logging.getLogger()
if not log.handlers:
channel = logging.StreamHandler()
if level == logging.DEBUG:
channel.setFormatter(DebugLogFormatter())
else:
channel.setFormatter(LogFormatter())
log.setLevel(level)
log.addHandler(channel)
def get_parser():
"""Return :py:class:`argparse.ArgumentParser` instance for CLI."""
main_parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
main_parser.add_argument(
'-c', '--config',
dest='config',
type=str,
nargs='?',
help='Pull the latest repositories from config(s)'
).completer = argcomplete.completers.FilesCompleter(
allowednames=('.yaml', '.yml', '.json'), directories=False
)
main_parser.add_argument(
'-p', '--push',
dest='do_push',
action='store_true', default=False,
help='Push result to target',
)
main_parser.add_argument(
'-d', '--dirmatch',
dest='dirmatch',
type=str,
nargs='?',
help='Pull only from the directories. Accepts fnmatch(1)'
'by commands'
)
main_parser.add_argument(
'--log-level',
default='INFO',
dest='log_level',
type=_log_level_string_to_int,
nargs='?',
help='Set the logging output level. {0}'.format(_LOG_LEVEL_STRINGS))
main_parser.add_argument(
'-e', '--expand-env',
dest='expand_env',
default=False,
action='store_true',
help='Expand environment variables in configuration file',
)
main_parser.add_argument(
'-f', '--force',
dest='force',
default=False,
action='store_true',
help='Force cleanup and aggregation on dirty repositories.',
)
main_parser.add_argument(
'-j', '--jobs',
dest='jobs',
default=1,
type=int,
help='Amount of processes to use when aggregating repos. '
'This is useful when there are a lot of large repos. '
'Set `1` or less to disable multiprocessing (default).',
)
main_parser.add_argument(
'command',
nargs='?',
default='aggregate',
help='aggregate (default): run the aggregation process.\n'
'show-all-prs: show GitHub pull requests in merge sections\n'
' such pull requests are indentified as having\n'
' a github.com remote and a\n'
' refs/pull/NNN/head ref in the merge section.\n'
'show-closed-prs: show pull requests that are not open anymore.\n'
)
return main_parser
def main():
"""Main CLI application."""
parser = get_parser()
argcomplete.autocomplete(parser, always_complete_options=False)
args = parser.parse_args()
setup_logger(
level=args.log_level
)
try:
if args.config and \
args.command in \
('aggregate', 'show-closed-prs', 'show-all-prs'):
run(args)
else:
parser.print_help()
except KeyboardInterrupt:
pass
def match_dir(cwd, dirmatch=None):
if not dirmatch:
return True
return (fnmatch.fnmatch(cwd, dirmatch) or
fnmatch.fnmatch(os.path.relpath(cwd), dirmatch) or
os.path.relpath(cwd) == os.path.relpath(dirmatch))
def load_aggregate(args):
"""Load YAML and JSON configs and begin creating / updating , aggregating
and pushing the repos (deprecated in favor or run())"""
repos = load_config(args.config, args.expand_env)
dirmatch = args.dirmatch
for repo_dict in repos:
r = Repo(**repo_dict)
logger.debug('%s' % r)
if not match_dir(r.cwd, dirmatch):
logger.info("Skip %s", r.cwd)
continue
r.aggregate()
if args.do_push:
r.push()
def run(args):
"""Load YAML and JSON configs and run the command specified
in args.command"""
repos = load_config(args.config, args.expand_env, args.force)
jobs = max(args.jobs, 1)
threads = []
sem = threading.Semaphore(jobs)
err_queue = Queue()
for repo_dict in repos:
if not err_queue.empty():
break
sem.acquire()
r = Repo(**repo_dict)
tname = os.path.basename(repo_dict['cwd'])
if jobs > 1:
t = threading.Thread(
target=aggregate_repo, args=(r, args, sem, err_queue))
t.daemon = True
t.name = tname
threads.append(t)
t.start()
else:
with ThreadNameKeeper():
threading.current_thread().name = tname
aggregate_repo(r, args, sem, err_queue)
for t in threads:
t.join()
if not err_queue.empty():
while True:
try:
exc_type, exc_obj, exc_trace = err_queue.get_nowait()
except EmptyQueue:
break
traceback.print_exception(exc_type, exc_obj, exc_trace)
sys.exit(1)
|
acsone/git-aggregator
|
git_aggregator/main.py
|
run
|
python
|
def run(args):
repos = load_config(args.config, args.expand_env, args.force)
jobs = max(args.jobs, 1)
threads = []
sem = threading.Semaphore(jobs)
err_queue = Queue()
for repo_dict in repos:
if not err_queue.empty():
break
sem.acquire()
r = Repo(**repo_dict)
tname = os.path.basename(repo_dict['cwd'])
if jobs > 1:
t = threading.Thread(
target=aggregate_repo, args=(r, args, sem, err_queue))
t.daemon = True
t.name = tname
threads.append(t)
t.start()
else:
with ThreadNameKeeper():
threading.current_thread().name = tname
aggregate_repo(r, args, sem, err_queue)
for t in threads:
t.join()
if not err_queue.empty():
while True:
try:
exc_type, exc_obj, exc_trace = err_queue.get_nowait()
except EmptyQueue:
break
traceback.print_exception(exc_type, exc_obj, exc_trace)
sys.exit(1)
|
Load YAML and JSON configs and run the command specified
in args.command
|
train
|
https://github.com/acsone/git-aggregator/blob/8631b0e64f9e8ce1857b21adeddb890ebd8469a6/git_aggregator/main.py#L217-L258
|
[
"def load_config(config, expand_env=False, force=False):\n \"\"\"Return repos from a directory and fnmatch. Not recursive.\n\n :param config: paths to config file\n :type config: str\n :param expand_env: True to expand environment varialbes in the config.\n :type expand_env: bool\n :param bool force: True to aggregate even if repo is dirty.\n :returns: expanded config dict item\n :rtype: iter(dict)\n \"\"\"\n if not os.path.exists(config):\n raise ConfigException('Unable to find configuration file: %s' % config)\n\n file_extension = os.path.splitext(config)[1][1:]\n conf = kaptan.Kaptan(handler=kaptan.HANDLER_EXT.get(file_extension))\n\n if expand_env:\n with open(config, 'r') as file_handler:\n config = Template(file_handler.read())\n config = config.substitute(os.environ)\n\n conf.import_config(config)\n return get_repos(conf.export('dict') or {}, force)\n",
"def aggregate_repo(repo, args, sem, err_queue):\n \"\"\"Aggregate one repo according to the args.\n\n Args:\n repo (Repo): The repository to aggregate.\n args (argparse.Namespace): CLI arguments.\n \"\"\"\n try:\n logger.debug('%s' % repo)\n dirmatch = args.dirmatch\n if not match_dir(repo.cwd, dirmatch):\n logger.info(\"Skip %s\", repo.cwd)\n return\n if args.command == 'aggregate':\n repo.aggregate()\n if args.do_push:\n repo.push()\n elif args.command == 'show-closed-prs':\n repo.show_closed_prs()\n elif args.command == 'show-all-prs':\n repo.show_all_prs()\n except Exception:\n err_queue.put_nowait(sys.exc_info())\n finally:\n sem.release()\n"
] |
# -*- coding: utf-8 -*-
# © 2015 ACSONE SA/NV
# License AGPLv3 (http://www.gnu.org/licenses/agpl-3.0-standalone.html)
import logging
import os
import sys
import threading
import traceback
try:
from Queue import Queue, Empty as EmptyQueue
except ImportError:
from queue import Queue, Empty as EmptyQueue
import argparse
import argcomplete
import fnmatch
from .utils import ThreadNameKeeper
from .log import DebugLogFormatter
from .log import LogFormatter
from .config import load_config
from .repo import Repo
logger = logging.getLogger(__name__)
_LOG_LEVEL_STRINGS = ['CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG']
def _log_level_string_to_int(log_level_string):
if log_level_string not in _LOG_LEVEL_STRINGS:
message = 'invalid choice: {0} (choose from {1})'.format(
log_level_string, _LOG_LEVEL_STRINGS)
raise argparse.ArgumentTypeError(message)
log_level_int = getattr(logging, log_level_string, logging.INFO)
# check the logging log_level_choices have not changed from our expected
# values
assert isinstance(log_level_int, int)
return log_level_int
def setup_logger(log=None, level=logging.INFO):
"""Setup logging for CLI use.
:param log: instance of logger
:type log: :py:class:`Logger`
"""
if not log:
log = logging.getLogger()
if not log.handlers:
channel = logging.StreamHandler()
if level == logging.DEBUG:
channel.setFormatter(DebugLogFormatter())
else:
channel.setFormatter(LogFormatter())
log.setLevel(level)
log.addHandler(channel)
def get_parser():
"""Return :py:class:`argparse.ArgumentParser` instance for CLI."""
main_parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
main_parser.add_argument(
'-c', '--config',
dest='config',
type=str,
nargs='?',
help='Pull the latest repositories from config(s)'
).completer = argcomplete.completers.FilesCompleter(
allowednames=('.yaml', '.yml', '.json'), directories=False
)
main_parser.add_argument(
'-p', '--push',
dest='do_push',
action='store_true', default=False,
help='Push result to target',
)
main_parser.add_argument(
'-d', '--dirmatch',
dest='dirmatch',
type=str,
nargs='?',
help='Pull only from the directories. Accepts fnmatch(1)'
'by commands'
)
main_parser.add_argument(
'--log-level',
default='INFO',
dest='log_level',
type=_log_level_string_to_int,
nargs='?',
help='Set the logging output level. {0}'.format(_LOG_LEVEL_STRINGS))
main_parser.add_argument(
'-e', '--expand-env',
dest='expand_env',
default=False,
action='store_true',
help='Expand environment variables in configuration file',
)
main_parser.add_argument(
'-f', '--force',
dest='force',
default=False,
action='store_true',
help='Force cleanup and aggregation on dirty repositories.',
)
main_parser.add_argument(
'-j', '--jobs',
dest='jobs',
default=1,
type=int,
help='Amount of processes to use when aggregating repos. '
'This is useful when there are a lot of large repos. '
'Set `1` or less to disable multiprocessing (default).',
)
main_parser.add_argument(
'command',
nargs='?',
default='aggregate',
help='aggregate (default): run the aggregation process.\n'
'show-all-prs: show GitHub pull requests in merge sections\n'
' such pull requests are indentified as having\n'
' a github.com remote and a\n'
' refs/pull/NNN/head ref in the merge section.\n'
'show-closed-prs: show pull requests that are not open anymore.\n'
)
return main_parser
def main():
"""Main CLI application."""
parser = get_parser()
argcomplete.autocomplete(parser, always_complete_options=False)
args = parser.parse_args()
setup_logger(
level=args.log_level
)
try:
if args.config and \
args.command in \
('aggregate', 'show-closed-prs', 'show-all-prs'):
run(args)
else:
parser.print_help()
except KeyboardInterrupt:
pass
def match_dir(cwd, dirmatch=None):
if not dirmatch:
return True
return (fnmatch.fnmatch(cwd, dirmatch) or
fnmatch.fnmatch(os.path.relpath(cwd), dirmatch) or
os.path.relpath(cwd) == os.path.relpath(dirmatch))
def load_aggregate(args):
"""Load YAML and JSON configs and begin creating / updating , aggregating
and pushing the repos (deprecated in favor or run())"""
repos = load_config(args.config, args.expand_env)
dirmatch = args.dirmatch
for repo_dict in repos:
r = Repo(**repo_dict)
logger.debug('%s' % r)
if not match_dir(r.cwd, dirmatch):
logger.info("Skip %s", r.cwd)
continue
r.aggregate()
if args.do_push:
r.push()
def aggregate_repo(repo, args, sem, err_queue):
"""Aggregate one repo according to the args.
Args:
repo (Repo): The repository to aggregate.
args (argparse.Namespace): CLI arguments.
"""
try:
logger.debug('%s' % repo)
dirmatch = args.dirmatch
if not match_dir(repo.cwd, dirmatch):
logger.info("Skip %s", repo.cwd)
return
if args.command == 'aggregate':
repo.aggregate()
if args.do_push:
repo.push()
elif args.command == 'show-closed-prs':
repo.show_closed_prs()
elif args.command == 'show-all-prs':
repo.show_all_prs()
except Exception:
err_queue.put_nowait(sys.exc_info())
finally:
sem.release()
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.figure
|
python
|
def figure(bgcolor=(1,1,1), size=(1000,1000)):
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
|
Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L32-L44
| null |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.show
|
python
|
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
|
Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L48-L72
|
[
"def clf():\n \"\"\"Clear the current figure\n \"\"\"\n Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)\n Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)\n"
] |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.render
|
python
|
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
|
Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L76-L106
|
[
"def clf():\n \"\"\"Clear the current figure\n \"\"\"\n Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)\n Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)\n"
] |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.save
|
python
|
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
|
Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L110-L143
|
[
"def clf():\n \"\"\"Clear the current figure\n \"\"\"\n Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)\n Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)\n"
] |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.save_loop
|
python
|
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
|
Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L146-L170
|
[
"def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):\n \"\"\"Save frames from the viewer out to a file.\n\n Parameters\n ----------\n filename : str\n The filename in which to save the output image. If more than one frame,\n should have extension .gif.\n n_frames : int\n Number of frames to render. If more than one, the scene will animate.\n axis : (3,) float or None\n If present, the animation will rotate about the given axis in world coordinates.\n Otherwise, the animation will rotate in azimuth.\n clf : bool\n If true, the Visualizer is cleared after rendering the figure.\n kwargs : dict\n Other keyword arguments for the SceneViewer instance.\n \"\"\"\n if n_frames >1 and os.path.splitext(filename)[1] != '.gif':\n raise ValueError('Expected .gif file for multiple-frame save.')\n v = SceneViewer(Visualizer3D._scene,\n size=Visualizer3D._init_size,\n animate=(n_frames > 1),\n animate_axis=axis,\n max_frames=n_frames,\n **kwargs)\n data = [m.data for m in v.saved_frames]\n if len(data) > 1:\n imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)\n else:\n imageio.imwrite(filename, data[0])\n\n if clf:\n Visualizer3D.clf()\n",
"def clf():\n \"\"\"Clear the current figure\n \"\"\"\n Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)\n Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)\n"
] |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.clf
|
python
|
def clf():
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
|
Clear the current figure
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L173-L177
| null |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.points
|
python
|
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L209-L287
| null |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.mesh
|
python
|
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L290-L325
| null |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.mesh_stable_pose
|
python
|
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
|
Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L329-L371
|
[
"def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):\n \"\"\"Scatter a point cloud in pose T_points_world.\n\n Parameters\n ----------\n points : autolab_core.BagOfPoints or (n,3) float\n The point set to visualize.\n T_points_world : autolab_core.RigidTransform\n Pose of points, specified as a transformation from point frame to world frame.\n color : (3,) or (n,3) float\n Color of whole cloud or per-point colors\n scale : float\n Radius of each point.\n n_cuts : int\n Number of longitude/latitude lines on sphere points.\n subsample : int\n Parameter of subsampling to display fewer points.\n name : str\n A name for the object to be added.\n \"\"\"\n if isinstance(points, BagOfPoints):\n if points.dim != 3:\n raise ValueError('BagOfPoints must have dimension 3xN!')\n else:\n if type(points) is not np.ndarray:\n raise ValueError('Points visualizer expects BagOfPoints or numpy array!')\n if len(points.shape) == 1:\n points = points[:,np.newaxis].T\n if len(points.shape) != 2 or points.shape[1] != 3:\n raise ValueError('Numpy array of points must have dimension (N,3)')\n frame = 'points'\n if T_points_world:\n frame = T_points_world.from_frame\n points = PointCloud(points.T, frame=frame)\n\n color = np.array(color)\n if subsample is not None:\n num_points = points.num_points\n points, inds = points.subsample(subsample, random=random)\n if color.shape[0] == num_points and color.shape[1] == 3:\n color = color[inds,:]\n\n # transform into world frame\n if points.frame != 'world':\n if T_points_world is None:\n T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')\n points_world = T_points_world * points\n else:\n points_world = points\n\n point_data = points_world.data\n if len(point_data.shape) == 1:\n point_data = point_data[:,np.newaxis]\n point_data = point_data.T\n\n mpcolor = color\n if len(color.shape) > 1:\n mpcolor = color[0]\n mp = MaterialProperties(\n color = np.array(mpcolor),\n k_a = 0.5,\n k_d = 0.3,\n k_s = 0.0,\n alpha = 10.0,\n smooth=True\n )\n\n # For each point, create a sphere of the specified color and size.\n sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])\n raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))\n raw_pose_data[3::4, :3] = point_data\n\n instcolor = None\n if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:\n instcolor = color\n obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)\n if name is None:\n name = str(uuid.uuid4())\n Visualizer3D._scene.add_object(name, obj)\n",
"def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),\n style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):\n \"\"\"Visualize a 3D triangular mesh.\n\n Parameters\n ----------\n mesh : trimesh.Trimesh\n The mesh to visualize.\n T_mesh_world : autolab_core.RigidTransform\n The pose of the mesh, specified as a transformation from mesh frame to world frame.\n style : str\n Triangular mesh style, either 'surface' or 'wireframe'.\n smooth : bool\n If true, the mesh is smoothed before rendering.\n color : 3-tuple\n Color tuple.\n name : str\n A name for the object to be added.\n \"\"\"\n if not isinstance(mesh, trimesh.Trimesh):\n raise ValueError('Must provide a trimesh.Trimesh object')\n\n mp = MaterialProperties(\n color = np.array(color),\n k_a = 0.5,\n k_d = 0.3,\n k_s = 0.1,\n alpha = 10.0,\n smooth=smooth,\n wireframe=(style == 'wireframe')\n )\n\n obj = SceneObject(mesh, T_mesh_world, mp)\n if name is None:\n name = str(uuid.uuid4())\n Visualizer3D._scene.add_object(name, obj)\n",
"def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):\n \"\"\"Plot a table mesh in 3D.\n\n Parameters\n ----------\n T_table_world : autolab_core.RigidTransform\n Pose of table relative to world.\n dim : float\n The side-length for the table.\n color : 3-tuple\n Color tuple.\n \"\"\"\n\n table_vertices = np.array([[ dim, dim, 0],\n [ dim, -dim, 0],\n [-dim, dim, 0],\n [-dim, -dim, 0]]).astype('float')\n table_tris = np.array([[0, 1, 2], [1, 2, 3]])\n table_mesh = trimesh.Trimesh(table_vertices, table_tris)\n table_mesh.apply_transform(T_table_world.matrix)\n Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)\n"
] |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.pose
|
python
|
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
|
Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L374-L398
|
[
"def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):\n \"\"\"Scatter a point cloud in pose T_points_world.\n\n Parameters\n ----------\n points : autolab_core.BagOfPoints or (n,3) float\n The point set to visualize.\n T_points_world : autolab_core.RigidTransform\n Pose of points, specified as a transformation from point frame to world frame.\n color : (3,) or (n,3) float\n Color of whole cloud or per-point colors\n scale : float\n Radius of each point.\n n_cuts : int\n Number of longitude/latitude lines on sphere points.\n subsample : int\n Parameter of subsampling to display fewer points.\n name : str\n A name for the object to be added.\n \"\"\"\n if isinstance(points, BagOfPoints):\n if points.dim != 3:\n raise ValueError('BagOfPoints must have dimension 3xN!')\n else:\n if type(points) is not np.ndarray:\n raise ValueError('Points visualizer expects BagOfPoints or numpy array!')\n if len(points.shape) == 1:\n points = points[:,np.newaxis].T\n if len(points.shape) != 2 or points.shape[1] != 3:\n raise ValueError('Numpy array of points must have dimension (N,3)')\n frame = 'points'\n if T_points_world:\n frame = T_points_world.from_frame\n points = PointCloud(points.T, frame=frame)\n\n color = np.array(color)\n if subsample is not None:\n num_points = points.num_points\n points, inds = points.subsample(subsample, random=random)\n if color.shape[0] == num_points and color.shape[1] == 3:\n color = color[inds,:]\n\n # transform into world frame\n if points.frame != 'world':\n if T_points_world is None:\n T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')\n points_world = T_points_world * points\n else:\n points_world = points\n\n point_data = points_world.data\n if len(point_data.shape) == 1:\n point_data = point_data[:,np.newaxis]\n point_data = point_data.T\n\n mpcolor = color\n if len(color.shape) > 1:\n mpcolor = color[0]\n mp = MaterialProperties(\n color = np.array(mpcolor),\n k_a = 0.5,\n k_d = 0.3,\n k_s = 0.0,\n alpha = 10.0,\n smooth=True\n )\n\n # For each point, create a sphere of the specified color and size.\n sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])\n raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))\n raw_pose_data[3::4, :3] = point_data\n\n instcolor = None\n if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:\n instcolor = color\n obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)\n if name is None:\n name = str(uuid.uuid4())\n Visualizer3D._scene.add_object(name, obj)\n",
"def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):\n \"\"\"Plot a 3d curve through a set of points using tubes.\n\n Parameters\n ----------\n points : (n,3) float\n A series of 3D points that define a curve in space.\n color : (3,) float\n The color of the tube.\n tube_radius : float\n Radius of tube representing curve.\n n_components : int\n The number of edges in each polygon representing the tube.\n name : str\n A name for the object to be added.\n \"\"\"\n points = np.asanyarray(points)\n mp = MaterialProperties(\n color = np.array(color),\n k_a = 0.5,\n k_d = 0.3,\n k_s = 0.0,\n alpha = 10.0,\n smooth=True\n )\n\n # Generate circular polygon\n vec = np.array([0,1]) * tube_radius\n angle = np.pi * 2.0 / n_components\n rotmat = np.array([\n [np.cos(angle), -np.sin(angle)],\n [np.sin(angle), np.cos(angle)]\n ])\n perim = []\n for i in range(n_components):\n perim.append(vec)\n vec = np.dot(rotmat, vec)\n poly = Polygon(perim)\n\n # Sweep it out along the path\n mesh = trimesh.creation.sweep_polygon(poly, points)\n obj = SceneObject(mesh, material=mp)\n if name is None:\n name = str(uuid.uuid4())\n Visualizer3D._scene.add_object(name, obj)\n"
] |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.table
|
python
|
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
|
Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L401-L421
|
[
"def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),\n style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):\n \"\"\"Visualize a 3D triangular mesh.\n\n Parameters\n ----------\n mesh : trimesh.Trimesh\n The mesh to visualize.\n T_mesh_world : autolab_core.RigidTransform\n The pose of the mesh, specified as a transformation from mesh frame to world frame.\n style : str\n Triangular mesh style, either 'surface' or 'wireframe'.\n smooth : bool\n If true, the mesh is smoothed before rendering.\n color : 3-tuple\n Color tuple.\n name : str\n A name for the object to be added.\n \"\"\"\n if not isinstance(mesh, trimesh.Trimesh):\n raise ValueError('Must provide a trimesh.Trimesh object')\n\n mp = MaterialProperties(\n color = np.array(color),\n k_a = 0.5,\n k_d = 0.3,\n k_s = 0.1,\n alpha = 10.0,\n smooth=smooth,\n wireframe=(style == 'wireframe')\n )\n\n obj = SceneObject(mesh, T_mesh_world, mp)\n if name is None:\n name = str(uuid.uuid4())\n Visualizer3D._scene.add_object(name, obj)\n"
] |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
@staticmethod
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
"""Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
"""
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
BerkeleyAutomation/visualization
|
visualization/visualizer3d.py
|
Visualizer3D.plot3d
|
python
|
def plot3d(points, color=(0.5, 0.5, 0.5), tube_radius=0.005, n_components=30, name=None):
points = np.asanyarray(points)
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# Generate circular polygon
vec = np.array([0,1]) * tube_radius
angle = np.pi * 2.0 / n_components
rotmat = np.array([
[np.cos(angle), -np.sin(angle)],
[np.sin(angle), np.cos(angle)]
])
perim = []
for i in range(n_components):
perim.append(vec)
vec = np.dot(rotmat, vec)
poly = Polygon(perim)
# Sweep it out along the path
mesh = trimesh.creation.sweep_polygon(poly, points)
obj = SceneObject(mesh, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
|
Plot a 3d curve through a set of points using tubes.
Parameters
----------
points : (n,3) float
A series of 3D points that define a curve in space.
color : (3,) float
The color of the tube.
tube_radius : float
Radius of tube representing curve.
n_components : int
The number of edges in each polygon representing the tube.
name : str
A name for the object to be added.
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer3d.py#L424-L468
| null |
class Visualizer3D:
"""
Class containing static methods for visualization.
The interface is styled after pyplot.
Should be thought of as a namespace rather than a class.
"""
_scene = Scene(background_color=np.array([1.0, 1.0, 1.0]))
_scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
_init_size = np.array([640,480])
_save_directory = None
@staticmethod
def figure(bgcolor=(1,1,1), size=(1000,1000)):
"""Create a blank figure.
Parameters
----------
bgcolor : (3,) float
Color of the background with values in [0,1].
size : (2,) int
Width and height of the figure in pixels.
"""
Visualizer3D._scene = Scene(background_color=np.array(bgcolor))
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
Visualizer3D._init_size = np.array(size)
@staticmethod
def show(animate=False, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Display the current figure and enable interaction.
Parameters
----------
animate : bool
Whether or not to animate the scene.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after showing the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
x = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=animate,
animate_axis=axis,
save_directory=Visualizer3D._save_directory,
**kwargs)
if x.save_directory:
Visualizer3D._save_directory = x.save_directory
if clf:
Visualizer3D.clf()
@staticmethod
def render(n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Render frames from the viewer.
Parameters
----------
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
Returns
-------
list of perception.ColorImage
A list of ColorImages rendered from the viewer.
"""
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
if clf:
Visualizer3D.clf()
return v.saved_frames
@staticmethod
def save(filename, n_frames=1, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Save frames from the viewer out to a file.
Parameters
----------
filename : str
The filename in which to save the output image. If more than one frame,
should have extension .gif.
n_frames : int
Number of frames to render. If more than one, the scene will animate.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
if n_frames >1 and os.path.splitext(filename)[1] != '.gif':
raise ValueError('Expected .gif file for multiple-frame save.')
v = SceneViewer(Visualizer3D._scene,
size=Visualizer3D._init_size,
animate=(n_frames > 1),
animate_axis=axis,
max_frames=n_frames,
**kwargs)
data = [m.data for m in v.saved_frames]
if len(data) > 1:
imageio.mimwrite(filename, data, fps=v._animate_rate, palettesize=128, subrectangles=True)
else:
imageio.imwrite(filename, data[0])
if clf:
Visualizer3D.clf()
@staticmethod
def save_loop(filename, framerate=30, time=3.0, axis=np.array([0.,0.,1.]), clf=True, **kwargs):
"""Off-screen save a GIF of one rotation about the scene.
Parameters
----------
filename : str
The filename in which to save the output image (should have extension .gif)
framerate : int
The frame rate at which to animate motion.
time : float
The number of seconds for one rotation.
axis : (3,) float or None
If present, the animation will rotate about the given axis in world coordinates.
Otherwise, the animation will rotate in azimuth.
clf : bool
If true, the Visualizer is cleared after rendering the figure.
kwargs : dict
Other keyword arguments for the SceneViewer instance.
"""
n_frames = framerate * time
az = 2.0 * np.pi / n_frames
Visualizer3D.save(filename, n_frames=n_frames, axis=axis, clf=clf,
animate_rate=framerate, animate_az=az)
if clf:
Visualizer3D.clf()
@staticmethod
def clf():
"""Clear the current figure
"""
Visualizer3D._scene = Scene(background_color=Visualizer3D._scene.background_color)
Visualizer3D._scene.ambient_light = AmbientLight(color=[1.0, 1.0, 1.0], strength=1.0)
@staticmethod
def close(*args, **kwargs):
"""Close the current figure
"""
pass
@staticmethod
def get_object_keys():
"""Return the visualizer's object keys.
Returns
-------
list of str
The keys for the visualizer's objects.
"""
return Visualizer3D._scene.objects.keys()
@staticmethod
def get_object(name):
"""Return a SceneObject corresponding to the given name.
Returns
-------
meshrender.SceneObject
The corresponding SceneObject.
"""
return Visualizer3D._scene.objects[name]
@staticmethod
def points(points, T_points_world=None, color=np.array([0,1,0]), scale=0.01, n_cuts=20, subsample=None, random=False, name=None):
"""Scatter a point cloud in pose T_points_world.
Parameters
----------
points : autolab_core.BagOfPoints or (n,3) float
The point set to visualize.
T_points_world : autolab_core.RigidTransform
Pose of points, specified as a transformation from point frame to world frame.
color : (3,) or (n,3) float
Color of whole cloud or per-point colors
scale : float
Radius of each point.
n_cuts : int
Number of longitude/latitude lines on sphere points.
subsample : int
Parameter of subsampling to display fewer points.
name : str
A name for the object to be added.
"""
if isinstance(points, BagOfPoints):
if points.dim != 3:
raise ValueError('BagOfPoints must have dimension 3xN!')
else:
if type(points) is not np.ndarray:
raise ValueError('Points visualizer expects BagOfPoints or numpy array!')
if len(points.shape) == 1:
points = points[:,np.newaxis].T
if len(points.shape) != 2 or points.shape[1] != 3:
raise ValueError('Numpy array of points must have dimension (N,3)')
frame = 'points'
if T_points_world:
frame = T_points_world.from_frame
points = PointCloud(points.T, frame=frame)
color = np.array(color)
if subsample is not None:
num_points = points.num_points
points, inds = points.subsample(subsample, random=random)
if color.shape[0] == num_points and color.shape[1] == 3:
color = color[inds,:]
# transform into world frame
if points.frame != 'world':
if T_points_world is None:
T_points_world = RigidTransform(from_frame=points.frame, to_frame='world')
points_world = T_points_world * points
else:
points_world = points
point_data = points_world.data
if len(point_data.shape) == 1:
point_data = point_data[:,np.newaxis]
point_data = point_data.T
mpcolor = color
if len(color.shape) > 1:
mpcolor = color[0]
mp = MaterialProperties(
color = np.array(mpcolor),
k_a = 0.5,
k_d = 0.3,
k_s = 0.0,
alpha = 10.0,
smooth=True
)
# For each point, create a sphere of the specified color and size.
sphere = trimesh.creation.uv_sphere(scale, [n_cuts, n_cuts])
raw_pose_data = np.tile(np.eye(4), (points.num_points, 1))
raw_pose_data[3::4, :3] = point_data
instcolor = None
if color.ndim == 2 and color.shape[0] == points.num_points and color.shape[1] == 3:
instcolor = color
obj = InstancedSceneObject(sphere, raw_pose_data=raw_pose_data, colors=instcolor, material=mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh(mesh, T_mesh_world=RigidTransform(from_frame='obj', to_frame='world'),
style='surface', smooth=False, color=(0.5,0.5,0.5), name=None):
"""Visualize a 3D triangular mesh.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_mesh_world : autolab_core.RigidTransform
The pose of the mesh, specified as a transformation from mesh frame to world frame.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
name : str
A name for the object to be added.
"""
if not isinstance(mesh, trimesh.Trimesh):
raise ValueError('Must provide a trimesh.Trimesh object')
mp = MaterialProperties(
color = np.array(color),
k_a = 0.5,
k_d = 0.3,
k_s = 0.1,
alpha = 10.0,
smooth=smooth,
wireframe=(style == 'wireframe')
)
obj = SceneObject(mesh, T_mesh_world, mp)
if name is None:
name = str(uuid.uuid4())
Visualizer3D._scene.add_object(name, obj)
@staticmethod
def mesh_stable_pose(mesh, T_obj_table,
T_table_world=RigidTransform(from_frame='table', to_frame='world'),
style='wireframe', smooth=False, color=(0.5,0.5,0.5),
dim=0.15, plot_table=True, plot_com=False, name=None):
"""Visualize a mesh in a stable pose.
Parameters
----------
mesh : trimesh.Trimesh
The mesh to visualize.
T_obj_table : autolab_core.RigidTransform
Pose of object relative to table.
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
style : str
Triangular mesh style, either 'surface' or 'wireframe'.
smooth : bool
If true, the mesh is smoothed before rendering.
color : 3-tuple
Color tuple.
dim : float
The side-length for the table.
plot_table : bool
If true, a table is visualized as well.
plot_com : bool
If true, a ball is visualized at the object's center of mass.
name : str
A name for the object to be added.
Returns
-------
autolab_core.RigidTransform
The pose of the mesh in world frame.
"""
T_obj_table = T_obj_table.as_frames('obj', 'table')
T_obj_world = T_table_world * T_obj_table
Visualizer3D.mesh(mesh, T_obj_world, style=style, smooth=smooth, color=color, name=name)
if plot_table:
Visualizer3D.table(T_table_world, dim=dim)
if plot_com:
Visualizer3D.points(Point(np.array(mesh.center_mass), 'obj'), T_obj_world, scale=0.01)
return T_obj_world
@staticmethod
def pose(T_frame_world, alpha=0.1, tube_radius=0.005, center_scale=0.01):
"""Plot a 3D pose as a set of axes (x red, y green, z blue).
Parameters
----------
T_frame_world : autolab_core.RigidTransform
The pose relative to world coordinates.
alpha : float
Length of plotted x,y,z axes.
tube_radius : float
Radius of plotted x,y,z axes.
center_scale : float
Radius of the pose's origin ball.
"""
R = T_frame_world.rotation
t = T_frame_world.translation
x_axis_tf = np.array([t, t + alpha * R[:,0]])
y_axis_tf = np.array([t, t + alpha * R[:,1]])
z_axis_tf = np.array([t, t + alpha * R[:,2]])
Visualizer3D.points(t, color=(1,1,1), scale=center_scale)
Visualizer3D.plot3d(x_axis_tf, color=(1,0,0), tube_radius=tube_radius)
Visualizer3D.plot3d(y_axis_tf, color=(0,1,0), tube_radius=tube_radius)
Visualizer3D.plot3d(z_axis_tf, color=(0,0,1), tube_radius=tube_radius)
@staticmethod
def table(T_table_world=RigidTransform(from_frame='table', to_frame='world'), dim=0.16, color=(0,0,0)):
"""Plot a table mesh in 3D.
Parameters
----------
T_table_world : autolab_core.RigidTransform
Pose of table relative to world.
dim : float
The side-length for the table.
color : 3-tuple
Color tuple.
"""
table_vertices = np.array([[ dim, dim, 0],
[ dim, -dim, 0],
[-dim, dim, 0],
[-dim, -dim, 0]]).astype('float')
table_tris = np.array([[0, 1, 2], [1, 2, 3]])
table_mesh = trimesh.Trimesh(table_vertices, table_tris)
table_mesh.apply_transform(T_table_world.matrix)
Visualizer3D.mesh(table_mesh, style='surface', smooth=True, color=color)
@staticmethod
|
BerkeleyAutomation/visualization
|
visualization/visualizer2d.py
|
Visualizer2D.figure
|
python
|
def figure(size=(8,8), *args, **kwargs):
return plt.figure(figsize=size, *args, **kwargs)
|
Creates a figure.
Parameters
----------
size : 2-tuple
size of the view window in inches
args : list
args of mayavi figure
kwargs : list
keyword args of mayavi figure
Returns
-------
pyplot figure
the current figure
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer2d.py#L14-L31
| null |
class Visualizer2D:
@staticmethod
@staticmethod
def show(filename=None, *args, **kwargs):
""" Show the current figure.
Parameters
----------
filename : :obj:`str`
filename to save the image to, for auto-saving
"""
if filename is None:
plt.show(*args, **kwargs)
else:
plt.savefig(filename, *args, **kwargs)
@staticmethod
def clf(*args, **kwargs):
""" Clear the current figure """
plt.clf(*args, **kwargs)
@staticmethod
def gca(*args, **kwargs):
""" Get the current axes """
return plt.gca(*args, **kwargs)
@staticmethod
def xlim(*args, **kwargs):
""" Set the x limits of the current figure """
plt.xlim(*args, **kwargs)
@staticmethod
def ylim(*args, **kwargs):
""" Set the y limits the current figure """
plt.ylim(*args, **kwargs)
@staticmethod
def savefig(*args, **kwargs):
""" Save the current figure """
plt.savefig(*args, **kwargs)
@staticmethod
def colorbar(*args, **kwargs):
""" Adds a colorbar to the current figure """
plt.colorbar(*args, **kwargs)
@staticmethod
def subplot(*args, **kwargs):
""" Creates a subplot in the current figure """
plt.subplot(*args, **kwargs)
@staticmethod
def title(*args, **kwargs):
""" Creates a title in the current figure """
plt.title(*args, **kwargs)
@staticmethod
def suptitle(*args, **kwargs):
""" Creates a title in the current figure """
plt.suptitle(*args, **kwargs)
@staticmethod
def xlabel(*args, **kwargs):
""" Creates an x axis label in the current figure """
plt.xlabel(*args, **kwargs)
@staticmethod
def ylabel(*args, **kwargs):
""" Creates an y axis label in the current figure """
plt.ylabel(*args, **kwargs)
@staticmethod
def legend(*args, **kwargs):
""" Creates a legend for the current figure """
plt.legend(*args, **kwargs)
@staticmethod
def scatter(*args, **kwargs):
""" Scatters points """
plt.scatter(*args, **kwargs)
@staticmethod
def plot(*args, **kwargs):
""" Plots lines """
plt.plot(*args, **kwargs)
@staticmethod
def imshow(image, auto_subplot=False, **kwargs):
""" Displays an image.
Parameters
----------
image : :obj:`perception.Image`
image to display
auto_subplot : bool
whether or not to automatically subplot for multi-channel images e.g. rgbd
"""
if isinstance(image, BinaryImage) or isinstance(image, GrayscaleImage):
plt.imshow(image.data, cmap=plt.cm.gray, **kwargs)
elif isinstance(image, ColorImage) or isinstance(image, SegmentationImage):
plt.imshow(image.data, **kwargs)
elif isinstance(image, DepthImage):
plt.imshow(image.data, cmap=plt.cm.gray_r, **kwargs)
elif isinstance(image, RgbdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.color.data, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.color.data, **kwargs)
elif isinstance(image, GdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
@staticmethod
def box(b, line_width=2, color='g', style='-'):
""" Draws a box on the current plot.
Parameters
----------
b : :obj:`autolab_core.Box`
box to draw
line_width : int
width of lines on side of box
color : :obj:`str`
color of box
style : :obj:`str`
style of lines to draw
"""
if not isinstance(b, Box):
raise ValueError('Input must be of type Box')
# get min pixels
min_i = b.min_pt[1]
min_j = b.min_pt[0]
max_i = b.max_pt[1]
max_j = b.max_pt[0]
top_left = np.array([min_i, min_j])
top_right = np.array([max_i, min_j])
bottom_left = np.array([min_i, max_j])
bottom_right = np.array([max_i, max_j])
# create lines
left = np.c_[top_left, bottom_left].T
right = np.c_[top_right, bottom_right].T
top = np.c_[top_left, top_right].T
bottom = np.c_[bottom_left, bottom_right].T
# plot lines
plt.plot(left[:,0], left[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(right[:,0], right[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(top[:,0], top[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(bottom[:,0], bottom[:,1], linewidth=line_width, color=color, linestyle=style)
@staticmethod
def contour(c, subsample=1, size=10, color='g'):
""" Draws a contour on the current plot by scattering points.
Parameters
----------
c : :obj:`autolab_core.Contour`
contour to draw
subsample : int
subsample rate for boundary pixels
size : int
size of scattered points
color : :obj:`str`
color of box
"""
if not isinstance(c, Contour):
raise ValueError('Input must be of type Contour')
for i in range(c.num_pixels)[0::subsample]:
plt.scatter(c.boundary_pixels[i,1], c.boundary_pixels[i,0], s=size, c=color)
@staticmethod
def grasp(grasp, width=None, color='r', arrow_len=4, arrow_head_len = 2, arrow_head_width = 3,
arrow_width = 1, jaw_len=3, jaw_width = 1.0,
grasp_center_size=1, grasp_center_thickness=2.5,
grasp_center_style='+', grasp_axis_width=1,
grasp_axis_style='--', line_width=1.0, alpha=50, show_center=True, show_axis=False, scale=1.0):
"""
Plots a 2D grasp with arrow and jaw style using matplotlib
Parameters
----------
grasp : :obj:`Grasp2D`
2D grasp to plot
width : float
width, in pixels, of the grasp (overrides Grasp2D.width_px)
color : :obj:`str`
color of plotted grasp
arrow_len : float
length of arrow body
arrow_head_len : float
length of arrow head
arrow_head_width : float
width of arrow head
arrow_width : float
width of arrow body
jaw_len : float
length of jaw line
jaw_width : float
line width of jaw line
grasp_center_thickness : float
thickness of grasp center
grasp_center_style : :obj:`str`
style of center of grasp
grasp_axis_width : float
line width of grasp axis
grasp_axis_style : :obj:`str`
style of grasp axis line
show_center : bool
whether or not to plot the grasp center
show_axis : bool
whether or not to plot the grasp axis
"""
# set vars for suction
skip_jaws = False
if not hasattr(grasp, 'width'):
grasp_center_style = '.'
grasp_center_size = 50
plt.scatter(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, s=scale*grasp_center_size)
if hasattr(grasp, 'orientation'):
axis = np.array([np.cos(grasp.angle), np.sin(grasp.angle)])
p = grasp.center.data + alpha * axis
line = np.c_[grasp.center.data, p]
plt.plot(line[0,:], line[1,:], color=color, linewidth=scale*grasp_axis_width)
plt.scatter(p[0], p[1], c=color, marker=grasp_center_style, s=scale*grasp_center_size)
return
# plot grasp center
if show_center:
plt.plot(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, mew=scale*grasp_center_thickness, ms=scale*grasp_center_size)
if skip_jaws:
return
# compute axis and jaw locations
axis = grasp.axis
width_px = width
if width_px is None and hasattr(grasp, 'width_px'):
width_px = grasp.width_px
g1 = grasp.center.data - (float(width_px) / 2) * axis
g2 = grasp.center.data + (float(width_px) / 2) * axis
g1p = g1 - scale * arrow_len * axis # start location of grasp jaw 1
g2p = g2 + scale * arrow_len * axis # start location of grasp jaw 2
# plot grasp axis
if show_axis:
plt.plot([g1[0], g2[0]], [g1[1], g2[1]], color=color, linewidth=scale*grasp_axis_width, linestyle=grasp_axis_style)
# direction of jaw line
jaw_dir = scale * jaw_len * np.array([axis[1], -axis[0]])
# length of arrow
alpha = scale*(arrow_len - arrow_head_len)
# plot first jaw
g1_line = np.c_[g1p, g1 - scale*arrow_head_len*axis].T
plt.arrow(g1p[0], g1p[1], alpha*axis[0], alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line1 = np.c_[g1 + jaw_dir, g1 - jaw_dir].T
plt.plot(jaw_line1[:,0], jaw_line1[:,1], linewidth=scale*jaw_width, c=color)
# plot second jaw
g2_line = np.c_[g2p, g2 + scale*arrow_head_len*axis].T
plt.arrow(g2p[0], g2p[1], -alpha*axis[0], -alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line2 = np.c_[g2 + jaw_dir, g2 - jaw_dir].T
plt.plot(jaw_line2[:,0], jaw_line2[:,1], linewidth=scale*jaw_width, c=color)
|
BerkeleyAutomation/visualization
|
visualization/visualizer2d.py
|
Visualizer2D.show
|
python
|
def show(filename=None, *args, **kwargs):
if filename is None:
plt.show(*args, **kwargs)
else:
plt.savefig(filename, *args, **kwargs)
|
Show the current figure.
Parameters
----------
filename : :obj:`str`
filename to save the image to, for auto-saving
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer2d.py#L34-L45
| null |
class Visualizer2D:
@staticmethod
def figure(size=(8,8), *args, **kwargs):
""" Creates a figure.
Parameters
----------
size : 2-tuple
size of the view window in inches
args : list
args of mayavi figure
kwargs : list
keyword args of mayavi figure
Returns
-------
pyplot figure
the current figure
"""
return plt.figure(figsize=size, *args, **kwargs)
@staticmethod
@staticmethod
def clf(*args, **kwargs):
""" Clear the current figure """
plt.clf(*args, **kwargs)
@staticmethod
def gca(*args, **kwargs):
""" Get the current axes """
return plt.gca(*args, **kwargs)
@staticmethod
def xlim(*args, **kwargs):
""" Set the x limits of the current figure """
plt.xlim(*args, **kwargs)
@staticmethod
def ylim(*args, **kwargs):
""" Set the y limits the current figure """
plt.ylim(*args, **kwargs)
@staticmethod
def savefig(*args, **kwargs):
""" Save the current figure """
plt.savefig(*args, **kwargs)
@staticmethod
def colorbar(*args, **kwargs):
""" Adds a colorbar to the current figure """
plt.colorbar(*args, **kwargs)
@staticmethod
def subplot(*args, **kwargs):
""" Creates a subplot in the current figure """
plt.subplot(*args, **kwargs)
@staticmethod
def title(*args, **kwargs):
""" Creates a title in the current figure """
plt.title(*args, **kwargs)
@staticmethod
def suptitle(*args, **kwargs):
""" Creates a title in the current figure """
plt.suptitle(*args, **kwargs)
@staticmethod
def xlabel(*args, **kwargs):
""" Creates an x axis label in the current figure """
plt.xlabel(*args, **kwargs)
@staticmethod
def ylabel(*args, **kwargs):
""" Creates an y axis label in the current figure """
plt.ylabel(*args, **kwargs)
@staticmethod
def legend(*args, **kwargs):
""" Creates a legend for the current figure """
plt.legend(*args, **kwargs)
@staticmethod
def scatter(*args, **kwargs):
""" Scatters points """
plt.scatter(*args, **kwargs)
@staticmethod
def plot(*args, **kwargs):
""" Plots lines """
plt.plot(*args, **kwargs)
@staticmethod
def imshow(image, auto_subplot=False, **kwargs):
""" Displays an image.
Parameters
----------
image : :obj:`perception.Image`
image to display
auto_subplot : bool
whether or not to automatically subplot for multi-channel images e.g. rgbd
"""
if isinstance(image, BinaryImage) or isinstance(image, GrayscaleImage):
plt.imshow(image.data, cmap=plt.cm.gray, **kwargs)
elif isinstance(image, ColorImage) or isinstance(image, SegmentationImage):
plt.imshow(image.data, **kwargs)
elif isinstance(image, DepthImage):
plt.imshow(image.data, cmap=plt.cm.gray_r, **kwargs)
elif isinstance(image, RgbdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.color.data, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.color.data, **kwargs)
elif isinstance(image, GdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
@staticmethod
def box(b, line_width=2, color='g', style='-'):
""" Draws a box on the current plot.
Parameters
----------
b : :obj:`autolab_core.Box`
box to draw
line_width : int
width of lines on side of box
color : :obj:`str`
color of box
style : :obj:`str`
style of lines to draw
"""
if not isinstance(b, Box):
raise ValueError('Input must be of type Box')
# get min pixels
min_i = b.min_pt[1]
min_j = b.min_pt[0]
max_i = b.max_pt[1]
max_j = b.max_pt[0]
top_left = np.array([min_i, min_j])
top_right = np.array([max_i, min_j])
bottom_left = np.array([min_i, max_j])
bottom_right = np.array([max_i, max_j])
# create lines
left = np.c_[top_left, bottom_left].T
right = np.c_[top_right, bottom_right].T
top = np.c_[top_left, top_right].T
bottom = np.c_[bottom_left, bottom_right].T
# plot lines
plt.plot(left[:,0], left[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(right[:,0], right[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(top[:,0], top[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(bottom[:,0], bottom[:,1], linewidth=line_width, color=color, linestyle=style)
@staticmethod
def contour(c, subsample=1, size=10, color='g'):
""" Draws a contour on the current plot by scattering points.
Parameters
----------
c : :obj:`autolab_core.Contour`
contour to draw
subsample : int
subsample rate for boundary pixels
size : int
size of scattered points
color : :obj:`str`
color of box
"""
if not isinstance(c, Contour):
raise ValueError('Input must be of type Contour')
for i in range(c.num_pixels)[0::subsample]:
plt.scatter(c.boundary_pixels[i,1], c.boundary_pixels[i,0], s=size, c=color)
@staticmethod
def grasp(grasp, width=None, color='r', arrow_len=4, arrow_head_len = 2, arrow_head_width = 3,
arrow_width = 1, jaw_len=3, jaw_width = 1.0,
grasp_center_size=1, grasp_center_thickness=2.5,
grasp_center_style='+', grasp_axis_width=1,
grasp_axis_style='--', line_width=1.0, alpha=50, show_center=True, show_axis=False, scale=1.0):
"""
Plots a 2D grasp with arrow and jaw style using matplotlib
Parameters
----------
grasp : :obj:`Grasp2D`
2D grasp to plot
width : float
width, in pixels, of the grasp (overrides Grasp2D.width_px)
color : :obj:`str`
color of plotted grasp
arrow_len : float
length of arrow body
arrow_head_len : float
length of arrow head
arrow_head_width : float
width of arrow head
arrow_width : float
width of arrow body
jaw_len : float
length of jaw line
jaw_width : float
line width of jaw line
grasp_center_thickness : float
thickness of grasp center
grasp_center_style : :obj:`str`
style of center of grasp
grasp_axis_width : float
line width of grasp axis
grasp_axis_style : :obj:`str`
style of grasp axis line
show_center : bool
whether or not to plot the grasp center
show_axis : bool
whether or not to plot the grasp axis
"""
# set vars for suction
skip_jaws = False
if not hasattr(grasp, 'width'):
grasp_center_style = '.'
grasp_center_size = 50
plt.scatter(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, s=scale*grasp_center_size)
if hasattr(grasp, 'orientation'):
axis = np.array([np.cos(grasp.angle), np.sin(grasp.angle)])
p = grasp.center.data + alpha * axis
line = np.c_[grasp.center.data, p]
plt.plot(line[0,:], line[1,:], color=color, linewidth=scale*grasp_axis_width)
plt.scatter(p[0], p[1], c=color, marker=grasp_center_style, s=scale*grasp_center_size)
return
# plot grasp center
if show_center:
plt.plot(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, mew=scale*grasp_center_thickness, ms=scale*grasp_center_size)
if skip_jaws:
return
# compute axis and jaw locations
axis = grasp.axis
width_px = width
if width_px is None and hasattr(grasp, 'width_px'):
width_px = grasp.width_px
g1 = grasp.center.data - (float(width_px) / 2) * axis
g2 = grasp.center.data + (float(width_px) / 2) * axis
g1p = g1 - scale * arrow_len * axis # start location of grasp jaw 1
g2p = g2 + scale * arrow_len * axis # start location of grasp jaw 2
# plot grasp axis
if show_axis:
plt.plot([g1[0], g2[0]], [g1[1], g2[1]], color=color, linewidth=scale*grasp_axis_width, linestyle=grasp_axis_style)
# direction of jaw line
jaw_dir = scale * jaw_len * np.array([axis[1], -axis[0]])
# length of arrow
alpha = scale*(arrow_len - arrow_head_len)
# plot first jaw
g1_line = np.c_[g1p, g1 - scale*arrow_head_len*axis].T
plt.arrow(g1p[0], g1p[1], alpha*axis[0], alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line1 = np.c_[g1 + jaw_dir, g1 - jaw_dir].T
plt.plot(jaw_line1[:,0], jaw_line1[:,1], linewidth=scale*jaw_width, c=color)
# plot second jaw
g2_line = np.c_[g2p, g2 + scale*arrow_head_len*axis].T
plt.arrow(g2p[0], g2p[1], -alpha*axis[0], -alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line2 = np.c_[g2 + jaw_dir, g2 - jaw_dir].T
plt.plot(jaw_line2[:,0], jaw_line2[:,1], linewidth=scale*jaw_width, c=color)
|
BerkeleyAutomation/visualization
|
visualization/visualizer2d.py
|
Visualizer2D.imshow
|
python
|
def imshow(image, auto_subplot=False, **kwargs):
if isinstance(image, BinaryImage) or isinstance(image, GrayscaleImage):
plt.imshow(image.data, cmap=plt.cm.gray, **kwargs)
elif isinstance(image, ColorImage) or isinstance(image, SegmentationImage):
plt.imshow(image.data, **kwargs)
elif isinstance(image, DepthImage):
plt.imshow(image.data, cmap=plt.cm.gray_r, **kwargs)
elif isinstance(image, RgbdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.color.data, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.color.data, **kwargs)
elif isinstance(image, GdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
|
Displays an image.
Parameters
----------
image : :obj:`perception.Image`
image to display
auto_subplot : bool
whether or not to automatically subplot for multi-channel images e.g. rgbd
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer2d.py#L118-L151
| null |
class Visualizer2D:
@staticmethod
def figure(size=(8,8), *args, **kwargs):
""" Creates a figure.
Parameters
----------
size : 2-tuple
size of the view window in inches
args : list
args of mayavi figure
kwargs : list
keyword args of mayavi figure
Returns
-------
pyplot figure
the current figure
"""
return plt.figure(figsize=size, *args, **kwargs)
@staticmethod
def show(filename=None, *args, **kwargs):
""" Show the current figure.
Parameters
----------
filename : :obj:`str`
filename to save the image to, for auto-saving
"""
if filename is None:
plt.show(*args, **kwargs)
else:
plt.savefig(filename, *args, **kwargs)
@staticmethod
def clf(*args, **kwargs):
""" Clear the current figure """
plt.clf(*args, **kwargs)
@staticmethod
def gca(*args, **kwargs):
""" Get the current axes """
return plt.gca(*args, **kwargs)
@staticmethod
def xlim(*args, **kwargs):
""" Set the x limits of the current figure """
plt.xlim(*args, **kwargs)
@staticmethod
def ylim(*args, **kwargs):
""" Set the y limits the current figure """
plt.ylim(*args, **kwargs)
@staticmethod
def savefig(*args, **kwargs):
""" Save the current figure """
plt.savefig(*args, **kwargs)
@staticmethod
def colorbar(*args, **kwargs):
""" Adds a colorbar to the current figure """
plt.colorbar(*args, **kwargs)
@staticmethod
def subplot(*args, **kwargs):
""" Creates a subplot in the current figure """
plt.subplot(*args, **kwargs)
@staticmethod
def title(*args, **kwargs):
""" Creates a title in the current figure """
plt.title(*args, **kwargs)
@staticmethod
def suptitle(*args, **kwargs):
""" Creates a title in the current figure """
plt.suptitle(*args, **kwargs)
@staticmethod
def xlabel(*args, **kwargs):
""" Creates an x axis label in the current figure """
plt.xlabel(*args, **kwargs)
@staticmethod
def ylabel(*args, **kwargs):
""" Creates an y axis label in the current figure """
plt.ylabel(*args, **kwargs)
@staticmethod
def legend(*args, **kwargs):
""" Creates a legend for the current figure """
plt.legend(*args, **kwargs)
@staticmethod
def scatter(*args, **kwargs):
""" Scatters points """
plt.scatter(*args, **kwargs)
@staticmethod
def plot(*args, **kwargs):
""" Plots lines """
plt.plot(*args, **kwargs)
@staticmethod
@staticmethod
def box(b, line_width=2, color='g', style='-'):
""" Draws a box on the current plot.
Parameters
----------
b : :obj:`autolab_core.Box`
box to draw
line_width : int
width of lines on side of box
color : :obj:`str`
color of box
style : :obj:`str`
style of lines to draw
"""
if not isinstance(b, Box):
raise ValueError('Input must be of type Box')
# get min pixels
min_i = b.min_pt[1]
min_j = b.min_pt[0]
max_i = b.max_pt[1]
max_j = b.max_pt[0]
top_left = np.array([min_i, min_j])
top_right = np.array([max_i, min_j])
bottom_left = np.array([min_i, max_j])
bottom_right = np.array([max_i, max_j])
# create lines
left = np.c_[top_left, bottom_left].T
right = np.c_[top_right, bottom_right].T
top = np.c_[top_left, top_right].T
bottom = np.c_[bottom_left, bottom_right].T
# plot lines
plt.plot(left[:,0], left[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(right[:,0], right[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(top[:,0], top[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(bottom[:,0], bottom[:,1], linewidth=line_width, color=color, linestyle=style)
@staticmethod
def contour(c, subsample=1, size=10, color='g'):
""" Draws a contour on the current plot by scattering points.
Parameters
----------
c : :obj:`autolab_core.Contour`
contour to draw
subsample : int
subsample rate for boundary pixels
size : int
size of scattered points
color : :obj:`str`
color of box
"""
if not isinstance(c, Contour):
raise ValueError('Input must be of type Contour')
for i in range(c.num_pixels)[0::subsample]:
plt.scatter(c.boundary_pixels[i,1], c.boundary_pixels[i,0], s=size, c=color)
@staticmethod
def grasp(grasp, width=None, color='r', arrow_len=4, arrow_head_len = 2, arrow_head_width = 3,
arrow_width = 1, jaw_len=3, jaw_width = 1.0,
grasp_center_size=1, grasp_center_thickness=2.5,
grasp_center_style='+', grasp_axis_width=1,
grasp_axis_style='--', line_width=1.0, alpha=50, show_center=True, show_axis=False, scale=1.0):
"""
Plots a 2D grasp with arrow and jaw style using matplotlib
Parameters
----------
grasp : :obj:`Grasp2D`
2D grasp to plot
width : float
width, in pixels, of the grasp (overrides Grasp2D.width_px)
color : :obj:`str`
color of plotted grasp
arrow_len : float
length of arrow body
arrow_head_len : float
length of arrow head
arrow_head_width : float
width of arrow head
arrow_width : float
width of arrow body
jaw_len : float
length of jaw line
jaw_width : float
line width of jaw line
grasp_center_thickness : float
thickness of grasp center
grasp_center_style : :obj:`str`
style of center of grasp
grasp_axis_width : float
line width of grasp axis
grasp_axis_style : :obj:`str`
style of grasp axis line
show_center : bool
whether or not to plot the grasp center
show_axis : bool
whether or not to plot the grasp axis
"""
# set vars for suction
skip_jaws = False
if not hasattr(grasp, 'width'):
grasp_center_style = '.'
grasp_center_size = 50
plt.scatter(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, s=scale*grasp_center_size)
if hasattr(grasp, 'orientation'):
axis = np.array([np.cos(grasp.angle), np.sin(grasp.angle)])
p = grasp.center.data + alpha * axis
line = np.c_[grasp.center.data, p]
plt.plot(line[0,:], line[1,:], color=color, linewidth=scale*grasp_axis_width)
plt.scatter(p[0], p[1], c=color, marker=grasp_center_style, s=scale*grasp_center_size)
return
# plot grasp center
if show_center:
plt.plot(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, mew=scale*grasp_center_thickness, ms=scale*grasp_center_size)
if skip_jaws:
return
# compute axis and jaw locations
axis = grasp.axis
width_px = width
if width_px is None and hasattr(grasp, 'width_px'):
width_px = grasp.width_px
g1 = grasp.center.data - (float(width_px) / 2) * axis
g2 = grasp.center.data + (float(width_px) / 2) * axis
g1p = g1 - scale * arrow_len * axis # start location of grasp jaw 1
g2p = g2 + scale * arrow_len * axis # start location of grasp jaw 2
# plot grasp axis
if show_axis:
plt.plot([g1[0], g2[0]], [g1[1], g2[1]], color=color, linewidth=scale*grasp_axis_width, linestyle=grasp_axis_style)
# direction of jaw line
jaw_dir = scale * jaw_len * np.array([axis[1], -axis[0]])
# length of arrow
alpha = scale*(arrow_len - arrow_head_len)
# plot first jaw
g1_line = np.c_[g1p, g1 - scale*arrow_head_len*axis].T
plt.arrow(g1p[0], g1p[1], alpha*axis[0], alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line1 = np.c_[g1 + jaw_dir, g1 - jaw_dir].T
plt.plot(jaw_line1[:,0], jaw_line1[:,1], linewidth=scale*jaw_width, c=color)
# plot second jaw
g2_line = np.c_[g2p, g2 + scale*arrow_head_len*axis].T
plt.arrow(g2p[0], g2p[1], -alpha*axis[0], -alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line2 = np.c_[g2 + jaw_dir, g2 - jaw_dir].T
plt.plot(jaw_line2[:,0], jaw_line2[:,1], linewidth=scale*jaw_width, c=color)
|
BerkeleyAutomation/visualization
|
visualization/visualizer2d.py
|
Visualizer2D.box
|
python
|
def box(b, line_width=2, color='g', style='-'):
if not isinstance(b, Box):
raise ValueError('Input must be of type Box')
# get min pixels
min_i = b.min_pt[1]
min_j = b.min_pt[0]
max_i = b.max_pt[1]
max_j = b.max_pt[0]
top_left = np.array([min_i, min_j])
top_right = np.array([max_i, min_j])
bottom_left = np.array([min_i, max_j])
bottom_right = np.array([max_i, max_j])
# create lines
left = np.c_[top_left, bottom_left].T
right = np.c_[top_right, bottom_right].T
top = np.c_[top_left, top_right].T
bottom = np.c_[bottom_left, bottom_right].T
# plot lines
plt.plot(left[:,0], left[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(right[:,0], right[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(top[:,0], top[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(bottom[:,0], bottom[:,1], linewidth=line_width, color=color, linestyle=style)
|
Draws a box on the current plot.
Parameters
----------
b : :obj:`autolab_core.Box`
box to draw
line_width : int
width of lines on side of box
color : :obj:`str`
color of box
style : :obj:`str`
style of lines to draw
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer2d.py#L154-L191
| null |
class Visualizer2D:
@staticmethod
def figure(size=(8,8), *args, **kwargs):
""" Creates a figure.
Parameters
----------
size : 2-tuple
size of the view window in inches
args : list
args of mayavi figure
kwargs : list
keyword args of mayavi figure
Returns
-------
pyplot figure
the current figure
"""
return plt.figure(figsize=size, *args, **kwargs)
@staticmethod
def show(filename=None, *args, **kwargs):
""" Show the current figure.
Parameters
----------
filename : :obj:`str`
filename to save the image to, for auto-saving
"""
if filename is None:
plt.show(*args, **kwargs)
else:
plt.savefig(filename, *args, **kwargs)
@staticmethod
def clf(*args, **kwargs):
""" Clear the current figure """
plt.clf(*args, **kwargs)
@staticmethod
def gca(*args, **kwargs):
""" Get the current axes """
return plt.gca(*args, **kwargs)
@staticmethod
def xlim(*args, **kwargs):
""" Set the x limits of the current figure """
plt.xlim(*args, **kwargs)
@staticmethod
def ylim(*args, **kwargs):
""" Set the y limits the current figure """
plt.ylim(*args, **kwargs)
@staticmethod
def savefig(*args, **kwargs):
""" Save the current figure """
plt.savefig(*args, **kwargs)
@staticmethod
def colorbar(*args, **kwargs):
""" Adds a colorbar to the current figure """
plt.colorbar(*args, **kwargs)
@staticmethod
def subplot(*args, **kwargs):
""" Creates a subplot in the current figure """
plt.subplot(*args, **kwargs)
@staticmethod
def title(*args, **kwargs):
""" Creates a title in the current figure """
plt.title(*args, **kwargs)
@staticmethod
def suptitle(*args, **kwargs):
""" Creates a title in the current figure """
plt.suptitle(*args, **kwargs)
@staticmethod
def xlabel(*args, **kwargs):
""" Creates an x axis label in the current figure """
plt.xlabel(*args, **kwargs)
@staticmethod
def ylabel(*args, **kwargs):
""" Creates an y axis label in the current figure """
plt.ylabel(*args, **kwargs)
@staticmethod
def legend(*args, **kwargs):
""" Creates a legend for the current figure """
plt.legend(*args, **kwargs)
@staticmethod
def scatter(*args, **kwargs):
""" Scatters points """
plt.scatter(*args, **kwargs)
@staticmethod
def plot(*args, **kwargs):
""" Plots lines """
plt.plot(*args, **kwargs)
@staticmethod
def imshow(image, auto_subplot=False, **kwargs):
""" Displays an image.
Parameters
----------
image : :obj:`perception.Image`
image to display
auto_subplot : bool
whether or not to automatically subplot for multi-channel images e.g. rgbd
"""
if isinstance(image, BinaryImage) or isinstance(image, GrayscaleImage):
plt.imshow(image.data, cmap=plt.cm.gray, **kwargs)
elif isinstance(image, ColorImage) or isinstance(image, SegmentationImage):
plt.imshow(image.data, **kwargs)
elif isinstance(image, DepthImage):
plt.imshow(image.data, cmap=plt.cm.gray_r, **kwargs)
elif isinstance(image, RgbdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.color.data, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.color.data, **kwargs)
elif isinstance(image, GdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
@staticmethod
def box(b, line_width=2, color='g', style='-'):
""" Draws a box on the current plot.
Parameters
----------
b : :obj:`autolab_core.Box`
box to draw
line_width : int
width of lines on side of box
color : :obj:`str`
color of box
style : :obj:`str`
style of lines to draw
"""
if not isinstance(b, Box):
raise ValueError('Input must be of type Box')
# get min pixels
min_i = b.min_pt[1]
min_j = b.min_pt[0]
max_i = b.max_pt[1]
max_j = b.max_pt[0]
top_left = np.array([min_i, min_j])
top_right = np.array([max_i, min_j])
bottom_left = np.array([min_i, max_j])
bottom_right = np.array([max_i, max_j])
# create lines
left = np.c_[top_left, bottom_left].T
right = np.c_[top_right, bottom_right].T
top = np.c_[top_left, top_right].T
bottom = np.c_[bottom_left, bottom_right].T
# plot lines
plt.plot(left[:,0], left[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(right[:,0], right[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(top[:,0], top[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(bottom[:,0], bottom[:,1], linewidth=line_width, color=color, linestyle=style)
@staticmethod
def contour(c, subsample=1, size=10, color='g'):
""" Draws a contour on the current plot by scattering points.
Parameters
----------
c : :obj:`autolab_core.Contour`
contour to draw
subsample : int
subsample rate for boundary pixels
size : int
size of scattered points
color : :obj:`str`
color of box
"""
if not isinstance(c, Contour):
raise ValueError('Input must be of type Contour')
for i in range(c.num_pixels)[0::subsample]:
plt.scatter(c.boundary_pixels[i,1], c.boundary_pixels[i,0], s=size, c=color)
@staticmethod
def grasp(grasp, width=None, color='r', arrow_len=4, arrow_head_len = 2, arrow_head_width = 3,
arrow_width = 1, jaw_len=3, jaw_width = 1.0,
grasp_center_size=1, grasp_center_thickness=2.5,
grasp_center_style='+', grasp_axis_width=1,
grasp_axis_style='--', line_width=1.0, alpha=50, show_center=True, show_axis=False, scale=1.0):
"""
Plots a 2D grasp with arrow and jaw style using matplotlib
Parameters
----------
grasp : :obj:`Grasp2D`
2D grasp to plot
width : float
width, in pixels, of the grasp (overrides Grasp2D.width_px)
color : :obj:`str`
color of plotted grasp
arrow_len : float
length of arrow body
arrow_head_len : float
length of arrow head
arrow_head_width : float
width of arrow head
arrow_width : float
width of arrow body
jaw_len : float
length of jaw line
jaw_width : float
line width of jaw line
grasp_center_thickness : float
thickness of grasp center
grasp_center_style : :obj:`str`
style of center of grasp
grasp_axis_width : float
line width of grasp axis
grasp_axis_style : :obj:`str`
style of grasp axis line
show_center : bool
whether or not to plot the grasp center
show_axis : bool
whether or not to plot the grasp axis
"""
# set vars for suction
skip_jaws = False
if not hasattr(grasp, 'width'):
grasp_center_style = '.'
grasp_center_size = 50
plt.scatter(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, s=scale*grasp_center_size)
if hasattr(grasp, 'orientation'):
axis = np.array([np.cos(grasp.angle), np.sin(grasp.angle)])
p = grasp.center.data + alpha * axis
line = np.c_[grasp.center.data, p]
plt.plot(line[0,:], line[1,:], color=color, linewidth=scale*grasp_axis_width)
plt.scatter(p[0], p[1], c=color, marker=grasp_center_style, s=scale*grasp_center_size)
return
# plot grasp center
if show_center:
plt.plot(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, mew=scale*grasp_center_thickness, ms=scale*grasp_center_size)
if skip_jaws:
return
# compute axis and jaw locations
axis = grasp.axis
width_px = width
if width_px is None and hasattr(grasp, 'width_px'):
width_px = grasp.width_px
g1 = grasp.center.data - (float(width_px) / 2) * axis
g2 = grasp.center.data + (float(width_px) / 2) * axis
g1p = g1 - scale * arrow_len * axis # start location of grasp jaw 1
g2p = g2 + scale * arrow_len * axis # start location of grasp jaw 2
# plot grasp axis
if show_axis:
plt.plot([g1[0], g2[0]], [g1[1], g2[1]], color=color, linewidth=scale*grasp_axis_width, linestyle=grasp_axis_style)
# direction of jaw line
jaw_dir = scale * jaw_len * np.array([axis[1], -axis[0]])
# length of arrow
alpha = scale*(arrow_len - arrow_head_len)
# plot first jaw
g1_line = np.c_[g1p, g1 - scale*arrow_head_len*axis].T
plt.arrow(g1p[0], g1p[1], alpha*axis[0], alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line1 = np.c_[g1 + jaw_dir, g1 - jaw_dir].T
plt.plot(jaw_line1[:,0], jaw_line1[:,1], linewidth=scale*jaw_width, c=color)
# plot second jaw
g2_line = np.c_[g2p, g2 + scale*arrow_head_len*axis].T
plt.arrow(g2p[0], g2p[1], -alpha*axis[0], -alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line2 = np.c_[g2 + jaw_dir, g2 - jaw_dir].T
plt.plot(jaw_line2[:,0], jaw_line2[:,1], linewidth=scale*jaw_width, c=color)
|
BerkeleyAutomation/visualization
|
visualization/visualizer2d.py
|
Visualizer2D.contour
|
python
|
def contour(c, subsample=1, size=10, color='g'):
if not isinstance(c, Contour):
raise ValueError('Input must be of type Contour')
for i in range(c.num_pixels)[0::subsample]:
plt.scatter(c.boundary_pixels[i,1], c.boundary_pixels[i,0], s=size, c=color)
|
Draws a contour on the current plot by scattering points.
Parameters
----------
c : :obj:`autolab_core.Contour`
contour to draw
subsample : int
subsample rate for boundary pixels
size : int
size of scattered points
color : :obj:`str`
color of box
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer2d.py#L194-L212
| null |
class Visualizer2D:
@staticmethod
def figure(size=(8,8), *args, **kwargs):
""" Creates a figure.
Parameters
----------
size : 2-tuple
size of the view window in inches
args : list
args of mayavi figure
kwargs : list
keyword args of mayavi figure
Returns
-------
pyplot figure
the current figure
"""
return plt.figure(figsize=size, *args, **kwargs)
@staticmethod
def show(filename=None, *args, **kwargs):
""" Show the current figure.
Parameters
----------
filename : :obj:`str`
filename to save the image to, for auto-saving
"""
if filename is None:
plt.show(*args, **kwargs)
else:
plt.savefig(filename, *args, **kwargs)
@staticmethod
def clf(*args, **kwargs):
""" Clear the current figure """
plt.clf(*args, **kwargs)
@staticmethod
def gca(*args, **kwargs):
""" Get the current axes """
return plt.gca(*args, **kwargs)
@staticmethod
def xlim(*args, **kwargs):
""" Set the x limits of the current figure """
plt.xlim(*args, **kwargs)
@staticmethod
def ylim(*args, **kwargs):
""" Set the y limits the current figure """
plt.ylim(*args, **kwargs)
@staticmethod
def savefig(*args, **kwargs):
""" Save the current figure """
plt.savefig(*args, **kwargs)
@staticmethod
def colorbar(*args, **kwargs):
""" Adds a colorbar to the current figure """
plt.colorbar(*args, **kwargs)
@staticmethod
def subplot(*args, **kwargs):
""" Creates a subplot in the current figure """
plt.subplot(*args, **kwargs)
@staticmethod
def title(*args, **kwargs):
""" Creates a title in the current figure """
plt.title(*args, **kwargs)
@staticmethod
def suptitle(*args, **kwargs):
""" Creates a title in the current figure """
plt.suptitle(*args, **kwargs)
@staticmethod
def xlabel(*args, **kwargs):
""" Creates an x axis label in the current figure """
plt.xlabel(*args, **kwargs)
@staticmethod
def ylabel(*args, **kwargs):
""" Creates an y axis label in the current figure """
plt.ylabel(*args, **kwargs)
@staticmethod
def legend(*args, **kwargs):
""" Creates a legend for the current figure """
plt.legend(*args, **kwargs)
@staticmethod
def scatter(*args, **kwargs):
""" Scatters points """
plt.scatter(*args, **kwargs)
@staticmethod
def plot(*args, **kwargs):
""" Plots lines """
plt.plot(*args, **kwargs)
@staticmethod
def imshow(image, auto_subplot=False, **kwargs):
""" Displays an image.
Parameters
----------
image : :obj:`perception.Image`
image to display
auto_subplot : bool
whether or not to automatically subplot for multi-channel images e.g. rgbd
"""
if isinstance(image, BinaryImage) or isinstance(image, GrayscaleImage):
plt.imshow(image.data, cmap=plt.cm.gray, **kwargs)
elif isinstance(image, ColorImage) or isinstance(image, SegmentationImage):
plt.imshow(image.data, **kwargs)
elif isinstance(image, DepthImage):
plt.imshow(image.data, cmap=plt.cm.gray_r, **kwargs)
elif isinstance(image, RgbdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.color.data, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.color.data, **kwargs)
elif isinstance(image, GdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
@staticmethod
def box(b, line_width=2, color='g', style='-'):
""" Draws a box on the current plot.
Parameters
----------
b : :obj:`autolab_core.Box`
box to draw
line_width : int
width of lines on side of box
color : :obj:`str`
color of box
style : :obj:`str`
style of lines to draw
"""
if not isinstance(b, Box):
raise ValueError('Input must be of type Box')
# get min pixels
min_i = b.min_pt[1]
min_j = b.min_pt[0]
max_i = b.max_pt[1]
max_j = b.max_pt[0]
top_left = np.array([min_i, min_j])
top_right = np.array([max_i, min_j])
bottom_left = np.array([min_i, max_j])
bottom_right = np.array([max_i, max_j])
# create lines
left = np.c_[top_left, bottom_left].T
right = np.c_[top_right, bottom_right].T
top = np.c_[top_left, top_right].T
bottom = np.c_[bottom_left, bottom_right].T
# plot lines
plt.plot(left[:,0], left[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(right[:,0], right[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(top[:,0], top[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(bottom[:,0], bottom[:,1], linewidth=line_width, color=color, linestyle=style)
@staticmethod
def contour(c, subsample=1, size=10, color='g'):
""" Draws a contour on the current plot by scattering points.
Parameters
----------
c : :obj:`autolab_core.Contour`
contour to draw
subsample : int
subsample rate for boundary pixels
size : int
size of scattered points
color : :obj:`str`
color of box
"""
if not isinstance(c, Contour):
raise ValueError('Input must be of type Contour')
for i in range(c.num_pixels)[0::subsample]:
plt.scatter(c.boundary_pixels[i,1], c.boundary_pixels[i,0], s=size, c=color)
@staticmethod
def grasp(grasp, width=None, color='r', arrow_len=4, arrow_head_len = 2, arrow_head_width = 3,
arrow_width = 1, jaw_len=3, jaw_width = 1.0,
grasp_center_size=1, grasp_center_thickness=2.5,
grasp_center_style='+', grasp_axis_width=1,
grasp_axis_style='--', line_width=1.0, alpha=50, show_center=True, show_axis=False, scale=1.0):
"""
Plots a 2D grasp with arrow and jaw style using matplotlib
Parameters
----------
grasp : :obj:`Grasp2D`
2D grasp to plot
width : float
width, in pixels, of the grasp (overrides Grasp2D.width_px)
color : :obj:`str`
color of plotted grasp
arrow_len : float
length of arrow body
arrow_head_len : float
length of arrow head
arrow_head_width : float
width of arrow head
arrow_width : float
width of arrow body
jaw_len : float
length of jaw line
jaw_width : float
line width of jaw line
grasp_center_thickness : float
thickness of grasp center
grasp_center_style : :obj:`str`
style of center of grasp
grasp_axis_width : float
line width of grasp axis
grasp_axis_style : :obj:`str`
style of grasp axis line
show_center : bool
whether or not to plot the grasp center
show_axis : bool
whether or not to plot the grasp axis
"""
# set vars for suction
skip_jaws = False
if not hasattr(grasp, 'width'):
grasp_center_style = '.'
grasp_center_size = 50
plt.scatter(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, s=scale*grasp_center_size)
if hasattr(grasp, 'orientation'):
axis = np.array([np.cos(grasp.angle), np.sin(grasp.angle)])
p = grasp.center.data + alpha * axis
line = np.c_[grasp.center.data, p]
plt.plot(line[0,:], line[1,:], color=color, linewidth=scale*grasp_axis_width)
plt.scatter(p[0], p[1], c=color, marker=grasp_center_style, s=scale*grasp_center_size)
return
# plot grasp center
if show_center:
plt.plot(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, mew=scale*grasp_center_thickness, ms=scale*grasp_center_size)
if skip_jaws:
return
# compute axis and jaw locations
axis = grasp.axis
width_px = width
if width_px is None and hasattr(grasp, 'width_px'):
width_px = grasp.width_px
g1 = grasp.center.data - (float(width_px) / 2) * axis
g2 = grasp.center.data + (float(width_px) / 2) * axis
g1p = g1 - scale * arrow_len * axis # start location of grasp jaw 1
g2p = g2 + scale * arrow_len * axis # start location of grasp jaw 2
# plot grasp axis
if show_axis:
plt.plot([g1[0], g2[0]], [g1[1], g2[1]], color=color, linewidth=scale*grasp_axis_width, linestyle=grasp_axis_style)
# direction of jaw line
jaw_dir = scale * jaw_len * np.array([axis[1], -axis[0]])
# length of arrow
alpha = scale*(arrow_len - arrow_head_len)
# plot first jaw
g1_line = np.c_[g1p, g1 - scale*arrow_head_len*axis].T
plt.arrow(g1p[0], g1p[1], alpha*axis[0], alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line1 = np.c_[g1 + jaw_dir, g1 - jaw_dir].T
plt.plot(jaw_line1[:,0], jaw_line1[:,1], linewidth=scale*jaw_width, c=color)
# plot second jaw
g2_line = np.c_[g2p, g2 + scale*arrow_head_len*axis].T
plt.arrow(g2p[0], g2p[1], -alpha*axis[0], -alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line2 = np.c_[g2 + jaw_dir, g2 - jaw_dir].T
plt.plot(jaw_line2[:,0], jaw_line2[:,1], linewidth=scale*jaw_width, c=color)
|
BerkeleyAutomation/visualization
|
visualization/visualizer2d.py
|
Visualizer2D.grasp
|
python
|
def grasp(grasp, width=None, color='r', arrow_len=4, arrow_head_len = 2, arrow_head_width = 3,
arrow_width = 1, jaw_len=3, jaw_width = 1.0,
grasp_center_size=1, grasp_center_thickness=2.5,
grasp_center_style='+', grasp_axis_width=1,
grasp_axis_style='--', line_width=1.0, alpha=50, show_center=True, show_axis=False, scale=1.0):
# set vars for suction
skip_jaws = False
if not hasattr(grasp, 'width'):
grasp_center_style = '.'
grasp_center_size = 50
plt.scatter(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, s=scale*grasp_center_size)
if hasattr(grasp, 'orientation'):
axis = np.array([np.cos(grasp.angle), np.sin(grasp.angle)])
p = grasp.center.data + alpha * axis
line = np.c_[grasp.center.data, p]
plt.plot(line[0,:], line[1,:], color=color, linewidth=scale*grasp_axis_width)
plt.scatter(p[0], p[1], c=color, marker=grasp_center_style, s=scale*grasp_center_size)
return
# plot grasp center
if show_center:
plt.plot(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, mew=scale*grasp_center_thickness, ms=scale*grasp_center_size)
if skip_jaws:
return
# compute axis and jaw locations
axis = grasp.axis
width_px = width
if width_px is None and hasattr(grasp, 'width_px'):
width_px = grasp.width_px
g1 = grasp.center.data - (float(width_px) / 2) * axis
g2 = grasp.center.data + (float(width_px) / 2) * axis
g1p = g1 - scale * arrow_len * axis # start location of grasp jaw 1
g2p = g2 + scale * arrow_len * axis # start location of grasp jaw 2
# plot grasp axis
if show_axis:
plt.plot([g1[0], g2[0]], [g1[1], g2[1]], color=color, linewidth=scale*grasp_axis_width, linestyle=grasp_axis_style)
# direction of jaw line
jaw_dir = scale * jaw_len * np.array([axis[1], -axis[0]])
# length of arrow
alpha = scale*(arrow_len - arrow_head_len)
# plot first jaw
g1_line = np.c_[g1p, g1 - scale*arrow_head_len*axis].T
plt.arrow(g1p[0], g1p[1], alpha*axis[0], alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line1 = np.c_[g1 + jaw_dir, g1 - jaw_dir].T
plt.plot(jaw_line1[:,0], jaw_line1[:,1], linewidth=scale*jaw_width, c=color)
# plot second jaw
g2_line = np.c_[g2p, g2 + scale*arrow_head_len*axis].T
plt.arrow(g2p[0], g2p[1], -alpha*axis[0], -alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line2 = np.c_[g2 + jaw_dir, g2 - jaw_dir].T
plt.plot(jaw_line2[:,0], jaw_line2[:,1], linewidth=scale*jaw_width, c=color)
|
Plots a 2D grasp with arrow and jaw style using matplotlib
Parameters
----------
grasp : :obj:`Grasp2D`
2D grasp to plot
width : float
width, in pixels, of the grasp (overrides Grasp2D.width_px)
color : :obj:`str`
color of plotted grasp
arrow_len : float
length of arrow body
arrow_head_len : float
length of arrow head
arrow_head_width : float
width of arrow head
arrow_width : float
width of arrow body
jaw_len : float
length of jaw line
jaw_width : float
line width of jaw line
grasp_center_thickness : float
thickness of grasp center
grasp_center_style : :obj:`str`
style of center of grasp
grasp_axis_width : float
line width of grasp axis
grasp_axis_style : :obj:`str`
style of grasp axis line
show_center : bool
whether or not to plot the grasp center
show_axis : bool
whether or not to plot the grasp axis
|
train
|
https://github.com/BerkeleyAutomation/visualization/blob/f8d038cc65c78f841ef27f99fb2a638f44fa72b6/visualization/visualizer2d.py#L215-L308
| null |
class Visualizer2D:
@staticmethod
def figure(size=(8,8), *args, **kwargs):
""" Creates a figure.
Parameters
----------
size : 2-tuple
size of the view window in inches
args : list
args of mayavi figure
kwargs : list
keyword args of mayavi figure
Returns
-------
pyplot figure
the current figure
"""
return plt.figure(figsize=size, *args, **kwargs)
@staticmethod
def show(filename=None, *args, **kwargs):
""" Show the current figure.
Parameters
----------
filename : :obj:`str`
filename to save the image to, for auto-saving
"""
if filename is None:
plt.show(*args, **kwargs)
else:
plt.savefig(filename, *args, **kwargs)
@staticmethod
def clf(*args, **kwargs):
""" Clear the current figure """
plt.clf(*args, **kwargs)
@staticmethod
def gca(*args, **kwargs):
""" Get the current axes """
return plt.gca(*args, **kwargs)
@staticmethod
def xlim(*args, **kwargs):
""" Set the x limits of the current figure """
plt.xlim(*args, **kwargs)
@staticmethod
def ylim(*args, **kwargs):
""" Set the y limits the current figure """
plt.ylim(*args, **kwargs)
@staticmethod
def savefig(*args, **kwargs):
""" Save the current figure """
plt.savefig(*args, **kwargs)
@staticmethod
def colorbar(*args, **kwargs):
""" Adds a colorbar to the current figure """
plt.colorbar(*args, **kwargs)
@staticmethod
def subplot(*args, **kwargs):
""" Creates a subplot in the current figure """
plt.subplot(*args, **kwargs)
@staticmethod
def title(*args, **kwargs):
""" Creates a title in the current figure """
plt.title(*args, **kwargs)
@staticmethod
def suptitle(*args, **kwargs):
""" Creates a title in the current figure """
plt.suptitle(*args, **kwargs)
@staticmethod
def xlabel(*args, **kwargs):
""" Creates an x axis label in the current figure """
plt.xlabel(*args, **kwargs)
@staticmethod
def ylabel(*args, **kwargs):
""" Creates an y axis label in the current figure """
plt.ylabel(*args, **kwargs)
@staticmethod
def legend(*args, **kwargs):
""" Creates a legend for the current figure """
plt.legend(*args, **kwargs)
@staticmethod
def scatter(*args, **kwargs):
""" Scatters points """
plt.scatter(*args, **kwargs)
@staticmethod
def plot(*args, **kwargs):
""" Plots lines """
plt.plot(*args, **kwargs)
@staticmethod
def imshow(image, auto_subplot=False, **kwargs):
""" Displays an image.
Parameters
----------
image : :obj:`perception.Image`
image to display
auto_subplot : bool
whether or not to automatically subplot for multi-channel images e.g. rgbd
"""
if isinstance(image, BinaryImage) or isinstance(image, GrayscaleImage):
plt.imshow(image.data, cmap=plt.cm.gray, **kwargs)
elif isinstance(image, ColorImage) or isinstance(image, SegmentationImage):
plt.imshow(image.data, **kwargs)
elif isinstance(image, DepthImage):
plt.imshow(image.data, cmap=plt.cm.gray_r, **kwargs)
elif isinstance(image, RgbdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.color.data, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.color.data, **kwargs)
elif isinstance(image, GdImage):
if auto_subplot:
plt.subplot(1,2,1)
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
plt.subplot(1,2,2)
plt.imshow(image.depth.data, cmap=plt.cm.gray_r, **kwargs)
else:
plt.imshow(image.gray.data, cmap=plt.cm.gray, **kwargs)
plt.axis('off')
@staticmethod
def box(b, line_width=2, color='g', style='-'):
""" Draws a box on the current plot.
Parameters
----------
b : :obj:`autolab_core.Box`
box to draw
line_width : int
width of lines on side of box
color : :obj:`str`
color of box
style : :obj:`str`
style of lines to draw
"""
if not isinstance(b, Box):
raise ValueError('Input must be of type Box')
# get min pixels
min_i = b.min_pt[1]
min_j = b.min_pt[0]
max_i = b.max_pt[1]
max_j = b.max_pt[0]
top_left = np.array([min_i, min_j])
top_right = np.array([max_i, min_j])
bottom_left = np.array([min_i, max_j])
bottom_right = np.array([max_i, max_j])
# create lines
left = np.c_[top_left, bottom_left].T
right = np.c_[top_right, bottom_right].T
top = np.c_[top_left, top_right].T
bottom = np.c_[bottom_left, bottom_right].T
# plot lines
plt.plot(left[:,0], left[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(right[:,0], right[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(top[:,0], top[:,1], linewidth=line_width, color=color, linestyle=style)
plt.plot(bottom[:,0], bottom[:,1], linewidth=line_width, color=color, linestyle=style)
@staticmethod
def contour(c, subsample=1, size=10, color='g'):
""" Draws a contour on the current plot by scattering points.
Parameters
----------
c : :obj:`autolab_core.Contour`
contour to draw
subsample : int
subsample rate for boundary pixels
size : int
size of scattered points
color : :obj:`str`
color of box
"""
if not isinstance(c, Contour):
raise ValueError('Input must be of type Contour')
for i in range(c.num_pixels)[0::subsample]:
plt.scatter(c.boundary_pixels[i,1], c.boundary_pixels[i,0], s=size, c=color)
@staticmethod
def grasp(grasp, width=None, color='r', arrow_len=4, arrow_head_len = 2, arrow_head_width = 3,
arrow_width = 1, jaw_len=3, jaw_width = 1.0,
grasp_center_size=1, grasp_center_thickness=2.5,
grasp_center_style='+', grasp_axis_width=1,
grasp_axis_style='--', line_width=1.0, alpha=50, show_center=True, show_axis=False, scale=1.0):
"""
Plots a 2D grasp with arrow and jaw style using matplotlib
Parameters
----------
grasp : :obj:`Grasp2D`
2D grasp to plot
width : float
width, in pixels, of the grasp (overrides Grasp2D.width_px)
color : :obj:`str`
color of plotted grasp
arrow_len : float
length of arrow body
arrow_head_len : float
length of arrow head
arrow_head_width : float
width of arrow head
arrow_width : float
width of arrow body
jaw_len : float
length of jaw line
jaw_width : float
line width of jaw line
grasp_center_thickness : float
thickness of grasp center
grasp_center_style : :obj:`str`
style of center of grasp
grasp_axis_width : float
line width of grasp axis
grasp_axis_style : :obj:`str`
style of grasp axis line
show_center : bool
whether or not to plot the grasp center
show_axis : bool
whether or not to plot the grasp axis
"""
# set vars for suction
skip_jaws = False
if not hasattr(grasp, 'width'):
grasp_center_style = '.'
grasp_center_size = 50
plt.scatter(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, s=scale*grasp_center_size)
if hasattr(grasp, 'orientation'):
axis = np.array([np.cos(grasp.angle), np.sin(grasp.angle)])
p = grasp.center.data + alpha * axis
line = np.c_[grasp.center.data, p]
plt.plot(line[0,:], line[1,:], color=color, linewidth=scale*grasp_axis_width)
plt.scatter(p[0], p[1], c=color, marker=grasp_center_style, s=scale*grasp_center_size)
return
# plot grasp center
if show_center:
plt.plot(grasp.center.x, grasp.center.y, c=color, marker=grasp_center_style, mew=scale*grasp_center_thickness, ms=scale*grasp_center_size)
if skip_jaws:
return
# compute axis and jaw locations
axis = grasp.axis
width_px = width
if width_px is None and hasattr(grasp, 'width_px'):
width_px = grasp.width_px
g1 = grasp.center.data - (float(width_px) / 2) * axis
g2 = grasp.center.data + (float(width_px) / 2) * axis
g1p = g1 - scale * arrow_len * axis # start location of grasp jaw 1
g2p = g2 + scale * arrow_len * axis # start location of grasp jaw 2
# plot grasp axis
if show_axis:
plt.plot([g1[0], g2[0]], [g1[1], g2[1]], color=color, linewidth=scale*grasp_axis_width, linestyle=grasp_axis_style)
# direction of jaw line
jaw_dir = scale * jaw_len * np.array([axis[1], -axis[0]])
# length of arrow
alpha = scale*(arrow_len - arrow_head_len)
# plot first jaw
g1_line = np.c_[g1p, g1 - scale*arrow_head_len*axis].T
plt.arrow(g1p[0], g1p[1], alpha*axis[0], alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line1 = np.c_[g1 + jaw_dir, g1 - jaw_dir].T
plt.plot(jaw_line1[:,0], jaw_line1[:,1], linewidth=scale*jaw_width, c=color)
# plot second jaw
g2_line = np.c_[g2p, g2 + scale*arrow_head_len*axis].T
plt.arrow(g2p[0], g2p[1], -alpha*axis[0], -alpha*axis[1], width=scale*arrow_width, head_width=scale*arrow_head_width, head_length=scale*arrow_head_len, fc=color, ec=color)
jaw_line2 = np.c_[g2 + jaw_dir, g2 - jaw_dir].T
plt.plot(jaw_line2[:,0], jaw_line2[:,1], linewidth=scale*jaw_width, c=color)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.