id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
6,400
denisenkom/pytds
src/pytds/tds_types.py
TdsTypeInferrer.from_value
def from_value(self, value): """ Function infers TDS type from Python value. :param value: value from which to infer TDS type :return: An instance of subclass of :class:`BaseType` """ if value is None: sql_type = NVarCharType(size=1) else: sql_type = self._from_class_value(value, type(value)) return sql_type
python
def from_value(self, value): if value is None: sql_type = NVarCharType(size=1) else: sql_type = self._from_class_value(value, type(value)) return sql_type
[ "def", "from_value", "(", "self", ",", "value", ")", ":", "if", "value", "is", "None", ":", "sql_type", "=", "NVarCharType", "(", "size", "=", "1", ")", "else", ":", "sql_type", "=", "self", ".", "_from_class_value", "(", "value", ",", "type", "(", "...
Function infers TDS type from Python value. :param value: value from which to infer TDS type :return: An instance of subclass of :class:`BaseType`
[ "Function", "infers", "TDS", "type", "from", "Python", "value", "." ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/tds_types.py#L2609-L2619
6,401
denisenkom/pytds
src/pytds/__init__.py
dict_row_strategy
def dict_row_strategy(column_names): """ Dict row strategy, rows returned as dictionaries """ # replace empty column names with indices column_names = [(name or idx) for idx, name in enumerate(column_names)] def row_factory(row): return dict(zip(column_names, row)) return row_factory
python
def dict_row_strategy(column_names): # replace empty column names with indices column_names = [(name or idx) for idx, name in enumerate(column_names)] def row_factory(row): return dict(zip(column_names, row)) return row_factory
[ "def", "dict_row_strategy", "(", "column_names", ")", ":", "# replace empty column names with indices", "column_names", "=", "[", "(", "name", "or", "idx", ")", "for", "idx", ",", "name", "in", "enumerate", "(", "column_names", ")", "]", "def", "row_factory", "(...
Dict row strategy, rows returned as dictionaries
[ "Dict", "row", "strategy", "rows", "returned", "as", "dictionaries" ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L88-L97
6,402
denisenkom/pytds
src/pytds/__init__.py
namedtuple_row_strategy
def namedtuple_row_strategy(column_names): """ Namedtuple row strategy, rows returned as named tuples Column names that are not valid Python identifiers will be replaced with col<number>_ """ import collections # replace empty column names with placeholders column_names = [name if is_valid_identifier(name) else 'col%s_' % idx for idx, name in enumerate(column_names)] row_class = collections.namedtuple('Row', column_names) def row_factory(row): return row_class(*row) return row_factory
python
def namedtuple_row_strategy(column_names): import collections # replace empty column names with placeholders column_names = [name if is_valid_identifier(name) else 'col%s_' % idx for idx, name in enumerate(column_names)] row_class = collections.namedtuple('Row', column_names) def row_factory(row): return row_class(*row) return row_factory
[ "def", "namedtuple_row_strategy", "(", "column_names", ")", ":", "import", "collections", "# replace empty column names with placeholders", "column_names", "=", "[", "name", "if", "is_valid_identifier", "(", "name", ")", "else", "'col%s_'", "%", "idx", "for", "idx", "...
Namedtuple row strategy, rows returned as named tuples Column names that are not valid Python identifiers will be replaced with col<number>_
[ "Namedtuple", "row", "strategy", "rows", "returned", "as", "named", "tuples" ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L104-L118
6,403
denisenkom/pytds
src/pytds/__init__.py
recordtype_row_strategy
def recordtype_row_strategy(column_names): """ Recordtype row strategy, rows returned as recordtypes Column names that are not valid Python identifiers will be replaced with col<number>_ """ try: from namedlist import namedlist as recordtype # optional dependency except ImportError: from recordtype import recordtype # optional dependency # replace empty column names with placeholders column_names = [name if is_valid_identifier(name) else 'col%s_' % idx for idx, name in enumerate(column_names)] recordtype_row_class = recordtype('Row', column_names) # custom extension class that supports indexing class Row(recordtype_row_class): def __getitem__(self, index): if isinstance(index, slice): return tuple(getattr(self, x) for x in self.__slots__[index]) return getattr(self, self.__slots__[index]) def __setitem__(self, index, value): setattr(self, self.__slots__[index], value) def row_factory(row): return Row(*row) return row_factory
python
def recordtype_row_strategy(column_names): try: from namedlist import namedlist as recordtype # optional dependency except ImportError: from recordtype import recordtype # optional dependency # replace empty column names with placeholders column_names = [name if is_valid_identifier(name) else 'col%s_' % idx for idx, name in enumerate(column_names)] recordtype_row_class = recordtype('Row', column_names) # custom extension class that supports indexing class Row(recordtype_row_class): def __getitem__(self, index): if isinstance(index, slice): return tuple(getattr(self, x) for x in self.__slots__[index]) return getattr(self, self.__slots__[index]) def __setitem__(self, index, value): setattr(self, self.__slots__[index], value) def row_factory(row): return Row(*row) return row_factory
[ "def", "recordtype_row_strategy", "(", "column_names", ")", ":", "try", ":", "from", "namedlist", "import", "namedlist", "as", "recordtype", "# optional dependency", "except", "ImportError", ":", "from", "recordtype", "import", "recordtype", "# optional dependency", "# ...
Recordtype row strategy, rows returned as recordtypes Column names that are not valid Python identifiers will be replaced with col<number>_
[ "Recordtype", "row", "strategy", "rows", "returned", "as", "recordtypes" ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L121-L148
6,404
denisenkom/pytds
src/pytds/__init__.py
_get_servers_deque
def _get_servers_deque(servers, database): """ Returns deque of servers for given tuple of servers and database name. This deque have active server at the begining, if first server is not accessible at the moment the deque will be rotated, second server will be moved to the first position, thirt to the second position etc, and previously first server will be moved to the last position. This allows to remember last successful server between calls to connect function. """ key = (servers, database) if key not in _servers_deques: _servers_deques[key] = deque(servers) return _servers_deques[key]
python
def _get_servers_deque(servers, database): key = (servers, database) if key not in _servers_deques: _servers_deques[key] = deque(servers) return _servers_deques[key]
[ "def", "_get_servers_deque", "(", "servers", ",", "database", ")", ":", "key", "=", "(", "servers", ",", "database", ")", "if", "key", "not", "in", "_servers_deques", ":", "_servers_deques", "[", "key", "]", "=", "deque", "(", "servers", ")", "return", "...
Returns deque of servers for given tuple of servers and database name. This deque have active server at the begining, if first server is not accessible at the moment the deque will be rotated, second server will be moved to the first position, thirt to the second position etc, and previously first server will be moved to the last position. This allows to remember last successful server between calls to connect function.
[ "Returns", "deque", "of", "servers", "for", "given", "tuple", "of", "servers", "and", "database", "name", ".", "This", "deque", "have", "active", "server", "at", "the", "begining", "if", "first", "server", "is", "not", "accessible", "at", "the", "moment", ...
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L1107-L1121
6,405
denisenkom/pytds
src/pytds/__init__.py
_parse_connection_string
def _parse_connection_string(connstr): """ MSSQL style connection string parser Returns normalized dictionary of connection string parameters """ res = {} for item in connstr.split(';'): item = item.strip() if not item: continue key, value = item.split('=', 1) key = key.strip().lower().replace(' ', '_') value = value.strip() res[key] = value return res
python
def _parse_connection_string(connstr): res = {} for item in connstr.split(';'): item = item.strip() if not item: continue key, value = item.split('=', 1) key = key.strip().lower().replace(' ', '_') value = value.strip() res[key] = value return res
[ "def", "_parse_connection_string", "(", "connstr", ")", ":", "res", "=", "{", "}", "for", "item", "in", "connstr", ".", "split", "(", "';'", ")", ":", "item", "=", "item", ".", "strip", "(", ")", "if", "not", "item", ":", "continue", "key", ",", "v...
MSSQL style connection string parser Returns normalized dictionary of connection string parameters
[ "MSSQL", "style", "connection", "string", "parser" ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L1124-L1139
6,406
denisenkom/pytds
src/pytds/__init__.py
Connection.commit
def commit(self): """ Commit transaction which is currently in progress. """ self._assert_open() if self._autocommit: return if not self._conn.tds72_transaction: return self._main_cursor._commit(cont=True, isolation_level=self._isolation_level)
python
def commit(self): self._assert_open() if self._autocommit: return if not self._conn.tds72_transaction: return self._main_cursor._commit(cont=True, isolation_level=self._isolation_level)
[ "def", "commit", "(", "self", ")", ":", "self", ".", "_assert_open", "(", ")", "if", "self", ".", "_autocommit", ":", "return", "if", "not", "self", ".", "_conn", ".", "tds72_transaction", ":", "return", "self", ".", "_main_cursor", ".", "_commit", "(", ...
Commit transaction which is currently in progress.
[ "Commit", "transaction", "which", "is", "currently", "in", "progress", "." ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L410-L419
6,407
denisenkom/pytds
src/pytds/__init__.py
Connection.cursor
def cursor(self): """ Return cursor object that can be used to make queries and fetch results from the database. """ self._assert_open() if self.mars_enabled: in_tran = self._conn.tds72_transaction if in_tran and self._dirty: try: return _MarsCursor(self, self._conn.create_session(self._tzinfo_factory), self._tzinfo_factory) except (socket.error, OSError) as e: self._conn.close() raise else: try: return _MarsCursor(self, self._conn.create_session(self._tzinfo_factory), self._tzinfo_factory) except (socket.error, OSError) as e: if e.errno not in (errno.EPIPE, errno.ECONNRESET): raise self._conn.close() except ClosedConnectionError: pass self._assert_open() return _MarsCursor(self, self._conn.create_session(self._tzinfo_factory), self._tzinfo_factory) else: return Cursor(self, self._conn.main_session, self._tzinfo_factory)
python
def cursor(self): self._assert_open() if self.mars_enabled: in_tran = self._conn.tds72_transaction if in_tran and self._dirty: try: return _MarsCursor(self, self._conn.create_session(self._tzinfo_factory), self._tzinfo_factory) except (socket.error, OSError) as e: self._conn.close() raise else: try: return _MarsCursor(self, self._conn.create_session(self._tzinfo_factory), self._tzinfo_factory) except (socket.error, OSError) as e: if e.errno not in (errno.EPIPE, errno.ECONNRESET): raise self._conn.close() except ClosedConnectionError: pass self._assert_open() return _MarsCursor(self, self._conn.create_session(self._tzinfo_factory), self._tzinfo_factory) else: return Cursor(self, self._conn.main_session, self._tzinfo_factory)
[ "def", "cursor", "(", "self", ")", ":", "self", ".", "_assert_open", "(", ")", "if", "self", ".", "mars_enabled", ":", "in_tran", "=", "self", ".", "_conn", ".", "tds72_transaction", "if", "in_tran", "and", "self", ".", "_dirty", ":", "try", ":", "retu...
Return cursor object that can be used to make queries and fetch results from the database.
[ "Return", "cursor", "object", "that", "can", "be", "used", "to", "make", "queries", "and", "fetch", "results", "from", "the", "database", "." ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L421-L455
6,408
denisenkom/pytds
src/pytds/__init__.py
Connection.rollback
def rollback(self): """ Roll back transaction which is currently in progress. """ try: if self._autocommit: return if not self._conn or not self._conn.is_connected(): return if not self._conn.tds72_transaction: return self._main_cursor._rollback(cont=True, isolation_level=self._isolation_level) except socket.error as e: if e.errno in (errno.ENETRESET, errno.ECONNRESET, errno.EPIPE): return self._conn.close() raise except ClosedConnectionError: pass
python
def rollback(self): try: if self._autocommit: return if not self._conn or not self._conn.is_connected(): return if not self._conn.tds72_transaction: return self._main_cursor._rollback(cont=True, isolation_level=self._isolation_level) except socket.error as e: if e.errno in (errno.ENETRESET, errno.ECONNRESET, errno.EPIPE): return self._conn.close() raise except ClosedConnectionError: pass
[ "def", "rollback", "(", "self", ")", ":", "try", ":", "if", "self", ".", "_autocommit", ":", "return", "if", "not", "self", ".", "_conn", "or", "not", "self", ".", "_conn", ".", "is_connected", "(", ")", ":", "return", "if", "not", "self", ".", "_c...
Roll back transaction which is currently in progress.
[ "Roll", "back", "transaction", "which", "is", "currently", "in", "progress", "." ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L457-L479
6,409
denisenkom/pytds
src/pytds/__init__.py
Connection.close
def close(self): """ Close connection to an MS SQL Server. This function tries to close the connection and free all memory used. It can be called more than once in a row. No exception is raised in this case. """ if self._conn: if self._pooling: _connection_pool.add(self._key, (self._conn, self._main_cursor._session)) else: self._conn.close() self._active_cursor = None self._main_cursor = None self._conn = None self._closed = True
python
def close(self): if self._conn: if self._pooling: _connection_pool.add(self._key, (self._conn, self._main_cursor._session)) else: self._conn.close() self._active_cursor = None self._main_cursor = None self._conn = None self._closed = True
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_conn", ":", "if", "self", ".", "_pooling", ":", "_connection_pool", ".", "add", "(", "self", ".", "_key", ",", "(", "self", ".", "_conn", ",", "self", ".", "_main_cursor", ".", "_session", ...
Close connection to an MS SQL Server. This function tries to close the connection and free all memory used. It can be called more than once in a row. No exception is raised in this case.
[ "Close", "connection", "to", "an", "MS", "SQL", "Server", "." ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L481-L496
6,410
denisenkom/pytds
src/pytds/__init__.py
Cursor.get_proc_return_status
def get_proc_return_status(self): """ Last stored proc result """ if self._session is None: return None if not self._session.has_status: self._session.find_return_status() return self._session.ret_status if self._session.has_status else None
python
def get_proc_return_status(self): if self._session is None: return None if not self._session.has_status: self._session.find_return_status() return self._session.ret_status if self._session.has_status else None
[ "def", "get_proc_return_status", "(", "self", ")", ":", "if", "self", ".", "_session", "is", "None", ":", "return", "None", "if", "not", "self", ".", "_session", ".", "has_status", ":", "self", ".", "_session", ".", "find_return_status", "(", ")", "return"...
Last stored proc result
[ "Last", "stored", "proc", "result" ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L617-L624
6,411
denisenkom/pytds
src/pytds/__init__.py
Cursor.cancel
def cancel(self): """ Cancel current statement """ conn = self._assert_open() conn._try_activate_cursor(self) self._session.cancel_if_pending()
python
def cancel(self): conn = self._assert_open() conn._try_activate_cursor(self) self._session.cancel_if_pending()
[ "def", "cancel", "(", "self", ")", ":", "conn", "=", "self", ".", "_assert_open", "(", ")", "conn", ".", "_try_activate_cursor", "(", "self", ")", "self", ".", "_session", ".", "cancel_if_pending", "(", ")" ]
Cancel current statement
[ "Cancel", "current", "statement" ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L626-L631
6,412
denisenkom/pytds
src/pytds/__init__.py
Cursor.execute
def execute(self, operation, params=()): """ Execute the query :param operation: SQL statement :type operation: str """ conn = self._assert_open() conn._try_activate_cursor(self) self._execute(operation, params) # for compatibility with pyodbc return self
python
def execute(self, operation, params=()): conn = self._assert_open() conn._try_activate_cursor(self) self._execute(operation, params) # for compatibility with pyodbc return self
[ "def", "execute", "(", "self", ",", "operation", ",", "params", "=", "(", ")", ")", ":", "conn", "=", "self", ".", "_assert_open", "(", ")", "conn", ".", "_try_activate_cursor", "(", "self", ")", "self", ".", "_execute", "(", "operation", ",", "params"...
Execute the query :param operation: SQL statement :type operation: str
[ "Execute", "the", "query" ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L723-L733
6,413
denisenkom/pytds
src/pytds/__init__.py
Cursor.execute_scalar
def execute_scalar(self, query_string, params=None): """ This method sends a query to the MS SQL Server to which this object instance is connected, then returns first column of first row from result. An exception is raised on failure. If there are pending results or rows prior to executing this command, they are silently discarded. This method accepts Python formatting. Please see execute_query() for details. This method is useful if you want just a single value, as in: ``conn.execute_scalar('SELECT COUNT(*) FROM employees')`` This method works in the same way as ``iter(conn).next()[0]``. Remaining rows, if any, can still be iterated after calling this method. """ self.execute(query_string, params) row = self.fetchone() if not row: return None return row[0]
python
def execute_scalar(self, query_string, params=None): self.execute(query_string, params) row = self.fetchone() if not row: return None return row[0]
[ "def", "execute_scalar", "(", "self", ",", "query_string", ",", "params", "=", "None", ")", ":", "self", ".", "execute", "(", "query_string", ",", "params", ")", "row", "=", "self", ".", "fetchone", "(", ")", "if", "not", "row", ":", "return", "None", ...
This method sends a query to the MS SQL Server to which this object instance is connected, then returns first column of first row from result. An exception is raised on failure. If there are pending results or rows prior to executing this command, they are silently discarded. This method accepts Python formatting. Please see execute_query() for details. This method is useful if you want just a single value, as in: ``conn.execute_scalar('SELECT COUNT(*) FROM employees')`` This method works in the same way as ``iter(conn).next()[0]``. Remaining rows, if any, can still be iterated after calling this method.
[ "This", "method", "sends", "a", "query", "to", "the", "MS", "SQL", "Server", "to", "which", "this", "object", "instance", "is", "connected", "then", "returns", "first", "column", "of", "first", "row", "from", "result", ".", "An", "exception", "is", "raised...
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L761-L785
6,414
denisenkom/pytds
src/pytds/__init__.py
Cursor.fetchone
def fetchone(self): """ Fetches next row, or ``None`` if there are no more rows """ row = self._session.fetchone() if row: return self._row_factory(row)
python
def fetchone(self): row = self._session.fetchone() if row: return self._row_factory(row)
[ "def", "fetchone", "(", "self", ")", ":", "row", "=", "self", ".", "_session", ".", "fetchone", "(", ")", "if", "row", ":", "return", "self", ".", "_row_factory", "(", "row", ")" ]
Fetches next row, or ``None`` if there are no more rows
[ "Fetches", "next", "row", "or", "None", "if", "there", "are", "no", "more", "rows" ]
7d875cab29134afdef719406831c1c6a0d7af48a
https://github.com/denisenkom/pytds/blob/7d875cab29134afdef719406831c1c6a0d7af48a/src/pytds/__init__.py#L849-L854
6,415
galaxyproject/pulsar
pulsar/managers/base/__init__.py
JobDirectory.calculate_path
def calculate_path(self, remote_path, input_type): """ Verify remote_path is in directory for input_type inputs and create directory if needed. """ directory, allow_nested_files = self._directory_for_file_type(input_type) path = get_mapped_file(directory, remote_path, allow_nested_files=allow_nested_files) return path
python
def calculate_path(self, remote_path, input_type): directory, allow_nested_files = self._directory_for_file_type(input_type) path = get_mapped_file(directory, remote_path, allow_nested_files=allow_nested_files) return path
[ "def", "calculate_path", "(", "self", ",", "remote_path", ",", "input_type", ")", ":", "directory", ",", "allow_nested_files", "=", "self", ".", "_directory_for_file_type", "(", "input_type", ")", "path", "=", "get_mapped_file", "(", "directory", ",", "remote_path...
Verify remote_path is in directory for input_type inputs and create directory if needed.
[ "Verify", "remote_path", "is", "in", "directory", "for", "input_type", "inputs", "and", "create", "directory", "if", "needed", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/base/__init__.py#L231-L237
6,416
galaxyproject/pulsar
pulsar/client/client.py
BaseJobClient.setup
def setup(self, tool_id=None, tool_version=None, preserve_galaxy_python_environment=None): """ Setup remote Pulsar server to run this job. """ setup_args = {"job_id": self.job_id} if tool_id: setup_args["tool_id"] = tool_id if tool_version: setup_args["tool_version"] = tool_version if preserve_galaxy_python_environment: setup_args["preserve_galaxy_python_environment"] = preserve_galaxy_python_environment return self.setup_handler.setup(**setup_args)
python
def setup(self, tool_id=None, tool_version=None, preserve_galaxy_python_environment=None): setup_args = {"job_id": self.job_id} if tool_id: setup_args["tool_id"] = tool_id if tool_version: setup_args["tool_version"] = tool_version if preserve_galaxy_python_environment: setup_args["preserve_galaxy_python_environment"] = preserve_galaxy_python_environment return self.setup_handler.setup(**setup_args)
[ "def", "setup", "(", "self", ",", "tool_id", "=", "None", ",", "tool_version", "=", "None", ",", "preserve_galaxy_python_environment", "=", "None", ")", ":", "setup_args", "=", "{", "\"job_id\"", ":", "self", ".", "job_id", "}", "if", "tool_id", ":", "setu...
Setup remote Pulsar server to run this job.
[ "Setup", "remote", "Pulsar", "server", "to", "run", "this", "job", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/client.py#L66-L77
6,417
galaxyproject/pulsar
pulsar/client/client.py
JobClient.launch
def launch(self, command_line, dependencies_description=None, env=[], remote_staging=[], job_config=None): """ Queue up the execution of the supplied `command_line` on the remote server. Called launch for historical reasons, should be renamed to enqueue or something like that. **Parameters** command_line : str Command to execute. """ launch_params = dict(command_line=command_line, job_id=self.job_id) submit_params_dict = submit_params(self.destination_params) if submit_params_dict: launch_params['params'] = json_dumps(submit_params_dict) if dependencies_description: launch_params['dependencies_description'] = json_dumps(dependencies_description.to_dict()) if env: launch_params['env'] = json_dumps(env) if remote_staging: launch_params['remote_staging'] = json_dumps(remote_staging) if job_config and 'touch_outputs' in job_config: # message clients pass the entire job config launch_params['submit_extras'] = json_dumps({'touch_outputs': job_config['touch_outputs']}) if job_config and self.setup_handler.local: # Setup not yet called, job properties were inferred from # destination arguments. Hence, must have Pulsar setup job # before queueing. setup_params = _setup_params_from_job_config(job_config) launch_params['setup_params'] = json_dumps(setup_params) return self._raw_execute("submit", launch_params)
python
def launch(self, command_line, dependencies_description=None, env=[], remote_staging=[], job_config=None): launch_params = dict(command_line=command_line, job_id=self.job_id) submit_params_dict = submit_params(self.destination_params) if submit_params_dict: launch_params['params'] = json_dumps(submit_params_dict) if dependencies_description: launch_params['dependencies_description'] = json_dumps(dependencies_description.to_dict()) if env: launch_params['env'] = json_dumps(env) if remote_staging: launch_params['remote_staging'] = json_dumps(remote_staging) if job_config and 'touch_outputs' in job_config: # message clients pass the entire job config launch_params['submit_extras'] = json_dumps({'touch_outputs': job_config['touch_outputs']}) if job_config and self.setup_handler.local: # Setup not yet called, job properties were inferred from # destination arguments. Hence, must have Pulsar setup job # before queueing. setup_params = _setup_params_from_job_config(job_config) launch_params['setup_params'] = json_dumps(setup_params) return self._raw_execute("submit", launch_params)
[ "def", "launch", "(", "self", ",", "command_line", ",", "dependencies_description", "=", "None", ",", "env", "=", "[", "]", ",", "remote_staging", "=", "[", "]", ",", "job_config", "=", "None", ")", ":", "launch_params", "=", "dict", "(", "command_line", ...
Queue up the execution of the supplied `command_line` on the remote server. Called launch for historical reasons, should be renamed to enqueue or something like that. **Parameters** command_line : str Command to execute.
[ "Queue", "up", "the", "execution", "of", "the", "supplied", "command_line", "on", "the", "remote", "server", ".", "Called", "launch", "for", "historical", "reasons", "should", "be", "renamed", "to", "enqueue", "or", "something", "like", "that", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/client.py#L102-L133
6,418
galaxyproject/pulsar
pulsar/client/action_mapper.py
FileActionMapper.__process_action
def __process_action(self, action, file_type): """ Extension point to populate extra action information after an action has been created. """ if getattr(action, "inject_url", False): self.__inject_url(action, file_type) if getattr(action, "inject_ssh_properties", False): self.__inject_ssh_properties(action)
python
def __process_action(self, action, file_type): if getattr(action, "inject_url", False): self.__inject_url(action, file_type) if getattr(action, "inject_ssh_properties", False): self.__inject_ssh_properties(action)
[ "def", "__process_action", "(", "self", ",", "action", ",", "file_type", ")", ":", "if", "getattr", "(", "action", ",", "\"inject_url\"", ",", "False", ")", ":", "self", ".", "__inject_url", "(", "action", ",", "file_type", ")", "if", "getattr", "(", "ac...
Extension point to populate extra action information after an action has been created.
[ "Extension", "point", "to", "populate", "extra", "action", "information", "after", "an", "action", "has", "been", "created", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/action_mapper.py#L235-L242
6,419
galaxyproject/pulsar
pulsar/managers/queued_external_drmaa.py
_handle_default
def _handle_default(value, script_name): """ There are two potential variants of these scripts, the Bash scripts that are meant to be run within PULSAR_ROOT for older-style installs and the binaries created by setup.py as part of a proper pulsar installation. This method first looks for the newer style variant of these scripts and returns the full path to them if needed and falls back to the bash scripts if these cannot be found. """ if value: return value installed_script = which("pulsar-%s" % script_name.replace("_", "-")) if installed_script: return installed_script else: return "scripts/%s.bash" % script_name
python
def _handle_default(value, script_name): if value: return value installed_script = which("pulsar-%s" % script_name.replace("_", "-")) if installed_script: return installed_script else: return "scripts/%s.bash" % script_name
[ "def", "_handle_default", "(", "value", ",", "script_name", ")", ":", "if", "value", ":", "return", "value", "installed_script", "=", "which", "(", "\"pulsar-%s\"", "%", "script_name", ".", "replace", "(", "\"_\"", ",", "\"-\"", ")", ")", "if", "installed_sc...
There are two potential variants of these scripts, the Bash scripts that are meant to be run within PULSAR_ROOT for older-style installs and the binaries created by setup.py as part of a proper pulsar installation. This method first looks for the newer style variant of these scripts and returns the full path to them if needed and falls back to the bash scripts if these cannot be found.
[ "There", "are", "two", "potential", "variants", "of", "these", "scripts", "the", "Bash", "scripts", "that", "are", "meant", "to", "be", "run", "within", "PULSAR_ROOT", "for", "older", "-", "style", "installs", "and", "the", "binaries", "created", "by", "setu...
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/queued_external_drmaa.py#L91-L108
6,420
galaxyproject/pulsar
pulsar/client/staging/up.py
JobInputs.rewrite_paths
def rewrite_paths(self, local_path, remote_path): """ Rewrite references to `local_path` with `remote_path` in job inputs. """ self.__rewrite_command_line(local_path, remote_path) self.__rewrite_config_files(local_path, remote_path)
python
def rewrite_paths(self, local_path, remote_path): self.__rewrite_command_line(local_path, remote_path) self.__rewrite_config_files(local_path, remote_path)
[ "def", "rewrite_paths", "(", "self", ",", "local_path", ",", "remote_path", ")", ":", "self", ".", "__rewrite_command_line", "(", "local_path", ",", "remote_path", ")", "self", ".", "__rewrite_config_files", "(", "local_path", ",", "remote_path", ")" ]
Rewrite references to `local_path` with `remote_path` in job inputs.
[ "Rewrite", "references", "to", "local_path", "with", "remote_path", "in", "job", "inputs", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/staging/up.py#L387-L392
6,421
galaxyproject/pulsar
pulsar/client/staging/up.py
TransferTracker.rewrite_input_paths
def rewrite_input_paths(self): """ For each file that has been transferred and renamed, updated command_line and configfiles to reflect that rewrite. """ for local_path, remote_path in self.file_renames.items(): self.job_inputs.rewrite_paths(local_path, remote_path)
python
def rewrite_input_paths(self): for local_path, remote_path in self.file_renames.items(): self.job_inputs.rewrite_paths(local_path, remote_path)
[ "def", "rewrite_input_paths", "(", "self", ")", ":", "for", "local_path", ",", "remote_path", "in", "self", ".", "file_renames", ".", "items", "(", ")", ":", "self", ".", "job_inputs", ".", "rewrite_paths", "(", "local_path", ",", "remote_path", ")" ]
For each file that has been transferred and renamed, updated command_line and configfiles to reflect that rewrite.
[ "For", "each", "file", "that", "has", "been", "transferred", "and", "renamed", "updated", "command_line", "and", "configfiles", "to", "reflect", "that", "rewrite", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/staging/up.py#L475-L481
6,422
galaxyproject/pulsar
pulsar/client/amqp_exchange.py
PulsarExchange.__get_payload
def __get_payload(self, uuid, failed): """Retry reading a message from the publish_uuid_store once, delete on the second failure.""" # Caller should have the publish_uuid_store lock try: return self.publish_uuid_store[uuid] except Exception as exc: msg = "Failed to load payload from publish store for UUID %s, %s: %s" if uuid in failed: log.error(msg, uuid, "discarding", str(exc)) self.__discard_publish_uuid(uuid, failed) else: log.error(msg, uuid, "will try agan", str(exc)) failed.add(uuid) return None
python
def __get_payload(self, uuid, failed): # Caller should have the publish_uuid_store lock try: return self.publish_uuid_store[uuid] except Exception as exc: msg = "Failed to load payload from publish store for UUID %s, %s: %s" if uuid in failed: log.error(msg, uuid, "discarding", str(exc)) self.__discard_publish_uuid(uuid, failed) else: log.error(msg, uuid, "will try agan", str(exc)) failed.add(uuid) return None
[ "def", "__get_payload", "(", "self", ",", "uuid", ",", "failed", ")", ":", "# Caller should have the publish_uuid_store lock", "try", ":", "return", "self", ".", "publish_uuid_store", "[", "uuid", "]", "except", "Exception", "as", "exc", ":", "msg", "=", "\"Fail...
Retry reading a message from the publish_uuid_store once, delete on the second failure.
[ "Retry", "reading", "a", "message", "from", "the", "publish_uuid_store", "once", "delete", "on", "the", "second", "failure", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/amqp_exchange.py#L239-L252
6,423
galaxyproject/pulsar
pulsar/manager_endpoint_util.py
__job_complete_dict
def __job_complete_dict(complete_status, manager, job_id): """ Build final dictionary describing completed job for consumption by Pulsar client. """ return_code = manager.return_code(job_id) if return_code == PULSAR_UNKNOWN_RETURN_CODE: return_code = None stdout_contents = manager.stdout_contents(job_id).decode("utf-8") stderr_contents = manager.stderr_contents(job_id).decode("utf-8") job_directory = manager.job_directory(job_id) as_dict = dict( job_id=job_id, complete="true", # Is this still used or is it legacy. status=complete_status, returncode=return_code, stdout=stdout_contents, stderr=stderr_contents, working_directory=job_directory.working_directory(), metadata_directory=job_directory.metadata_directory(), working_directory_contents=job_directory.working_directory_contents(), metadata_directory_contents=job_directory.metadata_directory_contents(), outputs_directory_contents=job_directory.outputs_directory_contents(), system_properties=manager.system_properties(), pulsar_version=pulsar_version, ) return as_dict
python
def __job_complete_dict(complete_status, manager, job_id): return_code = manager.return_code(job_id) if return_code == PULSAR_UNKNOWN_RETURN_CODE: return_code = None stdout_contents = manager.stdout_contents(job_id).decode("utf-8") stderr_contents = manager.stderr_contents(job_id).decode("utf-8") job_directory = manager.job_directory(job_id) as_dict = dict( job_id=job_id, complete="true", # Is this still used or is it legacy. status=complete_status, returncode=return_code, stdout=stdout_contents, stderr=stderr_contents, working_directory=job_directory.working_directory(), metadata_directory=job_directory.metadata_directory(), working_directory_contents=job_directory.working_directory_contents(), metadata_directory_contents=job_directory.metadata_directory_contents(), outputs_directory_contents=job_directory.outputs_directory_contents(), system_properties=manager.system_properties(), pulsar_version=pulsar_version, ) return as_dict
[ "def", "__job_complete_dict", "(", "complete_status", ",", "manager", ",", "job_id", ")", ":", "return_code", "=", "manager", ".", "return_code", "(", "job_id", ")", "if", "return_code", "==", "PULSAR_UNKNOWN_RETURN_CODE", ":", "return_code", "=", "None", "stdout_...
Build final dictionary describing completed job for consumption by Pulsar client.
[ "Build", "final", "dictionary", "describing", "completed", "job", "for", "consumption", "by", "Pulsar", "client", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/manager_endpoint_util.py#L29-L54
6,424
galaxyproject/pulsar
pulsar/manager_endpoint_util.py
submit_job
def submit_job(manager, job_config): """ Launch new job from specified config. May have been previously 'setup' if 'setup_params' in job_config is empty. """ # job_config is raw dictionary from JSON (from MQ or HTTP endpoint). job_id = job_config.get('job_id') try: command_line = job_config.get('command_line') setup_params = job_config.get('setup_params', {}) force_setup = job_config.get('setup') remote_staging = job_config.get('remote_staging', {}) dependencies_description = job_config.get('dependencies_description', None) env = job_config.get('env', []) submit_params = job_config.get('submit_params', {}) touch_outputs = job_config.get('touch_outputs', []) job_config = None if setup_params or force_setup: input_job_id = setup_params.get("job_id", job_id) tool_id = setup_params.get("tool_id", None) tool_version = setup_params.get("tool_version", None) use_metadata = setup_params.get("use_metadata", False) job_config = setup_job( manager, input_job_id, tool_id, tool_version, use_metadata, ) if job_config is not None: job_directory = job_config["job_directory"] jobs_directory = os.path.abspath(os.path.join(job_directory, os.pardir)) command_line = command_line.replace('__PULSAR_JOBS_DIRECTORY__', jobs_directory) # TODO: Handle __PULSAR_JOB_DIRECTORY__ config files, metadata files, etc... manager.touch_outputs(job_id, touch_outputs) launch_config = { "remote_staging": remote_staging, "command_line": command_line, "dependencies_description": dependencies_description, "submit_params": submit_params, "env": env, "setup_params": setup_params, } manager.preprocess_and_launch(job_id, launch_config) except Exception: manager.handle_failure_before_launch(job_id) raise
python
def submit_job(manager, job_config): # job_config is raw dictionary from JSON (from MQ or HTTP endpoint). job_id = job_config.get('job_id') try: command_line = job_config.get('command_line') setup_params = job_config.get('setup_params', {}) force_setup = job_config.get('setup') remote_staging = job_config.get('remote_staging', {}) dependencies_description = job_config.get('dependencies_description', None) env = job_config.get('env', []) submit_params = job_config.get('submit_params', {}) touch_outputs = job_config.get('touch_outputs', []) job_config = None if setup_params or force_setup: input_job_id = setup_params.get("job_id", job_id) tool_id = setup_params.get("tool_id", None) tool_version = setup_params.get("tool_version", None) use_metadata = setup_params.get("use_metadata", False) job_config = setup_job( manager, input_job_id, tool_id, tool_version, use_metadata, ) if job_config is not None: job_directory = job_config["job_directory"] jobs_directory = os.path.abspath(os.path.join(job_directory, os.pardir)) command_line = command_line.replace('__PULSAR_JOBS_DIRECTORY__', jobs_directory) # TODO: Handle __PULSAR_JOB_DIRECTORY__ config files, metadata files, etc... manager.touch_outputs(job_id, touch_outputs) launch_config = { "remote_staging": remote_staging, "command_line": command_line, "dependencies_description": dependencies_description, "submit_params": submit_params, "env": env, "setup_params": setup_params, } manager.preprocess_and_launch(job_id, launch_config) except Exception: manager.handle_failure_before_launch(job_id) raise
[ "def", "submit_job", "(", "manager", ",", "job_config", ")", ":", "# job_config is raw dictionary from JSON (from MQ or HTTP endpoint).", "job_id", "=", "job_config", ".", "get", "(", "'job_id'", ")", "try", ":", "command_line", "=", "job_config", ".", "get", "(", "...
Launch new job from specified config. May have been previously 'setup' if 'setup_params' in job_config is empty.
[ "Launch", "new", "job", "from", "specified", "config", ".", "May", "have", "been", "previously", "setup", "if", "setup_params", "in", "job_config", "is", "empty", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/manager_endpoint_util.py#L57-L105
6,425
galaxyproject/pulsar
pulsar/core.py
PulsarApp.__setup_tool_config
def __setup_tool_config(self, conf): """ Setups toolbox object and authorization mechanism based on supplied toolbox_path. """ tool_config_files = conf.get("tool_config_files", None) if not tool_config_files: # For compatibity with Galaxy, allow tool_config_file # option name. tool_config_files = conf.get("tool_config_file", None) toolbox = None if tool_config_files: toolbox = ToolBox(tool_config_files) else: log.info(NOT_WHITELIST_WARNING) self.toolbox = toolbox self.authorizer = get_authorizer(toolbox)
python
def __setup_tool_config(self, conf): tool_config_files = conf.get("tool_config_files", None) if not tool_config_files: # For compatibity with Galaxy, allow tool_config_file # option name. tool_config_files = conf.get("tool_config_file", None) toolbox = None if tool_config_files: toolbox = ToolBox(tool_config_files) else: log.info(NOT_WHITELIST_WARNING) self.toolbox = toolbox self.authorizer = get_authorizer(toolbox)
[ "def", "__setup_tool_config", "(", "self", ",", "conf", ")", ":", "tool_config_files", "=", "conf", ".", "get", "(", "\"tool_config_files\"", ",", "None", ")", "if", "not", "tool_config_files", ":", "# For compatibity with Galaxy, allow tool_config_file", "# option name...
Setups toolbox object and authorization mechanism based on supplied toolbox_path.
[ "Setups", "toolbox", "object", "and", "authorization", "mechanism", "based", "on", "supplied", "toolbox_path", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/core.py#L67-L83
6,426
galaxyproject/pulsar
pulsar/core.py
PulsarApp.only_manager
def only_manager(self): """Convience accessor for tests and contexts with sole manager.""" assert len(self.managers) == 1, MULTIPLE_MANAGERS_MESSAGE return list(self.managers.values())[0]
python
def only_manager(self): assert len(self.managers) == 1, MULTIPLE_MANAGERS_MESSAGE return list(self.managers.values())[0]
[ "def", "only_manager", "(", "self", ")", ":", "assert", "len", "(", "self", ".", "managers", ")", "==", "1", ",", "MULTIPLE_MANAGERS_MESSAGE", "return", "list", "(", "self", ".", "managers", ".", "values", "(", ")", ")", "[", "0", "]" ]
Convience accessor for tests and contexts with sole manager.
[ "Convience", "accessor", "for", "tests", "and", "contexts", "with", "sole", "manager", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/core.py#L138-L141
6,427
galaxyproject/pulsar
pulsar/web/wsgi.py
app_factory
def app_factory(global_conf, **local_conf): """ Returns the Pulsar WSGI application. """ configuration_file = global_conf.get("__file__", None) webapp = init_webapp(ini_path=configuration_file, local_conf=local_conf) return webapp
python
def app_factory(global_conf, **local_conf): configuration_file = global_conf.get("__file__", None) webapp = init_webapp(ini_path=configuration_file, local_conf=local_conf) return webapp
[ "def", "app_factory", "(", "global_conf", ",", "*", "*", "local_conf", ")", ":", "configuration_file", "=", "global_conf", ".", "get", "(", "\"__file__\"", ",", "None", ")", "webapp", "=", "init_webapp", "(", "ini_path", "=", "configuration_file", ",", "local_...
Returns the Pulsar WSGI application.
[ "Returns", "the", "Pulsar", "WSGI", "application", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/web/wsgi.py#L14-L20
6,428
galaxyproject/pulsar
tools/install_venv.py
check_dependencies
def check_dependencies(): """Make sure virtualenv is in the path.""" print 'Checking dependencies...' if not HAS_VIRTUALENV: print 'Virtual environment not found.' # Try installing it via easy_install... if HAS_EASY_INSTALL: print 'Installing virtualenv via easy_install...', run_command(['easy_install', 'virtualenv'], die_message='easy_install failed to install virtualenv' '\ndevelopment requires virtualenv, please' ' install it using your favorite tool') if not run_command(['which', 'virtualenv']): die('ERROR: virtualenv not found in path.\n\ndevelopment ' ' requires virtualenv, please install it using your' ' favorite package management tool and ensure' ' virtualenv is in your path') print 'virtualenv installation done.' else: die('easy_install not found.\n\nInstall easy_install' ' (python-setuptools in ubuntu) or virtualenv by hand,' ' then rerun.') print 'dependency check done.'
python
def check_dependencies(): print 'Checking dependencies...' if not HAS_VIRTUALENV: print 'Virtual environment not found.' # Try installing it via easy_install... if HAS_EASY_INSTALL: print 'Installing virtualenv via easy_install...', run_command(['easy_install', 'virtualenv'], die_message='easy_install failed to install virtualenv' '\ndevelopment requires virtualenv, please' ' install it using your favorite tool') if not run_command(['which', 'virtualenv']): die('ERROR: virtualenv not found in path.\n\ndevelopment ' ' requires virtualenv, please install it using your' ' favorite package management tool and ensure' ' virtualenv is in your path') print 'virtualenv installation done.' else: die('easy_install not found.\n\nInstall easy_install' ' (python-setuptools in ubuntu) or virtualenv by hand,' ' then rerun.') print 'dependency check done.'
[ "def", "check_dependencies", "(", ")", ":", "print", "'Checking dependencies...'", "if", "not", "HAS_VIRTUALENV", ":", "print", "'Virtual environment not found.'", "# Try installing it via easy_install...", "if", "HAS_EASY_INSTALL", ":", "print", "'Installing virtualenv via easy_...
Make sure virtualenv is in the path.
[ "Make", "sure", "virtualenv", "is", "in", "the", "path", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/tools/install_venv.py#L66-L89
6,429
galaxyproject/pulsar
pulsar/client/manager.py
ClientManager.get_client
def get_client(self, destination_params, job_id, **kwargs): """Build a client given specific destination parameters and job_id.""" destination_params = _parse_destination_params(destination_params) destination_params.update(**kwargs) job_manager_interface_class = self.job_manager_interface_class job_manager_interface_args = dict(destination_params=destination_params, **self.job_manager_interface_args) job_manager_interface = job_manager_interface_class(**job_manager_interface_args) return self.client_class(destination_params, job_id, job_manager_interface, **self.extra_client_kwds)
python
def get_client(self, destination_params, job_id, **kwargs): destination_params = _parse_destination_params(destination_params) destination_params.update(**kwargs) job_manager_interface_class = self.job_manager_interface_class job_manager_interface_args = dict(destination_params=destination_params, **self.job_manager_interface_args) job_manager_interface = job_manager_interface_class(**job_manager_interface_args) return self.client_class(destination_params, job_id, job_manager_interface, **self.extra_client_kwds)
[ "def", "get_client", "(", "self", ",", "destination_params", ",", "job_id", ",", "*", "*", "kwargs", ")", ":", "destination_params", "=", "_parse_destination_params", "(", "destination_params", ")", "destination_params", ".", "update", "(", "*", "*", "kwargs", "...
Build a client given specific destination parameters and job_id.
[ "Build", "a", "client", "given", "specific", "destination", "parameters", "and", "job_id", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/manager.py#L83-L90
6,430
galaxyproject/pulsar
pulsar/managers/util/cli/__init__.py
CliInterface.get_plugins
def get_plugins(self, shell_params, job_params): """ Return shell and job interface defined by and configured via specified params. """ shell = self.get_shell_plugin(shell_params) job_interface = self.get_job_interface(job_params) return shell, job_interface
python
def get_plugins(self, shell_params, job_params): shell = self.get_shell_plugin(shell_params) job_interface = self.get_job_interface(job_params) return shell, job_interface
[ "def", "get_plugins", "(", "self", ",", "shell_params", ",", "job_params", ")", ":", "shell", "=", "self", ".", "get_shell_plugin", "(", "shell_params", ")", "job_interface", "=", "self", ".", "get_job_interface", "(", "job_params", ")", "return", "shell", ","...
Return shell and job interface defined by and configured via specified params.
[ "Return", "shell", "and", "job", "interface", "defined", "by", "and", "configured", "via", "specified", "params", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/util/cli/__init__.py#L49-L56
6,431
galaxyproject/pulsar
pulsar/cache/util.py
atomicish_move
def atomicish_move(source, destination, tmp_suffix="_TMP"): """Move source to destination without risk of partial moves. > from tempfile import mkdtemp > from os.path import join, exists > temp_dir = mkdtemp() > source = join(temp_dir, "the_source") > destination = join(temp_dir, "the_dest") > open(source, "wb").write(b"Hello World!") > assert exists(source) > assert not exists(destination) > atomicish_move(source, destination) > assert not exists(source) > assert exists(destination) """ destination_dir = os.path.dirname(destination) destination_name = os.path.basename(destination) temp_destination = os.path.join(destination_dir, "%s%s" % (destination_name, tmp_suffix)) shutil.move(source, temp_destination) os.rename(temp_destination, destination)
python
def atomicish_move(source, destination, tmp_suffix="_TMP"): destination_dir = os.path.dirname(destination) destination_name = os.path.basename(destination) temp_destination = os.path.join(destination_dir, "%s%s" % (destination_name, tmp_suffix)) shutil.move(source, temp_destination) os.rename(temp_destination, destination)
[ "def", "atomicish_move", "(", "source", ",", "destination", ",", "tmp_suffix", "=", "\"_TMP\"", ")", ":", "destination_dir", "=", "os", ".", "path", ".", "dirname", "(", "destination", ")", "destination_name", "=", "os", ".", "path", ".", "basename", "(", ...
Move source to destination without risk of partial moves. > from tempfile import mkdtemp > from os.path import join, exists > temp_dir = mkdtemp() > source = join(temp_dir, "the_source") > destination = join(temp_dir, "the_dest") > open(source, "wb").write(b"Hello World!") > assert exists(source) > assert not exists(destination) > atomicish_move(source, destination) > assert not exists(source) > assert exists(destination)
[ "Move", "source", "to", "destination", "without", "risk", "of", "partial", "moves", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/cache/util.py#L8-L27
6,432
galaxyproject/pulsar
pulsar/managers/util/env.py
env_to_statement
def env_to_statement(env): ''' Return the abstraction description of an environment variable definition into a statement for shell script. >>> env_to_statement(dict(name='X', value='Y')) 'X="Y"; export X' >>> env_to_statement(dict(name='X', value='Y', raw=True)) 'X=Y; export X' >>> env_to_statement(dict(name='X', value='"A","B","C"')) 'X="\\\\"A\\\\",\\\\"B\\\\",\\\\"C\\\\""; export X' >>> env_to_statement(dict(file="Y")) '. "Y"' >>> env_to_statement(dict(file="'RAW $FILE'", raw=True)) ". 'RAW $FILE'" >>> # Source file takes precedence >>> env_to_statement(dict(name='X', value='"A","B","C"', file="S")) '. "S"' >>> env_to_statement(dict(execute="module load java/1.5.1")) 'module load java/1.5.1' ''' source_file = env.get('file', None) if source_file: return '. %s' % __escape(source_file, env) execute = env.get('execute', None) if execute: return execute name = env['name'] value = __escape(env['value'], env) return '%s=%s; export %s' % (name, value, name)
python
def env_to_statement(env): ''' Return the abstraction description of an environment variable definition into a statement for shell script. >>> env_to_statement(dict(name='X', value='Y')) 'X="Y"; export X' >>> env_to_statement(dict(name='X', value='Y', raw=True)) 'X=Y; export X' >>> env_to_statement(dict(name='X', value='"A","B","C"')) 'X="\\\\"A\\\\",\\\\"B\\\\",\\\\"C\\\\""; export X' >>> env_to_statement(dict(file="Y")) '. "Y"' >>> env_to_statement(dict(file="'RAW $FILE'", raw=True)) ". 'RAW $FILE'" >>> # Source file takes precedence >>> env_to_statement(dict(name='X', value='"A","B","C"', file="S")) '. "S"' >>> env_to_statement(dict(execute="module load java/1.5.1")) 'module load java/1.5.1' ''' source_file = env.get('file', None) if source_file: return '. %s' % __escape(source_file, env) execute = env.get('execute', None) if execute: return execute name = env['name'] value = __escape(env['value'], env) return '%s=%s; export %s' % (name, value, name)
[ "def", "env_to_statement", "(", "env", ")", ":", "source_file", "=", "env", ".", "get", "(", "'file'", ",", "None", ")", "if", "source_file", ":", "return", "'. %s'", "%", "__escape", "(", "source_file", ",", "env", ")", "execute", "=", "env", ".", "ge...
Return the abstraction description of an environment variable definition into a statement for shell script. >>> env_to_statement(dict(name='X', value='Y')) 'X="Y"; export X' >>> env_to_statement(dict(name='X', value='Y', raw=True)) 'X=Y; export X' >>> env_to_statement(dict(name='X', value='"A","B","C"')) 'X="\\\\"A\\\\",\\\\"B\\\\",\\\\"C\\\\""; export X' >>> env_to_statement(dict(file="Y")) '. "Y"' >>> env_to_statement(dict(file="'RAW $FILE'", raw=True)) ". 'RAW $FILE'" >>> # Source file takes precedence >>> env_to_statement(dict(name='X', value='"A","B","C"', file="S")) '. "S"' >>> env_to_statement(dict(execute="module load java/1.5.1")) 'module load java/1.5.1'
[ "Return", "the", "abstraction", "description", "of", "an", "environment", "variable", "definition", "into", "a", "statement", "for", "shell", "script", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/util/env.py#L5-L33
6,433
galaxyproject/pulsar
pulsar/util/__init__.py
copy_to_temp
def copy_to_temp(object): """ Copy file-like object to temp file and return path. """ temp_file = NamedTemporaryFile(delete=False) _copy_and_close(object, temp_file) return temp_file.name
python
def copy_to_temp(object): temp_file = NamedTemporaryFile(delete=False) _copy_and_close(object, temp_file) return temp_file.name
[ "def", "copy_to_temp", "(", "object", ")", ":", "temp_file", "=", "NamedTemporaryFile", "(", "delete", "=", "False", ")", "_copy_and_close", "(", "object", ",", "temp_file", ")", "return", "temp_file", ".", "name" ]
Copy file-like object to temp file and return path.
[ "Copy", "file", "-", "like", "object", "to", "temp", "file", "and", "return", "path", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/util/__init__.py#L27-L34
6,434
galaxyproject/pulsar
pulsar/managers/util/condor/__init__.py
build_submit_description
def build_submit_description(executable, output, error, user_log, query_params): """ Build up the contents of a condor submit description file. >>> submit_args = dict(executable='/path/to/script', output='o', error='e', user_log='ul') >>> submit_args['query_params'] = dict() >>> default_description = build_submit_description(**submit_args) >>> assert 'executable = /path/to/script' in default_description >>> assert 'output = o' in default_description >>> assert 'error = e' in default_description >>> assert 'queue' in default_description >>> assert 'universe = vanilla' in default_description >>> assert 'universe = standard' not in default_description >>> submit_args['query_params'] = dict(universe='standard') >>> std_description = build_submit_description(**submit_args) >>> assert 'universe = vanilla' not in std_description >>> assert 'universe = standard' in std_description """ all_query_params = DEFAULT_QUERY_CLASSAD.copy() all_query_params.update(query_params) submit_description = [] for key, value in all_query_params.items(): submit_description.append('%s = %s' % (key, value)) submit_description.append('executable = ' + executable) submit_description.append('output = ' + output) submit_description.append('error = ' + error) submit_description.append('log = ' + user_log) submit_description.append('queue') return '\n'.join(submit_description)
python
def build_submit_description(executable, output, error, user_log, query_params): all_query_params = DEFAULT_QUERY_CLASSAD.copy() all_query_params.update(query_params) submit_description = [] for key, value in all_query_params.items(): submit_description.append('%s = %s' % (key, value)) submit_description.append('executable = ' + executable) submit_description.append('output = ' + output) submit_description.append('error = ' + error) submit_description.append('log = ' + user_log) submit_description.append('queue') return '\n'.join(submit_description)
[ "def", "build_submit_description", "(", "executable", ",", "output", ",", "error", ",", "user_log", ",", "query_params", ")", ":", "all_query_params", "=", "DEFAULT_QUERY_CLASSAD", ".", "copy", "(", ")", "all_query_params", ".", "update", "(", "query_params", ")",...
Build up the contents of a condor submit description file. >>> submit_args = dict(executable='/path/to/script', output='o', error='e', user_log='ul') >>> submit_args['query_params'] = dict() >>> default_description = build_submit_description(**submit_args) >>> assert 'executable = /path/to/script' in default_description >>> assert 'output = o' in default_description >>> assert 'error = e' in default_description >>> assert 'queue' in default_description >>> assert 'universe = vanilla' in default_description >>> assert 'universe = standard' not in default_description >>> submit_args['query_params'] = dict(universe='standard') >>> std_description = build_submit_description(**submit_args) >>> assert 'universe = vanilla' not in std_description >>> assert 'universe = standard' in std_description
[ "Build", "up", "the", "contents", "of", "a", "condor", "submit", "description", "file", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/util/condor/__init__.py#L39-L68
6,435
galaxyproject/pulsar
pulsar/managers/util/condor/__init__.py
condor_submit
def condor_submit(submit_file): """ Submit a condor job described by the given file. Parse an external id for the submission or return None and a reason for the failure. """ external_id = None try: submit = Popen(('condor_submit', submit_file), stdout=PIPE, stderr=STDOUT) message, _ = submit.communicate() if submit.returncode == 0: external_id = parse_external_id(message, type='condor') else: message = PROBLEM_PARSING_EXTERNAL_ID except Exception as e: message = str(e) return external_id, message
python
def condor_submit(submit_file): external_id = None try: submit = Popen(('condor_submit', submit_file), stdout=PIPE, stderr=STDOUT) message, _ = submit.communicate() if submit.returncode == 0: external_id = parse_external_id(message, type='condor') else: message = PROBLEM_PARSING_EXTERNAL_ID except Exception as e: message = str(e) return external_id, message
[ "def", "condor_submit", "(", "submit_file", ")", ":", "external_id", "=", "None", "try", ":", "submit", "=", "Popen", "(", "(", "'condor_submit'", ",", "submit_file", ")", ",", "stdout", "=", "PIPE", ",", "stderr", "=", "STDOUT", ")", "message", ",", "_"...
Submit a condor job described by the given file. Parse an external id for the submission or return None and a reason for the failure.
[ "Submit", "a", "condor", "job", "described", "by", "the", "given", "file", ".", "Parse", "an", "external", "id", "for", "the", "submission", "or", "return", "None", "and", "a", "reason", "for", "the", "failure", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/util/condor/__init__.py#L71-L86
6,436
galaxyproject/pulsar
pulsar/managers/util/condor/__init__.py
condor_stop
def condor_stop(external_id): """ Stop running condor job and return a failure_message if this fails. """ failure_message = None try: check_call(('condor_rm', external_id)) except CalledProcessError: failure_message = "condor_rm failed" except Exception as e: "error encountered calling condor_rm: %s" % e return failure_message
python
def condor_stop(external_id): failure_message = None try: check_call(('condor_rm', external_id)) except CalledProcessError: failure_message = "condor_rm failed" except Exception as e: "error encountered calling condor_rm: %s" % e return failure_message
[ "def", "condor_stop", "(", "external_id", ")", ":", "failure_message", "=", "None", "try", ":", "check_call", "(", "(", "'condor_rm'", ",", "external_id", ")", ")", "except", "CalledProcessError", ":", "failure_message", "=", "\"condor_rm failed\"", "except", "Exc...
Stop running condor job and return a failure_message if this fails.
[ "Stop", "running", "condor", "job", "and", "return", "a", "failure_message", "if", "this", "fails", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/util/condor/__init__.py#L89-L101
6,437
galaxyproject/pulsar
pulsar/locks.py
LockManager.get_lock
def get_lock(self, path): """ Get a job lock corresponding to the path - assumes parent directory exists but the file itself does not. """ if self.lockfile: return self.lockfile.LockFile(path) else: with self.job_locks_lock: if path not in self.job_locks: lock = threading.Lock() self.job_locks[path] = lock else: lock = self.job_locks[path] return lock
python
def get_lock(self, path): if self.lockfile: return self.lockfile.LockFile(path) else: with self.job_locks_lock: if path not in self.job_locks: lock = threading.Lock() self.job_locks[path] = lock else: lock = self.job_locks[path] return lock
[ "def", "get_lock", "(", "self", ",", "path", ")", ":", "if", "self", ".", "lockfile", ":", "return", "self", ".", "lockfile", ".", "LockFile", "(", "path", ")", "else", ":", "with", "self", ".", "job_locks_lock", ":", "if", "path", "not", "in", "self...
Get a job lock corresponding to the path - assumes parent directory exists but the file itself does not.
[ "Get", "a", "job", "lock", "corresponding", "to", "the", "path", "-", "assumes", "parent", "directory", "exists", "but", "the", "file", "itself", "does", "not", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/locks.py#L23-L36
6,438
galaxyproject/pulsar
pulsar/client/setup_handler.py
build
def build(client, destination_args): """ Build a SetupHandler object for client from destination parameters. """ # Have defined a remote job directory, lets do the setup locally. if client.job_directory: handler = LocalSetupHandler(client, destination_args) else: handler = RemoteSetupHandler(client) return handler
python
def build(client, destination_args): # Have defined a remote job directory, lets do the setup locally. if client.job_directory: handler = LocalSetupHandler(client, destination_args) else: handler = RemoteSetupHandler(client) return handler
[ "def", "build", "(", "client", ",", "destination_args", ")", ":", "# Have defined a remote job directory, lets do the setup locally.", "if", "client", ".", "job_directory", ":", "handler", "=", "LocalSetupHandler", "(", "client", ",", "destination_args", ")", "else", ":...
Build a SetupHandler object for client from destination parameters.
[ "Build", "a", "SetupHandler", "object", "for", "client", "from", "destination", "parameters", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/setup_handler.py#L10-L18
6,439
galaxyproject/pulsar
pulsar/managers/util/drmaa/__init__.py
DrmaaSession.run_job
def run_job(self, **kwds): """ Create a DRMAA job template, populate with specified properties, run the job, and return the external_job_id. """ template = DrmaaSession.session.createJobTemplate() try: for key in kwds: setattr(template, key, kwds[key]) with DrmaaSession.session_lock: return DrmaaSession.session.runJob(template) finally: DrmaaSession.session.deleteJobTemplate(template)
python
def run_job(self, **kwds): template = DrmaaSession.session.createJobTemplate() try: for key in kwds: setattr(template, key, kwds[key]) with DrmaaSession.session_lock: return DrmaaSession.session.runJob(template) finally: DrmaaSession.session.deleteJobTemplate(template)
[ "def", "run_job", "(", "self", ",", "*", "*", "kwds", ")", ":", "template", "=", "DrmaaSession", ".", "session", ".", "createJobTemplate", "(", ")", "try", ":", "for", "key", "in", "kwds", ":", "setattr", "(", "template", ",", "key", ",", "kwds", "["...
Create a DRMAA job template, populate with specified properties, run the job, and return the external_job_id.
[ "Create", "a", "DRMAA", "job", "template", "populate", "with", "specified", "properties", "run", "the", "job", "and", "return", "the", "external_job_id", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/util/drmaa/__init__.py#L57-L69
6,440
galaxyproject/pulsar
pulsar/client/destination.py
url_to_destination_params
def url_to_destination_params(url): """Convert a legacy runner URL to a job destination >>> params_simple = url_to_destination_params("http://localhost:8913/") >>> params_simple["url"] 'http://localhost:8913/' >>> params_simple["private_token"] is None True >>> advanced_url = "https://1234x@example.com:8914/managers/longqueue" >>> params_advanced = url_to_destination_params(advanced_url) >>> params_advanced["url"] 'https://example.com:8914/managers/longqueue/' >>> params_advanced["private_token"] '1234x' >>> runner_url = "pulsar://http://localhost:8913/" >>> runner_params = url_to_destination_params(runner_url) >>> runner_params['url'] 'http://localhost:8913/' """ if url.startswith("pulsar://"): url = url[len("pulsar://"):] if not url.endswith("/"): url += "/" # Check for private token embedded in the URL. A URL of the form # https://moo@cow:8913 will try to contact https://cow:8913 # with a private key of moo private_token_format = "https?://(.*)@.*/?" private_token_match = match(private_token_format, url) private_token = None if private_token_match: private_token = private_token_match.group(1) url = url.replace("%s@" % private_token, '', 1) destination_args = {"url": url, "private_token": private_token} return destination_args
python
def url_to_destination_params(url): if url.startswith("pulsar://"): url = url[len("pulsar://"):] if not url.endswith("/"): url += "/" # Check for private token embedded in the URL. A URL of the form # https://moo@cow:8913 will try to contact https://cow:8913 # with a private key of moo private_token_format = "https?://(.*)@.*/?" private_token_match = match(private_token_format, url) private_token = None if private_token_match: private_token = private_token_match.group(1) url = url.replace("%s@" % private_token, '', 1) destination_args = {"url": url, "private_token": private_token} return destination_args
[ "def", "url_to_destination_params", "(", "url", ")", ":", "if", "url", ".", "startswith", "(", "\"pulsar://\"", ")", ":", "url", "=", "url", "[", "len", "(", "\"pulsar://\"", ")", ":", "]", "if", "not", "url", ".", "endswith", "(", "\"/\"", ")", ":", ...
Convert a legacy runner URL to a job destination >>> params_simple = url_to_destination_params("http://localhost:8913/") >>> params_simple["url"] 'http://localhost:8913/' >>> params_simple["private_token"] is None True >>> advanced_url = "https://1234x@example.com:8914/managers/longqueue" >>> params_advanced = url_to_destination_params(advanced_url) >>> params_advanced["url"] 'https://example.com:8914/managers/longqueue/' >>> params_advanced["private_token"] '1234x' >>> runner_url = "pulsar://http://localhost:8913/" >>> runner_params = url_to_destination_params(runner_url) >>> runner_params['url'] 'http://localhost:8913/'
[ "Convert", "a", "legacy", "runner", "URL", "to", "a", "job", "destination" ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/destination.py#L9-L48
6,441
galaxyproject/pulsar
pulsar/util/pastescript/serve.py
ensure_port_cleanup
def ensure_port_cleanup(bound_addresses, maxtries=30, sleeptime=2): """ This makes sure any open ports are closed. Does this by connecting to them until they give connection refused. Servers should call like:: import paste.script ensure_port_cleanup([80, 443]) """ atexit.register(_cleanup_ports, bound_addresses, maxtries=maxtries, sleeptime=sleeptime)
python
def ensure_port_cleanup(bound_addresses, maxtries=30, sleeptime=2): atexit.register(_cleanup_ports, bound_addresses, maxtries=maxtries, sleeptime=sleeptime)
[ "def", "ensure_port_cleanup", "(", "bound_addresses", ",", "maxtries", "=", "30", ",", "sleeptime", "=", "2", ")", ":", "atexit", ".", "register", "(", "_cleanup_ports", ",", "bound_addresses", ",", "maxtries", "=", "maxtries", ",", "sleeptime", "=", "sleeptim...
This makes sure any open ports are closed. Does this by connecting to them until they give connection refused. Servers should call like:: import paste.script ensure_port_cleanup([80, 443])
[ "This", "makes", "sure", "any", "open", "ports", "are", "closed", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/util/pastescript/serve.py#L963-L974
6,442
galaxyproject/pulsar
pulsar/util/pastescript/serve.py
Command.standard_parser
def standard_parser(cls, verbose=True, interactive=False, no_interactive=False, simulate=False, quiet=False, overwrite=False): """ Create a standard ``OptionParser`` instance. Typically used like:: class MyCommand(Command): parser = Command.standard_parser() Subclasses may redefine ``standard_parser``, so use the nearest superclass's class method. """ parser = BoolOptionParser() if verbose: parser.add_option('-v', '--verbose', action='count', dest='verbose', default=0) if quiet: parser.add_option('-q', '--quiet', action='count', dest='quiet', default=0) if no_interactive: parser.add_option('--no-interactive', action="count", dest="no_interactive", default=0) if interactive: parser.add_option('-i', '--interactive', action='count', dest='interactive', default=0) if simulate: parser.add_option('-n', '--simulate', action='store_true', dest='simulate', default=False) if overwrite: parser.add_option('-f', '--overwrite', dest="overwrite", action="store_true", help="Overwrite files (warnings will be emitted for non-matching files otherwise)") return parser
python
def standard_parser(cls, verbose=True, interactive=False, no_interactive=False, simulate=False, quiet=False, overwrite=False): parser = BoolOptionParser() if verbose: parser.add_option('-v', '--verbose', action='count', dest='verbose', default=0) if quiet: parser.add_option('-q', '--quiet', action='count', dest='quiet', default=0) if no_interactive: parser.add_option('--no-interactive', action="count", dest="no_interactive", default=0) if interactive: parser.add_option('-i', '--interactive', action='count', dest='interactive', default=0) if simulate: parser.add_option('-n', '--simulate', action='store_true', dest='simulate', default=False) if overwrite: parser.add_option('-f', '--overwrite', dest="overwrite", action="store_true", help="Overwrite files (warnings will be emitted for non-matching files otherwise)") return parser
[ "def", "standard_parser", "(", "cls", ",", "verbose", "=", "True", ",", "interactive", "=", "False", ",", "no_interactive", "=", "False", ",", "simulate", "=", "False", ",", "quiet", "=", "False", ",", "overwrite", "=", "False", ")", ":", "parser", "=", ...
Create a standard ``OptionParser`` instance. Typically used like:: class MyCommand(Command): parser = Command.standard_parser() Subclasses may redefine ``standard_parser``, so use the nearest superclass's class method.
[ "Create", "a", "standard", "OptionParser", "instance", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/util/pastescript/serve.py#L256-L304
6,443
galaxyproject/pulsar
pulsar/util/pastescript/serve.py
Command.quote_first_command_arg
def quote_first_command_arg(self, arg): """ There's a bug in Windows when running an executable that's located inside a path with a space in it. This method handles that case, or on non-Windows systems or an executable with no spaces, it just leaves well enough alone. """ if (sys.platform != 'win32' or ' ' not in arg): # Problem does not apply: return arg try: import win32api except ImportError: raise ValueError( "The executable %r contains a space, and in order to " "handle this issue you must have the win32api module " "installed" % arg) arg = win32api.GetShortPathName(arg) return arg
python
def quote_first_command_arg(self, arg): if (sys.platform != 'win32' or ' ' not in arg): # Problem does not apply: return arg try: import win32api except ImportError: raise ValueError( "The executable %r contains a space, and in order to " "handle this issue you must have the win32api module " "installed" % arg) arg = win32api.GetShortPathName(arg) return arg
[ "def", "quote_first_command_arg", "(", "self", ",", "arg", ")", ":", "if", "(", "sys", ".", "platform", "!=", "'win32'", "or", "' '", "not", "in", "arg", ")", ":", "# Problem does not apply:", "return", "arg", "try", ":", "import", "win32api", "except", "I...
There's a bug in Windows when running an executable that's located inside a path with a space in it. This method handles that case, or on non-Windows systems or an executable with no spaces, it just leaves well enough alone.
[ "There", "s", "a", "bug", "in", "Windows", "when", "running", "an", "executable", "that", "s", "located", "inside", "a", "path", "with", "a", "space", "in", "it", ".", "This", "method", "handles", "that", "case", "or", "on", "non", "-", "Windows", "sys...
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/util/pastescript/serve.py#L308-L327
6,444
galaxyproject/pulsar
pulsar/util/pastescript/serve.py
Command.logging_file_config
def logging_file_config(self, config_file): """ Setup logging via the logging module's fileConfig function with the specified ``config_file``, if applicable. ConfigParser defaults are specified for the special ``__file__`` and ``here`` variables, similar to PasteDeploy config loading. """ parser = ConfigParser.ConfigParser() parser.read([config_file]) if parser.has_section('loggers'): config_file = os.path.abspath(config_file) fileConfig(config_file, dict(__file__=config_file, here=os.path.dirname(config_file)))
python
def logging_file_config(self, config_file): parser = ConfigParser.ConfigParser() parser.read([config_file]) if parser.has_section('loggers'): config_file = os.path.abspath(config_file) fileConfig(config_file, dict(__file__=config_file, here=os.path.dirname(config_file)))
[ "def", "logging_file_config", "(", "self", ",", "config_file", ")", ":", "parser", "=", "ConfigParser", ".", "ConfigParser", "(", ")", "parser", ".", "read", "(", "[", "config_file", "]", ")", "if", "parser", ".", "has_section", "(", "'loggers'", ")", ":",...
Setup logging via the logging module's fileConfig function with the specified ``config_file``, if applicable. ConfigParser defaults are specified for the special ``__file__`` and ``here`` variables, similar to PasteDeploy config loading.
[ "Setup", "logging", "via", "the", "logging", "module", "s", "fileConfig", "function", "with", "the", "specified", "config_file", "if", "applicable", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/util/pastescript/serve.py#L345-L358
6,445
galaxyproject/pulsar
pulsar/client/staging/down.py
finish_job
def finish_job(client, cleanup_job, job_completed_normally, client_outputs, pulsar_outputs): """Process for "un-staging" a complete Pulsar job. This function is responsible for downloading results from remote server and cleaning up Pulsar staging directory (if needed.) """ collection_failure_exceptions = [] if job_completed_normally: output_collector = ClientOutputCollector(client) action_mapper = FileActionMapper(client) results_stager = ResultsCollector(output_collector, action_mapper, client_outputs, pulsar_outputs) collection_failure_exceptions = results_stager.collect() _clean(collection_failure_exceptions, cleanup_job, client) return collection_failure_exceptions
python
def finish_job(client, cleanup_job, job_completed_normally, client_outputs, pulsar_outputs): collection_failure_exceptions = [] if job_completed_normally: output_collector = ClientOutputCollector(client) action_mapper = FileActionMapper(client) results_stager = ResultsCollector(output_collector, action_mapper, client_outputs, pulsar_outputs) collection_failure_exceptions = results_stager.collect() _clean(collection_failure_exceptions, cleanup_job, client) return collection_failure_exceptions
[ "def", "finish_job", "(", "client", ",", "cleanup_job", ",", "job_completed_normally", ",", "client_outputs", ",", "pulsar_outputs", ")", ":", "collection_failure_exceptions", "=", "[", "]", "if", "job_completed_normally", ":", "output_collector", "=", "ClientOutputColl...
Process for "un-staging" a complete Pulsar job. This function is responsible for downloading results from remote server and cleaning up Pulsar staging directory (if needed.)
[ "Process", "for", "un", "-", "staging", "a", "complete", "Pulsar", "job", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/staging/down.py#L13-L26
6,446
galaxyproject/pulsar
pulsar/managers/base/base_drmaa.py
BaseDrmaaManager.shutdown
def shutdown(self, timeout=None): """Cleanup DRMAA session and call shutdown of parent.""" try: super(BaseDrmaaManager, self).shutdown(timeout) except Exception: pass self.drmaa_session.close()
python
def shutdown(self, timeout=None): try: super(BaseDrmaaManager, self).shutdown(timeout) except Exception: pass self.drmaa_session.close()
[ "def", "shutdown", "(", "self", ",", "timeout", "=", "None", ")", ":", "try", ":", "super", "(", "BaseDrmaaManager", ",", "self", ")", ".", "shutdown", "(", "timeout", ")", "except", "Exception", ":", "pass", "self", ".", "drmaa_session", ".", "close", ...
Cleanup DRMAA session and call shutdown of parent.
[ "Cleanup", "DRMAA", "session", "and", "call", "shutdown", "of", "parent", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/base/base_drmaa.py#L31-L37
6,447
galaxyproject/pulsar
pulsar/cache/__init__.py
Cache.cache_file
def cache_file(self, local_path, ip, path): """ Move a file from a temporary staging area into the cache. """ destination = self.__destination(ip, path) atomicish_move(local_path, destination)
python
def cache_file(self, local_path, ip, path): destination = self.__destination(ip, path) atomicish_move(local_path, destination)
[ "def", "cache_file", "(", "self", ",", "local_path", ",", "ip", ",", "path", ")", ":", "destination", "=", "self", ".", "__destination", "(", "ip", ",", "path", ")", "atomicish_move", "(", "local_path", ",", "destination", ")" ]
Move a file from a temporary staging area into the cache.
[ "Move", "a", "file", "from", "a", "temporary", "staging", "area", "into", "the", "cache", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/cache/__init__.py#L41-L46
6,448
galaxyproject/pulsar
pulsar/manager_factory.py
build_managers
def build_managers(app, conf): """ Takes in a config file as outlined in job_managers.ini.sample and builds a dictionary of job manager objects from them. """ # Load default options from config file that apply to all # managers. default_options = _get_default_options(conf) manager_descriptions = ManagerDescriptions() if "job_managers_config" in conf: job_managers_config = conf.get("job_managers_config", None) _populate_manager_descriptions_from_ini(manager_descriptions, job_managers_config) elif "managers" in conf: for manager_name, manager_options in conf["managers"].items(): manager_description = ManagerDescription.from_dict(manager_options, manager_name) manager_descriptions.add(manager_description) elif "manager" in conf: manager_description = ManagerDescription.from_dict(conf["manager"]) manager_descriptions.add(manager_description) else: manager_descriptions.add(ManagerDescription()) manager_classes = _get_managers_dict() managers = {} for manager_name, manager_description in manager_descriptions.descriptions.items(): manager_options = dict(default_options) manager_options.update(manager_description.manager_options) manager_class = manager_classes[manager_description.manager_type] manager = _build_manager(manager_class, app, manager_name, manager_options) managers[manager_name] = manager return managers
python
def build_managers(app, conf): # Load default options from config file that apply to all # managers. default_options = _get_default_options(conf) manager_descriptions = ManagerDescriptions() if "job_managers_config" in conf: job_managers_config = conf.get("job_managers_config", None) _populate_manager_descriptions_from_ini(manager_descriptions, job_managers_config) elif "managers" in conf: for manager_name, manager_options in conf["managers"].items(): manager_description = ManagerDescription.from_dict(manager_options, manager_name) manager_descriptions.add(manager_description) elif "manager" in conf: manager_description = ManagerDescription.from_dict(conf["manager"]) manager_descriptions.add(manager_description) else: manager_descriptions.add(ManagerDescription()) manager_classes = _get_managers_dict() managers = {} for manager_name, manager_description in manager_descriptions.descriptions.items(): manager_options = dict(default_options) manager_options.update(manager_description.manager_options) manager_class = manager_classes[manager_description.manager_type] manager = _build_manager(manager_class, app, manager_name, manager_options) managers[manager_name] = manager return managers
[ "def", "build_managers", "(", "app", ",", "conf", ")", ":", "# Load default options from config file that apply to all", "# managers.", "default_options", "=", "_get_default_options", "(", "conf", ")", "manager_descriptions", "=", "ManagerDescriptions", "(", ")", "if", "\...
Takes in a config file as outlined in job_managers.ini.sample and builds a dictionary of job manager objects from them.
[ "Takes", "in", "a", "config", "file", "as", "outlined", "in", "job_managers", ".", "ini", ".", "sample", "and", "builds", "a", "dictionary", "of", "job", "manager", "objects", "from", "them", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/manager_factory.py#L17-L50
6,449
galaxyproject/pulsar
pulsar/util/pastescript/loadwsgi.py
fix_type_error
def fix_type_error(exc_info, callable, varargs, kwargs): """ Given an exception, this will test if the exception was due to a signature error, and annotate the error with better information if so. Usage:: try: val = callable(*args, **kw) except TypeError: exc_info = fix_type_error(None, callable, args, kw) raise exc_info[0], exc_info[1], exc_info[2] """ if exc_info is None: exc_info = sys.exc_info() if (exc_info[0] != TypeError or str(exc_info[1]).find('arguments') == -1 or getattr(exc_info[1], '_type_error_fixed', False)): return exc_info exc_info[1]._type_error_fixed = True argspec = inspect.formatargspec(*inspect.getargspec(callable)) args = ', '.join(map(_short_repr, varargs)) if kwargs and args: args += ', ' if kwargs: kwargs = kwargs.items() kwargs.sort() args += ', '.join(['%s=...' % n for n, v in kwargs]) gotspec = '(%s)' % args msg = '%s; got %s, wanted %s' % (exc_info[1], gotspec, argspec) exc_info[1].args = (msg,) return exc_info
python
def fix_type_error(exc_info, callable, varargs, kwargs): if exc_info is None: exc_info = sys.exc_info() if (exc_info[0] != TypeError or str(exc_info[1]).find('arguments') == -1 or getattr(exc_info[1], '_type_error_fixed', False)): return exc_info exc_info[1]._type_error_fixed = True argspec = inspect.formatargspec(*inspect.getargspec(callable)) args = ', '.join(map(_short_repr, varargs)) if kwargs and args: args += ', ' if kwargs: kwargs = kwargs.items() kwargs.sort() args += ', '.join(['%s=...' % n for n, v in kwargs]) gotspec = '(%s)' % args msg = '%s; got %s, wanted %s' % (exc_info[1], gotspec, argspec) exc_info[1].args = (msg,) return exc_info
[ "def", "fix_type_error", "(", "exc_info", ",", "callable", ",", "varargs", ",", "kwargs", ")", ":", "if", "exc_info", "is", "None", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "if", "(", "exc_info", "[", "0", "]", "!=", "TypeError", "or", ...
Given an exception, this will test if the exception was due to a signature error, and annotate the error with better information if so. Usage:: try: val = callable(*args, **kw) except TypeError: exc_info = fix_type_error(None, callable, args, kw) raise exc_info[0], exc_info[1], exc_info[2]
[ "Given", "an", "exception", "this", "will", "test", "if", "the", "exception", "was", "due", "to", "a", "signature", "error", "and", "annotate", "the", "error", "with", "better", "information", "if", "so", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/util/pastescript/loadwsgi.py#L50-L82
6,450
galaxyproject/pulsar
pulsar/client/job_directory.py
RemoteJobDirectory.calculate_path
def calculate_path(self, remote_relative_path, input_type): """ Only for used by Pulsar client, should override for managers to enforce security and make the directory if needed. """ directory, allow_nested_files = self._directory_for_file_type(input_type) return self.path_helper.remote_join(directory, remote_relative_path)
python
def calculate_path(self, remote_relative_path, input_type): directory, allow_nested_files = self._directory_for_file_type(input_type) return self.path_helper.remote_join(directory, remote_relative_path)
[ "def", "calculate_path", "(", "self", ",", "remote_relative_path", ",", "input_type", ")", ":", "directory", ",", "allow_nested_files", "=", "self", ".", "_directory_for_file_type", "(", "input_type", ")", "return", "self", ".", "path_helper", ".", "remote_join", ...
Only for used by Pulsar client, should override for managers to enforce security and make the directory if needed.
[ "Only", "for", "used", "by", "Pulsar", "client", "should", "override", "for", "managers", "to", "enforce", "security", "and", "make", "the", "directory", "if", "needed", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/job_directory.py#L71-L76
6,451
galaxyproject/pulsar
pulsar/managers/stateful.py
StatefulManagerProxy.get_status
def get_status(self, job_id): """ Compute status used proxied manager and handle state transitions and track additional state information needed. """ job_directory = self._proxied_manager.job_directory(job_id) with job_directory.lock("status"): proxy_status, state_change = self.__proxy_status(job_directory, job_id) if state_change == "to_complete": self.__deactivate(job_id, proxy_status) elif state_change == "to_running": self.__state_change_callback(status.RUNNING, job_id) return self.__status(job_directory, proxy_status)
python
def get_status(self, job_id): job_directory = self._proxied_manager.job_directory(job_id) with job_directory.lock("status"): proxy_status, state_change = self.__proxy_status(job_directory, job_id) if state_change == "to_complete": self.__deactivate(job_id, proxy_status) elif state_change == "to_running": self.__state_change_callback(status.RUNNING, job_id) return self.__status(job_directory, proxy_status)
[ "def", "get_status", "(", "self", ",", "job_id", ")", ":", "job_directory", "=", "self", ".", "_proxied_manager", ".", "job_directory", "(", "job_id", ")", "with", "job_directory", ".", "lock", "(", "\"status\"", ")", ":", "proxy_status", ",", "state_change", ...
Compute status used proxied manager and handle state transitions and track additional state information needed.
[ "Compute", "status", "used", "proxied", "manager", "and", "handle", "state", "transitions", "and", "track", "additional", "state", "information", "needed", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/stateful.py#L137-L150
6,452
galaxyproject/pulsar
pulsar/managers/stateful.py
StatefulManagerProxy.__proxy_status
def __proxy_status(self, job_directory, job_id): """ Determine state with proxied job manager and if this job needs to be marked as deactivated (this occurs when job first returns a complete status from proxy. """ state_change = None if job_directory.has_metadata(JOB_FILE_PREPROCESSING_FAILED): proxy_status = status.FAILED job_directory.store_metadata(JOB_FILE_FINAL_STATUS, proxy_status) state_change = "to_complete" elif not job_directory.has_metadata(JOB_FILE_PREPROCESSED): proxy_status = status.PREPROCESSING elif job_directory.has_metadata(JOB_FILE_FINAL_STATUS): proxy_status = job_directory.load_metadata(JOB_FILE_FINAL_STATUS) else: proxy_status = self._proxied_manager.get_status(job_id) if proxy_status == status.RUNNING: if not job_directory.has_metadata(JOB_METADATA_RUNNING): job_directory.store_metadata(JOB_METADATA_RUNNING, True) state_change = "to_running" elif proxy_status in [status.COMPLETE, status.CANCELLED]: job_directory.store_metadata(JOB_FILE_FINAL_STATUS, proxy_status) state_change = "to_complete" return proxy_status, state_change
python
def __proxy_status(self, job_directory, job_id): state_change = None if job_directory.has_metadata(JOB_FILE_PREPROCESSING_FAILED): proxy_status = status.FAILED job_directory.store_metadata(JOB_FILE_FINAL_STATUS, proxy_status) state_change = "to_complete" elif not job_directory.has_metadata(JOB_FILE_PREPROCESSED): proxy_status = status.PREPROCESSING elif job_directory.has_metadata(JOB_FILE_FINAL_STATUS): proxy_status = job_directory.load_metadata(JOB_FILE_FINAL_STATUS) else: proxy_status = self._proxied_manager.get_status(job_id) if proxy_status == status.RUNNING: if not job_directory.has_metadata(JOB_METADATA_RUNNING): job_directory.store_metadata(JOB_METADATA_RUNNING, True) state_change = "to_running" elif proxy_status in [status.COMPLETE, status.CANCELLED]: job_directory.store_metadata(JOB_FILE_FINAL_STATUS, proxy_status) state_change = "to_complete" return proxy_status, state_change
[ "def", "__proxy_status", "(", "self", ",", "job_directory", ",", "job_id", ")", ":", "state_change", "=", "None", "if", "job_directory", ".", "has_metadata", "(", "JOB_FILE_PREPROCESSING_FAILED", ")", ":", "proxy_status", "=", "status", ".", "FAILED", "job_directo...
Determine state with proxied job manager and if this job needs to be marked as deactivated (this occurs when job first returns a complete status from proxy.
[ "Determine", "state", "with", "proxied", "job", "manager", "and", "if", "this", "job", "needs", "to", "be", "marked", "as", "deactivated", "(", "this", "occurs", "when", "job", "first", "returns", "a", "complete", "status", "from", "proxy", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/stateful.py#L152-L175
6,453
galaxyproject/pulsar
pulsar/client/staging/__init__.py
PulsarOutputs.output_extras
def output_extras(self, output_file): """ Returns dict mapping local path to remote name. """ output_directory = dirname(output_file) def local_path(name): return join(output_directory, self.path_helper.local_name(name)) files_directory = "%s_files%s" % (basename(output_file)[0:-len(".dat")], self.path_helper.separator) names = filter(lambda o: o.startswith(files_directory), self.output_directory_contents) return dict(map(lambda name: (local_path(name), name), names))
python
def output_extras(self, output_file): output_directory = dirname(output_file) def local_path(name): return join(output_directory, self.path_helper.local_name(name)) files_directory = "%s_files%s" % (basename(output_file)[0:-len(".dat")], self.path_helper.separator) names = filter(lambda o: o.startswith(files_directory), self.output_directory_contents) return dict(map(lambda name: (local_path(name), name), names))
[ "def", "output_extras", "(", "self", ",", "output_file", ")", ":", "output_directory", "=", "dirname", "(", "output_file", ")", "def", "local_path", "(", "name", ")", ":", "return", "join", "(", "output_directory", ",", "self", ".", "path_helper", ".", "loca...
Returns dict mapping local path to remote name.
[ "Returns", "dict", "mapping", "local", "path", "to", "remote", "name", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/client/staging/__init__.py#L185-L196
6,454
galaxyproject/pulsar
pulsar/managers/util/sudo.py
sudo_popen
def sudo_popen(*args, **kwargs): """ Helper method for building and executing Popen command. This is potentially sensetive code so should probably be centralized. """ user = kwargs.get("user", None) full_command = [SUDO_PATH, SUDO_PRESERVE_ENVIRONMENT_ARG] if user: full_command.extend([SUDO_USER_ARG, user]) full_command.extend(args) log.info("About to execute the following sudo command - [%s]" % ' '.join(full_command)) p = Popen(full_command, shell=False, stdout=PIPE, stderr=PIPE) return p
python
def sudo_popen(*args, **kwargs): user = kwargs.get("user", None) full_command = [SUDO_PATH, SUDO_PRESERVE_ENVIRONMENT_ARG] if user: full_command.extend([SUDO_USER_ARG, user]) full_command.extend(args) log.info("About to execute the following sudo command - [%s]" % ' '.join(full_command)) p = Popen(full_command, shell=False, stdout=PIPE, stderr=PIPE) return p
[ "def", "sudo_popen", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "user", "=", "kwargs", ".", "get", "(", "\"user\"", ",", "None", ")", "full_command", "=", "[", "SUDO_PATH", ",", "SUDO_PRESERVE_ENVIRONMENT_ARG", "]", "if", "user", ":", "full_com...
Helper method for building and executing Popen command. This is potentially sensetive code so should probably be centralized.
[ "Helper", "method", "for", "building", "and", "executing", "Popen", "command", ".", "This", "is", "potentially", "sensetive", "code", "so", "should", "probably", "be", "centralized", "." ]
9ab6683802884324652da0a9f0808c7eb59d3ab4
https://github.com/galaxyproject/pulsar/blob/9ab6683802884324652da0a9f0808c7eb59d3ab4/pulsar/managers/util/sudo.py#L14-L26
6,455
bitprophet/releases
releases/line_manager.py
LineManager.add_family
def add_family(self, major_number): """ Expand to a new release line with given ``major_number``. This will flesh out mandatory buckets like ``unreleased_bugfix`` and do other necessary bookkeeping. """ # Normally, we have separate buckets for bugfixes vs features keys = ['unreleased_bugfix', 'unreleased_feature'] # But unstable prehistorical releases roll all up into just # 'unreleased' if major_number == 0 and self.config.releases_unstable_prehistory: keys = ['unreleased'] # Either way, the buckets default to an empty list self[major_number] = {key: [] for key in keys}
python
def add_family(self, major_number): # Normally, we have separate buckets for bugfixes vs features keys = ['unreleased_bugfix', 'unreleased_feature'] # But unstable prehistorical releases roll all up into just # 'unreleased' if major_number == 0 and self.config.releases_unstable_prehistory: keys = ['unreleased'] # Either way, the buckets default to an empty list self[major_number] = {key: [] for key in keys}
[ "def", "add_family", "(", "self", ",", "major_number", ")", ":", "# Normally, we have separate buckets for bugfixes vs features", "keys", "=", "[", "'unreleased_bugfix'", ",", "'unreleased_feature'", "]", "# But unstable prehistorical releases roll all up into just", "# 'unreleased...
Expand to a new release line with given ``major_number``. This will flesh out mandatory buckets like ``unreleased_bugfix`` and do other necessary bookkeeping.
[ "Expand", "to", "a", "new", "release", "line", "with", "given", "major_number", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/line_manager.py#L23-L37
6,456
bitprophet/releases
releases/util.py
parse_changelog
def parse_changelog(path, **kwargs): """ Load and parse changelog file from ``path``, returning data structures. This function does not alter any files on disk; it is solely for introspecting a Releases ``changelog.rst`` and programmatically answering questions like "are there any unreleased bugfixes for the 2.3 line?" or "what was included in release 1.2.1?". For example, answering the above questions is as simple as:: changelog = parse_changelog("/path/to/changelog") print("Unreleased issues for 2.3.x: {}".format(changelog['2.3'])) print("Contents of v1.2.1: {}".format(changelog['1.2.1'])) Aside from the documented arguments, any additional keyword arguments are passed unmodified into an internal `get_doctree` call (which then passes them to `make_app`). :param str path: A relative or absolute file path string. :returns: A dict whose keys map to lists of ``releases.models.Issue`` objects, as follows: - Actual releases are full version number keys, such as ``"1.2.1"`` or ``"2.0.0"``. - Unreleased bugs (or bug-like issues; see the Releases docs) are stored in minor-release buckets, e.g. ``"1.2"`` or ``"2.0"``. - Unreleased features (or feature-like issues) are found in ``"unreleased_N_feature"``, where ``N`` is one of the major release families (so, a changelog spanning only 1.x will only have ``unreleased_1_feature``, whereas one with 1.x and 2.x releases will have ``unreleased_1_feature`` and ``unreleased_2_feature``, etc). .. versionchanged:: 1.6 Added support for passing kwargs to `get_doctree`/`make_app`. """ app, doctree = get_doctree(path, **kwargs) # Have to semi-reproduce the 'find first bullet list' bit from main code, # which is unfortunately side-effect-heavy (thanks to Sphinx plugin # design). first_list = None for node in doctree[0]: if isinstance(node, bullet_list): first_list = node break # Initial parse into the structures Releases finds useful internally releases, manager = construct_releases(first_list.children, app) ret = changelog2dict(releases) # Stitch them together into something an end-user would find better: # - nuke unreleased_N.N_Y as their contents will be represented in the # per-line buckets for key in ret.copy(): if key.startswith('unreleased'): del ret[key] for family in manager: # - remove unreleased_bugfix, as they are accounted for in the per-line # buckets too. No need to store anywhere. manager[family].pop('unreleased_bugfix', None) # - bring over each major family's unreleased_feature as # unreleased_N_feature unreleased = manager[family].pop('unreleased_feature', None) if unreleased is not None: ret['unreleased_{}_feature'.format(family)] = unreleased # - bring over all per-line buckets from manager (flattening) # Here, all that's left in the per-family bucket should be lines, not # unreleased_* ret.update(manager[family]) return ret
python
def parse_changelog(path, **kwargs): app, doctree = get_doctree(path, **kwargs) # Have to semi-reproduce the 'find first bullet list' bit from main code, # which is unfortunately side-effect-heavy (thanks to Sphinx plugin # design). first_list = None for node in doctree[0]: if isinstance(node, bullet_list): first_list = node break # Initial parse into the structures Releases finds useful internally releases, manager = construct_releases(first_list.children, app) ret = changelog2dict(releases) # Stitch them together into something an end-user would find better: # - nuke unreleased_N.N_Y as their contents will be represented in the # per-line buckets for key in ret.copy(): if key.startswith('unreleased'): del ret[key] for family in manager: # - remove unreleased_bugfix, as they are accounted for in the per-line # buckets too. No need to store anywhere. manager[family].pop('unreleased_bugfix', None) # - bring over each major family's unreleased_feature as # unreleased_N_feature unreleased = manager[family].pop('unreleased_feature', None) if unreleased is not None: ret['unreleased_{}_feature'.format(family)] = unreleased # - bring over all per-line buckets from manager (flattening) # Here, all that's left in the per-family bucket should be lines, not # unreleased_* ret.update(manager[family]) return ret
[ "def", "parse_changelog", "(", "path", ",", "*", "*", "kwargs", ")", ":", "app", ",", "doctree", "=", "get_doctree", "(", "path", ",", "*", "*", "kwargs", ")", "# Have to semi-reproduce the 'find first bullet list' bit from main code,", "# which is unfortunately side-ef...
Load and parse changelog file from ``path``, returning data structures. This function does not alter any files on disk; it is solely for introspecting a Releases ``changelog.rst`` and programmatically answering questions like "are there any unreleased bugfixes for the 2.3 line?" or "what was included in release 1.2.1?". For example, answering the above questions is as simple as:: changelog = parse_changelog("/path/to/changelog") print("Unreleased issues for 2.3.x: {}".format(changelog['2.3'])) print("Contents of v1.2.1: {}".format(changelog['1.2.1'])) Aside from the documented arguments, any additional keyword arguments are passed unmodified into an internal `get_doctree` call (which then passes them to `make_app`). :param str path: A relative or absolute file path string. :returns: A dict whose keys map to lists of ``releases.models.Issue`` objects, as follows: - Actual releases are full version number keys, such as ``"1.2.1"`` or ``"2.0.0"``. - Unreleased bugs (or bug-like issues; see the Releases docs) are stored in minor-release buckets, e.g. ``"1.2"`` or ``"2.0"``. - Unreleased features (or feature-like issues) are found in ``"unreleased_N_feature"``, where ``N`` is one of the major release families (so, a changelog spanning only 1.x will only have ``unreleased_1_feature``, whereas one with 1.x and 2.x releases will have ``unreleased_1_feature`` and ``unreleased_2_feature``, etc). .. versionchanged:: 1.6 Added support for passing kwargs to `get_doctree`/`make_app`.
[ "Load", "and", "parse", "changelog", "file", "from", "path", "returning", "data", "structures", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/util.py#L37-L106
6,457
bitprophet/releases
releases/util.py
get_doctree
def get_doctree(path, **kwargs): """ Obtain a Sphinx doctree from the RST file at ``path``. Performs no Releases-specific processing; this code would, ideally, be in Sphinx itself, but things there are pretty tightly coupled. So we wrote this. Any additional kwargs are passed unmodified into an internal `make_app` call. :param str path: A relative or absolute file path string. :returns: A two-tuple of the generated ``sphinx.application.Sphinx`` app and the doctree (a ``docutils.document`` object). .. versionchanged:: 1.6 Added support for passing kwargs to `make_app`. """ root, filename = os.path.split(path) docname, _ = os.path.splitext(filename) # TODO: this only works for top level changelog files (i.e. ones where # their dirname is the project/doc root) app = make_app(srcdir=root, **kwargs) # Create & init a BuildEnvironment. Mm, tasty side effects. app._init_env(freshenv=True) env = app.env # More arity/API changes: Sphinx 1.3/1.4-ish require one to pass in the app # obj in BuildEnvironment.update(); modern Sphinx performs that inside # Application._init_env() (which we just called above) and so that kwarg is # removed from update(). EAFP. kwargs = dict( config=app.config, srcdir=root, doctreedir=app.doctreedir, app=app, ) try: env.update(**kwargs) except TypeError: # Assume newer Sphinx w/o an app= kwarg del kwargs['app'] env.update(**kwargs) # Code taken from sphinx.environment.read_doc; easier to manually call # it with a working Environment object, instead of doing more random crap # to trick the higher up build system into thinking our single changelog # document was "updated". env.temp_data['docname'] = docname env.app = app # NOTE: SphinxStandaloneReader API changed in 1.4 :( reader_kwargs = { 'app': app, 'parsers': env.config.source_parsers, } if sphinx.version_info[:2] < (1, 4): del reader_kwargs['app'] # This monkeypatches (!!!) docutils to 'inject' all registered Sphinx # domains' roles & so forth. Without this, rendering the doctree lacks # almost all Sphinx magic, including things like :ref: and :doc:! with sphinx_domains(env): try: reader = SphinxStandaloneReader(**reader_kwargs) except TypeError: # If we import from io, this happens automagically, not in API del reader_kwargs['parsers'] reader = SphinxStandaloneReader(**reader_kwargs) pub = Publisher(reader=reader, writer=SphinxDummyWriter(), destination_class=NullOutput) pub.set_components(None, 'restructuredtext', None) pub.process_programmatic_settings(None, env.settings, None) # NOTE: docname derived higher up, from our given path src_path = env.doc2path(docname) source = SphinxFileInput( app, env, source=None, source_path=src_path, encoding=env.config.source_encoding, ) pub.source = source pub.settings._source = src_path pub.set_destination(None, None) pub.publish() return app, pub.document
python
def get_doctree(path, **kwargs): root, filename = os.path.split(path) docname, _ = os.path.splitext(filename) # TODO: this only works for top level changelog files (i.e. ones where # their dirname is the project/doc root) app = make_app(srcdir=root, **kwargs) # Create & init a BuildEnvironment. Mm, tasty side effects. app._init_env(freshenv=True) env = app.env # More arity/API changes: Sphinx 1.3/1.4-ish require one to pass in the app # obj in BuildEnvironment.update(); modern Sphinx performs that inside # Application._init_env() (which we just called above) and so that kwarg is # removed from update(). EAFP. kwargs = dict( config=app.config, srcdir=root, doctreedir=app.doctreedir, app=app, ) try: env.update(**kwargs) except TypeError: # Assume newer Sphinx w/o an app= kwarg del kwargs['app'] env.update(**kwargs) # Code taken from sphinx.environment.read_doc; easier to manually call # it with a working Environment object, instead of doing more random crap # to trick the higher up build system into thinking our single changelog # document was "updated". env.temp_data['docname'] = docname env.app = app # NOTE: SphinxStandaloneReader API changed in 1.4 :( reader_kwargs = { 'app': app, 'parsers': env.config.source_parsers, } if sphinx.version_info[:2] < (1, 4): del reader_kwargs['app'] # This monkeypatches (!!!) docutils to 'inject' all registered Sphinx # domains' roles & so forth. Without this, rendering the doctree lacks # almost all Sphinx magic, including things like :ref: and :doc:! with sphinx_domains(env): try: reader = SphinxStandaloneReader(**reader_kwargs) except TypeError: # If we import from io, this happens automagically, not in API del reader_kwargs['parsers'] reader = SphinxStandaloneReader(**reader_kwargs) pub = Publisher(reader=reader, writer=SphinxDummyWriter(), destination_class=NullOutput) pub.set_components(None, 'restructuredtext', None) pub.process_programmatic_settings(None, env.settings, None) # NOTE: docname derived higher up, from our given path src_path = env.doc2path(docname) source = SphinxFileInput( app, env, source=None, source_path=src_path, encoding=env.config.source_encoding, ) pub.source = source pub.settings._source = src_path pub.set_destination(None, None) pub.publish() return app, pub.document
[ "def", "get_doctree", "(", "path", ",", "*", "*", "kwargs", ")", ":", "root", ",", "filename", "=", "os", ".", "path", ".", "split", "(", "path", ")", "docname", ",", "_", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "# TODO: this...
Obtain a Sphinx doctree from the RST file at ``path``. Performs no Releases-specific processing; this code would, ideally, be in Sphinx itself, but things there are pretty tightly coupled. So we wrote this. Any additional kwargs are passed unmodified into an internal `make_app` call. :param str path: A relative or absolute file path string. :returns: A two-tuple of the generated ``sphinx.application.Sphinx`` app and the doctree (a ``docutils.document`` object). .. versionchanged:: 1.6 Added support for passing kwargs to `make_app`.
[ "Obtain", "a", "Sphinx", "doctree", "from", "the", "RST", "file", "at", "path", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/util.py#L109-L194
6,458
bitprophet/releases
releases/util.py
load_conf
def load_conf(srcdir): """ Load ``conf.py`` from given ``srcdir``. :returns: Dictionary derived from the conf module. """ path = os.path.join(srcdir, 'conf.py') mylocals = {'__file__': path} with open(path) as fd: exec(fd.read(), mylocals) return mylocals
python
def load_conf(srcdir): path = os.path.join(srcdir, 'conf.py') mylocals = {'__file__': path} with open(path) as fd: exec(fd.read(), mylocals) return mylocals
[ "def", "load_conf", "(", "srcdir", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "srcdir", ",", "'conf.py'", ")", "mylocals", "=", "{", "'__file__'", ":", "path", "}", "with", "open", "(", "path", ")", "as", "fd", ":", "exec", "(", ...
Load ``conf.py`` from given ``srcdir``. :returns: Dictionary derived from the conf module.
[ "Load", "conf", ".", "py", "from", "given", "srcdir", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/util.py#L197-L207
6,459
bitprophet/releases
releases/util.py
make_app
def make_app(**kwargs): """ Create a dummy Sphinx app, filling in various hardcoded assumptions. For example, Sphinx assumes the existence of various source/dest directories, even if you're only calling internals that never generate (or sometimes, even read!) on-disk files. This function creates safe temp directories for these instances. It also neuters Sphinx's internal logging, which otherwise causes verbosity in one's own test output and/or debug logs. Finally, it does load the given srcdir's ``conf.py``, but only to read specific bits like ``extensions`` (if requested); most of it is ignored. All args are stored in a single ``**kwargs``. Aside from the params listed below (all of which are optional), all kwargs given are turned into 'releases_xxx' config settings; e.g. ``make_app(foo='bar')`` is like setting ``releases_foo = 'bar'`` in ``conf.py``. :param str docname: Override the document name used (mostly for internal testing). :param str srcdir: Sphinx source directory path. :param str dstdir: Sphinx dest directory path. :param str doctreedir: Sphinx doctree directory path. :param bool load_extensions: Whether to load the real ``conf.py`` and setup any extensions it configures. Default: ``False``. :returns: A Sphinx ``Application`` instance. .. versionchanged:: 1.6 Added the ``load_extensions`` kwarg. """ srcdir = kwargs.pop('srcdir', mkdtemp()) dstdir = kwargs.pop('dstdir', mkdtemp()) doctreedir = kwargs.pop('doctreedir', mkdtemp()) load_extensions = kwargs.pop('load_extensions', False) real_conf = None try: # Sphinx <1.6ish Sphinx._log = lambda self, message, wfile, nonl=False: None # Sphinx >=1.6ish. Technically still lets Very Bad Things through, # unlike the total muting above, but probably OK. # NOTE: used to just do 'sphinx' but that stopped working, even on # sphinx 1.6.x. Weird. Unsure why hierarchy not functioning. for name in ('sphinx', 'sphinx.sphinx.application'): logging.getLogger(name).setLevel(logging.ERROR) # App API seems to work on all versions so far. app = Sphinx( srcdir=srcdir, confdir=None, outdir=dstdir, doctreedir=doctreedir, buildername='html', ) # Might as well load the conf file here too. if load_extensions: real_conf = load_conf(srcdir) finally: for d in (srcdir, dstdir, doctreedir): # Only remove empty dirs; non-empty dirs are implicitly something # that existed before we ran, and should not be touched. try: os.rmdir(d) except OSError: pass setup(app) # Mock out the config within. More assumptions by Sphinx :( # TODO: just use real config and overlay what truly needs changing? is that # feasible given the rest of the weird ordering we have to do? If it is, # maybe just literally slap this over the return value of load_conf()... config = { 'releases_release_uri': 'foo_%s', 'releases_issue_uri': 'bar_%s', 'releases_debug': False, 'master_doc': 'index', } # Allow tinkering with document filename if 'docname' in kwargs: app.env.temp_data['docname'] = kwargs.pop('docname') # Allow config overrides via kwargs for name in kwargs: config['releases_{}'.format(name)] = kwargs[name] # Stitch together as the sphinx app init() usually does w/ real conf files app.config._raw_config = config # init_values() requires a 'warn' runner on Sphinx 1.3-1.6, so if we seem # to be hitting arity errors, give it a dummy such callable. Hopefully # calling twice doesn't introduce any wacko state issues :( try: app.config.init_values() except TypeError: # boy I wish Python had an ArityError or w/e app.config.init_values(lambda x: x) # Initialize extensions (the internal call to this happens at init time, # which of course had no valid config yet here...) if load_extensions: for extension in real_conf.get('extensions', []): # But don't set up ourselves again, that causes errors if extension == 'releases': continue app.setup_extension(extension) return app
python
def make_app(**kwargs): srcdir = kwargs.pop('srcdir', mkdtemp()) dstdir = kwargs.pop('dstdir', mkdtemp()) doctreedir = kwargs.pop('doctreedir', mkdtemp()) load_extensions = kwargs.pop('load_extensions', False) real_conf = None try: # Sphinx <1.6ish Sphinx._log = lambda self, message, wfile, nonl=False: None # Sphinx >=1.6ish. Technically still lets Very Bad Things through, # unlike the total muting above, but probably OK. # NOTE: used to just do 'sphinx' but that stopped working, even on # sphinx 1.6.x. Weird. Unsure why hierarchy not functioning. for name in ('sphinx', 'sphinx.sphinx.application'): logging.getLogger(name).setLevel(logging.ERROR) # App API seems to work on all versions so far. app = Sphinx( srcdir=srcdir, confdir=None, outdir=dstdir, doctreedir=doctreedir, buildername='html', ) # Might as well load the conf file here too. if load_extensions: real_conf = load_conf(srcdir) finally: for d in (srcdir, dstdir, doctreedir): # Only remove empty dirs; non-empty dirs are implicitly something # that existed before we ran, and should not be touched. try: os.rmdir(d) except OSError: pass setup(app) # Mock out the config within. More assumptions by Sphinx :( # TODO: just use real config and overlay what truly needs changing? is that # feasible given the rest of the weird ordering we have to do? If it is, # maybe just literally slap this over the return value of load_conf()... config = { 'releases_release_uri': 'foo_%s', 'releases_issue_uri': 'bar_%s', 'releases_debug': False, 'master_doc': 'index', } # Allow tinkering with document filename if 'docname' in kwargs: app.env.temp_data['docname'] = kwargs.pop('docname') # Allow config overrides via kwargs for name in kwargs: config['releases_{}'.format(name)] = kwargs[name] # Stitch together as the sphinx app init() usually does w/ real conf files app.config._raw_config = config # init_values() requires a 'warn' runner on Sphinx 1.3-1.6, so if we seem # to be hitting arity errors, give it a dummy such callable. Hopefully # calling twice doesn't introduce any wacko state issues :( try: app.config.init_values() except TypeError: # boy I wish Python had an ArityError or w/e app.config.init_values(lambda x: x) # Initialize extensions (the internal call to this happens at init time, # which of course had no valid config yet here...) if load_extensions: for extension in real_conf.get('extensions', []): # But don't set up ourselves again, that causes errors if extension == 'releases': continue app.setup_extension(extension) return app
[ "def", "make_app", "(", "*", "*", "kwargs", ")", ":", "srcdir", "=", "kwargs", ".", "pop", "(", "'srcdir'", ",", "mkdtemp", "(", ")", ")", "dstdir", "=", "kwargs", ".", "pop", "(", "'dstdir'", ",", "mkdtemp", "(", ")", ")", "doctreedir", "=", "kwar...
Create a dummy Sphinx app, filling in various hardcoded assumptions. For example, Sphinx assumes the existence of various source/dest directories, even if you're only calling internals that never generate (or sometimes, even read!) on-disk files. This function creates safe temp directories for these instances. It also neuters Sphinx's internal logging, which otherwise causes verbosity in one's own test output and/or debug logs. Finally, it does load the given srcdir's ``conf.py``, but only to read specific bits like ``extensions`` (if requested); most of it is ignored. All args are stored in a single ``**kwargs``. Aside from the params listed below (all of which are optional), all kwargs given are turned into 'releases_xxx' config settings; e.g. ``make_app(foo='bar')`` is like setting ``releases_foo = 'bar'`` in ``conf.py``. :param str docname: Override the document name used (mostly for internal testing). :param str srcdir: Sphinx source directory path. :param str dstdir: Sphinx dest directory path. :param str doctreedir: Sphinx doctree directory path. :param bool load_extensions: Whether to load the real ``conf.py`` and setup any extensions it configures. Default: ``False``. :returns: A Sphinx ``Application`` instance. .. versionchanged:: 1.6 Added the ``load_extensions`` kwarg.
[ "Create", "a", "dummy", "Sphinx", "app", "filling", "in", "various", "hardcoded", "assumptions", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/util.py#L210-L318
6,460
bitprophet/releases
releases/__init__.py
_log
def _log(txt, config): """ Log debug output if debug setting is on. Intended to be partial'd w/ config at top of functions. Meh. """ if config.releases_debug: sys.stderr.write(str(txt) + "\n") sys.stderr.flush()
python
def _log(txt, config): if config.releases_debug: sys.stderr.write(str(txt) + "\n") sys.stderr.flush()
[ "def", "_log", "(", "txt", ",", "config", ")", ":", "if", "config", ".", "releases_debug", ":", "sys", ".", "stderr", ".", "write", "(", "str", "(", "txt", ")", "+", "\"\\n\"", ")", "sys", ".", "stderr", ".", "flush", "(", ")" ]
Log debug output if debug setting is on. Intended to be partial'd w/ config at top of functions. Meh.
[ "Log", "debug", "output", "if", "debug", "setting", "is", "on", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/__init__.py#L15-L23
6,461
bitprophet/releases
releases/__init__.py
scan_for_spec
def scan_for_spec(keyword): """ Attempt to return some sort of Spec from given keyword value. Returns None if one could not be derived. """ # Both 'spec' formats are wrapped in parens, discard keyword = keyword.lstrip('(').rstrip(')') # First, test for intermediate '1.2+' style matches = release_line_re.findall(keyword) if matches: return Spec(">={}".format(matches[0])) # Failing that, see if Spec can make sense of it try: return Spec(keyword) # I've only ever seen Spec fail with ValueError. except ValueError: return None
python
def scan_for_spec(keyword): # Both 'spec' formats are wrapped in parens, discard keyword = keyword.lstrip('(').rstrip(')') # First, test for intermediate '1.2+' style matches = release_line_re.findall(keyword) if matches: return Spec(">={}".format(matches[0])) # Failing that, see if Spec can make sense of it try: return Spec(keyword) # I've only ever seen Spec fail with ValueError. except ValueError: return None
[ "def", "scan_for_spec", "(", "keyword", ")", ":", "# Both 'spec' formats are wrapped in parens, discard", "keyword", "=", "keyword", ".", "lstrip", "(", "'('", ")", ".", "rstrip", "(", "')'", ")", "# First, test for intermediate '1.2+' style", "matches", "=", "release_l...
Attempt to return some sort of Spec from given keyword value. Returns None if one could not be derived.
[ "Attempt", "to", "return", "some", "sort", "of", "Spec", "from", "given", "keyword", "value", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/__init__.py#L38-L55
6,462
bitprophet/releases
releases/__init__.py
append_unreleased_entries
def append_unreleased_entries(app, manager, releases): """ Generate new abstract 'releases' for unreleased issues. There's one for each combination of bug-vs-feature & major release line. When only one major release line exists, that dimension is ignored. """ for family, lines in six.iteritems(manager): for type_ in ('bugfix', 'feature'): bucket = 'unreleased_{}'.format(type_) if bucket not in lines: # Implies unstable prehistory + 0.x fam continue issues = lines[bucket] fam_prefix = "{}.x ".format(family) if len(manager) > 1 else "" header = "Next {}{} release".format(fam_prefix, type_) line = "unreleased_{}.x_{}".format(family, type_) releases.append( generate_unreleased_entry(header, line, issues, manager, app) )
python
def append_unreleased_entries(app, manager, releases): for family, lines in six.iteritems(manager): for type_ in ('bugfix', 'feature'): bucket = 'unreleased_{}'.format(type_) if bucket not in lines: # Implies unstable prehistory + 0.x fam continue issues = lines[bucket] fam_prefix = "{}.x ".format(family) if len(manager) > 1 else "" header = "Next {}{} release".format(fam_prefix, type_) line = "unreleased_{}.x_{}".format(family, type_) releases.append( generate_unreleased_entry(header, line, issues, manager, app) )
[ "def", "append_unreleased_entries", "(", "app", ",", "manager", ",", "releases", ")", ":", "for", "family", ",", "lines", "in", "six", ".", "iteritems", "(", "manager", ")", ":", "for", "type_", "in", "(", "'bugfix'", ",", "'feature'", ")", ":", "bucket"...
Generate new abstract 'releases' for unreleased issues. There's one for each combination of bug-vs-feature & major release line. When only one major release line exists, that dimension is ignored.
[ "Generate", "new", "abstract", "releases", "for", "unreleased", "issues", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/__init__.py#L202-L221
6,463
bitprophet/releases
releases/__init__.py
handle_first_release_line
def handle_first_release_line(entries, manager): """ Set up initial line-manager entry for first encountered release line. To be called at start of overall process; afterwards, subsequent major lines are generated by `handle_upcoming_major_release`. """ # It's remotely possible the changelog is totally empty... if not entries: return # Obtain (short-circuiting) first Release obj. first_release = None for obj in entries: if isinstance(obj, Release): first_release = obj break # It's also possible it's non-empty but has no releases yet. if first_release: manager.add_family(obj.family) # If God did not exist, man would be forced to invent him. else: manager.add_family(0)
python
def handle_first_release_line(entries, manager): # It's remotely possible the changelog is totally empty... if not entries: return # Obtain (short-circuiting) first Release obj. first_release = None for obj in entries: if isinstance(obj, Release): first_release = obj break # It's also possible it's non-empty but has no releases yet. if first_release: manager.add_family(obj.family) # If God did not exist, man would be forced to invent him. else: manager.add_family(0)
[ "def", "handle_first_release_line", "(", "entries", ",", "manager", ")", ":", "# It's remotely possible the changelog is totally empty...", "if", "not", "entries", ":", "return", "# Obtain (short-circuiting) first Release obj.", "first_release", "=", "None", "for", "obj", "in...
Set up initial line-manager entry for first encountered release line. To be called at start of overall process; afterwards, subsequent major lines are generated by `handle_upcoming_major_release`.
[ "Set", "up", "initial", "line", "-", "manager", "entry", "for", "first", "encountered", "release", "line", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/__init__.py#L434-L455
6,464
bitprophet/releases
releases/models.py
Issue.minor_releases
def minor_releases(self, manager): """ Return all minor release line labels found in ``manager``. """ # TODO: yea deffo need a real object for 'manager', heh. E.g. we do a # very similar test for "do you have any actual releases yet?" # elsewhere. (This may be fodder for changing how we roll up # pre-major-release features though...?) return [ key for key, value in six.iteritems(manager) if any(x for x in value if not x.startswith('unreleased')) ]
python
def minor_releases(self, manager): # TODO: yea deffo need a real object for 'manager', heh. E.g. we do a # very similar test for "do you have any actual releases yet?" # elsewhere. (This may be fodder for changing how we roll up # pre-major-release features though...?) return [ key for key, value in six.iteritems(manager) if any(x for x in value if not x.startswith('unreleased')) ]
[ "def", "minor_releases", "(", "self", ",", "manager", ")", ":", "# TODO: yea deffo need a real object for 'manager', heh. E.g. we do a", "# very similar test for \"do you have any actual releases yet?\"", "# elsewhere. (This may be fodder for changing how we roll up", "# pre-major-release feat...
Return all minor release line labels found in ``manager``.
[ "Return", "all", "minor", "release", "line", "labels", "found", "in", "manager", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/models.py#L69-L80
6,465
bitprophet/releases
releases/models.py
Issue.default_spec
def default_spec(self, manager): """ Given the current release-lines structure, return a default Spec. Specifics: * For feature-like issues, only the highest major release is used, so given a ``manager`` with top level keys of ``[1, 2]``, this would return ``Spec(">=2")``. * When ``releases_always_forwardport_features`` is ``True``, that behavior is nullified, and this function always returns the empty ``Spec`` (which matches any and all versions/lines). * For bugfix-like issues, we only consider major release families which have actual releases already. * Thus the core difference here is that features are 'consumed' by upcoming major releases, and bugfixes are not. * When the ``unstable_prehistory`` setting is ``True``, the default spec starts at the oldest non-zero release line. (Otherwise, issues posted after prehistory ends would try being added to the 0.x part of the tree, which makes no sense in unstable-prehistory mode.) """ # TODO: I feel like this + the surrounding bits in add_to_manager() # could be consolidated & simplified... specstr = "" # Make sure truly-default spec skips 0.x if prehistory was unstable. stable_families = manager.stable_families if manager.config.releases_unstable_prehistory and stable_families: specstr = ">={}".format(min(stable_families)) if self.is_featurelike: # TODO: if app->config-><releases_always_forwardport_features or # w/e if True: specstr = ">={}".format(max(manager.keys())) else: # Can only meaningfully limit to minor release buckets if they # actually exist yet. buckets = self.minor_releases(manager) if buckets: specstr = ">={}".format(max(buckets)) return Spec(specstr) if specstr else Spec()
python
def default_spec(self, manager): # TODO: I feel like this + the surrounding bits in add_to_manager() # could be consolidated & simplified... specstr = "" # Make sure truly-default spec skips 0.x if prehistory was unstable. stable_families = manager.stable_families if manager.config.releases_unstable_prehistory and stable_families: specstr = ">={}".format(min(stable_families)) if self.is_featurelike: # TODO: if app->config-><releases_always_forwardport_features or # w/e if True: specstr = ">={}".format(max(manager.keys())) else: # Can only meaningfully limit to minor release buckets if they # actually exist yet. buckets = self.minor_releases(manager) if buckets: specstr = ">={}".format(max(buckets)) return Spec(specstr) if specstr else Spec()
[ "def", "default_spec", "(", "self", ",", "manager", ")", ":", "# TODO: I feel like this + the surrounding bits in add_to_manager()", "# could be consolidated & simplified...", "specstr", "=", "\"\"", "# Make sure truly-default spec skips 0.x if prehistory was unstable.", "stable_families...
Given the current release-lines structure, return a default Spec. Specifics: * For feature-like issues, only the highest major release is used, so given a ``manager`` with top level keys of ``[1, 2]``, this would return ``Spec(">=2")``. * When ``releases_always_forwardport_features`` is ``True``, that behavior is nullified, and this function always returns the empty ``Spec`` (which matches any and all versions/lines). * For bugfix-like issues, we only consider major release families which have actual releases already. * Thus the core difference here is that features are 'consumed' by upcoming major releases, and bugfixes are not. * When the ``unstable_prehistory`` setting is ``True``, the default spec starts at the oldest non-zero release line. (Otherwise, issues posted after prehistory ends would try being added to the 0.x part of the tree, which makes no sense in unstable-prehistory mode.)
[ "Given", "the", "current", "release", "-", "lines", "structure", "return", "a", "default", "Spec", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/models.py#L82-L125
6,466
bitprophet/releases
releases/models.py
Issue.add_to_manager
def add_to_manager(self, manager): """ Given a 'manager' structure, add self to one or more of its 'buckets'. """ # Derive version spec allowing us to filter against major/minor buckets spec = self.spec or self.default_spec(manager) # Only look in appropriate major version/family; if self is an issue # declared as living in e.g. >=2, this means we don't even bother # looking in the 1.x family. families = [Version(str(x)) for x in manager] versions = list(spec.filter(families)) for version in versions: family = version.major # Within each family, we further limit which bugfix lines match up # to what self cares about (ignoring 'unreleased' until later) candidates = [ Version(x) for x in manager[family] if not x.startswith('unreleased') ] # Select matching release lines (& stringify) buckets = [] bugfix_buckets = [str(x) for x in spec.filter(candidates)] # Add back in unreleased_* as appropriate # TODO: probably leverage Issue subclasses for this eventually? if self.is_buglike: buckets.extend(bugfix_buckets) # Don't put into JUST unreleased_bugfix; it implies that this # major release/family hasn't actually seen any releases yet # and only exists for features to go into. if bugfix_buckets: buckets.append('unreleased_bugfix') # Obtain list of minor releases to check for "haven't had ANY # releases yet" corner case, in which case ALL issues get thrown in # unreleased_feature for the first release to consume. # NOTE: assumes first release is a minor or major one, # but...really? why would your first release be a bugfix one?? no_releases = not self.minor_releases(manager) if self.is_featurelike or self.backported or no_releases: buckets.append('unreleased_feature') # Now that we know which buckets are appropriate, add ourself to # all of them. TODO: or just...do it above...instead... for bucket in buckets: manager[family][bucket].append(self)
python
def add_to_manager(self, manager): # Derive version spec allowing us to filter against major/minor buckets spec = self.spec or self.default_spec(manager) # Only look in appropriate major version/family; if self is an issue # declared as living in e.g. >=2, this means we don't even bother # looking in the 1.x family. families = [Version(str(x)) for x in manager] versions = list(spec.filter(families)) for version in versions: family = version.major # Within each family, we further limit which bugfix lines match up # to what self cares about (ignoring 'unreleased' until later) candidates = [ Version(x) for x in manager[family] if not x.startswith('unreleased') ] # Select matching release lines (& stringify) buckets = [] bugfix_buckets = [str(x) for x in spec.filter(candidates)] # Add back in unreleased_* as appropriate # TODO: probably leverage Issue subclasses for this eventually? if self.is_buglike: buckets.extend(bugfix_buckets) # Don't put into JUST unreleased_bugfix; it implies that this # major release/family hasn't actually seen any releases yet # and only exists for features to go into. if bugfix_buckets: buckets.append('unreleased_bugfix') # Obtain list of minor releases to check for "haven't had ANY # releases yet" corner case, in which case ALL issues get thrown in # unreleased_feature for the first release to consume. # NOTE: assumes first release is a minor or major one, # but...really? why would your first release be a bugfix one?? no_releases = not self.minor_releases(manager) if self.is_featurelike or self.backported or no_releases: buckets.append('unreleased_feature') # Now that we know which buckets are appropriate, add ourself to # all of them. TODO: or just...do it above...instead... for bucket in buckets: manager[family][bucket].append(self)
[ "def", "add_to_manager", "(", "self", ",", "manager", ")", ":", "# Derive version spec allowing us to filter against major/minor buckets", "spec", "=", "self", ".", "spec", "or", "self", ".", "default_spec", "(", "manager", ")", "# Only look in appropriate major version/fam...
Given a 'manager' structure, add self to one or more of its 'buckets'.
[ "Given", "a", "manager", "structure", "add", "self", "to", "one", "or", "more", "of", "its", "buckets", "." ]
97a763e41bbe7374106a1c648b89346a0d935429
https://github.com/bitprophet/releases/blob/97a763e41bbe7374106a1c648b89346a0d935429/releases/models.py#L127-L170
6,467
brendonh/pyth
pyth/plugins/rtf15/reader.py
DocBuilder.cleanParagraph
def cleanParagraph(self): """ Compress text runs, remove whitespace at start and end, skip empty blocks, etc """ runs = self.block.content if not runs: self.block = None return if not self.clean_paragraphs: return joinedRuns = [] hasContent = False for run in runs: if run.content[0]: hasContent = True else: continue # For whitespace-only groups, remove any property stuff, # to avoid extra markup in output if not run.content[0].strip(): run.properties = {} # Join runs only if their properties match if joinedRuns and (run.properties == joinedRuns[-1].properties): joinedRuns[-1].content[0] += run.content[0] else: joinedRuns.append(run) if hasContent: # Strip beginning of paragraph joinedRuns[0].content[0] = joinedRuns[0].content[0].lstrip() # And then strip the end joinedRuns[-1].content[0] = joinedRuns[-1].content[0].rstrip() self.block.content = joinedRuns else: self.block = None
python
def cleanParagraph(self): runs = self.block.content if not runs: self.block = None return if not self.clean_paragraphs: return joinedRuns = [] hasContent = False for run in runs: if run.content[0]: hasContent = True else: continue # For whitespace-only groups, remove any property stuff, # to avoid extra markup in output if not run.content[0].strip(): run.properties = {} # Join runs only if their properties match if joinedRuns and (run.properties == joinedRuns[-1].properties): joinedRuns[-1].content[0] += run.content[0] else: joinedRuns.append(run) if hasContent: # Strip beginning of paragraph joinedRuns[0].content[0] = joinedRuns[0].content[0].lstrip() # And then strip the end joinedRuns[-1].content[0] = joinedRuns[-1].content[0].rstrip() self.block.content = joinedRuns else: self.block = None
[ "def", "cleanParagraph", "(", "self", ")", ":", "runs", "=", "self", ".", "block", ".", "content", "if", "not", "runs", ":", "self", ".", "block", "=", "None", "return", "if", "not", "self", ".", "clean_paragraphs", ":", "return", "joinedRuns", "=", "[...
Compress text runs, remove whitespace at start and end, skip empty blocks, etc
[ "Compress", "text", "runs", "remove", "whitespace", "at", "start", "and", "end", "skip", "empty", "blocks", "etc" ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/rtf15/reader.py#L241-L284
6,468
brendonh/pyth
pyth/plugins/xhtml/css.py
CSS.parse_css
def parse_css(self, css): """ Parse a css style sheet into the CSS object. For the moment this will only work for very simple css documents. It works by using regular expression matching css syntax. This is not bullet proof. """ rulesets = self.ruleset_re.findall(css) for (selector, declarations) in rulesets: rule = Rule(self.parse_selector(selector)) rule.properties = self.parse_declarations(declarations) self.rules.append(rule)
python
def parse_css(self, css): rulesets = self.ruleset_re.findall(css) for (selector, declarations) in rulesets: rule = Rule(self.parse_selector(selector)) rule.properties = self.parse_declarations(declarations) self.rules.append(rule)
[ "def", "parse_css", "(", "self", ",", "css", ")", ":", "rulesets", "=", "self", ".", "ruleset_re", ".", "findall", "(", "css", ")", "for", "(", "selector", ",", "declarations", ")", "in", "rulesets", ":", "rule", "=", "Rule", "(", "self", ".", "parse...
Parse a css style sheet into the CSS object. For the moment this will only work for very simple css documents. It works by using regular expression matching css syntax. This is not bullet proof.
[ "Parse", "a", "css", "style", "sheet", "into", "the", "CSS", "object", "." ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/xhtml/css.py#L73-L85
6,469
brendonh/pyth
pyth/plugins/xhtml/css.py
CSS.parse_declarations
def parse_declarations(self, declarations): """ parse a css declaration list """ declarations = self.declaration_re.findall(declarations) return dict(declarations)
python
def parse_declarations(self, declarations): declarations = self.declaration_re.findall(declarations) return dict(declarations)
[ "def", "parse_declarations", "(", "self", ",", "declarations", ")", ":", "declarations", "=", "self", ".", "declaration_re", ".", "findall", "(", "declarations", ")", "return", "dict", "(", "declarations", ")" ]
parse a css declaration list
[ "parse", "a", "css", "declaration", "list" ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/xhtml/css.py#L87-L92
6,470
brendonh/pyth
pyth/plugins/xhtml/css.py
CSS.parse_selector
def parse_selector(self, selector): """ parse a css selector """ tag, klass = self.selector_re.match(selector).groups() return Selector(tag, klass)
python
def parse_selector(self, selector): tag, klass = self.selector_re.match(selector).groups() return Selector(tag, klass)
[ "def", "parse_selector", "(", "self", ",", "selector", ")", ":", "tag", ",", "klass", "=", "self", ".", "selector_re", ".", "match", "(", "selector", ")", ".", "groups", "(", ")", "return", "Selector", "(", "tag", ",", "klass", ")" ]
parse a css selector
[ "parse", "a", "css", "selector" ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/xhtml/css.py#L94-L99
6,471
brendonh/pyth
pyth/plugins/xhtml/css.py
CSS.get_properties
def get_properties(self, node): """ return a dict of all the properties of a given BeautifulSoup node found by applying the css style. """ ret = {} # Try all the rules one by one for rule in self.rules: if rule.selector(node): ret.update(rule.properties) # Also search for direct 'style' arguments in the html doc for style_node in node.findParents(attrs={'style': True}): style = style_node.get('style') properties = self.parse_declarations(style) ret.update(properties) return ret
python
def get_properties(self, node): ret = {} # Try all the rules one by one for rule in self.rules: if rule.selector(node): ret.update(rule.properties) # Also search for direct 'style' arguments in the html doc for style_node in node.findParents(attrs={'style': True}): style = style_node.get('style') properties = self.parse_declarations(style) ret.update(properties) return ret
[ "def", "get_properties", "(", "self", ",", "node", ")", ":", "ret", "=", "{", "}", "# Try all the rules one by one", "for", "rule", "in", "self", ".", "rules", ":", "if", "rule", ".", "selector", "(", "node", ")", ":", "ret", ".", "update", "(", "rule"...
return a dict of all the properties of a given BeautifulSoup node found by applying the css style.
[ "return", "a", "dict", "of", "all", "the", "properties", "of", "a", "given", "BeautifulSoup", "node", "found", "by", "applying", "the", "css", "style", "." ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/xhtml/css.py#L101-L116
6,472
brendonh/pyth
pyth/__init__.py
namedModule
def namedModule(name): """Return a module given its name.""" topLevel = __import__(name) packages = name.split(".")[1:] m = topLevel for p in packages: m = getattr(m, p) return m
python
def namedModule(name): topLevel = __import__(name) packages = name.split(".")[1:] m = topLevel for p in packages: m = getattr(m, p) return m
[ "def", "namedModule", "(", "name", ")", ":", "topLevel", "=", "__import__", "(", "name", ")", "packages", "=", "name", ".", "split", "(", "\".\"", ")", "[", "1", ":", "]", "m", "=", "topLevel", "for", "p", "in", "packages", ":", "m", "=", "getattr"...
Return a module given its name.
[ "Return", "a", "module", "given", "its", "name", "." ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/__init__.py#L37-L44
6,473
brendonh/pyth
pyth/__init__.py
namedObject
def namedObject(name): """Get a fully named module-global object. """ classSplit = name.split('.') module = namedModule('.'.join(classSplit[:-1])) return getattr(module, classSplit[-1])
python
def namedObject(name): classSplit = name.split('.') module = namedModule('.'.join(classSplit[:-1])) return getattr(module, classSplit[-1])
[ "def", "namedObject", "(", "name", ")", ":", "classSplit", "=", "name", ".", "split", "(", "'.'", ")", "module", "=", "namedModule", "(", "'.'", ".", "join", "(", "classSplit", "[", ":", "-", "1", "]", ")", ")", "return", "getattr", "(", "module", ...
Get a fully named module-global object.
[ "Get", "a", "fully", "named", "module", "-", "global", "object", "." ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/__init__.py#L47-L52
6,474
brendonh/pyth
pyth/plugins/rst/writer.py
RSTWriter.text
def text(self, text): """ process a pyth text and return the formatted string """ ret = u"".join(text.content) if 'url' in text.properties: return u"`%s`_" % ret if 'bold' in text.properties: return u"**%s**" % ret if 'italic' in text.properties: return u"*%s*" % ret if 'sub' in text.properties: return ur"\ :sub:`%s`\ " % ret if 'super' in text.properties: return ur"\ :sup:`%s`\ " % ret return ret
python
def text(self, text): ret = u"".join(text.content) if 'url' in text.properties: return u"`%s`_" % ret if 'bold' in text.properties: return u"**%s**" % ret if 'italic' in text.properties: return u"*%s*" % ret if 'sub' in text.properties: return ur"\ :sub:`%s`\ " % ret if 'super' in text.properties: return ur"\ :sup:`%s`\ " % ret return ret
[ "def", "text", "(", "self", ",", "text", ")", ":", "ret", "=", "u\"\"", ".", "join", "(", "text", ".", "content", ")", "if", "'url'", "in", "text", ".", "properties", ":", "return", "u\"`%s`_\"", "%", "ret", "if", "'bold'", "in", "text", ".", "prop...
process a pyth text and return the formatted string
[ "process", "a", "pyth", "text", "and", "return", "the", "formatted", "string" ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/rst/writer.py#L40-L55
6,475
brendonh/pyth
pyth/plugins/rst/writer.py
RSTWriter.paragraph
def paragraph(self, paragraph, prefix=""): """ process a pyth paragraph into the target """ content = [] for text in paragraph.content: content.append(self.text(text)) content = u"".join(content).encode("utf-8") for line in content.split("\n"): self.target.write(" " * self.indent) self.target.write(prefix) self.target.write(line) self.target.write("\n") if prefix: prefix = " " # handle the links if any('url' in text.properties for text in paragraph.content): self.target.write("\n") for text in paragraph.content: if 'url' in text.properties: string = u"".join(text.content) url = text.properties['url'] self.target.write(".. _%s: %s\n" % (string, url))
python
def paragraph(self, paragraph, prefix=""): content = [] for text in paragraph.content: content.append(self.text(text)) content = u"".join(content).encode("utf-8") for line in content.split("\n"): self.target.write(" " * self.indent) self.target.write(prefix) self.target.write(line) self.target.write("\n") if prefix: prefix = " " # handle the links if any('url' in text.properties for text in paragraph.content): self.target.write("\n") for text in paragraph.content: if 'url' in text.properties: string = u"".join(text.content) url = text.properties['url'] self.target.write(".. _%s: %s\n" % (string, url))
[ "def", "paragraph", "(", "self", ",", "paragraph", ",", "prefix", "=", "\"\"", ")", ":", "content", "=", "[", "]", "for", "text", "in", "paragraph", ".", "content", ":", "content", ".", "append", "(", "self", ".", "text", "(", "text", ")", ")", "co...
process a pyth paragraph into the target
[ "process", "a", "pyth", "paragraph", "into", "the", "target" ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/rst/writer.py#L57-L81
6,476
brendonh/pyth
pyth/plugins/rst/writer.py
RSTWriter.list
def list(self, list, prefix=None): """ Process a pyth list into the target """ self.indent += 1 for (i, entry) in enumerate(list.content): for (j, paragraph) in enumerate(entry.content): prefix = "- " if j == 0 else " " handler = self.paragraphDispatch[paragraph.__class__] handler(paragraph, prefix) self.target.write("\n") self.indent -= 1
python
def list(self, list, prefix=None): self.indent += 1 for (i, entry) in enumerate(list.content): for (j, paragraph) in enumerate(entry.content): prefix = "- " if j == 0 else " " handler = self.paragraphDispatch[paragraph.__class__] handler(paragraph, prefix) self.target.write("\n") self.indent -= 1
[ "def", "list", "(", "self", ",", "list", ",", "prefix", "=", "None", ")", ":", "self", ".", "indent", "+=", "1", "for", "(", "i", ",", "entry", ")", "in", "enumerate", "(", "list", ".", "content", ")", ":", "for", "(", "j", ",", "paragraph", ")...
Process a pyth list into the target
[ "Process", "a", "pyth", "list", "into", "the", "target" ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/rst/writer.py#L83-L94
6,477
brendonh/pyth
pyth/plugins/xhtml/reader.py
XHTMLReader.format
def format(self, soup): """format a BeautifulSoup document This will transform the block elements content from multi-lines text into single line. This allow us to avoid having to deal with further text rendering once this step has been done. """ # Remove all the newline characters before a closing tag. for node in soup.findAll(text=True): if node.rstrip(" ").endswith("\n"): node.replaceWith(node.rstrip(" ").rstrip("\n")) # Join the block elements lines into a single long line for tag in ['p', 'li']: for node in soup.findAll(tag): text = unicode(node) lines = [x.strip() for x in text.splitlines()] text = ' '.join(lines) node.replaceWith(BeautifulSoup.BeautifulSoup(text)) soup = BeautifulSoup.BeautifulSoup(unicode(soup)) # replace all <br/> tag by newline character for node in soup.findAll('br'): node.replaceWith("\n") soup = BeautifulSoup.BeautifulSoup(unicode(soup)) return soup
python
def format(self, soup): # Remove all the newline characters before a closing tag. for node in soup.findAll(text=True): if node.rstrip(" ").endswith("\n"): node.replaceWith(node.rstrip(" ").rstrip("\n")) # Join the block elements lines into a single long line for tag in ['p', 'li']: for node in soup.findAll(tag): text = unicode(node) lines = [x.strip() for x in text.splitlines()] text = ' '.join(lines) node.replaceWith(BeautifulSoup.BeautifulSoup(text)) soup = BeautifulSoup.BeautifulSoup(unicode(soup)) # replace all <br/> tag by newline character for node in soup.findAll('br'): node.replaceWith("\n") soup = BeautifulSoup.BeautifulSoup(unicode(soup)) return soup
[ "def", "format", "(", "self", ",", "soup", ")", ":", "# Remove all the newline characters before a closing tag.", "for", "node", "in", "soup", ".", "findAll", "(", "text", "=", "True", ")", ":", "if", "node", ".", "rstrip", "(", "\" \"", ")", ".", "endswith"...
format a BeautifulSoup document This will transform the block elements content from multi-lines text into single line. This allow us to avoid having to deal with further text rendering once this step has been done.
[ "format", "a", "BeautifulSoup", "document" ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/xhtml/reader.py#L40-L65
6,478
brendonh/pyth
pyth/plugins/xhtml/reader.py
XHTMLReader.url
def url(self, node): """ return the url of a BeautifulSoup node or None if there is no url. """ a_node = node.findParent('a') if not a_node: return None if self.link_callback is None: return a_node.get('href') else: return self.link_callback(a_node.get('href'))
python
def url(self, node): a_node = node.findParent('a') if not a_node: return None if self.link_callback is None: return a_node.get('href') else: return self.link_callback(a_node.get('href'))
[ "def", "url", "(", "self", ",", "node", ")", ":", "a_node", "=", "node", ".", "findParent", "(", "'a'", ")", "if", "not", "a_node", ":", "return", "None", "if", "self", ".", "link_callback", "is", "None", ":", "return", "a_node", ".", "get", "(", "...
return the url of a BeautifulSoup node or None if there is no url.
[ "return", "the", "url", "of", "a", "BeautifulSoup", "node", "or", "None", "if", "there", "is", "no", "url", "." ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/xhtml/reader.py#L99-L111
6,479
brendonh/pyth
pyth/plugins/xhtml/reader.py
XHTMLReader.process_text
def process_text(self, node): """ Return a pyth Text object from a BeautifulSoup node or None if the text is empty. """ text = node.string.strip() if not text: return # Set all the properties properties=dict() if self.is_bold(node): properties['bold'] = True if self.is_italic(node): properties['italic'] = True if self.url(node): properties['url'] = self.url(node) if self.is_sub(node): properties['sub'] = True if self.is_super(node): properties['super'] = True content=[node.string] return document.Text(properties, content)
python
def process_text(self, node): text = node.string.strip() if not text: return # Set all the properties properties=dict() if self.is_bold(node): properties['bold'] = True if self.is_italic(node): properties['italic'] = True if self.url(node): properties['url'] = self.url(node) if self.is_sub(node): properties['sub'] = True if self.is_super(node): properties['super'] = True content=[node.string] return document.Text(properties, content)
[ "def", "process_text", "(", "self", ",", "node", ")", ":", "text", "=", "node", ".", "string", ".", "strip", "(", ")", "if", "not", "text", ":", "return", "# Set all the properties", "properties", "=", "dict", "(", ")", "if", "self", ".", "is_bold", "(...
Return a pyth Text object from a BeautifulSoup node or None if the text is empty.
[ "Return", "a", "pyth", "Text", "object", "from", "a", "BeautifulSoup", "node", "or", "None", "if", "the", "text", "is", "empty", "." ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/xhtml/reader.py#L113-L137
6,480
brendonh/pyth
pyth/plugins/xhtml/reader.py
XHTMLReader.process_into
def process_into(self, node, obj): """ Process a BeautifulSoup node and fill its elements into a pyth base object. """ if isinstance(node, BeautifulSoup.NavigableString): text = self.process_text(node) if text: obj.append(text) return if node.name == 'p': # add a new paragraph into the pyth object new_obj = document.Paragraph() obj.append(new_obj) obj = new_obj elif node.name == 'ul': # add a new list new_obj = document.List() obj.append(new_obj) obj = new_obj elif node.name == 'li': # add a new list entry new_obj = document.ListEntry() obj.append(new_obj) obj = new_obj for child in node: self.process_into(child, obj)
python
def process_into(self, node, obj): if isinstance(node, BeautifulSoup.NavigableString): text = self.process_text(node) if text: obj.append(text) return if node.name == 'p': # add a new paragraph into the pyth object new_obj = document.Paragraph() obj.append(new_obj) obj = new_obj elif node.name == 'ul': # add a new list new_obj = document.List() obj.append(new_obj) obj = new_obj elif node.name == 'li': # add a new list entry new_obj = document.ListEntry() obj.append(new_obj) obj = new_obj for child in node: self.process_into(child, obj)
[ "def", "process_into", "(", "self", ",", "node", ",", "obj", ")", ":", "if", "isinstance", "(", "node", ",", "BeautifulSoup", ".", "NavigableString", ")", ":", "text", "=", "self", ".", "process_text", "(", "node", ")", "if", "text", ":", "obj", ".", ...
Process a BeautifulSoup node and fill its elements into a pyth base object.
[ "Process", "a", "BeautifulSoup", "node", "and", "fill", "its", "elements", "into", "a", "pyth", "base", "object", "." ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/xhtml/reader.py#L139-L165
6,481
brendonh/pyth
pyth/document.py
_PythBase.append
def append(self, item): """ Try to add an item to this element. If the item is of the wrong type, and if this element has a sub-type, then try to create such a sub-type and insert the item into that, instead. This happens recursively, so (in python-markup): L [ u'Foo' ] actually creates: L [ LE [ P [ T [ u'Foo' ] ] ] ] If that doesn't work, raise a TypeError. """ okay = True if not isinstance(item, self.contentType): if hasattr(self.contentType, 'contentType'): try: item = self.contentType(content=[item]) except TypeError: okay = False else: okay = False if not okay: raise TypeError("Wrong content type for %s: %s (%s)" % ( self.__class__.__name__, repr(type(item)), repr(item))) self.content.append(item)
python
def append(self, item): okay = True if not isinstance(item, self.contentType): if hasattr(self.contentType, 'contentType'): try: item = self.contentType(content=[item]) except TypeError: okay = False else: okay = False if not okay: raise TypeError("Wrong content type for %s: %s (%s)" % ( self.__class__.__name__, repr(type(item)), repr(item))) self.content.append(item)
[ "def", "append", "(", "self", ",", "item", ")", ":", "okay", "=", "True", "if", "not", "isinstance", "(", "item", ",", "self", ".", "contentType", ")", ":", "if", "hasattr", "(", "self", ".", "contentType", ",", "'contentType'", ")", ":", "try", ":",...
Try to add an item to this element. If the item is of the wrong type, and if this element has a sub-type, then try to create such a sub-type and insert the item into that, instead. This happens recursively, so (in python-markup): L [ u'Foo' ] actually creates: L [ LE [ P [ T [ u'Foo' ] ] ] ] If that doesn't work, raise a TypeError.
[ "Try", "to", "add", "an", "item", "to", "this", "element", "." ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/document.py#L30-L59
6,482
brendonh/pyth
pyth/plugins/latex/writer.py
LatexWriter.write
def write(klass, document, target=None, stylesheet=""): """ convert a pyth document to a latex document we can specify a stylesheet as a latex document fragment that will be inserted after the headers. This way we can override the default style. """ writer = LatexWriter(document, target, stylesheet) return writer.go()
python
def write(klass, document, target=None, stylesheet=""): writer = LatexWriter(document, target, stylesheet) return writer.go()
[ "def", "write", "(", "klass", ",", "document", ",", "target", "=", "None", ",", "stylesheet", "=", "\"\"", ")", ":", "writer", "=", "LatexWriter", "(", "document", ",", "target", ",", "stylesheet", ")", "return", "writer", ".", "go", "(", ")" ]
convert a pyth document to a latex document we can specify a stylesheet as a latex document fragment that will be inserted after the headers. This way we can override the default style.
[ "convert", "a", "pyth", "document", "to", "a", "latex", "document" ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/latex/writer.py#L19-L28
6,483
brendonh/pyth
pyth/plugins/latex/writer.py
LatexWriter.full_stylesheet
def full_stylesheet(self): """ Return the style sheet that will ultimately be inserted into the latex document. This is the user given style sheet plus some additional parts to add the meta data. """ latex_fragment = r""" \usepackage[colorlinks=true,linkcolor=blue,urlcolor=blue]{hyperref} \hypersetup{ pdftitle={%s}, pdfauthor={%s}, pdfsubject={%s} } """ % (self.document.properties.get("title"), self.document.properties.get("author"), self.document.properties.get("subject")) return latex_fragment + self.stylesheet
python
def full_stylesheet(self): latex_fragment = r""" \usepackage[colorlinks=true,linkcolor=blue,urlcolor=blue]{hyperref} \hypersetup{ pdftitle={%s}, pdfauthor={%s}, pdfsubject={%s} } """ % (self.document.properties.get("title"), self.document.properties.get("author"), self.document.properties.get("subject")) return latex_fragment + self.stylesheet
[ "def", "full_stylesheet", "(", "self", ")", ":", "latex_fragment", "=", "r\"\"\"\n \\usepackage[colorlinks=true,linkcolor=blue,urlcolor=blue]{hyperref}\n \\hypersetup{\n pdftitle={%s},\n pdfauthor={%s},\n pdfsubject={%s}\n }\n \"\"\"", "%",...
Return the style sheet that will ultimately be inserted into the latex document. This is the user given style sheet plus some additional parts to add the meta data.
[ "Return", "the", "style", "sheet", "that", "will", "ultimately", "be", "inserted", "into", "the", "latex", "document", "." ]
f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b
https://github.com/brendonh/pyth/blob/f2a06fc8dc9b1cfc439ea14252d39b9845a7fa4b/pyth/plugins/latex/writer.py#L42-L60
6,484
opentok/Opentok-Python-SDK
opentok/endpoints.py
Endpoints.get_stream_url
def get_stream_url(self, session_id, stream_id=None): """ this method returns the url to get streams information """ url = self.api_url + '/v2/project/' + self.api_key + '/session/' + session_id + '/stream' if stream_id: url = url + '/' + stream_id return url
python
def get_stream_url(self, session_id, stream_id=None): url = self.api_url + '/v2/project/' + self.api_key + '/session/' + session_id + '/stream' if stream_id: url = url + '/' + stream_id return url
[ "def", "get_stream_url", "(", "self", ",", "session_id", ",", "stream_id", "=", "None", ")", ":", "url", "=", "self", ".", "api_url", "+", "'/v2/project/'", "+", "self", ".", "api_key", "+", "'/session/'", "+", "session_id", "+", "'/stream'", "if", "stream...
this method returns the url to get streams information
[ "this", "method", "returns", "the", "url", "to", "get", "streams", "information" ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/endpoints.py#L29-L34
6,485
opentok/Opentok-Python-SDK
opentok/endpoints.py
Endpoints.force_disconnect_url
def force_disconnect_url(self, session_id, connection_id): """ this method returns the force disconnect url endpoint """ url = ( self.api_url + '/v2/project/' + self.api_key + '/session/' + session_id + '/connection/' + connection_id ) return url
python
def force_disconnect_url(self, session_id, connection_id): url = ( self.api_url + '/v2/project/' + self.api_key + '/session/' + session_id + '/connection/' + connection_id ) return url
[ "def", "force_disconnect_url", "(", "self", ",", "session_id", ",", "connection_id", ")", ":", "url", "=", "(", "self", ".", "api_url", "+", "'/v2/project/'", "+", "self", ".", "api_key", "+", "'/session/'", "+", "session_id", "+", "'/connection/'", "+", "co...
this method returns the force disconnect url endpoint
[ "this", "method", "returns", "the", "force", "disconnect", "url", "endpoint" ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/endpoints.py#L36-L42
6,486
opentok/Opentok-Python-SDK
opentok/endpoints.py
Endpoints.set_archive_layout_url
def set_archive_layout_url(self, archive_id): """ this method returns the url to set the archive layout """ url = self.api_url + '/v2/project/' + self.api_key + '/archive/' + archive_id + '/layout' return url
python
def set_archive_layout_url(self, archive_id): url = self.api_url + '/v2/project/' + self.api_key + '/archive/' + archive_id + '/layout' return url
[ "def", "set_archive_layout_url", "(", "self", ",", "archive_id", ")", ":", "url", "=", "self", ".", "api_url", "+", "'/v2/project/'", "+", "self", ".", "api_key", "+", "'/archive/'", "+", "archive_id", "+", "'/layout'", "return", "url" ]
this method returns the url to set the archive layout
[ "this", "method", "returns", "the", "url", "to", "set", "the", "archive", "layout" ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/endpoints.py#L44-L47
6,487
opentok/Opentok-Python-SDK
opentok/endpoints.py
Endpoints.set_stream_class_lists_url
def set_stream_class_lists_url(self, session_id): """ this method returns the url to set the stream class list """ url = self.api_url + '/v2/project/' + self.api_key + '/session/' + session_id + '/stream' return url
python
def set_stream_class_lists_url(self, session_id): url = self.api_url + '/v2/project/' + self.api_key + '/session/' + session_id + '/stream' return url
[ "def", "set_stream_class_lists_url", "(", "self", ",", "session_id", ")", ":", "url", "=", "self", ".", "api_url", "+", "'/v2/project/'", "+", "self", ".", "api_key", "+", "'/session/'", "+", "session_id", "+", "'/stream'", "return", "url" ]
this method returns the url to set the stream class list
[ "this", "method", "returns", "the", "url", "to", "set", "the", "stream", "class", "list" ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/endpoints.py#L54-L57
6,488
opentok/Opentok-Python-SDK
opentok/endpoints.py
Endpoints.broadcast_url
def broadcast_url(self, broadcast_id=None, stop=False, layout=False): """ this method returns urls for working with broadcast """ url = self.api_url + '/v2/project/' + self.api_key + '/broadcast' if broadcast_id: url = url + '/' + broadcast_id if stop: url = url + '/stop' if layout: url = url + '/layout' return url
python
def broadcast_url(self, broadcast_id=None, stop=False, layout=False): url = self.api_url + '/v2/project/' + self.api_key + '/broadcast' if broadcast_id: url = url + '/' + broadcast_id if stop: url = url + '/stop' if layout: url = url + '/layout' return url
[ "def", "broadcast_url", "(", "self", ",", "broadcast_id", "=", "None", ",", "stop", "=", "False", ",", "layout", "=", "False", ")", ":", "url", "=", "self", ".", "api_url", "+", "'/v2/project/'", "+", "self", ".", "api_key", "+", "'/broadcast'", "if", ...
this method returns urls for working with broadcast
[ "this", "method", "returns", "urls", "for", "working", "with", "broadcast" ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/endpoints.py#L59-L69
6,489
opentok/Opentok-Python-SDK
opentok/archives.py
Archive.attrs
def attrs(self): """ Returns a dictionary of the archive's attributes. """ return dict((k, v) for k, v in iteritems(self.__dict__) if k is not "sdk")
python
def attrs(self): return dict((k, v) for k, v in iteritems(self.__dict__) if k is not "sdk")
[ "def", "attrs", "(", "self", ")", ":", "return", "dict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "iteritems", "(", "self", ".", "__dict__", ")", "if", "k", "is", "not", "\"sdk\"", ")" ]
Returns a dictionary of the archive's attributes.
[ "Returns", "a", "dictionary", "of", "the", "archive", "s", "attributes", "." ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/archives.py#L129-L133
6,490
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.create_session
def create_session(self, location=None, media_mode=MediaModes.relayed, archive_mode=ArchiveModes.manual): """ Creates a new OpenTok session and returns the session ID, which uniquely identifies the session. For example, when using the OpenTok JavaScript library, use the session ID when calling the OT.initSession() method (to initialize an OpenTok session). OpenTok sessions do not expire. However, authentication tokens do expire (see the generateToken() method). Also note that sessions cannot explicitly be destroyed. A session ID string can be up to 255 characters long. Calling this method results in an OpenTokException in the event of an error. Check the error message for details. You can also create a session using the OpenTok `REST API <https://tokbox.com/opentok/api/#session_id_production>`_ or `the OpenTok dashboard <https://dashboard.tokbox.com/projects>`_. :param String media_mode: Determines whether the session will transmit streams using the OpenTok Media Router (MediaMode.routed) or not (MediaMode.relayed). By default, the setting is MediaMode.relayed. With the media_mode property set to MediaMode.relayed, the session will attempt to transmit streams directly between clients. If clients cannot connect due to firewall restrictions, the session uses the OpenTok TURN server to relay audio-video streams. The `OpenTok Media Router <https://tokbox.com/opentok/tutorials/create-session/#media-mode>`_ provides the following benefits: * The OpenTok Media Router can decrease bandwidth usage in multiparty sessions. (When the mediaMode property is set to MediaMode.relayed, each client must send a separate audio-video stream to each client subscribing to it.) * The OpenTok Media Router can improve the quality of the user experience through audio fallback and video recovery (see https://tokbox.com/platform/fallback). With these features, if a client's connectivity degrades to a degree that it does not support video for a stream it's subscribing to, the video is dropped on that client (without affecting other clients), and the client receives audio only. If the client's connectivity improves, the video returns. * The OpenTok Media Router supports the archiving feature, which lets you record, save, and retrieve OpenTok sessions (see http://tokbox.com/platform/archiving). :param String archive_mode: Whether the session is automatically archived (ArchiveModes.always) or not (ArchiveModes.manual). By default, the setting is ArchiveModes.manual, and you must call the start_archive() method of the OpenTok object to start archiving. To archive the session (either automatically or not), you must set the media_mode parameter to MediaModes.routed. :param String location: An IP address that the OpenTok servers will use to situate the session in its global network. If you do not set a location hint, the OpenTok servers will be based on the first client connecting to the session. :rtype: The Session object. The session_id property of the object is the session ID. """ # build options options = {} if not isinstance(media_mode, MediaModes): raise OpenTokException(u('Cannot create session, {0} is not a valid media mode').format(media_mode)) if not isinstance(archive_mode, ArchiveModes): raise OpenTokException(u('Cannot create session, {0} is not a valid archive mode').format(archive_mode)) if archive_mode == ArchiveModes.always and media_mode != MediaModes.routed: raise OpenTokException(u('A session with always archive mode must also have the routed media mode.')) options[u('p2p.preference')] = media_mode.value options[u('archiveMode')] = archive_mode.value if location: # validate IP address try: inet_aton(location) except: raise OpenTokException(u('Cannot create session. Location must be either None or a valid IPv4 address {0}').format(location)) options[u('location')] = location try: response = requests.post(self.endpoints.session_url(), data=options, headers=self.headers(), proxies=self.proxies, timeout=self.timeout) response.encoding = 'utf-8' if response.status_code == 403: raise AuthError('Failed to create session, invalid credentials') if not response.content: raise RequestError() dom = xmldom.parseString(response.content) except Exception as e: raise RequestError('Failed to create session: %s' % str(e)) try: error = dom.getElementsByTagName('error') if error: error = error[0] raise AuthError('Failed to create session (code=%s): %s' % (error.attributes['code'].value, error.firstChild.attributes['message'].value)) session_id = dom.getElementsByTagName('session_id')[0].childNodes[0].nodeValue return Session(self, session_id, location=location, media_mode=media_mode, archive_mode=archive_mode) except Exception as e: raise OpenTokException('Failed to generate session: %s' % str(e))
python
def create_session(self, location=None, media_mode=MediaModes.relayed, archive_mode=ArchiveModes.manual): # build options options = {} if not isinstance(media_mode, MediaModes): raise OpenTokException(u('Cannot create session, {0} is not a valid media mode').format(media_mode)) if not isinstance(archive_mode, ArchiveModes): raise OpenTokException(u('Cannot create session, {0} is not a valid archive mode').format(archive_mode)) if archive_mode == ArchiveModes.always and media_mode != MediaModes.routed: raise OpenTokException(u('A session with always archive mode must also have the routed media mode.')) options[u('p2p.preference')] = media_mode.value options[u('archiveMode')] = archive_mode.value if location: # validate IP address try: inet_aton(location) except: raise OpenTokException(u('Cannot create session. Location must be either None or a valid IPv4 address {0}').format(location)) options[u('location')] = location try: response = requests.post(self.endpoints.session_url(), data=options, headers=self.headers(), proxies=self.proxies, timeout=self.timeout) response.encoding = 'utf-8' if response.status_code == 403: raise AuthError('Failed to create session, invalid credentials') if not response.content: raise RequestError() dom = xmldom.parseString(response.content) except Exception as e: raise RequestError('Failed to create session: %s' % str(e)) try: error = dom.getElementsByTagName('error') if error: error = error[0] raise AuthError('Failed to create session (code=%s): %s' % (error.attributes['code'].value, error.firstChild.attributes['message'].value)) session_id = dom.getElementsByTagName('session_id')[0].childNodes[0].nodeValue return Session(self, session_id, location=location, media_mode=media_mode, archive_mode=archive_mode) except Exception as e: raise OpenTokException('Failed to generate session: %s' % str(e))
[ "def", "create_session", "(", "self", ",", "location", "=", "None", ",", "media_mode", "=", "MediaModes", ".", "relayed", ",", "archive_mode", "=", "ArchiveModes", ".", "manual", ")", ":", "# build options", "options", "=", "{", "}", "if", "not", "isinstance...
Creates a new OpenTok session and returns the session ID, which uniquely identifies the session. For example, when using the OpenTok JavaScript library, use the session ID when calling the OT.initSession() method (to initialize an OpenTok session). OpenTok sessions do not expire. However, authentication tokens do expire (see the generateToken() method). Also note that sessions cannot explicitly be destroyed. A session ID string can be up to 255 characters long. Calling this method results in an OpenTokException in the event of an error. Check the error message for details. You can also create a session using the OpenTok `REST API <https://tokbox.com/opentok/api/#session_id_production>`_ or `the OpenTok dashboard <https://dashboard.tokbox.com/projects>`_. :param String media_mode: Determines whether the session will transmit streams using the OpenTok Media Router (MediaMode.routed) or not (MediaMode.relayed). By default, the setting is MediaMode.relayed. With the media_mode property set to MediaMode.relayed, the session will attempt to transmit streams directly between clients. If clients cannot connect due to firewall restrictions, the session uses the OpenTok TURN server to relay audio-video streams. The `OpenTok Media Router <https://tokbox.com/opentok/tutorials/create-session/#media-mode>`_ provides the following benefits: * The OpenTok Media Router can decrease bandwidth usage in multiparty sessions. (When the mediaMode property is set to MediaMode.relayed, each client must send a separate audio-video stream to each client subscribing to it.) * The OpenTok Media Router can improve the quality of the user experience through audio fallback and video recovery (see https://tokbox.com/platform/fallback). With these features, if a client's connectivity degrades to a degree that it does not support video for a stream it's subscribing to, the video is dropped on that client (without affecting other clients), and the client receives audio only. If the client's connectivity improves, the video returns. * The OpenTok Media Router supports the archiving feature, which lets you record, save, and retrieve OpenTok sessions (see http://tokbox.com/platform/archiving). :param String archive_mode: Whether the session is automatically archived (ArchiveModes.always) or not (ArchiveModes.manual). By default, the setting is ArchiveModes.manual, and you must call the start_archive() method of the OpenTok object to start archiving. To archive the session (either automatically or not), you must set the media_mode parameter to MediaModes.routed. :param String location: An IP address that the OpenTok servers will use to situate the session in its global network. If you do not set a location hint, the OpenTok servers will be based on the first client connecting to the session. :rtype: The Session object. The session_id property of the object is the session ID.
[ "Creates", "a", "new", "OpenTok", "session", "and", "returns", "the", "session", "ID", "which", "uniquely", "identifies", "the", "session", "." ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L204-L304
6,491
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.start_archive
def start_archive(self, session_id, has_audio=True, has_video=True, name=None, output_mode=OutputModes.composed, resolution=None): """ Starts archiving an OpenTok session. Clients must be actively connected to the OpenTok session for you to successfully start recording an archive. You can only record one archive at a time for a given session. You can only record archives of sessions that use the OpenTok Media Router (sessions with the media mode set to routed); you cannot archive sessions with the media mode set to relayed. For more information on archiving, see the `OpenTok archiving <https://tokbox.com/opentok/tutorials/archiving/>`_ programming guide. :param String session_id: The session ID of the OpenTok session to archive. :param String name: This is the name of the archive. You can use this name to identify the archive. It is a property of the Archive object, and it is a property of archive-related events in the OpenTok.js library. :param Boolean has_audio: if set to True, an audio track will be inserted to the archive. has_audio is an optional parameter that is set to True by default. If you set both has_audio and has_video to False, the call to the start_archive() method results in an error. :param Boolean has_video: if set to True, a video track will be inserted to the archive. has_video is an optional parameter that is set to True by default. :param OutputModes output_mode: Whether all streams in the archive are recorded to a single file (OutputModes.composed, the default) or to individual files (OutputModes.individual). :param String resolution (Optional): The resolution of the archive, either "640x480" (the default) or "1280x720". This parameter only applies to composed archives. If you set this parameter and set the output_mode parameter to OutputModes.individual, the call to the start_archive() method results in an error. :rtype: The Archive object, which includes properties defining the archive, including the archive ID. """ if not isinstance(output_mode, OutputModes): raise OpenTokException(u('Cannot start archive, {0} is not a valid output mode').format(output_mode)) if resolution and output_mode == OutputModes.individual: raise OpenTokException(u('Invalid parameters: Resolution cannot be supplied for individual output mode.')) payload = {'name': name, 'sessionId': session_id, 'hasAudio': has_audio, 'hasVideo': has_video, 'outputMode': output_mode.value, 'resolution': resolution, } response = requests.post(self.endpoints.archive_url(), data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout) if response.status_code < 300: return Archive(self, response.json()) elif response.status_code == 403: raise AuthError() elif response.status_code == 400: """ The HTTP response has a 400 status code in the following cases: You do not pass in a session ID or you pass in an invalid session ID. No clients are actively connected to the OpenTok session. You specify an invalid resolution value. The outputMode property is set to "individual" and you set the resolution property and (which is not supported in individual stream archives). """ raise RequestError(response.json().get("message")) elif response.status_code == 404: raise NotFoundError("Session not found") elif response.status_code == 409: raise ArchiveError(response.json().get("message")) else: raise RequestError("An unexpected error occurred", response.status_code)
python
def start_archive(self, session_id, has_audio=True, has_video=True, name=None, output_mode=OutputModes.composed, resolution=None): if not isinstance(output_mode, OutputModes): raise OpenTokException(u('Cannot start archive, {0} is not a valid output mode').format(output_mode)) if resolution and output_mode == OutputModes.individual: raise OpenTokException(u('Invalid parameters: Resolution cannot be supplied for individual output mode.')) payload = {'name': name, 'sessionId': session_id, 'hasAudio': has_audio, 'hasVideo': has_video, 'outputMode': output_mode.value, 'resolution': resolution, } response = requests.post(self.endpoints.archive_url(), data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout) if response.status_code < 300: return Archive(self, response.json()) elif response.status_code == 403: raise AuthError() elif response.status_code == 400: """ The HTTP response has a 400 status code in the following cases: You do not pass in a session ID or you pass in an invalid session ID. No clients are actively connected to the OpenTok session. You specify an invalid resolution value. The outputMode property is set to "individual" and you set the resolution property and (which is not supported in individual stream archives). """ raise RequestError(response.json().get("message")) elif response.status_code == 404: raise NotFoundError("Session not found") elif response.status_code == 409: raise ArchiveError(response.json().get("message")) else: raise RequestError("An unexpected error occurred", response.status_code)
[ "def", "start_archive", "(", "self", ",", "session_id", ",", "has_audio", "=", "True", ",", "has_video", "=", "True", ",", "name", "=", "None", ",", "output_mode", "=", "OutputModes", ".", "composed", ",", "resolution", "=", "None", ")", ":", "if", "not"...
Starts archiving an OpenTok session. Clients must be actively connected to the OpenTok session for you to successfully start recording an archive. You can only record one archive at a time for a given session. You can only record archives of sessions that use the OpenTok Media Router (sessions with the media mode set to routed); you cannot archive sessions with the media mode set to relayed. For more information on archiving, see the `OpenTok archiving <https://tokbox.com/opentok/tutorials/archiving/>`_ programming guide. :param String session_id: The session ID of the OpenTok session to archive. :param String name: This is the name of the archive. You can use this name to identify the archive. It is a property of the Archive object, and it is a property of archive-related events in the OpenTok.js library. :param Boolean has_audio: if set to True, an audio track will be inserted to the archive. has_audio is an optional parameter that is set to True by default. If you set both has_audio and has_video to False, the call to the start_archive() method results in an error. :param Boolean has_video: if set to True, a video track will be inserted to the archive. has_video is an optional parameter that is set to True by default. :param OutputModes output_mode: Whether all streams in the archive are recorded to a single file (OutputModes.composed, the default) or to individual files (OutputModes.individual). :param String resolution (Optional): The resolution of the archive, either "640x480" (the default) or "1280x720". This parameter only applies to composed archives. If you set this parameter and set the output_mode parameter to OutputModes.individual, the call to the start_archive() method results in an error. :rtype: The Archive object, which includes properties defining the archive, including the archive ID.
[ "Starts", "archiving", "an", "OpenTok", "session", "." ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L319-L389
6,492
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.delete_archive
def delete_archive(self, archive_id): """ Deletes an OpenTok archive. You can only delete an archive which has a status of "available" or "uploaded". Deleting an archive removes its record from the list of archives. For an "available" archive, it also removes the archive file, making it unavailable for download. :param String archive_id: The archive ID of the archive to be deleted. """ response = requests.delete(self.endpoints.archive_url(archive_id), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout) if response.status_code < 300: pass elif response.status_code == 403: raise AuthError() elif response.status_code == 404: raise NotFoundError("Archive not found") else: raise RequestError("An unexpected error occurred", response.status_code)
python
def delete_archive(self, archive_id): response = requests.delete(self.endpoints.archive_url(archive_id), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout) if response.status_code < 300: pass elif response.status_code == 403: raise AuthError() elif response.status_code == 404: raise NotFoundError("Archive not found") else: raise RequestError("An unexpected error occurred", response.status_code)
[ "def", "delete_archive", "(", "self", ",", "archive_id", ")", ":", "response", "=", "requests", ".", "delete", "(", "self", ".", "endpoints", ".", "archive_url", "(", "archive_id", ")", ",", "headers", "=", "self", ".", "json_headers", "(", ")", ",", "pr...
Deletes an OpenTok archive. You can only delete an archive which has a status of "available" or "uploaded". Deleting an archive removes its record from the list of archives. For an "available" archive, it also removes the archive file, making it unavailable for download. :param String archive_id: The archive ID of the archive to be deleted.
[ "Deletes", "an", "OpenTok", "archive", "." ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L415-L434
6,493
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.get_archive
def get_archive(self, archive_id): """Gets an Archive object for the given archive ID. :param String archive_id: The archive ID. :rtype: The Archive object. """ response = requests.get(self.endpoints.archive_url(archive_id), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout) if response.status_code < 300: return Archive(self, response.json()) elif response.status_code == 403: raise AuthError() elif response.status_code == 404: raise NotFoundError("Archive not found") else: raise RequestError("An unexpected error occurred", response.status_code)
python
def get_archive(self, archive_id): response = requests.get(self.endpoints.archive_url(archive_id), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout) if response.status_code < 300: return Archive(self, response.json()) elif response.status_code == 403: raise AuthError() elif response.status_code == 404: raise NotFoundError("Archive not found") else: raise RequestError("An unexpected error occurred", response.status_code)
[ "def", "get_archive", "(", "self", ",", "archive_id", ")", ":", "response", "=", "requests", ".", "get", "(", "self", ".", "endpoints", ".", "archive_url", "(", "archive_id", ")", ",", "headers", "=", "self", ".", "json_headers", "(", ")", ",", "proxies"...
Gets an Archive object for the given archive ID. :param String archive_id: The archive ID. :rtype: The Archive object.
[ "Gets", "an", "Archive", "object", "for", "the", "given", "archive", "ID", "." ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L436-L452
6,494
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.get_archives
def get_archives(self, offset=None, count=None, session_id=None): """Returns an ArchiveList, which is an array of archives that are completed and in-progress, for your API key. :param int: offset Optional. The index offset of the first archive. 0 is offset of the most recently started archive. 1 is the offset of the archive that started prior to the most recent archive. If you do not specify an offset, 0 is used. :param int: count Optional. The number of archives to be returned. The maximum number of archives returned is 1000. :param string: session_id Optional. Used to list archives for a specific session ID. :rtype: An ArchiveList object, which is an array of Archive objects. """ params = {} if offset is not None: params['offset'] = offset if count is not None: params['count'] = count if session_id is not None: params['sessionId'] = session_id endpoint = self.endpoints.archive_url() + "?" + urlencode(params) response = requests.get( endpoint, headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code < 300: return ArchiveList(self, response.json()) elif response.status_code == 403: raise AuthError() elif response.status_code == 404: raise NotFoundError("Archive not found") else: raise RequestError("An unexpected error occurred", response.status_code)
python
def get_archives(self, offset=None, count=None, session_id=None): params = {} if offset is not None: params['offset'] = offset if count is not None: params['count'] = count if session_id is not None: params['sessionId'] = session_id endpoint = self.endpoints.archive_url() + "?" + urlencode(params) response = requests.get( endpoint, headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code < 300: return ArchiveList(self, response.json()) elif response.status_code == 403: raise AuthError() elif response.status_code == 404: raise NotFoundError("Archive not found") else: raise RequestError("An unexpected error occurred", response.status_code)
[ "def", "get_archives", "(", "self", ",", "offset", "=", "None", ",", "count", "=", "None", ",", "session_id", "=", "None", ")", ":", "params", "=", "{", "}", "if", "offset", "is", "not", "None", ":", "params", "[", "'offset'", "]", "=", "offset", "...
Returns an ArchiveList, which is an array of archives that are completed and in-progress, for your API key. :param int: offset Optional. The index offset of the first archive. 0 is offset of the most recently started archive. 1 is the offset of the archive that started prior to the most recent archive. If you do not specify an offset, 0 is used. :param int: count Optional. The number of archives to be returned. The maximum number of archives returned is 1000. :param string: session_id Optional. Used to list archives for a specific session ID. :rtype: An ArchiveList object, which is an array of Archive objects.
[ "Returns", "an", "ArchiveList", "which", "is", "an", "array", "of", "archives", "that", "are", "completed", "and", "in", "-", "progress", "for", "your", "API", "key", "." ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L454-L488
6,495
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.signal
def signal(self, session_id, payload, connection_id=None): """ Send signals to all participants in an active OpenTok session or to a specific client connected to that session. :param String session_id: The session ID of the OpenTok session that receives the signal :param Dictionary payload: Structure that contains both the type and data fields. These correspond to the type and data parameters passed in the client signal received handlers :param String connection_id: The connection_id parameter is an optional string used to specify the connection ID of a client connected to the session. If you specify this value, the signal is sent to the specified client. Otherwise, the signal is sent to all clients connected to the session """ response = requests.post( self.endpoints.signaling_url(session_id, connection_id), data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 204: pass elif response.status_code == 400: raise SignalingError('One of the signal properties - data, type, sessionId or connectionId - is invalid.') elif response.status_code == 403: raise AuthError('You are not authorized to send the signal. Check your authentication credentials.') elif response.status_code == 404: raise SignalingError('The client specified by the connectionId property is not connected to the session.') elif response.status_code == 413: raise SignalingError('The type string exceeds the maximum length (128 bytes), or the data string exceeds the maximum size (8 kB).') else: raise RequestError('An unexpected error occurred', response.status_code)
python
def signal(self, session_id, payload, connection_id=None): response = requests.post( self.endpoints.signaling_url(session_id, connection_id), data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 204: pass elif response.status_code == 400: raise SignalingError('One of the signal properties - data, type, sessionId or connectionId - is invalid.') elif response.status_code == 403: raise AuthError('You are not authorized to send the signal. Check your authentication credentials.') elif response.status_code == 404: raise SignalingError('The client specified by the connectionId property is not connected to the session.') elif response.status_code == 413: raise SignalingError('The type string exceeds the maximum length (128 bytes), or the data string exceeds the maximum size (8 kB).') else: raise RequestError('An unexpected error occurred', response.status_code)
[ "def", "signal", "(", "self", ",", "session_id", ",", "payload", ",", "connection_id", "=", "None", ")", ":", "response", "=", "requests", ".", "post", "(", "self", ".", "endpoints", ".", "signaling_url", "(", "session_id", ",", "connection_id", ")", ",", ...
Send signals to all participants in an active OpenTok session or to a specific client connected to that session. :param String session_id: The session ID of the OpenTok session that receives the signal :param Dictionary payload: Structure that contains both the type and data fields. These correspond to the type and data parameters passed in the client signal received handlers :param String connection_id: The connection_id parameter is an optional string used to specify the connection ID of a client connected to the session. If you specify this value, the signal is sent to the specified client. Otherwise, the signal is sent to all clients connected to the session
[ "Send", "signals", "to", "all", "participants", "in", "an", "active", "OpenTok", "session", "or", "to", "a", "specific", "client", "connected", "to", "that", "session", "." ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L497-L531
6,496
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.force_disconnect
def force_disconnect(self, session_id, connection_id): """ Sends a request to disconnect a client from an OpenTok session :param String session_id: The session ID of the OpenTok session from which the client will be disconnected :param String connection_id: The connection ID of the client that will be disconnected """ endpoint = self.endpoints.force_disconnect_url(session_id, connection_id) response = requests.delete( endpoint, headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 204: pass elif response.status_code == 400: raise ForceDisconnectError('One of the arguments - sessionId or connectionId - is invalid.') elif response.status_code == 403: raise AuthError('You are not authorized to forceDisconnect, check your authentication credentials.') elif response.status_code == 404: raise ForceDisconnectError('The client specified by the connectionId property is not connected to the session.') else: raise RequestError('An unexpected error occurred', response.status_code)
python
def force_disconnect(self, session_id, connection_id): endpoint = self.endpoints.force_disconnect_url(session_id, connection_id) response = requests.delete( endpoint, headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 204: pass elif response.status_code == 400: raise ForceDisconnectError('One of the arguments - sessionId or connectionId - is invalid.') elif response.status_code == 403: raise AuthError('You are not authorized to forceDisconnect, check your authentication credentials.') elif response.status_code == 404: raise ForceDisconnectError('The client specified by the connectionId property is not connected to the session.') else: raise RequestError('An unexpected error occurred', response.status_code)
[ "def", "force_disconnect", "(", "self", ",", "session_id", ",", "connection_id", ")", ":", "endpoint", "=", "self", ".", "endpoints", ".", "force_disconnect_url", "(", "session_id", ",", "connection_id", ")", "response", "=", "requests", ".", "delete", "(", "e...
Sends a request to disconnect a client from an OpenTok session :param String session_id: The session ID of the OpenTok session from which the client will be disconnected :param String connection_id: The connection ID of the client that will be disconnected
[ "Sends", "a", "request", "to", "disconnect", "a", "client", "from", "an", "OpenTok", "session" ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L581-L604
6,497
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.set_archive_layout
def set_archive_layout(self, archive_id, layout_type, stylesheet=None): """ Use this method to change the layout of videos in an OpenTok archive :param String archive_id: The ID of the archive that will be updated :param String layout_type: The layout type for the archive. Valid values are: 'bestFit', 'custom', 'horizontalPresentation', 'pip' and 'verticalPresentation' :param String stylesheet optional: CSS used to style the custom layout. Specify this only if you set the type property to 'custom' """ payload = { 'type': layout_type, } if layout_type == 'custom': if stylesheet is not None: payload['stylesheet'] = stylesheet endpoint = self.endpoints.set_archive_layout_url(archive_id) response = requests.put( endpoint, data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 200: pass elif response.status_code == 400: raise ArchiveError('Invalid request. This response may indicate that data in your request data is invalid JSON. It may also indicate that you passed in invalid layout options.') elif response.status_code == 403: raise AuthError('Authentication error.') else: raise RequestError('OpenTok server error.', response.status_code)
python
def set_archive_layout(self, archive_id, layout_type, stylesheet=None): payload = { 'type': layout_type, } if layout_type == 'custom': if stylesheet is not None: payload['stylesheet'] = stylesheet endpoint = self.endpoints.set_archive_layout_url(archive_id) response = requests.put( endpoint, data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 200: pass elif response.status_code == 400: raise ArchiveError('Invalid request. This response may indicate that data in your request data is invalid JSON. It may also indicate that you passed in invalid layout options.') elif response.status_code == 403: raise AuthError('Authentication error.') else: raise RequestError('OpenTok server error.', response.status_code)
[ "def", "set_archive_layout", "(", "self", ",", "archive_id", ",", "layout_type", ",", "stylesheet", "=", "None", ")", ":", "payload", "=", "{", "'type'", ":", "layout_type", ",", "}", "if", "layout_type", "==", "'custom'", ":", "if", "stylesheet", "is", "n...
Use this method to change the layout of videos in an OpenTok archive :param String archive_id: The ID of the archive that will be updated :param String layout_type: The layout type for the archive. Valid values are: 'bestFit', 'custom', 'horizontalPresentation', 'pip' and 'verticalPresentation' :param String stylesheet optional: CSS used to style the custom layout. Specify this only if you set the type property to 'custom'
[ "Use", "this", "method", "to", "change", "the", "layout", "of", "videos", "in", "an", "OpenTok", "archive" ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L606-L642
6,498
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.dial
def dial(self, session_id, token, sip_uri, options=[]): """ Use this method to connect a SIP platform to an OpenTok session. The audio from the end of the SIP call is added to the OpenTok session as an audio-only stream. The OpenTok Media Router mixes audio from other streams in the session and sends the mixed audio to the SIP endpoint :param String session_id: The OpenTok session ID for the SIP call to join :param String token: The OpenTok token to be used for the participant being called :param String sip_uri: The SIP URI to be used as destination of the SIP call initiated from OpenTok to the SIP platform :param Dictionary options optional: Aditional options with the following properties: String 'from': The number or string that will be sent to the final SIP number as the caller Dictionary 'headers': Defines custom headers to be added to the SIP INVITE request initiated from OpenTok to the SIP platform. Each of the custom headers must start with the "X-" prefix, or the call will result in a Bad Request (400) response Dictionary 'auth': Contains the username and password to be used in the the SIP INVITE request for HTTP digest authentication, if it is required by the SIP platform For example: 'auth': { 'username': 'username', 'password': 'password' } Boolean 'secure': A Boolean flag that indicates whether the media must be transmitted encrypted (true) or not (false, the default) :rtype: A SipCall object, which contains data of the SIP call: id, connectionId and streamId """ payload = { 'sessionId': session_id, 'token': token, 'sip': { 'uri': sip_uri } } if 'from' in options: payload['sip']['from'] = options['from'] if 'headers' in options: payload['sip']['headers'] = options['headers'] if 'auth' in options: payload['sip']['auth'] = options['auth'] if 'secure' in options: payload['sip']['secure'] = options['secure'] endpoint = self.endpoints.dial_url() response = requests.post( endpoint, data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 200: return SipCall(response.json()) elif response.status_code == 400: raise SipDialError('Invalid request. Invalid session ID.') elif response.status_code == 403: raise AuthError('Authentication error.') elif response.status_code == 404: raise SipDialError('The session does not exist.') elif response.status_code == 409: raise SipDialError( 'You attempted to start a SIP call for a session that ' 'does not use the OpenTok Media Router.') else: raise RequestError('OpenTok server error.', response.status_code)
python
def dial(self, session_id, token, sip_uri, options=[]): payload = { 'sessionId': session_id, 'token': token, 'sip': { 'uri': sip_uri } } if 'from' in options: payload['sip']['from'] = options['from'] if 'headers' in options: payload['sip']['headers'] = options['headers'] if 'auth' in options: payload['sip']['auth'] = options['auth'] if 'secure' in options: payload['sip']['secure'] = options['secure'] endpoint = self.endpoints.dial_url() response = requests.post( endpoint, data=json.dumps(payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 200: return SipCall(response.json()) elif response.status_code == 400: raise SipDialError('Invalid request. Invalid session ID.') elif response.status_code == 403: raise AuthError('Authentication error.') elif response.status_code == 404: raise SipDialError('The session does not exist.') elif response.status_code == 409: raise SipDialError( 'You attempted to start a SIP call for a session that ' 'does not use the OpenTok Media Router.') else: raise RequestError('OpenTok server error.', response.status_code)
[ "def", "dial", "(", "self", ",", "session_id", ",", "token", ",", "sip_uri", ",", "options", "=", "[", "]", ")", ":", "payload", "=", "{", "'sessionId'", ":", "session_id", ",", "'token'", ":", "token", ",", "'sip'", ":", "{", "'uri'", ":", "sip_uri"...
Use this method to connect a SIP platform to an OpenTok session. The audio from the end of the SIP call is added to the OpenTok session as an audio-only stream. The OpenTok Media Router mixes audio from other streams in the session and sends the mixed audio to the SIP endpoint :param String session_id: The OpenTok session ID for the SIP call to join :param String token: The OpenTok token to be used for the participant being called :param String sip_uri: The SIP URI to be used as destination of the SIP call initiated from OpenTok to the SIP platform :param Dictionary options optional: Aditional options with the following properties: String 'from': The number or string that will be sent to the final SIP number as the caller Dictionary 'headers': Defines custom headers to be added to the SIP INVITE request initiated from OpenTok to the SIP platform. Each of the custom headers must start with the "X-" prefix, or the call will result in a Bad Request (400) response Dictionary 'auth': Contains the username and password to be used in the the SIP INVITE request for HTTP digest authentication, if it is required by the SIP platform For example: 'auth': { 'username': 'username', 'password': 'password' } Boolean 'secure': A Boolean flag that indicates whether the media must be transmitted encrypted (true) or not (false, the default) :rtype: A SipCall object, which contains data of the SIP call: id, connectionId and streamId
[ "Use", "this", "method", "to", "connect", "a", "SIP", "platform", "to", "an", "OpenTok", "session", ".", "The", "audio", "from", "the", "end", "of", "the", "SIP", "call", "is", "added", "to", "the", "OpenTok", "session", "as", "an", "audio", "-", "only...
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L644-L723
6,499
opentok/Opentok-Python-SDK
opentok/opentok.py
OpenTok.set_stream_class_lists
def set_stream_class_lists(self, session_id, payload): """ Use this method to change layout classes for OpenTok streams. The layout classes define how the streams are displayed in the layout of a composed OpenTok archive :param String session_id: The ID of the session of the streams that will be updated :param List payload: A list defining the class lists to apply to the streams. Each element in the list is a dictionary with two properties: 'id' and 'layoutClassList'. The 'id' property is the stream ID (a String), and the 'layoutClassList' is an array of class names (Strings) to apply to the stream. For example: payload = [ {'id': '7b09ec3c-26f9-43d7-8197-f608f13d4fb6', 'layoutClassList': ['focus']}, {'id': '567bc941-6ea0-4c69-97fc-70a740b68976', 'layoutClassList': ['top']}, {'id': '307dc941-0450-4c09-975c-705740d08970', 'layoutClassList': ['bottom']} ] """ items_payload = {'items': payload} endpoint = self.endpoints.set_stream_class_lists_url(session_id) response = requests.put( endpoint, data=json.dumps(items_payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 200: pass elif response.status_code == 400: raise SetStreamClassError( 'Invalid request. This response may indicate that data in your request data ' 'is invalid JSON. It may also indicate that you passed in invalid layout options.' ) elif response.status_code == 403: raise AuthError('Authentication error.') else: raise RequestError('OpenTok server error.', response.status_code)
python
def set_stream_class_lists(self, session_id, payload): items_payload = {'items': payload} endpoint = self.endpoints.set_stream_class_lists_url(session_id) response = requests.put( endpoint, data=json.dumps(items_payload), headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 200: pass elif response.status_code == 400: raise SetStreamClassError( 'Invalid request. This response may indicate that data in your request data ' 'is invalid JSON. It may also indicate that you passed in invalid layout options.' ) elif response.status_code == 403: raise AuthError('Authentication error.') else: raise RequestError('OpenTok server error.', response.status_code)
[ "def", "set_stream_class_lists", "(", "self", ",", "session_id", ",", "payload", ")", ":", "items_payload", "=", "{", "'items'", ":", "payload", "}", "endpoint", "=", "self", ".", "endpoints", ".", "set_stream_class_lists_url", "(", "session_id", ")", "response"...
Use this method to change layout classes for OpenTok streams. The layout classes define how the streams are displayed in the layout of a composed OpenTok archive :param String session_id: The ID of the session of the streams that will be updated :param List payload: A list defining the class lists to apply to the streams. Each element in the list is a dictionary with two properties: 'id' and 'layoutClassList'. The 'id' property is the stream ID (a String), and the 'layoutClassList' is an array of class names (Strings) to apply to the stream. For example: payload = [ {'id': '7b09ec3c-26f9-43d7-8197-f608f13d4fb6', 'layoutClassList': ['focus']}, {'id': '567bc941-6ea0-4c69-97fc-70a740b68976', 'layoutClassList': ['top']}, {'id': '307dc941-0450-4c09-975c-705740d08970', 'layoutClassList': ['bottom']} ]
[ "Use", "this", "method", "to", "change", "layout", "classes", "for", "OpenTok", "streams", ".", "The", "layout", "classes", "define", "how", "the", "streams", "are", "displayed", "in", "the", "layout", "of", "a", "composed", "OpenTok", "archive" ]
ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c
https://github.com/opentok/Opentok-Python-SDK/blob/ffc6714e76be0d29e6b56aff8cbf7509b71a8b2c/opentok/opentok.py#L725-L764