repo
stringlengths
7
55
path
stringlengths
4
223
func_name
stringlengths
1
134
original_string
stringlengths
75
104k
language
stringclasses
1 value
code
stringlengths
75
104k
code_tokens
listlengths
19
28.4k
docstring
stringlengths
1
46.9k
docstring_tokens
listlengths
1
1.97k
sha
stringlengths
40
40
url
stringlengths
87
315
partition
stringclasses
1 value
bd808/python-iptools
iptools/__init__.py
IpRange.index
def index(self, item): """ Return the 0-based position of `item` in this IpRange. >>> r = IpRange('127.0.0.1', '127.255.255.255') >>> r.index('127.0.0.1') 0 >>> r.index('127.255.255.255') 16777214 >>> r.index('10.0.0.1') Traceback (most recent call last): ... ValueError: 10.0.0.1 is not in range :param item: Dotted-quad ip address. :type item: str :returns: Index of ip address in range """ item = self._cast(item) offset = item - self.startIp if offset >= 0 and offset < self._len: return offset raise ValueError('%s is not in range' % self._ipver.long2ip(item))
python
def index(self, item): """ Return the 0-based position of `item` in this IpRange. >>> r = IpRange('127.0.0.1', '127.255.255.255') >>> r.index('127.0.0.1') 0 >>> r.index('127.255.255.255') 16777214 >>> r.index('10.0.0.1') Traceback (most recent call last): ... ValueError: 10.0.0.1 is not in range :param item: Dotted-quad ip address. :type item: str :returns: Index of ip address in range """ item = self._cast(item) offset = item - self.startIp if offset >= 0 and offset < self._len: return offset raise ValueError('%s is not in range' % self._ipver.long2ip(item))
[ "def", "index", "(", "self", ",", "item", ")", ":", "item", "=", "self", ".", "_cast", "(", "item", ")", "offset", "=", "item", "-", "self", ".", "startIp", "if", "offset", ">=", "0", "and", "offset", "<", "self", ".", "_len", ":", "return", "offset", "raise", "ValueError", "(", "'%s is not in range'", "%", "self", ".", "_ipver", ".", "long2ip", "(", "item", ")", ")" ]
Return the 0-based position of `item` in this IpRange. >>> r = IpRange('127.0.0.1', '127.255.255.255') >>> r.index('127.0.0.1') 0 >>> r.index('127.255.255.255') 16777214 >>> r.index('10.0.0.1') Traceback (most recent call last): ... ValueError: 10.0.0.1 is not in range :param item: Dotted-quad ip address. :type item: str :returns: Index of ip address in range
[ "Return", "the", "0", "-", "based", "position", "of", "item", "in", "this", "IpRange", "." ]
5d3fae0056297540355bb7c6c112703cfaa4b6ce
https://github.com/bd808/python-iptools/blob/5d3fae0056297540355bb7c6c112703cfaa4b6ce/iptools/__init__.py#L259-L283
train
torfsen/service
src/service/__init__.py
_detach_process
def _detach_process(): """ Detach daemon process. Forks the current process into a parent and a detached child. The child process resides in its own process group, has no controlling terminal attached and is cleaned up by the init process. Returns ``True`` for the parent and ``False`` for the child. """ # To detach from our process group we need to call ``setsid``. We # can only do that if we aren't a process group leader. Therefore # we fork once, which makes sure that the new child process is not # a process group leader. pid = os.fork() if pid > 0: # Parent process # Use waitpid to "collect" the child process and avoid Zombies os.waitpid(pid, 0) return True os.setsid() # We now fork a second time and let the second's fork parent exit. # This makes the second fork's child process an orphan. Orphans are # cleaned up by the init process, so we won't end up with a zombie. # In addition, the second fork's child is no longer a session # leader and can therefore never acquire a controlling terminal. pid = os.fork() if pid > 0: os._exit(os.EX_OK) return False
python
def _detach_process(): """ Detach daemon process. Forks the current process into a parent and a detached child. The child process resides in its own process group, has no controlling terminal attached and is cleaned up by the init process. Returns ``True`` for the parent and ``False`` for the child. """ # To detach from our process group we need to call ``setsid``. We # can only do that if we aren't a process group leader. Therefore # we fork once, which makes sure that the new child process is not # a process group leader. pid = os.fork() if pid > 0: # Parent process # Use waitpid to "collect" the child process and avoid Zombies os.waitpid(pid, 0) return True os.setsid() # We now fork a second time and let the second's fork parent exit. # This makes the second fork's child process an orphan. Orphans are # cleaned up by the init process, so we won't end up with a zombie. # In addition, the second fork's child is no longer a session # leader and can therefore never acquire a controlling terminal. pid = os.fork() if pid > 0: os._exit(os.EX_OK) return False
[ "def", "_detach_process", "(", ")", ":", "# To detach from our process group we need to call ``setsid``. We", "# can only do that if we aren't a process group leader. Therefore", "# we fork once, which makes sure that the new child process is not", "# a process group leader.", "pid", "=", "os", ".", "fork", "(", ")", "if", "pid", ">", "0", ":", "# Parent process", "# Use waitpid to \"collect\" the child process and avoid Zombies", "os", ".", "waitpid", "(", "pid", ",", "0", ")", "return", "True", "os", ".", "setsid", "(", ")", "# We now fork a second time and let the second's fork parent exit.", "# This makes the second fork's child process an orphan. Orphans are", "# cleaned up by the init process, so we won't end up with a zombie.", "# In addition, the second fork's child is no longer a session", "# leader and can therefore never acquire a controlling terminal.", "pid", "=", "os", ".", "fork", "(", ")", "if", "pid", ">", "0", ":", "os", ".", "_exit", "(", "os", ".", "EX_OK", ")", "return", "False" ]
Detach daemon process. Forks the current process into a parent and a detached child. The child process resides in its own process group, has no controlling terminal attached and is cleaned up by the init process. Returns ``True`` for the parent and ``False`` for the child.
[ "Detach", "daemon", "process", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L49-L78
train
torfsen/service
src/service/__init__.py
_block
def _block(predicate, timeout): """ Block until a predicate becomes true. ``predicate`` is a function taking no arguments. The call to ``_block`` blocks until ``predicate`` returns a true value. This is done by polling ``predicate``. ``timeout`` is either ``True`` (block indefinitely) or a timeout in seconds. The return value is the value of the predicate after the timeout. """ if timeout: if timeout is True: timeout = float('Inf') timeout = time.time() + timeout while not predicate() and time.time() < timeout: time.sleep(0.1) return predicate()
python
def _block(predicate, timeout): """ Block until a predicate becomes true. ``predicate`` is a function taking no arguments. The call to ``_block`` blocks until ``predicate`` returns a true value. This is done by polling ``predicate``. ``timeout`` is either ``True`` (block indefinitely) or a timeout in seconds. The return value is the value of the predicate after the timeout. """ if timeout: if timeout is True: timeout = float('Inf') timeout = time.time() + timeout while not predicate() and time.time() < timeout: time.sleep(0.1) return predicate()
[ "def", "_block", "(", "predicate", ",", "timeout", ")", ":", "if", "timeout", ":", "if", "timeout", "is", "True", ":", "timeout", "=", "float", "(", "'Inf'", ")", "timeout", "=", "time", ".", "time", "(", ")", "+", "timeout", "while", "not", "predicate", "(", ")", "and", "time", ".", "time", "(", ")", "<", "timeout", ":", "time", ".", "sleep", "(", "0.1", ")", "return", "predicate", "(", ")" ]
Block until a predicate becomes true. ``predicate`` is a function taking no arguments. The call to ``_block`` blocks until ``predicate`` returns a true value. This is done by polling ``predicate``. ``timeout`` is either ``True`` (block indefinitely) or a timeout in seconds. The return value is the value of the predicate after the timeout.
[ "Block", "until", "a", "predicate", "becomes", "true", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L143-L163
train
torfsen/service
src/service/__init__.py
_PIDFile.read_pid
def read_pid(self): """ Return the PID of the process owning the lock. Returns ``None`` if no lock is present. """ try: with open(self._path, 'r') as f: s = f.read().strip() if not s: return None return int(s) except IOError as e: if e.errno == errno.ENOENT: return None raise
python
def read_pid(self): """ Return the PID of the process owning the lock. Returns ``None`` if no lock is present. """ try: with open(self._path, 'r') as f: s = f.read().strip() if not s: return None return int(s) except IOError as e: if e.errno == errno.ENOENT: return None raise
[ "def", "read_pid", "(", "self", ")", ":", "try", ":", "with", "open", "(", "self", ".", "_path", ",", "'r'", ")", "as", "f", ":", "s", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "if", "not", "s", ":", "return", "None", "return", "int", "(", "s", ")", "except", "IOError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "return", "None", "raise" ]
Return the PID of the process owning the lock. Returns ``None`` if no lock is present.
[ "Return", "the", "PID", "of", "the", "process", "owning", "the", "lock", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L109-L124
train
torfsen/service
src/service/__init__.py
Service._get_logger_file_handles
def _get_logger_file_handles(self): """ Find the file handles used by our logger's handlers. """ handles = [] for handler in self.logger.handlers: # The following code works for logging's SysLogHandler, # StreamHandler, SocketHandler, and their subclasses. for attr in ['sock', 'socket', 'stream']: try: handle = getattr(handler, attr) if handle: handles.append(handle) break except AttributeError: continue return handles
python
def _get_logger_file_handles(self): """ Find the file handles used by our logger's handlers. """ handles = [] for handler in self.logger.handlers: # The following code works for logging's SysLogHandler, # StreamHandler, SocketHandler, and their subclasses. for attr in ['sock', 'socket', 'stream']: try: handle = getattr(handler, attr) if handle: handles.append(handle) break except AttributeError: continue return handles
[ "def", "_get_logger_file_handles", "(", "self", ")", ":", "handles", "=", "[", "]", "for", "handler", "in", "self", ".", "logger", ".", "handlers", ":", "# The following code works for logging's SysLogHandler,", "# StreamHandler, SocketHandler, and their subclasses.", "for", "attr", "in", "[", "'sock'", ",", "'socket'", ",", "'stream'", "]", ":", "try", ":", "handle", "=", "getattr", "(", "handler", ",", "attr", ")", "if", "handle", ":", "handles", ".", "append", "(", "handle", ")", "break", "except", "AttributeError", ":", "continue", "return", "handles" ]
Find the file handles used by our logger's handlers.
[ "Find", "the", "file", "handles", "used", "by", "our", "logger", "s", "handlers", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L221-L237
train
torfsen/service
src/service/__init__.py
Service.is_running
def is_running(self): """ Check if the daemon is running. """ pid = self.get_pid() if pid is None: return False # The PID file may still exist even if the daemon isn't running, # for example if it has crashed. try: os.kill(pid, 0) except OSError as e: if e.errno == errno.ESRCH: # In this case the PID file shouldn't have existed in # the first place, so we remove it self.pid_file.release() return False # We may also get an exception if we're not allowed to use # kill on the process, but that means that the process does # exist, which is all we care about here. return True
python
def is_running(self): """ Check if the daemon is running. """ pid = self.get_pid() if pid is None: return False # The PID file may still exist even if the daemon isn't running, # for example if it has crashed. try: os.kill(pid, 0) except OSError as e: if e.errno == errno.ESRCH: # In this case the PID file shouldn't have existed in # the first place, so we remove it self.pid_file.release() return False # We may also get an exception if we're not allowed to use # kill on the process, but that means that the process does # exist, which is all we care about here. return True
[ "def", "is_running", "(", "self", ")", ":", "pid", "=", "self", ".", "get_pid", "(", ")", "if", "pid", "is", "None", ":", "return", "False", "# The PID file may still exist even if the daemon isn't running,", "# for example if it has crashed.", "try", ":", "os", ".", "kill", "(", "pid", ",", "0", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ESRCH", ":", "# In this case the PID file shouldn't have existed in", "# the first place, so we remove it", "self", ".", "pid_file", ".", "release", "(", ")", "return", "False", "# We may also get an exception if we're not allowed to use", "# kill on the process, but that means that the process does", "# exist, which is all we care about here.", "return", "True" ]
Check if the daemon is running.
[ "Check", "if", "the", "daemon", "is", "running", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L239-L259
train
torfsen/service
src/service/__init__.py
Service._get_signal_event
def _get_signal_event(self, s): ''' Get the event for a signal. Checks if the signal has been enabled and raises a ``ValueError`` if not. ''' try: return self._signal_events[int(s)] except KeyError: raise ValueError('Signal {} has not been enabled'.format(s))
python
def _get_signal_event(self, s): ''' Get the event for a signal. Checks if the signal has been enabled and raises a ``ValueError`` if not. ''' try: return self._signal_events[int(s)] except KeyError: raise ValueError('Signal {} has not been enabled'.format(s))
[ "def", "_get_signal_event", "(", "self", ",", "s", ")", ":", "try", ":", "return", "self", ".", "_signal_events", "[", "int", "(", "s", ")", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "'Signal {} has not been enabled'", ".", "format", "(", "s", ")", ")" ]
Get the event for a signal. Checks if the signal has been enabled and raises a ``ValueError`` if not.
[ "Get", "the", "event", "for", "a", "signal", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L267-L277
train
torfsen/service
src/service/__init__.py
Service.send_signal
def send_signal(self, s): """ Send a signal to the daemon process. The signal must have been enabled using the ``signals`` parameter of :py:meth:`Service.__init__`. Otherwise, a ``ValueError`` is raised. """ self._get_signal_event(s) # Check if signal has been enabled pid = self.get_pid() if not pid: raise ValueError('Daemon is not running.') os.kill(pid, s)
python
def send_signal(self, s): """ Send a signal to the daemon process. The signal must have been enabled using the ``signals`` parameter of :py:meth:`Service.__init__`. Otherwise, a ``ValueError`` is raised. """ self._get_signal_event(s) # Check if signal has been enabled pid = self.get_pid() if not pid: raise ValueError('Daemon is not running.') os.kill(pid, s)
[ "def", "send_signal", "(", "self", ",", "s", ")", ":", "self", ".", "_get_signal_event", "(", "s", ")", "# Check if signal has been enabled", "pid", "=", "self", ".", "get_pid", "(", ")", "if", "not", "pid", ":", "raise", "ValueError", "(", "'Daemon is not running.'", ")", "os", ".", "kill", "(", "pid", ",", "s", ")" ]
Send a signal to the daemon process. The signal must have been enabled using the ``signals`` parameter of :py:meth:`Service.__init__`. Otherwise, a ``ValueError`` is raised.
[ "Send", "a", "signal", "to", "the", "daemon", "process", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L279-L291
train
torfsen/service
src/service/__init__.py
Service.stop
def stop(self, block=False): """ Tell the daemon process to stop. Sends the SIGTERM signal to the daemon process, requesting it to terminate. If ``block`` is true then the call blocks until the daemon process has exited. This may take some time since the daemon process will complete its on-going backup activities before shutting down. ``block`` can either be ``True`` (in which case it blocks indefinitely) or a timeout in seconds. The return value is ``True`` if the daemon process has been stopped and ``False`` otherwise. .. versionadded:: 0.3 The ``block`` parameter """ self.send_signal(signal.SIGTERM) return _block(lambda: not self.is_running(), block)
python
def stop(self, block=False): """ Tell the daemon process to stop. Sends the SIGTERM signal to the daemon process, requesting it to terminate. If ``block`` is true then the call blocks until the daemon process has exited. This may take some time since the daemon process will complete its on-going backup activities before shutting down. ``block`` can either be ``True`` (in which case it blocks indefinitely) or a timeout in seconds. The return value is ``True`` if the daemon process has been stopped and ``False`` otherwise. .. versionadded:: 0.3 The ``block`` parameter """ self.send_signal(signal.SIGTERM) return _block(lambda: not self.is_running(), block)
[ "def", "stop", "(", "self", ",", "block", "=", "False", ")", ":", "self", ".", "send_signal", "(", "signal", ".", "SIGTERM", ")", "return", "_block", "(", "lambda", ":", "not", "self", ".", "is_running", "(", ")", ",", "block", ")" ]
Tell the daemon process to stop. Sends the SIGTERM signal to the daemon process, requesting it to terminate. If ``block`` is true then the call blocks until the daemon process has exited. This may take some time since the daemon process will complete its on-going backup activities before shutting down. ``block`` can either be ``True`` (in which case it blocks indefinitely) or a timeout in seconds. The return value is ``True`` if the daemon process has been stopped and ``False`` otherwise. .. versionadded:: 0.3 The ``block`` parameter
[ "Tell", "the", "daemon", "process", "to", "stop", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L381-L401
train
torfsen/service
src/service/__init__.py
Service.kill
def kill(self, block=False): """ Kill the daemon process. Sends the SIGKILL signal to the daemon process, killing it. You probably want to try :py:meth:`stop` first. If ``block`` is true then the call blocks until the daemon process has exited. ``block`` can either be ``True`` (in which case it blocks indefinitely) or a timeout in seconds. Returns ``True`` if the daemon process has (already) exited and ``False`` otherwise. The PID file is always removed, whether the process has already exited or not. Note that this means that subsequent calls to :py:meth:`is_running` and :py:meth:`get_pid` will behave as if the process has exited. If you need to be sure that the process has already exited, set ``block`` to ``True``. .. versionadded:: 0.5.1 The ``block`` parameter """ pid = self.get_pid() if not pid: raise ValueError('Daemon is not running.') try: os.kill(pid, signal.SIGKILL) return _block(lambda: not self.is_running(), block) except OSError as e: if e.errno == errno.ESRCH: raise ValueError('Daemon is not running.') raise finally: self.pid_file.release()
python
def kill(self, block=False): """ Kill the daemon process. Sends the SIGKILL signal to the daemon process, killing it. You probably want to try :py:meth:`stop` first. If ``block`` is true then the call blocks until the daemon process has exited. ``block`` can either be ``True`` (in which case it blocks indefinitely) or a timeout in seconds. Returns ``True`` if the daemon process has (already) exited and ``False`` otherwise. The PID file is always removed, whether the process has already exited or not. Note that this means that subsequent calls to :py:meth:`is_running` and :py:meth:`get_pid` will behave as if the process has exited. If you need to be sure that the process has already exited, set ``block`` to ``True``. .. versionadded:: 0.5.1 The ``block`` parameter """ pid = self.get_pid() if not pid: raise ValueError('Daemon is not running.') try: os.kill(pid, signal.SIGKILL) return _block(lambda: not self.is_running(), block) except OSError as e: if e.errno == errno.ESRCH: raise ValueError('Daemon is not running.') raise finally: self.pid_file.release()
[ "def", "kill", "(", "self", ",", "block", "=", "False", ")", ":", "pid", "=", "self", ".", "get_pid", "(", ")", "if", "not", "pid", ":", "raise", "ValueError", "(", "'Daemon is not running.'", ")", "try", ":", "os", ".", "kill", "(", "pid", ",", "signal", ".", "SIGKILL", ")", "return", "_block", "(", "lambda", ":", "not", "self", ".", "is_running", "(", ")", ",", "block", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ESRCH", ":", "raise", "ValueError", "(", "'Daemon is not running.'", ")", "raise", "finally", ":", "self", ".", "pid_file", ".", "release", "(", ")" ]
Kill the daemon process. Sends the SIGKILL signal to the daemon process, killing it. You probably want to try :py:meth:`stop` first. If ``block`` is true then the call blocks until the daemon process has exited. ``block`` can either be ``True`` (in which case it blocks indefinitely) or a timeout in seconds. Returns ``True`` if the daemon process has (already) exited and ``False`` otherwise. The PID file is always removed, whether the process has already exited or not. Note that this means that subsequent calls to :py:meth:`is_running` and :py:meth:`get_pid` will behave as if the process has exited. If you need to be sure that the process has already exited, set ``block`` to ``True``. .. versionadded:: 0.5.1 The ``block`` parameter
[ "Kill", "the", "daemon", "process", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L403-L437
train
torfsen/service
src/service/__init__.py
Service.start
def start(self, block=False): """ Start the daemon process. The daemon process is started in the background and the calling process returns. Once the daemon process is initialized it calls the :py:meth:`run` method. If ``block`` is true then the call blocks until the daemon process has started. ``block`` can either be ``True`` (in which case it blocks indefinitely) or a timeout in seconds. The return value is ``True`` if the daemon process has been started and ``False`` otherwise. .. versionadded:: 0.3 The ``block`` parameter """ pid = self.get_pid() if pid: raise ValueError('Daemon is already running at PID %d.' % pid) # The default is to place the PID file into ``/var/run``. This # requires root privileges. Since not having these is a common # problem we check a priori whether we can create the lock file. try: self.pid_file.acquire() finally: self.pid_file.release() # Clear previously received SIGTERMs. This must be done before # the calling process returns so that the calling process can # call ``stop`` directly after ``start`` returns without the # signal being lost. self.clear_signal(signal.SIGTERM) if _detach_process(): # Calling process returns return _block(lambda: self.is_running(), block) # Daemon process continues here self._debug('Daemon has detached') def on_signal(s, frame): self._debug('Received signal {}'.format(s)) self._signal_events[int(s)].set() def runner(): try: # We acquire the PID as late as possible, since its # existence is used to verify whether the service # is running. self.pid_file.acquire() self._debug('PID file has been acquired') self._debug('Calling `run`') self.run() self._debug('`run` returned without exception') except Exception as e: self.logger.exception(e) except SystemExit: self._debug('`run` called `sys.exit`') try: self.pid_file.release() self._debug('PID file has been released') except Exception as e: self.logger.exception(e) os._exit(os.EX_OK) # FIXME: This seems redundant try: setproctitle.setproctitle(self.name) self._debug('Process title has been set') files_preserve = (self.files_preserve + self._get_logger_file_handles()) signal_map = {s: on_signal for s in self._signal_events} signal_map.update({ signal.SIGTTIN: None, signal.SIGTTOU: None, signal.SIGTSTP: None, }) with DaemonContext( detach_process=False, signal_map=signal_map, files_preserve=files_preserve): self._debug('Daemon context has been established') # Python's signal handling mechanism only forwards signals to # the main thread and only when that thread is doing something # (e.g. not when it's waiting for a lock, etc.). If we use the # main thread for the ``run`` method this means that we cannot # use the synchronization devices from ``threading`` for # communicating the reception of SIGTERM to ``run``. Hence we # use a separate thread for ``run`` and make sure that the # main loop receives signals. See # https://bugs.python.org/issue1167930 thread = threading.Thread(target=runner) thread.start() while thread.is_alive(): time.sleep(1) except Exception as e: self.logger.exception(e) # We need to shutdown the daemon process at this point, because # otherwise it will continue executing from after the original # call to ``start``. os._exit(os.EX_OK)
python
def start(self, block=False): """ Start the daemon process. The daemon process is started in the background and the calling process returns. Once the daemon process is initialized it calls the :py:meth:`run` method. If ``block`` is true then the call blocks until the daemon process has started. ``block`` can either be ``True`` (in which case it blocks indefinitely) or a timeout in seconds. The return value is ``True`` if the daemon process has been started and ``False`` otherwise. .. versionadded:: 0.3 The ``block`` parameter """ pid = self.get_pid() if pid: raise ValueError('Daemon is already running at PID %d.' % pid) # The default is to place the PID file into ``/var/run``. This # requires root privileges. Since not having these is a common # problem we check a priori whether we can create the lock file. try: self.pid_file.acquire() finally: self.pid_file.release() # Clear previously received SIGTERMs. This must be done before # the calling process returns so that the calling process can # call ``stop`` directly after ``start`` returns without the # signal being lost. self.clear_signal(signal.SIGTERM) if _detach_process(): # Calling process returns return _block(lambda: self.is_running(), block) # Daemon process continues here self._debug('Daemon has detached') def on_signal(s, frame): self._debug('Received signal {}'.format(s)) self._signal_events[int(s)].set() def runner(): try: # We acquire the PID as late as possible, since its # existence is used to verify whether the service # is running. self.pid_file.acquire() self._debug('PID file has been acquired') self._debug('Calling `run`') self.run() self._debug('`run` returned without exception') except Exception as e: self.logger.exception(e) except SystemExit: self._debug('`run` called `sys.exit`') try: self.pid_file.release() self._debug('PID file has been released') except Exception as e: self.logger.exception(e) os._exit(os.EX_OK) # FIXME: This seems redundant try: setproctitle.setproctitle(self.name) self._debug('Process title has been set') files_preserve = (self.files_preserve + self._get_logger_file_handles()) signal_map = {s: on_signal for s in self._signal_events} signal_map.update({ signal.SIGTTIN: None, signal.SIGTTOU: None, signal.SIGTSTP: None, }) with DaemonContext( detach_process=False, signal_map=signal_map, files_preserve=files_preserve): self._debug('Daemon context has been established') # Python's signal handling mechanism only forwards signals to # the main thread and only when that thread is doing something # (e.g. not when it's waiting for a lock, etc.). If we use the # main thread for the ``run`` method this means that we cannot # use the synchronization devices from ``threading`` for # communicating the reception of SIGTERM to ``run``. Hence we # use a separate thread for ``run`` and make sure that the # main loop receives signals. See # https://bugs.python.org/issue1167930 thread = threading.Thread(target=runner) thread.start() while thread.is_alive(): time.sleep(1) except Exception as e: self.logger.exception(e) # We need to shutdown the daemon process at this point, because # otherwise it will continue executing from after the original # call to ``start``. os._exit(os.EX_OK)
[ "def", "start", "(", "self", ",", "block", "=", "False", ")", ":", "pid", "=", "self", ".", "get_pid", "(", ")", "if", "pid", ":", "raise", "ValueError", "(", "'Daemon is already running at PID %d.'", "%", "pid", ")", "# The default is to place the PID file into ``/var/run``. This", "# requires root privileges. Since not having these is a common", "# problem we check a priori whether we can create the lock file.", "try", ":", "self", ".", "pid_file", ".", "acquire", "(", ")", "finally", ":", "self", ".", "pid_file", ".", "release", "(", ")", "# Clear previously received SIGTERMs. This must be done before", "# the calling process returns so that the calling process can", "# call ``stop`` directly after ``start`` returns without the", "# signal being lost.", "self", ".", "clear_signal", "(", "signal", ".", "SIGTERM", ")", "if", "_detach_process", "(", ")", ":", "# Calling process returns", "return", "_block", "(", "lambda", ":", "self", ".", "is_running", "(", ")", ",", "block", ")", "# Daemon process continues here", "self", ".", "_debug", "(", "'Daemon has detached'", ")", "def", "on_signal", "(", "s", ",", "frame", ")", ":", "self", ".", "_debug", "(", "'Received signal {}'", ".", "format", "(", "s", ")", ")", "self", ".", "_signal_events", "[", "int", "(", "s", ")", "]", ".", "set", "(", ")", "def", "runner", "(", ")", ":", "try", ":", "# We acquire the PID as late as possible, since its", "# existence is used to verify whether the service", "# is running.", "self", ".", "pid_file", ".", "acquire", "(", ")", "self", ".", "_debug", "(", "'PID file has been acquired'", ")", "self", ".", "_debug", "(", "'Calling `run`'", ")", "self", ".", "run", "(", ")", "self", ".", "_debug", "(", "'`run` returned without exception'", ")", "except", "Exception", "as", "e", ":", "self", ".", "logger", ".", "exception", "(", "e", ")", "except", "SystemExit", ":", "self", ".", "_debug", "(", "'`run` called `sys.exit`'", ")", "try", ":", "self", ".", "pid_file", ".", "release", "(", ")", "self", ".", "_debug", "(", "'PID file has been released'", ")", "except", "Exception", "as", "e", ":", "self", ".", "logger", ".", "exception", "(", "e", ")", "os", ".", "_exit", "(", "os", ".", "EX_OK", ")", "# FIXME: This seems redundant", "try", ":", "setproctitle", ".", "setproctitle", "(", "self", ".", "name", ")", "self", ".", "_debug", "(", "'Process title has been set'", ")", "files_preserve", "=", "(", "self", ".", "files_preserve", "+", "self", ".", "_get_logger_file_handles", "(", ")", ")", "signal_map", "=", "{", "s", ":", "on_signal", "for", "s", "in", "self", ".", "_signal_events", "}", "signal_map", ".", "update", "(", "{", "signal", ".", "SIGTTIN", ":", "None", ",", "signal", ".", "SIGTTOU", ":", "None", ",", "signal", ".", "SIGTSTP", ":", "None", ",", "}", ")", "with", "DaemonContext", "(", "detach_process", "=", "False", ",", "signal_map", "=", "signal_map", ",", "files_preserve", "=", "files_preserve", ")", ":", "self", ".", "_debug", "(", "'Daemon context has been established'", ")", "# Python's signal handling mechanism only forwards signals to", "# the main thread and only when that thread is doing something", "# (e.g. not when it's waiting for a lock, etc.). If we use the", "# main thread for the ``run`` method this means that we cannot", "# use the synchronization devices from ``threading`` for", "# communicating the reception of SIGTERM to ``run``. Hence we", "# use a separate thread for ``run`` and make sure that the", "# main loop receives signals. See", "# https://bugs.python.org/issue1167930", "thread", "=", "threading", ".", "Thread", "(", "target", "=", "runner", ")", "thread", ".", "start", "(", ")", "while", "thread", ".", "is_alive", "(", ")", ":", "time", ".", "sleep", "(", "1", ")", "except", "Exception", "as", "e", ":", "self", ".", "logger", ".", "exception", "(", "e", ")", "# We need to shutdown the daemon process at this point, because", "# otherwise it will continue executing from after the original", "# call to ``start``.", "os", ".", "_exit", "(", "os", ".", "EX_OK", ")" ]
Start the daemon process. The daemon process is started in the background and the calling process returns. Once the daemon process is initialized it calls the :py:meth:`run` method. If ``block`` is true then the call blocks until the daemon process has started. ``block`` can either be ``True`` (in which case it blocks indefinitely) or a timeout in seconds. The return value is ``True`` if the daemon process has been started and ``False`` otherwise. .. versionadded:: 0.3 The ``block`` parameter
[ "Start", "the", "daemon", "process", "." ]
d0dd824fce9237825c1943b30cd14f7b0f5957a6
https://github.com/torfsen/service/blob/d0dd824fce9237825c1943b30cd14f7b0f5957a6/src/service/__init__.py#L439-L544
train
datacats/datacats
datacats/cli/create.py
create
def create(opts): """Create a new environment Usage: datacats create [-bin] [--interactive] [-s NAME] [--address=IP] [--syslog] [--ckan=CKAN_VERSION] [--no-datapusher] [--site-url SITE_URL] [--no-init-db] ENVIRONMENT_DIR [PORT] Options: --address=IP Address to listen on (Linux-only) --ckan=CKAN_VERSION Use CKAN version CKAN_VERSION [default: 2.4] -b --bare Bare CKAN site with no example extension -i --image-only Create the environment but don't start containers --interactive Doesn't detach from the web container --no-datapusher Don't install/enable ckanext-datapusher --no-init-db Don't initialize the database. Useful for importing CKANs. -n --no-sysadmin Don't prompt for an initial sysadmin user account -s --site=NAME Pick a site to create [default: primary] --site-url SITE_URL The site_url to use in API responses (e.g. http://example.org:{port}/) --syslog Log to the syslog ENVIRONMENT_DIR is a path for the new environment directory. The last part of this path will be used as the environment name. """ if opts['--address'] and is_boot2docker(): raise DatacatsError('Cannot specify address on boot2docker.') return create_environment( environment_dir=opts['ENVIRONMENT_DIR'], port=opts['PORT'], create_skin=not opts['--bare'], start_web=not opts['--image-only'], create_sysadmin=not opts['--no-sysadmin'], site_name=opts['--site'], ckan_version=opts['--ckan'], address=opts['--address'], log_syslog=opts['--syslog'], datapusher=not opts['--no-datapusher'], site_url=opts['--site-url'], interactive=opts['--interactive'], init_db=not opts['--no-init-db'], )
python
def create(opts): """Create a new environment Usage: datacats create [-bin] [--interactive] [-s NAME] [--address=IP] [--syslog] [--ckan=CKAN_VERSION] [--no-datapusher] [--site-url SITE_URL] [--no-init-db] ENVIRONMENT_DIR [PORT] Options: --address=IP Address to listen on (Linux-only) --ckan=CKAN_VERSION Use CKAN version CKAN_VERSION [default: 2.4] -b --bare Bare CKAN site with no example extension -i --image-only Create the environment but don't start containers --interactive Doesn't detach from the web container --no-datapusher Don't install/enable ckanext-datapusher --no-init-db Don't initialize the database. Useful for importing CKANs. -n --no-sysadmin Don't prompt for an initial sysadmin user account -s --site=NAME Pick a site to create [default: primary] --site-url SITE_URL The site_url to use in API responses (e.g. http://example.org:{port}/) --syslog Log to the syslog ENVIRONMENT_DIR is a path for the new environment directory. The last part of this path will be used as the environment name. """ if opts['--address'] and is_boot2docker(): raise DatacatsError('Cannot specify address on boot2docker.') return create_environment( environment_dir=opts['ENVIRONMENT_DIR'], port=opts['PORT'], create_skin=not opts['--bare'], start_web=not opts['--image-only'], create_sysadmin=not opts['--no-sysadmin'], site_name=opts['--site'], ckan_version=opts['--ckan'], address=opts['--address'], log_syslog=opts['--syslog'], datapusher=not opts['--no-datapusher'], site_url=opts['--site-url'], interactive=opts['--interactive'], init_db=not opts['--no-init-db'], )
[ "def", "create", "(", "opts", ")", ":", "if", "opts", "[", "'--address'", "]", "and", "is_boot2docker", "(", ")", ":", "raise", "DatacatsError", "(", "'Cannot specify address on boot2docker.'", ")", "return", "create_environment", "(", "environment_dir", "=", "opts", "[", "'ENVIRONMENT_DIR'", "]", ",", "port", "=", "opts", "[", "'PORT'", "]", ",", "create_skin", "=", "not", "opts", "[", "'--bare'", "]", ",", "start_web", "=", "not", "opts", "[", "'--image-only'", "]", ",", "create_sysadmin", "=", "not", "opts", "[", "'--no-sysadmin'", "]", ",", "site_name", "=", "opts", "[", "'--site'", "]", ",", "ckan_version", "=", "opts", "[", "'--ckan'", "]", ",", "address", "=", "opts", "[", "'--address'", "]", ",", "log_syslog", "=", "opts", "[", "'--syslog'", "]", ",", "datapusher", "=", "not", "opts", "[", "'--no-datapusher'", "]", ",", "site_url", "=", "opts", "[", "'--site-url'", "]", ",", "interactive", "=", "opts", "[", "'--interactive'", "]", ",", "init_db", "=", "not", "opts", "[", "'--no-init-db'", "]", ",", ")" ]
Create a new environment Usage: datacats create [-bin] [--interactive] [-s NAME] [--address=IP] [--syslog] [--ckan=CKAN_VERSION] [--no-datapusher] [--site-url SITE_URL] [--no-init-db] ENVIRONMENT_DIR [PORT] Options: --address=IP Address to listen on (Linux-only) --ckan=CKAN_VERSION Use CKAN version CKAN_VERSION [default: 2.4] -b --bare Bare CKAN site with no example extension -i --image-only Create the environment but don't start containers --interactive Doesn't detach from the web container --no-datapusher Don't install/enable ckanext-datapusher --no-init-db Don't initialize the database. Useful for importing CKANs. -n --no-sysadmin Don't prompt for an initial sysadmin user account -s --site=NAME Pick a site to create [default: primary] --site-url SITE_URL The site_url to use in API responses (e.g. http://example.org:{port}/) --syslog Log to the syslog ENVIRONMENT_DIR is a path for the new environment directory. The last part of this path will be used as the environment name.
[ "Create", "a", "new", "environment" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/create.py#L23-L63
train
datacats/datacats
datacats/cli/create.py
reset
def reset(environment, opts): """Resets a site to the default state. This will re-initialize the database and recreate the administrator account. Usage: datacats reset [-iyn] [-s NAME] [ENVIRONMENT] Options: -i --interactive Don't detach from the web container -s --site=NAME The site to reset [default: primary] -y --yes Respond yes to all questions -n --no-sysadmin Don't prompt for a sysadmin password""" # pylint: disable=unused-argument if not opts['--yes']: y_or_n_prompt('Reset will remove all data related to the ' 'site {} and recreate the database'.format(opts['--site'])) print 'Resetting...' environment.stop_supporting_containers() environment.stop_ckan() clean_pyc(environment) # Save the port. saved_port = environment.port environment.purge_data([opts['--site']], never_delete=True) init({ 'ENVIRONMENT_DIR': opts['ENVIRONMENT'], '--site': opts['--site'], 'PORT': saved_port, '--syslog': None, '--address': None, '--image-only': False, '--interactive': opts['--interactive'], '--no-init-db': False, '--no-sysadmin': opts['--no-sysadmin'], '--site-url': None }, no_install=True)
python
def reset(environment, opts): """Resets a site to the default state. This will re-initialize the database and recreate the administrator account. Usage: datacats reset [-iyn] [-s NAME] [ENVIRONMENT] Options: -i --interactive Don't detach from the web container -s --site=NAME The site to reset [default: primary] -y --yes Respond yes to all questions -n --no-sysadmin Don't prompt for a sysadmin password""" # pylint: disable=unused-argument if not opts['--yes']: y_or_n_prompt('Reset will remove all data related to the ' 'site {} and recreate the database'.format(opts['--site'])) print 'Resetting...' environment.stop_supporting_containers() environment.stop_ckan() clean_pyc(environment) # Save the port. saved_port = environment.port environment.purge_data([opts['--site']], never_delete=True) init({ 'ENVIRONMENT_DIR': opts['ENVIRONMENT'], '--site': opts['--site'], 'PORT': saved_port, '--syslog': None, '--address': None, '--image-only': False, '--interactive': opts['--interactive'], '--no-init-db': False, '--no-sysadmin': opts['--no-sysadmin'], '--site-url': None }, no_install=True)
[ "def", "reset", "(", "environment", ",", "opts", ")", ":", "# pylint: disable=unused-argument", "if", "not", "opts", "[", "'--yes'", "]", ":", "y_or_n_prompt", "(", "'Reset will remove all data related to the '", "'site {} and recreate the database'", ".", "format", "(", "opts", "[", "'--site'", "]", ")", ")", "print", "'Resetting...'", "environment", ".", "stop_supporting_containers", "(", ")", "environment", ".", "stop_ckan", "(", ")", "clean_pyc", "(", "environment", ")", "# Save the port.", "saved_port", "=", "environment", ".", "port", "environment", ".", "purge_data", "(", "[", "opts", "[", "'--site'", "]", "]", ",", "never_delete", "=", "True", ")", "init", "(", "{", "'ENVIRONMENT_DIR'", ":", "opts", "[", "'ENVIRONMENT'", "]", ",", "'--site'", ":", "opts", "[", "'--site'", "]", ",", "'PORT'", ":", "saved_port", ",", "'--syslog'", ":", "None", ",", "'--address'", ":", "None", ",", "'--image-only'", ":", "False", ",", "'--interactive'", ":", "opts", "[", "'--interactive'", "]", ",", "'--no-init-db'", ":", "False", ",", "'--no-sysadmin'", ":", "opts", "[", "'--no-sysadmin'", "]", ",", "'--site-url'", ":", "None", "}", ",", "no_install", "=", "True", ")" ]
Resets a site to the default state. This will re-initialize the database and recreate the administrator account. Usage: datacats reset [-iyn] [-s NAME] [ENVIRONMENT] Options: -i --interactive Don't detach from the web container -s --site=NAME The site to reset [default: primary] -y --yes Respond yes to all questions -n --no-sysadmin Don't prompt for a sysadmin password
[ "Resets", "a", "site", "to", "the", "default", "state", ".", "This", "will", "re", "-", "initialize", "the", "database", "and", "recreate", "the", "administrator", "account", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/create.py#L122-L157
train
datacats/datacats
datacats/cli/create.py
init
def init(opts, no_install=False, quiet=False): """Initialize a purged environment or copied environment directory Usage: datacats init [-in] [--syslog] [-s NAME] [--address=IP] [--interactive] [--site-url SITE_URL] [ENVIRONMENT_DIR [PORT]] [--no-init-db] Options: --address=IP Address to listen on (Linux-only) --interactive Don't detach from the web container -i --image-only Create the environment but don't start containers --no-init-db Don't initialize the database. Useful for importing other CKANs -n --no-sysadmin Don't prompt for an initial sysadmin user account -s --site=NAME Pick a site to initialize [default: primary] --site-url SITE_URL The site_url to use in API responses (e.g. http://example.org:{port}/) --syslog Log to the syslog ENVIRONMENT_DIR is an existing datacats environment directory. Defaults to '.' """ if opts['--address'] and is_boot2docker(): raise DatacatsError('Cannot specify address on boot2docker.') environment_dir = opts['ENVIRONMENT_DIR'] port = opts['PORT'] address = opts['--address'] start_web = not opts['--image-only'] create_sysadmin = not opts['--no-sysadmin'] site_name = opts['--site'] site_url = opts['--site-url'] interactive = opts['--interactive'] init_db = not opts['--no-init-db'] environment_dir = abspath(environment_dir or '.') log_syslog = opts['--syslog'] environment = Environment.load(environment_dir, site_name) if address: environment.address = address if port: environment.port = int(port) if site_url: environment.site_url = site_url try: if environment.sites and site_name in environment.sites: raise DatacatsError('Site named {0} already exists.' .format(site_name)) # There are a couple of steps we can/must skip if we're making a sub-site only making_full_environment = not environment.data_exists() if not quiet: write('Creating environment {0}/{1} ' 'from existing environment directory "{0}"' .format(environment.name, environment.site_name)) steps = [ lambda: environment.create_directories(create_project_dir=False)] + ([ environment.save, environment.create_virtualenv ] if making_full_environment else []) + [ environment.save_site, environment.start_supporting_containers, environment.fix_storage_permissions, ] for fn in steps: fn() if not quiet: write('.') if not quiet: write('\n') except: if not quiet: print raise return finish_init(environment, start_web, create_sysadmin, log_syslog=log_syslog, do_install=not no_install, quiet=quiet, site_url=site_url, interactive=interactive, init_db=init_db)
python
def init(opts, no_install=False, quiet=False): """Initialize a purged environment or copied environment directory Usage: datacats init [-in] [--syslog] [-s NAME] [--address=IP] [--interactive] [--site-url SITE_URL] [ENVIRONMENT_DIR [PORT]] [--no-init-db] Options: --address=IP Address to listen on (Linux-only) --interactive Don't detach from the web container -i --image-only Create the environment but don't start containers --no-init-db Don't initialize the database. Useful for importing other CKANs -n --no-sysadmin Don't prompt for an initial sysadmin user account -s --site=NAME Pick a site to initialize [default: primary] --site-url SITE_URL The site_url to use in API responses (e.g. http://example.org:{port}/) --syslog Log to the syslog ENVIRONMENT_DIR is an existing datacats environment directory. Defaults to '.' """ if opts['--address'] and is_boot2docker(): raise DatacatsError('Cannot specify address on boot2docker.') environment_dir = opts['ENVIRONMENT_DIR'] port = opts['PORT'] address = opts['--address'] start_web = not opts['--image-only'] create_sysadmin = not opts['--no-sysadmin'] site_name = opts['--site'] site_url = opts['--site-url'] interactive = opts['--interactive'] init_db = not opts['--no-init-db'] environment_dir = abspath(environment_dir or '.') log_syslog = opts['--syslog'] environment = Environment.load(environment_dir, site_name) if address: environment.address = address if port: environment.port = int(port) if site_url: environment.site_url = site_url try: if environment.sites and site_name in environment.sites: raise DatacatsError('Site named {0} already exists.' .format(site_name)) # There are a couple of steps we can/must skip if we're making a sub-site only making_full_environment = not environment.data_exists() if not quiet: write('Creating environment {0}/{1} ' 'from existing environment directory "{0}"' .format(environment.name, environment.site_name)) steps = [ lambda: environment.create_directories(create_project_dir=False)] + ([ environment.save, environment.create_virtualenv ] if making_full_environment else []) + [ environment.save_site, environment.start_supporting_containers, environment.fix_storage_permissions, ] for fn in steps: fn() if not quiet: write('.') if not quiet: write('\n') except: if not quiet: print raise return finish_init(environment, start_web, create_sysadmin, log_syslog=log_syslog, do_install=not no_install, quiet=quiet, site_url=site_url, interactive=interactive, init_db=init_db)
[ "def", "init", "(", "opts", ",", "no_install", "=", "False", ",", "quiet", "=", "False", ")", ":", "if", "opts", "[", "'--address'", "]", "and", "is_boot2docker", "(", ")", ":", "raise", "DatacatsError", "(", "'Cannot specify address on boot2docker.'", ")", "environment_dir", "=", "opts", "[", "'ENVIRONMENT_DIR'", "]", "port", "=", "opts", "[", "'PORT'", "]", "address", "=", "opts", "[", "'--address'", "]", "start_web", "=", "not", "opts", "[", "'--image-only'", "]", "create_sysadmin", "=", "not", "opts", "[", "'--no-sysadmin'", "]", "site_name", "=", "opts", "[", "'--site'", "]", "site_url", "=", "opts", "[", "'--site-url'", "]", "interactive", "=", "opts", "[", "'--interactive'", "]", "init_db", "=", "not", "opts", "[", "'--no-init-db'", "]", "environment_dir", "=", "abspath", "(", "environment_dir", "or", "'.'", ")", "log_syslog", "=", "opts", "[", "'--syslog'", "]", "environment", "=", "Environment", ".", "load", "(", "environment_dir", ",", "site_name", ")", "if", "address", ":", "environment", ".", "address", "=", "address", "if", "port", ":", "environment", ".", "port", "=", "int", "(", "port", ")", "if", "site_url", ":", "environment", ".", "site_url", "=", "site_url", "try", ":", "if", "environment", ".", "sites", "and", "site_name", "in", "environment", ".", "sites", ":", "raise", "DatacatsError", "(", "'Site named {0} already exists.'", ".", "format", "(", "site_name", ")", ")", "# There are a couple of steps we can/must skip if we're making a sub-site only", "making_full_environment", "=", "not", "environment", ".", "data_exists", "(", ")", "if", "not", "quiet", ":", "write", "(", "'Creating environment {0}/{1} '", "'from existing environment directory \"{0}\"'", ".", "format", "(", "environment", ".", "name", ",", "environment", ".", "site_name", ")", ")", "steps", "=", "[", "lambda", ":", "environment", ".", "create_directories", "(", "create_project_dir", "=", "False", ")", "]", "+", "(", "[", "environment", ".", "save", ",", "environment", ".", "create_virtualenv", "]", "if", "making_full_environment", "else", "[", "]", ")", "+", "[", "environment", ".", "save_site", ",", "environment", ".", "start_supporting_containers", ",", "environment", ".", "fix_storage_permissions", ",", "]", "for", "fn", "in", "steps", ":", "fn", "(", ")", "if", "not", "quiet", ":", "write", "(", "'.'", ")", "if", "not", "quiet", ":", "write", "(", "'\\n'", ")", "except", ":", "if", "not", "quiet", ":", "print", "raise", "return", "finish_init", "(", "environment", ",", "start_web", ",", "create_sysadmin", ",", "log_syslog", "=", "log_syslog", ",", "do_install", "=", "not", "no_install", ",", "quiet", "=", "quiet", ",", "site_url", "=", "site_url", ",", "interactive", "=", "interactive", ",", "init_db", "=", "init_db", ")" ]
Initialize a purged environment or copied environment directory Usage: datacats init [-in] [--syslog] [-s NAME] [--address=IP] [--interactive] [--site-url SITE_URL] [ENVIRONMENT_DIR [PORT]] [--no-init-db] Options: --address=IP Address to listen on (Linux-only) --interactive Don't detach from the web container -i --image-only Create the environment but don't start containers --no-init-db Don't initialize the database. Useful for importing other CKANs -n --no-sysadmin Don't prompt for an initial sysadmin user account -s --site=NAME Pick a site to initialize [default: primary] --site-url SITE_URL The site_url to use in API responses (e.g. http://example.org:{port}/) --syslog Log to the syslog ENVIRONMENT_DIR is an existing datacats environment directory. Defaults to '.'
[ "Initialize", "a", "purged", "environment", "or", "copied", "environment", "directory" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/create.py#L160-L237
train
datacats/datacats
datacats/cli/create.py
finish_init
def finish_init(environment, start_web, create_sysadmin, log_syslog=False, do_install=True, quiet=False, site_url=None, interactive=False, init_db=True): """ Common parts of create and init: Install, init db, start site, sysadmin """ if not init_db: start_web = False create_sysadmin = False if do_install: install_all(environment, False, verbose=False, quiet=quiet) if init_db: if not quiet: write('Initializing database') environment.install_postgis_sql() environment.ckan_db_init() if not quiet: write('\n') if site_url: try: site_url = site_url.format(address=environment.address, port=environment.port) environment.site_url = site_url environment.save_site(False) except (KeyError, IndexError, ValueError) as e: raise DatacatsError('Could not parse site_url: {}'.format(e)) if start_web: environment.start_ckan(log_syslog=log_syslog) if not quiet and not interactive: write('Starting web server at {0} ...\n'.format( environment.web_address())) if create_sysadmin: try: adminpw = confirm_password() environment.create_admin_set_password(adminpw) except KeyboardInterrupt: print if not start_web: environment.stop_supporting_containers()
python
def finish_init(environment, start_web, create_sysadmin, log_syslog=False, do_install=True, quiet=False, site_url=None, interactive=False, init_db=True): """ Common parts of create and init: Install, init db, start site, sysadmin """ if not init_db: start_web = False create_sysadmin = False if do_install: install_all(environment, False, verbose=False, quiet=quiet) if init_db: if not quiet: write('Initializing database') environment.install_postgis_sql() environment.ckan_db_init() if not quiet: write('\n') if site_url: try: site_url = site_url.format(address=environment.address, port=environment.port) environment.site_url = site_url environment.save_site(False) except (KeyError, IndexError, ValueError) as e: raise DatacatsError('Could not parse site_url: {}'.format(e)) if start_web: environment.start_ckan(log_syslog=log_syslog) if not quiet and not interactive: write('Starting web server at {0} ...\n'.format( environment.web_address())) if create_sysadmin: try: adminpw = confirm_password() environment.create_admin_set_password(adminpw) except KeyboardInterrupt: print if not start_web: environment.stop_supporting_containers()
[ "def", "finish_init", "(", "environment", ",", "start_web", ",", "create_sysadmin", ",", "log_syslog", "=", "False", ",", "do_install", "=", "True", ",", "quiet", "=", "False", ",", "site_url", "=", "None", ",", "interactive", "=", "False", ",", "init_db", "=", "True", ")", ":", "if", "not", "init_db", ":", "start_web", "=", "False", "create_sysadmin", "=", "False", "if", "do_install", ":", "install_all", "(", "environment", ",", "False", ",", "verbose", "=", "False", ",", "quiet", "=", "quiet", ")", "if", "init_db", ":", "if", "not", "quiet", ":", "write", "(", "'Initializing database'", ")", "environment", ".", "install_postgis_sql", "(", ")", "environment", ".", "ckan_db_init", "(", ")", "if", "not", "quiet", ":", "write", "(", "'\\n'", ")", "if", "site_url", ":", "try", ":", "site_url", "=", "site_url", ".", "format", "(", "address", "=", "environment", ".", "address", ",", "port", "=", "environment", ".", "port", ")", "environment", ".", "site_url", "=", "site_url", "environment", ".", "save_site", "(", "False", ")", "except", "(", "KeyError", ",", "IndexError", ",", "ValueError", ")", "as", "e", ":", "raise", "DatacatsError", "(", "'Could not parse site_url: {}'", ".", "format", "(", "e", ")", ")", "if", "start_web", ":", "environment", ".", "start_ckan", "(", "log_syslog", "=", "log_syslog", ")", "if", "not", "quiet", "and", "not", "interactive", ":", "write", "(", "'Starting web server at {0} ...\\n'", ".", "format", "(", "environment", ".", "web_address", "(", ")", ")", ")", "if", "create_sysadmin", ":", "try", ":", "adminpw", "=", "confirm_password", "(", ")", "environment", ".", "create_admin_set_password", "(", "adminpw", ")", "except", "KeyboardInterrupt", ":", "print", "if", "not", "start_web", ":", "environment", ".", "stop_supporting_containers", "(", ")" ]
Common parts of create and init: Install, init db, start site, sysadmin
[ "Common", "parts", "of", "create", "and", "init", ":", "Install", "init", "db", "start", "site", "sysadmin" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/create.py#L240-L283
train
datacats/datacats
datacats/userprofile.py
UserProfile.save
def save(self): """ Save profile settings into user profile directory """ config = self.profiledir + '/config' if not isdir(self.profiledir): makedirs(self.profiledir) cp = SafeConfigParser() cp.add_section('ssh') cp.set('ssh', 'private_key', self.ssh_private_key) cp.set('ssh', 'public_key', self.ssh_public_key) with open(config, 'w') as cfile: cp.write(cfile)
python
def save(self): """ Save profile settings into user profile directory """ config = self.profiledir + '/config' if not isdir(self.profiledir): makedirs(self.profiledir) cp = SafeConfigParser() cp.add_section('ssh') cp.set('ssh', 'private_key', self.ssh_private_key) cp.set('ssh', 'public_key', self.ssh_public_key) with open(config, 'w') as cfile: cp.write(cfile)
[ "def", "save", "(", "self", ")", ":", "config", "=", "self", ".", "profiledir", "+", "'/config'", "if", "not", "isdir", "(", "self", ".", "profiledir", ")", ":", "makedirs", "(", "self", ".", "profiledir", ")", "cp", "=", "SafeConfigParser", "(", ")", "cp", ".", "add_section", "(", "'ssh'", ")", "cp", ".", "set", "(", "'ssh'", ",", "'private_key'", ",", "self", ".", "ssh_private_key", ")", "cp", ".", "set", "(", "'ssh'", ",", "'public_key'", ",", "self", ".", "ssh_public_key", ")", "with", "open", "(", "config", ",", "'w'", ")", "as", "cfile", ":", "cp", ".", "write", "(", "cfile", ")" ]
Save profile settings into user profile directory
[ "Save", "profile", "settings", "into", "user", "profile", "directory" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/userprofile.py#L60-L75
train
datacats/datacats
datacats/userprofile.py
UserProfile.generate_ssh_key
def generate_ssh_key(self): """ Generate a new ssh private and public key """ web_command( command=["ssh-keygen", "-q", "-t", "rsa", "-N", "", "-C", "datacats generated {0}@{1}".format( getuser(), gethostname()), "-f", "/output/id_rsa"], rw={self.profiledir: '/output'}, )
python
def generate_ssh_key(self): """ Generate a new ssh private and public key """ web_command( command=["ssh-keygen", "-q", "-t", "rsa", "-N", "", "-C", "datacats generated {0}@{1}".format( getuser(), gethostname()), "-f", "/output/id_rsa"], rw={self.profiledir: '/output'}, )
[ "def", "generate_ssh_key", "(", "self", ")", ":", "web_command", "(", "command", "=", "[", "\"ssh-keygen\"", ",", "\"-q\"", ",", "\"-t\"", ",", "\"rsa\"", ",", "\"-N\"", ",", "\"\"", ",", "\"-C\"", ",", "\"datacats generated {0}@{1}\"", ".", "format", "(", "getuser", "(", ")", ",", "gethostname", "(", ")", ")", ",", "\"-f\"", ",", "\"/output/id_rsa\"", "]", ",", "rw", "=", "{", "self", ".", "profiledir", ":", "'/output'", "}", ",", ")" ]
Generate a new ssh private and public key
[ "Generate", "a", "new", "ssh", "private", "and", "public", "key" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/userprofile.py#L77-L87
train
datacats/datacats
datacats/userprofile.py
UserProfile.create
def create(self, environment, target_name): """ Sends "create project" command to the remote server """ remote_server_command( ["ssh", environment.deploy_target, "create", target_name], environment, self, clean_up=True, )
python
def create(self, environment, target_name): """ Sends "create project" command to the remote server """ remote_server_command( ["ssh", environment.deploy_target, "create", target_name], environment, self, clean_up=True, )
[ "def", "create", "(", "self", ",", "environment", ",", "target_name", ")", ":", "remote_server_command", "(", "[", "\"ssh\"", ",", "environment", ".", "deploy_target", ",", "\"create\"", ",", "target_name", "]", ",", "environment", ",", "self", ",", "clean_up", "=", "True", ",", ")" ]
Sends "create project" command to the remote server
[ "Sends", "create", "project", "command", "to", "the", "remote", "server" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/userprofile.py#L127-L135
train
datacats/datacats
datacats/userprofile.py
UserProfile.admin_password
def admin_password(self, environment, target_name, password): """ Return True if password was set successfully """ try: remote_server_command( ["ssh", environment.deploy_target, "admin_password", target_name, password], environment, self, clean_up=True ) return True except WebCommandError: return False
python
def admin_password(self, environment, target_name, password): """ Return True if password was set successfully """ try: remote_server_command( ["ssh", environment.deploy_target, "admin_password", target_name, password], environment, self, clean_up=True ) return True except WebCommandError: return False
[ "def", "admin_password", "(", "self", ",", "environment", ",", "target_name", ",", "password", ")", ":", "try", ":", "remote_server_command", "(", "[", "\"ssh\"", ",", "environment", ".", "deploy_target", ",", "\"admin_password\"", ",", "target_name", ",", "password", "]", ",", "environment", ",", "self", ",", "clean_up", "=", "True", ")", "return", "True", "except", "WebCommandError", ":", "return", "False" ]
Return True if password was set successfully
[ "Return", "True", "if", "password", "was", "set", "successfully" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/userprofile.py#L137-L150
train
datacats/datacats
datacats/userprofile.py
UserProfile.deploy
def deploy(self, environment, target_name, stream_output=None): """ Return True if deployment was successful """ try: remote_server_command( [ "rsync", "-lrv", "--safe-links", "--munge-links", "--delete", "--inplace", "--chmod=ugo=rwX", "--exclude=.datacats-environment", "--exclude=.git", "/project/.", environment.deploy_target + ':' + target_name ], environment, self, include_project_dir=True, stream_output=stream_output, clean_up=True, ) except WebCommandError as e: raise DatacatsError( "Unable to deploy `{0}` to remote server for some reason:\n" " datacats was not able to copy data to the remote server" .format((target_name,)), parent_exception=e ) try: remote_server_command( [ "ssh", environment.deploy_target, "install", target_name, ], environment, self, clean_up=True, ) return True except WebCommandError as e: raise DatacatsError( "Unable to deploy `{0}` to remote server for some reason:\n" "datacats copied data to the server but failed to register\n" "(or `install`) the new catalog" .format((target_name,)), parent_exception=e )
python
def deploy(self, environment, target_name, stream_output=None): """ Return True if deployment was successful """ try: remote_server_command( [ "rsync", "-lrv", "--safe-links", "--munge-links", "--delete", "--inplace", "--chmod=ugo=rwX", "--exclude=.datacats-environment", "--exclude=.git", "/project/.", environment.deploy_target + ':' + target_name ], environment, self, include_project_dir=True, stream_output=stream_output, clean_up=True, ) except WebCommandError as e: raise DatacatsError( "Unable to deploy `{0}` to remote server for some reason:\n" " datacats was not able to copy data to the remote server" .format((target_name,)), parent_exception=e ) try: remote_server_command( [ "ssh", environment.deploy_target, "install", target_name, ], environment, self, clean_up=True, ) return True except WebCommandError as e: raise DatacatsError( "Unable to deploy `{0}` to remote server for some reason:\n" "datacats copied data to the server but failed to register\n" "(or `install`) the new catalog" .format((target_name,)), parent_exception=e )
[ "def", "deploy", "(", "self", ",", "environment", ",", "target_name", ",", "stream_output", "=", "None", ")", ":", "try", ":", "remote_server_command", "(", "[", "\"rsync\"", ",", "\"-lrv\"", ",", "\"--safe-links\"", ",", "\"--munge-links\"", ",", "\"--delete\"", ",", "\"--inplace\"", ",", "\"--chmod=ugo=rwX\"", ",", "\"--exclude=.datacats-environment\"", ",", "\"--exclude=.git\"", ",", "\"/project/.\"", ",", "environment", ".", "deploy_target", "+", "':'", "+", "target_name", "]", ",", "environment", ",", "self", ",", "include_project_dir", "=", "True", ",", "stream_output", "=", "stream_output", ",", "clean_up", "=", "True", ",", ")", "except", "WebCommandError", "as", "e", ":", "raise", "DatacatsError", "(", "\"Unable to deploy `{0}` to remote server for some reason:\\n\"", "\" datacats was not able to copy data to the remote server\"", ".", "format", "(", "(", "target_name", ",", ")", ")", ",", "parent_exception", "=", "e", ")", "try", ":", "remote_server_command", "(", "[", "\"ssh\"", ",", "environment", ".", "deploy_target", ",", "\"install\"", ",", "target_name", ",", "]", ",", "environment", ",", "self", ",", "clean_up", "=", "True", ",", ")", "return", "True", "except", "WebCommandError", "as", "e", ":", "raise", "DatacatsError", "(", "\"Unable to deploy `{0}` to remote server for some reason:\\n\"", "\"datacats copied data to the server but failed to register\\n\"", "\"(or `install`) the new catalog\"", ".", "format", "(", "(", "target_name", ",", ")", ")", ",", "parent_exception", "=", "e", ")" ]
Return True if deployment was successful
[ "Return", "True", "if", "deployment", "was", "successful" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/userprofile.py#L152-L195
train
Britefury/batchup
batchup/sampling.py
num_batches
def num_batches(n, batch_size): """Compute the number of mini-batches required to cover a data set of size `n` using batches of size `batch_size`. Parameters ---------- n: int the number of samples in the data set batch_size: int the mini-batch size Returns ------- int: the number of batches required """ b = n // batch_size if n % batch_size > 0: b += 1 return b
python
def num_batches(n, batch_size): """Compute the number of mini-batches required to cover a data set of size `n` using batches of size `batch_size`. Parameters ---------- n: int the number of samples in the data set batch_size: int the mini-batch size Returns ------- int: the number of batches required """ b = n // batch_size if n % batch_size > 0: b += 1 return b
[ "def", "num_batches", "(", "n", ",", "batch_size", ")", ":", "b", "=", "n", "//", "batch_size", "if", "n", "%", "batch_size", ">", "0", ":", "b", "+=", "1", "return", "b" ]
Compute the number of mini-batches required to cover a data set of size `n` using batches of size `batch_size`. Parameters ---------- n: int the number of samples in the data set batch_size: int the mini-batch size Returns ------- int: the number of batches required
[ "Compute", "the", "number", "of", "mini", "-", "batches", "required", "to", "cover", "a", "data", "set", "of", "size", "n", "using", "batches", "of", "size", "batch_size", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/sampling.py#L10-L28
train
Britefury/batchup
batchup/sampling.py
StandardSampler.num_indices_generated
def num_indices_generated(self): """ Get the number of indices that would be generated by this sampler. Returns ------- int, `np.inf` or `None`. An int if the number of samples is known, `np.inf` if it is infinite or `None` if the number of samples is unknown. """ if self.repeats == -1: return np.inf else: return self.length * self.repeats
python
def num_indices_generated(self): """ Get the number of indices that would be generated by this sampler. Returns ------- int, `np.inf` or `None`. An int if the number of samples is known, `np.inf` if it is infinite or `None` if the number of samples is unknown. """ if self.repeats == -1: return np.inf else: return self.length * self.repeats
[ "def", "num_indices_generated", "(", "self", ")", ":", "if", "self", ".", "repeats", "==", "-", "1", ":", "return", "np", ".", "inf", "else", ":", "return", "self", ".", "length", "*", "self", ".", "repeats" ]
Get the number of indices that would be generated by this sampler. Returns ------- int, `np.inf` or `None`. An int if the number of samples is known, `np.inf` if it is infinite or `None` if the number of samples is unknown.
[ "Get", "the", "number", "of", "indices", "that", "would", "be", "generated", "by", "this", "sampler", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/sampling.py#L137-L151
train
Britefury/batchup
batchup/sampling.py
StandardSampler.in_order_indices_batch_iterator
def in_order_indices_batch_iterator(self, batch_size): """ Create an iterator that generates in-order mini-batches of sample indices. The batches will have `batch_size` elements, with the exception of the final batch which will have less if there are not enough samples left to fill it. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays. """ if self.repeats == 1: for i in range(0, self.length, batch_size): yield np.arange(i, min(i + batch_size, self.length)) else: repeats = self.repeats i = 0 while True: j = i + batch_size if j <= self.length: # Within size of data yield np.arange(i, j) i = j elif j <= self.length * 2: # One restart is required # Reduce the number of remaining repeats if repeats != -1: repeats -= 1 if repeats == 0: # Finished; emit remaining elements if i < self.length: yield np.arange(i, self.length) break # Wrap over # Compute number of elements required to make up # the batch k = batch_size - (self.length - i) yield np.append(np.arange(i, self.length), np.arange(0, k), axis=0) i = k else: # Multiple restarts required to fill the batch batch_ndx = np.arange(0) # i = 0 while len(batch_ndx) < batch_size: # Wrap over k = min(batch_size - len(batch_ndx), self.length - i) batch_ndx = np.append( batch_ndx, np.arange(i, i + k), axis=0) i += k if i >= self.length: i -= self.length # Reduce the number of remaining repeats if repeats != -1: repeats -= 1 if repeats == 0: break if len(batch_ndx) > 0: yield batch_ndx if repeats == 0: break
python
def in_order_indices_batch_iterator(self, batch_size): """ Create an iterator that generates in-order mini-batches of sample indices. The batches will have `batch_size` elements, with the exception of the final batch which will have less if there are not enough samples left to fill it. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays. """ if self.repeats == 1: for i in range(0, self.length, batch_size): yield np.arange(i, min(i + batch_size, self.length)) else: repeats = self.repeats i = 0 while True: j = i + batch_size if j <= self.length: # Within size of data yield np.arange(i, j) i = j elif j <= self.length * 2: # One restart is required # Reduce the number of remaining repeats if repeats != -1: repeats -= 1 if repeats == 0: # Finished; emit remaining elements if i < self.length: yield np.arange(i, self.length) break # Wrap over # Compute number of elements required to make up # the batch k = batch_size - (self.length - i) yield np.append(np.arange(i, self.length), np.arange(0, k), axis=0) i = k else: # Multiple restarts required to fill the batch batch_ndx = np.arange(0) # i = 0 while len(batch_ndx) < batch_size: # Wrap over k = min(batch_size - len(batch_ndx), self.length - i) batch_ndx = np.append( batch_ndx, np.arange(i, i + k), axis=0) i += k if i >= self.length: i -= self.length # Reduce the number of remaining repeats if repeats != -1: repeats -= 1 if repeats == 0: break if len(batch_ndx) > 0: yield batch_ndx if repeats == 0: break
[ "def", "in_order_indices_batch_iterator", "(", "self", ",", "batch_size", ")", ":", "if", "self", ".", "repeats", "==", "1", ":", "for", "i", "in", "range", "(", "0", ",", "self", ".", "length", ",", "batch_size", ")", ":", "yield", "np", ".", "arange", "(", "i", ",", "min", "(", "i", "+", "batch_size", ",", "self", ".", "length", ")", ")", "else", ":", "repeats", "=", "self", ".", "repeats", "i", "=", "0", "while", "True", ":", "j", "=", "i", "+", "batch_size", "if", "j", "<=", "self", ".", "length", ":", "# Within size of data", "yield", "np", ".", "arange", "(", "i", ",", "j", ")", "i", "=", "j", "elif", "j", "<=", "self", ".", "length", "*", "2", ":", "# One restart is required", "# Reduce the number of remaining repeats", "if", "repeats", "!=", "-", "1", ":", "repeats", "-=", "1", "if", "repeats", "==", "0", ":", "# Finished; emit remaining elements", "if", "i", "<", "self", ".", "length", ":", "yield", "np", ".", "arange", "(", "i", ",", "self", ".", "length", ")", "break", "# Wrap over", "# Compute number of elements required to make up", "# the batch", "k", "=", "batch_size", "-", "(", "self", ".", "length", "-", "i", ")", "yield", "np", ".", "append", "(", "np", ".", "arange", "(", "i", ",", "self", ".", "length", ")", ",", "np", ".", "arange", "(", "0", ",", "k", ")", ",", "axis", "=", "0", ")", "i", "=", "k", "else", ":", "# Multiple restarts required to fill the batch", "batch_ndx", "=", "np", ".", "arange", "(", "0", ")", "# i = 0", "while", "len", "(", "batch_ndx", ")", "<", "batch_size", ":", "# Wrap over", "k", "=", "min", "(", "batch_size", "-", "len", "(", "batch_ndx", ")", ",", "self", ".", "length", "-", "i", ")", "batch_ndx", "=", "np", ".", "append", "(", "batch_ndx", ",", "np", ".", "arange", "(", "i", ",", "i", "+", "k", ")", ",", "axis", "=", "0", ")", "i", "+=", "k", "if", "i", ">=", "self", ".", "length", ":", "i", "-=", "self", ".", "length", "# Reduce the number of remaining repeats", "if", "repeats", "!=", "-", "1", ":", "repeats", "-=", "1", "if", "repeats", "==", "0", ":", "break", "if", "len", "(", "batch_ndx", ")", ">", "0", ":", "yield", "batch_ndx", "if", "repeats", "==", "0", ":", "break" ]
Create an iterator that generates in-order mini-batches of sample indices. The batches will have `batch_size` elements, with the exception of the final batch which will have less if there are not enough samples left to fill it. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays.
[ "Create", "an", "iterator", "that", "generates", "in", "-", "order", "mini", "-", "batches", "of", "sample", "indices", ".", "The", "batches", "will", "have", "batch_size", "elements", "with", "the", "exception", "of", "the", "final", "batch", "which", "will", "have", "less", "if", "there", "are", "not", "enough", "samples", "left", "to", "fill", "it", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/sampling.py#L153-L225
train
Britefury/batchup
batchup/sampling.py
StandardSampler.shuffled_indices_batch_iterator
def shuffled_indices_batch_iterator(self, batch_size, shuffle_rng): """ Create an iterator that generates randomly shuffled mini-batches of sample indices. The batches will have `batch_size` elements, with the exception of the final batch which will have less if there are not enough samples left to fill it. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size shuffle_rng: a `numpy.random.RandomState` that will be used to randomise element order. Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays. """ if self.repeats == 1: indices = shuffle_rng.permutation(self.length) for i in range(0, self.length, batch_size): yield indices[i:i + batch_size] else: repeats = self.repeats indices = shuffle_rng.permutation(self.length) i = 0 while True: j = i + batch_size if j <= self.length: # Within size of data yield indices[i:j] i = j else: # Multiple restarts required to fill the batch batch_ndx = np.arange(0) while len(batch_ndx) < batch_size: # Wrap over k = min(batch_size - len(batch_ndx), self.length - i) batch_ndx = np.append( batch_ndx, indices[i:i + k], axis=0) i += k if i >= self.length: # Loop over; new permutation indices = shuffle_rng.permutation(self.length) i -= self.length # Reduce the number of remaining repeats if repeats != -1: repeats -= 1 if repeats == 0: break if len(batch_ndx) > 0: yield batch_ndx if repeats == 0: break
python
def shuffled_indices_batch_iterator(self, batch_size, shuffle_rng): """ Create an iterator that generates randomly shuffled mini-batches of sample indices. The batches will have `batch_size` elements, with the exception of the final batch which will have less if there are not enough samples left to fill it. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size shuffle_rng: a `numpy.random.RandomState` that will be used to randomise element order. Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays. """ if self.repeats == 1: indices = shuffle_rng.permutation(self.length) for i in range(0, self.length, batch_size): yield indices[i:i + batch_size] else: repeats = self.repeats indices = shuffle_rng.permutation(self.length) i = 0 while True: j = i + batch_size if j <= self.length: # Within size of data yield indices[i:j] i = j else: # Multiple restarts required to fill the batch batch_ndx = np.arange(0) while len(batch_ndx) < batch_size: # Wrap over k = min(batch_size - len(batch_ndx), self.length - i) batch_ndx = np.append( batch_ndx, indices[i:i + k], axis=0) i += k if i >= self.length: # Loop over; new permutation indices = shuffle_rng.permutation(self.length) i -= self.length # Reduce the number of remaining repeats if repeats != -1: repeats -= 1 if repeats == 0: break if len(batch_ndx) > 0: yield batch_ndx if repeats == 0: break
[ "def", "shuffled_indices_batch_iterator", "(", "self", ",", "batch_size", ",", "shuffle_rng", ")", ":", "if", "self", ".", "repeats", "==", "1", ":", "indices", "=", "shuffle_rng", ".", "permutation", "(", "self", ".", "length", ")", "for", "i", "in", "range", "(", "0", ",", "self", ".", "length", ",", "batch_size", ")", ":", "yield", "indices", "[", "i", ":", "i", "+", "batch_size", "]", "else", ":", "repeats", "=", "self", ".", "repeats", "indices", "=", "shuffle_rng", ".", "permutation", "(", "self", ".", "length", ")", "i", "=", "0", "while", "True", ":", "j", "=", "i", "+", "batch_size", "if", "j", "<=", "self", ".", "length", ":", "# Within size of data", "yield", "indices", "[", "i", ":", "j", "]", "i", "=", "j", "else", ":", "# Multiple restarts required to fill the batch", "batch_ndx", "=", "np", ".", "arange", "(", "0", ")", "while", "len", "(", "batch_ndx", ")", "<", "batch_size", ":", "# Wrap over", "k", "=", "min", "(", "batch_size", "-", "len", "(", "batch_ndx", ")", ",", "self", ".", "length", "-", "i", ")", "batch_ndx", "=", "np", ".", "append", "(", "batch_ndx", ",", "indices", "[", "i", ":", "i", "+", "k", "]", ",", "axis", "=", "0", ")", "i", "+=", "k", "if", "i", ">=", "self", ".", "length", ":", "# Loop over; new permutation", "indices", "=", "shuffle_rng", ".", "permutation", "(", "self", ".", "length", ")", "i", "-=", "self", ".", "length", "# Reduce the number of remaining repeats", "if", "repeats", "!=", "-", "1", ":", "repeats", "-=", "1", "if", "repeats", "==", "0", ":", "break", "if", "len", "(", "batch_ndx", ")", ">", "0", ":", "yield", "batch_ndx", "if", "repeats", "==", "0", ":", "break" ]
Create an iterator that generates randomly shuffled mini-batches of sample indices. The batches will have `batch_size` elements, with the exception of the final batch which will have less if there are not enough samples left to fill it. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size shuffle_rng: a `numpy.random.RandomState` that will be used to randomise element order. Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays.
[ "Create", "an", "iterator", "that", "generates", "randomly", "shuffled", "mini", "-", "batches", "of", "sample", "indices", ".", "The", "batches", "will", "have", "batch_size", "elements", "with", "the", "exception", "of", "the", "final", "batch", "which", "will", "have", "less", "if", "there", "are", "not", "enough", "samples", "left", "to", "fill", "it", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/sampling.py#L227-L287
train
Britefury/batchup
batchup/sampling.py
WeightedSampler.shuffled_indices_batch_iterator
def shuffled_indices_batch_iterator(self, batch_size, shuffle_rng): """ Create an iterator that generates randomly shuffled mini-batches of sample indices. The batches will have `batch_size` elements. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size shuffle_rng: a `numpy.random.RandomState` that will be used to randomise element order. Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays. """ while True: yield shuffle_rng.choice(len(self.weights), size=(batch_size,), p=self.weights)
python
def shuffled_indices_batch_iterator(self, batch_size, shuffle_rng): """ Create an iterator that generates randomly shuffled mini-batches of sample indices. The batches will have `batch_size` elements. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size shuffle_rng: a `numpy.random.RandomState` that will be used to randomise element order. Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays. """ while True: yield shuffle_rng.choice(len(self.weights), size=(batch_size,), p=self.weights)
[ "def", "shuffled_indices_batch_iterator", "(", "self", ",", "batch_size", ",", "shuffle_rng", ")", ":", "while", "True", ":", "yield", "shuffle_rng", ".", "choice", "(", "len", "(", "self", ".", "weights", ")", ",", "size", "=", "(", "batch_size", ",", ")", ",", "p", "=", "self", ".", "weights", ")" ]
Create an iterator that generates randomly shuffled mini-batches of sample indices. The batches will have `batch_size` elements. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size shuffle_rng: a `numpy.random.RandomState` that will be used to randomise element order. Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays.
[ "Create", "an", "iterator", "that", "generates", "randomly", "shuffled", "mini", "-", "batches", "of", "sample", "indices", ".", "The", "batches", "will", "have", "batch_size", "elements", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/sampling.py#L533-L556
train
Britefury/batchup
batchup/sampling.py
WeightedSampler.class_balancing_sample_weights
def class_balancing_sample_weights(y): """ Compute sample weight given an array of sample classes. The weights are assigned on a per-class basis and the per-class weights are inversely proportional to their frequency. Parameters ---------- y: NumPy array, 1D dtype=int sample classes, values must be 0 or positive Returns ------- NumPy array, 1D dtype=float per sample weight array """ h = np.bincount(y) cls_weight = 1.0 / (h.astype(float) * len(np.nonzero(h)[0])) cls_weight[np.isnan(cls_weight)] = 0.0 sample_weight = cls_weight[y] return sample_weight
python
def class_balancing_sample_weights(y): """ Compute sample weight given an array of sample classes. The weights are assigned on a per-class basis and the per-class weights are inversely proportional to their frequency. Parameters ---------- y: NumPy array, 1D dtype=int sample classes, values must be 0 or positive Returns ------- NumPy array, 1D dtype=float per sample weight array """ h = np.bincount(y) cls_weight = 1.0 / (h.astype(float) * len(np.nonzero(h)[0])) cls_weight[np.isnan(cls_weight)] = 0.0 sample_weight = cls_weight[y] return sample_weight
[ "def", "class_balancing_sample_weights", "(", "y", ")", ":", "h", "=", "np", ".", "bincount", "(", "y", ")", "cls_weight", "=", "1.0", "/", "(", "h", ".", "astype", "(", "float", ")", "*", "len", "(", "np", ".", "nonzero", "(", "h", ")", "[", "0", "]", ")", ")", "cls_weight", "[", "np", ".", "isnan", "(", "cls_weight", ")", "]", "=", "0.0", "sample_weight", "=", "cls_weight", "[", "y", "]", "return", "sample_weight" ]
Compute sample weight given an array of sample classes. The weights are assigned on a per-class basis and the per-class weights are inversely proportional to their frequency. Parameters ---------- y: NumPy array, 1D dtype=int sample classes, values must be 0 or positive Returns ------- NumPy array, 1D dtype=float per sample weight array
[ "Compute", "sample", "weight", "given", "an", "array", "of", "sample", "classes", ".", "The", "weights", "are", "assigned", "on", "a", "per", "-", "class", "basis", "and", "the", "per", "-", "class", "weights", "are", "inversely", "proportional", "to", "their", "frequency", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/sampling.py#L559-L579
train
Britefury/batchup
batchup/sampling.py
WeightedSubsetSampler.shuffled_indices_batch_iterator
def shuffled_indices_batch_iterator(self, batch_size, shuffle_rng): """ Create an iterator that generates randomly shuffled mini-batches of sample indices. The batches will have `batch_size` elements. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size shuffle_rng: a `numpy.random.RandomState` that will be used to randomise element order. Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays. """ while True: yield shuffle_rng.choice(self.indices, size=(batch_size,), p=self.sub_weights)
python
def shuffled_indices_batch_iterator(self, batch_size, shuffle_rng): """ Create an iterator that generates randomly shuffled mini-batches of sample indices. The batches will have `batch_size` elements. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size shuffle_rng: a `numpy.random.RandomState` that will be used to randomise element order. Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays. """ while True: yield shuffle_rng.choice(self.indices, size=(batch_size,), p=self.sub_weights)
[ "def", "shuffled_indices_batch_iterator", "(", "self", ",", "batch_size", ",", "shuffle_rng", ")", ":", "while", "True", ":", "yield", "shuffle_rng", ".", "choice", "(", "self", ".", "indices", ",", "size", "=", "(", "batch_size", ",", ")", ",", "p", "=", "self", ".", "sub_weights", ")" ]
Create an iterator that generates randomly shuffled mini-batches of sample indices. The batches will have `batch_size` elements. The generated mini-batches indices take the form of 1D NumPy integer arrays. Parameters ---------- batch_size: int Mini-batch size shuffle_rng: a `numpy.random.RandomState` that will be used to randomise element order. Returns ------- iterator An iterator that generates mini-batches in the form of 1D NumPy integer arrays.
[ "Create", "an", "iterator", "that", "generates", "randomly", "shuffled", "mini", "-", "batches", "of", "sample", "indices", ".", "The", "batches", "will", "have", "batch_size", "elements", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/sampling.py#L688-L711
train
Britefury/batchup
batchup/sampling.py
WeightedSubsetSampler.class_balancing_sampler
def class_balancing_sampler(y, indices): """ Construct a `WeightedSubsetSampler` that compensates for class imbalance. Parameters ---------- y: NumPy array, 1D dtype=int sample classes, values must be 0 or positive indices: NumPy array, 1D dtype=int An array of indices that identify the subset of samples drawn from data that are to be used Returns ------- WeightedSubsetSampler instance Sampler """ weights = WeightedSampler.class_balancing_sample_weights(y[indices]) return WeightedSubsetSampler(weights, indices=indices)
python
def class_balancing_sampler(y, indices): """ Construct a `WeightedSubsetSampler` that compensates for class imbalance. Parameters ---------- y: NumPy array, 1D dtype=int sample classes, values must be 0 or positive indices: NumPy array, 1D dtype=int An array of indices that identify the subset of samples drawn from data that are to be used Returns ------- WeightedSubsetSampler instance Sampler """ weights = WeightedSampler.class_balancing_sample_weights(y[indices]) return WeightedSubsetSampler(weights, indices=indices)
[ "def", "class_balancing_sampler", "(", "y", ",", "indices", ")", ":", "weights", "=", "WeightedSampler", ".", "class_balancing_sample_weights", "(", "y", "[", "indices", "]", ")", "return", "WeightedSubsetSampler", "(", "weights", ",", "indices", "=", "indices", ")" ]
Construct a `WeightedSubsetSampler` that compensates for class imbalance. Parameters ---------- y: NumPy array, 1D dtype=int sample classes, values must be 0 or positive indices: NumPy array, 1D dtype=int An array of indices that identify the subset of samples drawn from data that are to be used Returns ------- WeightedSubsetSampler instance Sampler
[ "Construct", "a", "WeightedSubsetSampler", "that", "compensates", "for", "class", "imbalance", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/sampling.py#L714-L733
train
datacats/datacats
datacats/network.py
wait_for_service_available
def wait_for_service_available(container, url, timeout): """ Wait up to timeout seconds for service at host:port to start. Returns True if service becomes available, False if the container stops or raises ServiceTimeout if timeout is reached. """ start = time.time() remaining = timeout while True: remaining = start + timeout - time.time() if remaining < 0: raise ServiceTimeout try: response = get(url, timeout=min(remaining, REQUEST_TIMEOUT_SECONDS)) if 500 <= response.status_code < 600: return False return True except (ConnectionError, Timeout): pass if not inspect_container(container)['State']['Running']: return False remaining = start + timeout - time.time() delay = max(0, min(RETRY_DELAY_SECONDS, remaining)) time.sleep(delay) raise ServiceTimeout
python
def wait_for_service_available(container, url, timeout): """ Wait up to timeout seconds for service at host:port to start. Returns True if service becomes available, False if the container stops or raises ServiceTimeout if timeout is reached. """ start = time.time() remaining = timeout while True: remaining = start + timeout - time.time() if remaining < 0: raise ServiceTimeout try: response = get(url, timeout=min(remaining, REQUEST_TIMEOUT_SECONDS)) if 500 <= response.status_code < 600: return False return True except (ConnectionError, Timeout): pass if not inspect_container(container)['State']['Running']: return False remaining = start + timeout - time.time() delay = max(0, min(RETRY_DELAY_SECONDS, remaining)) time.sleep(delay) raise ServiceTimeout
[ "def", "wait_for_service_available", "(", "container", ",", "url", ",", "timeout", ")", ":", "start", "=", "time", ".", "time", "(", ")", "remaining", "=", "timeout", "while", "True", ":", "remaining", "=", "start", "+", "timeout", "-", "time", ".", "time", "(", ")", "if", "remaining", "<", "0", ":", "raise", "ServiceTimeout", "try", ":", "response", "=", "get", "(", "url", ",", "timeout", "=", "min", "(", "remaining", ",", "REQUEST_TIMEOUT_SECONDS", ")", ")", "if", "500", "<=", "response", ".", "status_code", "<", "600", ":", "return", "False", "return", "True", "except", "(", "ConnectionError", ",", "Timeout", ")", ":", "pass", "if", "not", "inspect_container", "(", "container", ")", "[", "'State'", "]", "[", "'Running'", "]", ":", "return", "False", "remaining", "=", "start", "+", "timeout", "-", "time", ".", "time", "(", ")", "delay", "=", "max", "(", "0", ",", "min", "(", "RETRY_DELAY_SECONDS", ",", "remaining", ")", ")", "time", ".", "sleep", "(", "delay", ")", "raise", "ServiceTimeout" ]
Wait up to timeout seconds for service at host:port to start. Returns True if service becomes available, False if the container stops or raises ServiceTimeout if timeout is reached.
[ "Wait", "up", "to", "timeout", "seconds", "for", "service", "at", "host", ":", "port", "to", "start", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/network.py#L21-L51
train
Britefury/batchup
batchup/config.py
get_data_path
def get_data_path(filename): """ Get the path of the given file within the batchup data directory Parameters ---------- filename: str The filename to locate within the batchup data directory Returns ------- str The full path of the file """ if os.path.isabs(filename): return filename else: return os.path.join(get_data_dir(), filename)
python
def get_data_path(filename): """ Get the path of the given file within the batchup data directory Parameters ---------- filename: str The filename to locate within the batchup data directory Returns ------- str The full path of the file """ if os.path.isabs(filename): return filename else: return os.path.join(get_data_dir(), filename)
[ "def", "get_data_path", "(", "filename", ")", ":", "if", "os", ".", "path", ".", "isabs", "(", "filename", ")", ":", "return", "filename", "else", ":", "return", "os", ".", "path", ".", "join", "(", "get_data_dir", "(", ")", ",", "filename", ")" ]
Get the path of the given file within the batchup data directory Parameters ---------- filename: str The filename to locate within the batchup data directory Returns ------- str The full path of the file
[ "Get", "the", "path", "of", "the", "given", "file", "within", "the", "batchup", "data", "directory" ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/config.py#L68-L85
train
Britefury/batchup
batchup/config.py
download
def download(path, source_url): """ Download a file to a given path from a given URL, if it does not exist. Parameters ---------- path: str The (destination) path of the file on the local filesystem source_url: str The URL from which to download the file Returns ------- str The path of the file """ dir_path = os.path.dirname(path) if not os.path.exists(dir_path): os.makedirs(dir_path) if not os.path.exists(path): print('Downloading {} to {}'.format(source_url, path)) filename = source_url.split('/')[-1] def _progress(count, block_size, total_size): sys.stdout.write('\rDownloading {} {:.2%}'.format( filename, float(count * block_size) / float(total_size))) sys.stdout.flush() try: urlretrieve(source_url, path, reporthook=_progress) except: sys.stdout.write('\r') # Exception; remove any partially downloaded file and re-raise if os.path.exists(path): os.remove(path) raise sys.stdout.write('\r') return path
python
def download(path, source_url): """ Download a file to a given path from a given URL, if it does not exist. Parameters ---------- path: str The (destination) path of the file on the local filesystem source_url: str The URL from which to download the file Returns ------- str The path of the file """ dir_path = os.path.dirname(path) if not os.path.exists(dir_path): os.makedirs(dir_path) if not os.path.exists(path): print('Downloading {} to {}'.format(source_url, path)) filename = source_url.split('/')[-1] def _progress(count, block_size, total_size): sys.stdout.write('\rDownloading {} {:.2%}'.format( filename, float(count * block_size) / float(total_size))) sys.stdout.flush() try: urlretrieve(source_url, path, reporthook=_progress) except: sys.stdout.write('\r') # Exception; remove any partially downloaded file and re-raise if os.path.exists(path): os.remove(path) raise sys.stdout.write('\r') return path
[ "def", "download", "(", "path", ",", "source_url", ")", ":", "dir_path", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dir_path", ")", ":", "os", ".", "makedirs", "(", "dir_path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "print", "(", "'Downloading {} to {}'", ".", "format", "(", "source_url", ",", "path", ")", ")", "filename", "=", "source_url", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "def", "_progress", "(", "count", ",", "block_size", ",", "total_size", ")", ":", "sys", ".", "stdout", ".", "write", "(", "'\\rDownloading {} {:.2%}'", ".", "format", "(", "filename", ",", "float", "(", "count", "*", "block_size", ")", "/", "float", "(", "total_size", ")", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "try", ":", "urlretrieve", "(", "source_url", ",", "path", ",", "reporthook", "=", "_progress", ")", "except", ":", "sys", ".", "stdout", ".", "write", "(", "'\\r'", ")", "# Exception; remove any partially downloaded file and re-raise", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "os", ".", "remove", "(", "path", ")", "raise", "sys", ".", "stdout", ".", "write", "(", "'\\r'", ")", "return", "path" ]
Download a file to a given path from a given URL, if it does not exist. Parameters ---------- path: str The (destination) path of the file on the local filesystem source_url: str The URL from which to download the file Returns ------- str The path of the file
[ "Download", "a", "file", "to", "a", "given", "path", "from", "a", "given", "URL", "if", "it", "does", "not", "exist", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/config.py#L88-L124
train
Britefury/batchup
batchup/config.py
compute_sha256
def compute_sha256(path): """ Compute the SHA-256 hash of the file at the given path Parameters ---------- path: str The path of the file Returns ------- str The SHA-256 HEX digest """ hasher = hashlib.sha256() with open(path, 'rb') as f: # 10MB chunks for chunk in iter(lambda: f.read(10 * 1024 * 1024), b''): hasher.update(chunk) return hasher.hexdigest()
python
def compute_sha256(path): """ Compute the SHA-256 hash of the file at the given path Parameters ---------- path: str The path of the file Returns ------- str The SHA-256 HEX digest """ hasher = hashlib.sha256() with open(path, 'rb') as f: # 10MB chunks for chunk in iter(lambda: f.read(10 * 1024 * 1024), b''): hasher.update(chunk) return hasher.hexdigest()
[ "def", "compute_sha256", "(", "path", ")", ":", "hasher", "=", "hashlib", ".", "sha256", "(", ")", "with", "open", "(", "path", ",", "'rb'", ")", "as", "f", ":", "# 10MB chunks", "for", "chunk", "in", "iter", "(", "lambda", ":", "f", ".", "read", "(", "10", "*", "1024", "*", "1024", ")", ",", "b''", ")", ":", "hasher", ".", "update", "(", "chunk", ")", "return", "hasher", ".", "hexdigest", "(", ")" ]
Compute the SHA-256 hash of the file at the given path Parameters ---------- path: str The path of the file Returns ------- str The SHA-256 HEX digest
[ "Compute", "the", "SHA", "-", "256", "hash", "of", "the", "file", "at", "the", "given", "path" ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/config.py#L127-L146
train
Britefury/batchup
batchup/config.py
verify_file
def verify_file(path, sha256): """ Verify the integrity of a file by checking its SHA-256 hash. If no digest is supplied, the digest is printed to the console. Closely follows the code in `torchvision.datasets.utils.check_integrity` Parameters ---------- path: str The path of the file to check sha256: str The expected SHA-256 hex digest of the file, or `None` to print the digest of the file to the console Returns ------- bool Indicates if the file passes the integrity check or not """ if not os.path.isfile(path): return False digest = compute_sha256(path) if sha256 is None: # No digest supplied; report it to the console so a develop can fill # it in print('SHA-256 of {}:'.format(path)) print(' "{}"'.format(digest)) else: if digest != sha256: return False return True
python
def verify_file(path, sha256): """ Verify the integrity of a file by checking its SHA-256 hash. If no digest is supplied, the digest is printed to the console. Closely follows the code in `torchvision.datasets.utils.check_integrity` Parameters ---------- path: str The path of the file to check sha256: str The expected SHA-256 hex digest of the file, or `None` to print the digest of the file to the console Returns ------- bool Indicates if the file passes the integrity check or not """ if not os.path.isfile(path): return False digest = compute_sha256(path) if sha256 is None: # No digest supplied; report it to the console so a develop can fill # it in print('SHA-256 of {}:'.format(path)) print(' "{}"'.format(digest)) else: if digest != sha256: return False return True
[ "def", "verify_file", "(", "path", ",", "sha256", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "return", "False", "digest", "=", "compute_sha256", "(", "path", ")", "if", "sha256", "is", "None", ":", "# No digest supplied; report it to the console so a develop can fill", "# it in", "print", "(", "'SHA-256 of {}:'", ".", "format", "(", "path", ")", ")", "print", "(", "' \"{}\"'", ".", "format", "(", "digest", ")", ")", "else", ":", "if", "digest", "!=", "sha256", ":", "return", "False", "return", "True" ]
Verify the integrity of a file by checking its SHA-256 hash. If no digest is supplied, the digest is printed to the console. Closely follows the code in `torchvision.datasets.utils.check_integrity` Parameters ---------- path: str The path of the file to check sha256: str The expected SHA-256 hex digest of the file, or `None` to print the digest of the file to the console Returns ------- bool Indicates if the file passes the integrity check or not
[ "Verify", "the", "integrity", "of", "a", "file", "by", "checking", "its", "SHA", "-", "256", "hash", ".", "If", "no", "digest", "is", "supplied", "the", "digest", "is", "printed", "to", "the", "console", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/config.py#L149-L180
train
Britefury/batchup
batchup/config.py
download_and_verify
def download_and_verify(path, source_url, sha256): """ Download a file to a given path from a given URL, if it does not exist. After downloading it, verify it integrity by checking the SHA-256 hash. Parameters ---------- path: str The (destination) path of the file on the local filesystem source_url: str The URL from which to download the file sha256: str The expected SHA-256 hex digest of the file, or `None` to print the digest of the file to the console Returns ------- str or None The path of the file if successfully downloaded otherwise `None` """ if os.path.exists(path): # Already exists? # Nothing to do, except print the SHA-256 if necessary if sha256 is None: print('The SHA-256 of {} is "{}"'.format( path, compute_sha256(path))) return path # Compute the path of the unverified file unverified_path = path + '.unverified' for i in range(_MAX_DOWNLOAD_TRIES): # Download it try: unverified_path = download(unverified_path, source_url) except Exception as e: # Report failure print( 'Download of {} unsuccessful; error {}; ' 'deleting and re-trying...'.format(source_url, e)) # Delete so that we can retry if os.path.exists(unverified_path): os.remove(unverified_path) else: if os.path.exists(unverified_path): # Got something... if verify_file(unverified_path, sha256): # Success: rename the unverified file to the destination # filename os.rename(unverified_path, path) return path else: # Report failure print( 'Download of {} unsuccessful; verification failed; ' 'deleting and re-trying...'.format(source_url)) # Delete so that we can retry os.remove(unverified_path) print('Did not succeed in downloading {} (tried {} times)'.format( source_url, _MAX_DOWNLOAD_TRIES )) return None
python
def download_and_verify(path, source_url, sha256): """ Download a file to a given path from a given URL, if it does not exist. After downloading it, verify it integrity by checking the SHA-256 hash. Parameters ---------- path: str The (destination) path of the file on the local filesystem source_url: str The URL from which to download the file sha256: str The expected SHA-256 hex digest of the file, or `None` to print the digest of the file to the console Returns ------- str or None The path of the file if successfully downloaded otherwise `None` """ if os.path.exists(path): # Already exists? # Nothing to do, except print the SHA-256 if necessary if sha256 is None: print('The SHA-256 of {} is "{}"'.format( path, compute_sha256(path))) return path # Compute the path of the unverified file unverified_path = path + '.unverified' for i in range(_MAX_DOWNLOAD_TRIES): # Download it try: unverified_path = download(unverified_path, source_url) except Exception as e: # Report failure print( 'Download of {} unsuccessful; error {}; ' 'deleting and re-trying...'.format(source_url, e)) # Delete so that we can retry if os.path.exists(unverified_path): os.remove(unverified_path) else: if os.path.exists(unverified_path): # Got something... if verify_file(unverified_path, sha256): # Success: rename the unverified file to the destination # filename os.rename(unverified_path, path) return path else: # Report failure print( 'Download of {} unsuccessful; verification failed; ' 'deleting and re-trying...'.format(source_url)) # Delete so that we can retry os.remove(unverified_path) print('Did not succeed in downloading {} (tried {} times)'.format( source_url, _MAX_DOWNLOAD_TRIES )) return None
[ "def", "download_and_verify", "(", "path", ",", "source_url", ",", "sha256", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "# Already exists?", "# Nothing to do, except print the SHA-256 if necessary", "if", "sha256", "is", "None", ":", "print", "(", "'The SHA-256 of {} is \"{}\"'", ".", "format", "(", "path", ",", "compute_sha256", "(", "path", ")", ")", ")", "return", "path", "# Compute the path of the unverified file", "unverified_path", "=", "path", "+", "'.unverified'", "for", "i", "in", "range", "(", "_MAX_DOWNLOAD_TRIES", ")", ":", "# Download it", "try", ":", "unverified_path", "=", "download", "(", "unverified_path", ",", "source_url", ")", "except", "Exception", "as", "e", ":", "# Report failure", "print", "(", "'Download of {} unsuccessful; error {}; '", "'deleting and re-trying...'", ".", "format", "(", "source_url", ",", "e", ")", ")", "# Delete so that we can retry", "if", "os", ".", "path", ".", "exists", "(", "unverified_path", ")", ":", "os", ".", "remove", "(", "unverified_path", ")", "else", ":", "if", "os", ".", "path", ".", "exists", "(", "unverified_path", ")", ":", "# Got something...", "if", "verify_file", "(", "unverified_path", ",", "sha256", ")", ":", "# Success: rename the unverified file to the destination", "# filename", "os", ".", "rename", "(", "unverified_path", ",", "path", ")", "return", "path", "else", ":", "# Report failure", "print", "(", "'Download of {} unsuccessful; verification failed; '", "'deleting and re-trying...'", ".", "format", "(", "source_url", ")", ")", "# Delete so that we can retry", "os", ".", "remove", "(", "unverified_path", ")", "print", "(", "'Did not succeed in downloading {} (tried {} times)'", ".", "format", "(", "source_url", ",", "_MAX_DOWNLOAD_TRIES", ")", ")", "return", "None" ]
Download a file to a given path from a given URL, if it does not exist. After downloading it, verify it integrity by checking the SHA-256 hash. Parameters ---------- path: str The (destination) path of the file on the local filesystem source_url: str The URL from which to download the file sha256: str The expected SHA-256 hex digest of the file, or `None` to print the digest of the file to the console Returns ------- str or None The path of the file if successfully downloaded otherwise `None`
[ "Download", "a", "file", "to", "a", "given", "path", "from", "a", "given", "URL", "if", "it", "does", "not", "exist", ".", "After", "downloading", "it", "verify", "it", "integrity", "by", "checking", "the", "SHA", "-", "256", "hash", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/config.py#L183-L244
train
Britefury/batchup
batchup/config.py
copy_and_verify
def copy_and_verify(path, source_path, sha256): """ Copy a file to a given path from a given path, if it does not exist. After copying it, verify it integrity by checking the SHA-256 hash. Parameters ---------- path: str The (destination) path of the file on the local filesystem source_path: str The path from which to copy the file sha256: str The expected SHA-256 hex digest of the file, or `None` to print the digest of the file to the console Returns ------- str or None The path of the file if successfully downloaded otherwise `None` """ if os.path.exists(path): # Already exists? # Nothing to do, except print the SHA-256 if necessary if sha256 is None: print('The SHA-256 of {} is "{}"'.format( path, compute_sha256(path))) return path if not os.path.exists(source_path): return None # Compute the path of the unverified file unverified_path = path + '.unverified' # Copy it dir_path = os.path.dirname(path) if not os.path.exists(dir_path): os.makedirs(dir_path) shutil.copy(source_path, unverified_path) if os.path.exists(unverified_path): # Got something... if verify_file(unverified_path, sha256): # Success: rename the unverified file to the destination # filename os.rename(unverified_path, path) return path else: # Report failure print('SHA verification of file {} failed'.format(source_path)) # Delete os.remove(unverified_path) return None
python
def copy_and_verify(path, source_path, sha256): """ Copy a file to a given path from a given path, if it does not exist. After copying it, verify it integrity by checking the SHA-256 hash. Parameters ---------- path: str The (destination) path of the file on the local filesystem source_path: str The path from which to copy the file sha256: str The expected SHA-256 hex digest of the file, or `None` to print the digest of the file to the console Returns ------- str or None The path of the file if successfully downloaded otherwise `None` """ if os.path.exists(path): # Already exists? # Nothing to do, except print the SHA-256 if necessary if sha256 is None: print('The SHA-256 of {} is "{}"'.format( path, compute_sha256(path))) return path if not os.path.exists(source_path): return None # Compute the path of the unverified file unverified_path = path + '.unverified' # Copy it dir_path = os.path.dirname(path) if not os.path.exists(dir_path): os.makedirs(dir_path) shutil.copy(source_path, unverified_path) if os.path.exists(unverified_path): # Got something... if verify_file(unverified_path, sha256): # Success: rename the unverified file to the destination # filename os.rename(unverified_path, path) return path else: # Report failure print('SHA verification of file {} failed'.format(source_path)) # Delete os.remove(unverified_path) return None
[ "def", "copy_and_verify", "(", "path", ",", "source_path", ",", "sha256", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "# Already exists?", "# Nothing to do, except print the SHA-256 if necessary", "if", "sha256", "is", "None", ":", "print", "(", "'The SHA-256 of {} is \"{}\"'", ".", "format", "(", "path", ",", "compute_sha256", "(", "path", ")", ")", ")", "return", "path", "if", "not", "os", ".", "path", ".", "exists", "(", "source_path", ")", ":", "return", "None", "# Compute the path of the unverified file", "unverified_path", "=", "path", "+", "'.unverified'", "# Copy it", "dir_path", "=", "os", ".", "path", ".", "dirname", "(", "path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dir_path", ")", ":", "os", ".", "makedirs", "(", "dir_path", ")", "shutil", ".", "copy", "(", "source_path", ",", "unverified_path", ")", "if", "os", ".", "path", ".", "exists", "(", "unverified_path", ")", ":", "# Got something...", "if", "verify_file", "(", "unverified_path", ",", "sha256", ")", ":", "# Success: rename the unverified file to the destination", "# filename", "os", ".", "rename", "(", "unverified_path", ",", "path", ")", "return", "path", "else", ":", "# Report failure", "print", "(", "'SHA verification of file {} failed'", ".", "format", "(", "source_path", ")", ")", "# Delete", "os", ".", "remove", "(", "unverified_path", ")", "return", "None" ]
Copy a file to a given path from a given path, if it does not exist. After copying it, verify it integrity by checking the SHA-256 hash. Parameters ---------- path: str The (destination) path of the file on the local filesystem source_path: str The path from which to copy the file sha256: str The expected SHA-256 hex digest of the file, or `None` to print the digest of the file to the console Returns ------- str or None The path of the file if successfully downloaded otherwise `None`
[ "Copy", "a", "file", "to", "a", "given", "path", "from", "a", "given", "path", "if", "it", "does", "not", "exist", ".", "After", "copying", "it", "verify", "it", "integrity", "by", "checking", "the", "SHA", "-", "256", "hash", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/config.py#L247-L298
train
datacats/datacats
datacats/template.py
ckan_extension_template
def ckan_extension_template(name, target): """ Create ckanext-(name) in target directory. """ setupdir = '{0}/ckanext-{1}theme'.format(target, name) extdir = setupdir + '/ckanext/{0}theme'.format(name) templatedir = extdir + '/templates/' staticdir = extdir + '/static/datacats' makedirs(templatedir + '/home/snippets') makedirs(staticdir) here = dirname(__file__) copyfile(here + '/images/chart.png', staticdir + '/chart.png') copyfile(here + '/images/datacats-footer.png', staticdir + '/datacats-footer.png') filecontents = [ (setupdir + '/setup.py', SETUP_PY), (setupdir + '/.gitignore', DOT_GITIGNORE), (setupdir + '/ckanext/__init__.py', NAMESPACE_PACKAGE), (extdir + '/__init__.py', ''), (extdir + '/plugins.py', PLUGINS_PY), (templatedir + '/home/snippets/promoted.html', PROMOTED_SNIPPET), (templatedir + '/footer.html', FOOTER_HTML), ] for filename, content in filecontents: with open(filename, 'w') as f: f.write(content.replace('##name##', name))
python
def ckan_extension_template(name, target): """ Create ckanext-(name) in target directory. """ setupdir = '{0}/ckanext-{1}theme'.format(target, name) extdir = setupdir + '/ckanext/{0}theme'.format(name) templatedir = extdir + '/templates/' staticdir = extdir + '/static/datacats' makedirs(templatedir + '/home/snippets') makedirs(staticdir) here = dirname(__file__) copyfile(here + '/images/chart.png', staticdir + '/chart.png') copyfile(here + '/images/datacats-footer.png', staticdir + '/datacats-footer.png') filecontents = [ (setupdir + '/setup.py', SETUP_PY), (setupdir + '/.gitignore', DOT_GITIGNORE), (setupdir + '/ckanext/__init__.py', NAMESPACE_PACKAGE), (extdir + '/__init__.py', ''), (extdir + '/plugins.py', PLUGINS_PY), (templatedir + '/home/snippets/promoted.html', PROMOTED_SNIPPET), (templatedir + '/footer.html', FOOTER_HTML), ] for filename, content in filecontents: with open(filename, 'w') as f: f.write(content.replace('##name##', name))
[ "def", "ckan_extension_template", "(", "name", ",", "target", ")", ":", "setupdir", "=", "'{0}/ckanext-{1}theme'", ".", "format", "(", "target", ",", "name", ")", "extdir", "=", "setupdir", "+", "'/ckanext/{0}theme'", ".", "format", "(", "name", ")", "templatedir", "=", "extdir", "+", "'/templates/'", "staticdir", "=", "extdir", "+", "'/static/datacats'", "makedirs", "(", "templatedir", "+", "'/home/snippets'", ")", "makedirs", "(", "staticdir", ")", "here", "=", "dirname", "(", "__file__", ")", "copyfile", "(", "here", "+", "'/images/chart.png'", ",", "staticdir", "+", "'/chart.png'", ")", "copyfile", "(", "here", "+", "'/images/datacats-footer.png'", ",", "staticdir", "+", "'/datacats-footer.png'", ")", "filecontents", "=", "[", "(", "setupdir", "+", "'/setup.py'", ",", "SETUP_PY", ")", ",", "(", "setupdir", "+", "'/.gitignore'", ",", "DOT_GITIGNORE", ")", ",", "(", "setupdir", "+", "'/ckanext/__init__.py'", ",", "NAMESPACE_PACKAGE", ")", ",", "(", "extdir", "+", "'/__init__.py'", ",", "''", ")", ",", "(", "extdir", "+", "'/plugins.py'", ",", "PLUGINS_PY", ")", ",", "(", "templatedir", "+", "'/home/snippets/promoted.html'", ",", "PROMOTED_SNIPPET", ")", ",", "(", "templatedir", "+", "'/footer.html'", ",", "FOOTER_HTML", ")", ",", "]", "for", "filename", ",", "content", "in", "filecontents", ":", "with", "open", "(", "filename", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "content", ".", "replace", "(", "'##name##'", ",", "name", ")", ")" ]
Create ckanext-(name) in target directory.
[ "Create", "ckanext", "-", "(", "name", ")", "in", "target", "directory", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/template.py#L12-L41
train
datacats/datacats
datacats/cli/shell.py
shell
def shell(environment, opts): """Run a command or interactive shell within this environment Usage: datacats [-d] [-s NAME] shell [ENVIRONMENT [COMMAND...]] Options: -d --detach Run the resulting container in the background -s --site=NAME Specify a site to run the shell on [default: primary] ENVIRONMENT may be an environment name or a path to an environment directory. Default: '.' """ environment.require_data() environment.start_supporting_containers() return environment.interactive_shell( opts['COMMAND'], detach=opts['--detach'] )
python
def shell(environment, opts): """Run a command or interactive shell within this environment Usage: datacats [-d] [-s NAME] shell [ENVIRONMENT [COMMAND...]] Options: -d --detach Run the resulting container in the background -s --site=NAME Specify a site to run the shell on [default: primary] ENVIRONMENT may be an environment name or a path to an environment directory. Default: '.' """ environment.require_data() environment.start_supporting_containers() return environment.interactive_shell( opts['COMMAND'], detach=opts['--detach'] )
[ "def", "shell", "(", "environment", ",", "opts", ")", ":", "environment", ".", "require_data", "(", ")", "environment", ".", "start_supporting_containers", "(", ")", "return", "environment", ".", "interactive_shell", "(", "opts", "[", "'COMMAND'", "]", ",", "detach", "=", "opts", "[", "'--detach'", "]", ")" ]
Run a command or interactive shell within this environment Usage: datacats [-d] [-s NAME] shell [ENVIRONMENT [COMMAND...]] Options: -d --detach Run the resulting container in the background -s --site=NAME Specify a site to run the shell on [default: primary] ENVIRONMENT may be an environment name or a path to an environment directory. Default: '.'
[ "Run", "a", "command", "or", "interactive", "shell", "within", "this", "environment" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/shell.py#L10-L28
train
datacats/datacats
datacats/cli/shell.py
paster
def paster(opts): """Run a paster command from the current directory Usage: datacats paster [-d] [-s NAME] [COMMAND...] Options: -s --site=NAME Specify a site to run this paster command on [default: primary] -d --detach Run the resulting container in the background You must be inside a datacats environment to run this. The paster command will run within your current directory inside the environment. You don't need to specify the --plugin option. The --config option also need not be specified. """ environment = Environment.load('.') environment.require_data() environment.start_supporting_containers() if not opts['COMMAND']: opts['COMMAND'] = ['--', 'help'] assert opts['COMMAND'][0] == '--' return environment.interactive_shell( opts['COMMAND'][1:], paster=True, detach=opts['--detach'] )
python
def paster(opts): """Run a paster command from the current directory Usage: datacats paster [-d] [-s NAME] [COMMAND...] Options: -s --site=NAME Specify a site to run this paster command on [default: primary] -d --detach Run the resulting container in the background You must be inside a datacats environment to run this. The paster command will run within your current directory inside the environment. You don't need to specify the --plugin option. The --config option also need not be specified. """ environment = Environment.load('.') environment.require_data() environment.start_supporting_containers() if not opts['COMMAND']: opts['COMMAND'] = ['--', 'help'] assert opts['COMMAND'][0] == '--' return environment.interactive_shell( opts['COMMAND'][1:], paster=True, detach=opts['--detach'] )
[ "def", "paster", "(", "opts", ")", ":", "environment", "=", "Environment", ".", "load", "(", "'.'", ")", "environment", ".", "require_data", "(", ")", "environment", ".", "start_supporting_containers", "(", ")", "if", "not", "opts", "[", "'COMMAND'", "]", ":", "opts", "[", "'COMMAND'", "]", "=", "[", "'--'", ",", "'help'", "]", "assert", "opts", "[", "'COMMAND'", "]", "[", "0", "]", "==", "'--'", "return", "environment", ".", "interactive_shell", "(", "opts", "[", "'COMMAND'", "]", "[", "1", ":", "]", ",", "paster", "=", "True", ",", "detach", "=", "opts", "[", "'--detach'", "]", ")" ]
Run a paster command from the current directory Usage: datacats paster [-d] [-s NAME] [COMMAND...] Options: -s --site=NAME Specify a site to run this paster command on [default: primary] -d --detach Run the resulting container in the background You must be inside a datacats environment to run this. The paster command will run within your current directory inside the environment. You don't need to specify the --plugin option. The --config option also need not be specified.
[ "Run", "a", "paster", "command", "from", "the", "current", "directory" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/shell.py#L31-L57
train
datacats/datacats
datacats/task.py
save_new_site
def save_new_site(site_name, sitedir, srcdir, port, address, site_url, passwords): """ Add a site's configuration to the source dir and site dir """ cp = ConfigParser.SafeConfigParser() cp.read([srcdir + '/.datacats-environment']) section_name = 'site_' + site_name if not cp.has_section(section_name): cp.add_section(section_name) cp.set(section_name, 'port', str(port)) if address: cp.set(section_name, 'address', address) if site_url: cp.set(section_name, 'site_url', site_url) with open(srcdir + '/.datacats-environment', 'w') as config: cp.write(config) # save passwords to datadir cp = ConfigParser.SafeConfigParser() cp.add_section('passwords') for n in sorted(passwords): cp.set('passwords', n.lower(), passwords[n]) # Write to the sitedir so we maintain separate passwords. with open(sitedir + '/passwords.ini', 'w') as config: cp.write(config)
python
def save_new_site(site_name, sitedir, srcdir, port, address, site_url, passwords): """ Add a site's configuration to the source dir and site dir """ cp = ConfigParser.SafeConfigParser() cp.read([srcdir + '/.datacats-environment']) section_name = 'site_' + site_name if not cp.has_section(section_name): cp.add_section(section_name) cp.set(section_name, 'port', str(port)) if address: cp.set(section_name, 'address', address) if site_url: cp.set(section_name, 'site_url', site_url) with open(srcdir + '/.datacats-environment', 'w') as config: cp.write(config) # save passwords to datadir cp = ConfigParser.SafeConfigParser() cp.add_section('passwords') for n in sorted(passwords): cp.set('passwords', n.lower(), passwords[n]) # Write to the sitedir so we maintain separate passwords. with open(sitedir + '/passwords.ini', 'w') as config: cp.write(config)
[ "def", "save_new_site", "(", "site_name", ",", "sitedir", ",", "srcdir", ",", "port", ",", "address", ",", "site_url", ",", "passwords", ")", ":", "cp", "=", "ConfigParser", ".", "SafeConfigParser", "(", ")", "cp", ".", "read", "(", "[", "srcdir", "+", "'/.datacats-environment'", "]", ")", "section_name", "=", "'site_'", "+", "site_name", "if", "not", "cp", ".", "has_section", "(", "section_name", ")", ":", "cp", ".", "add_section", "(", "section_name", ")", "cp", ".", "set", "(", "section_name", ",", "'port'", ",", "str", "(", "port", ")", ")", "if", "address", ":", "cp", ".", "set", "(", "section_name", ",", "'address'", ",", "address", ")", "if", "site_url", ":", "cp", ".", "set", "(", "section_name", ",", "'site_url'", ",", "site_url", ")", "with", "open", "(", "srcdir", "+", "'/.datacats-environment'", ",", "'w'", ")", "as", "config", ":", "cp", ".", "write", "(", "config", ")", "# save passwords to datadir", "cp", "=", "ConfigParser", ".", "SafeConfigParser", "(", ")", "cp", ".", "add_section", "(", "'passwords'", ")", "for", "n", "in", "sorted", "(", "passwords", ")", ":", "cp", ".", "set", "(", "'passwords'", ",", "n", ".", "lower", "(", ")", ",", "passwords", "[", "n", "]", ")", "# Write to the sitedir so we maintain separate passwords.", "with", "open", "(", "sitedir", "+", "'/passwords.ini'", ",", "'w'", ")", "as", "config", ":", "cp", ".", "write", "(", "config", ")" ]
Add a site's configuration to the source dir and site dir
[ "Add", "a", "site", "s", "configuration", "to", "the", "source", "dir", "and", "site", "dir" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L44-L75
train
datacats/datacats
datacats/task.py
save_new_environment
def save_new_environment(name, datadir, srcdir, ckan_version, deploy_target=None, always_prod=False): """ Save an environment's configuration to the source dir and data dir """ with open(datadir + '/.version', 'w') as f: f.write('2') cp = ConfigParser.SafeConfigParser() cp.read(srcdir + '/.datacats-environment') if not cp.has_section('datacats'): cp.add_section('datacats') cp.set('datacats', 'name', name) cp.set('datacats', 'ckan_version', ckan_version) if deploy_target: if not cp.has_section('deploy'): cp.add_section('deploy') cp.set('deploy', 'target', deploy_target) if always_prod: cp.set('datacats', 'always_prod', 'true') with open(srcdir + '/.datacats-environment', 'w') as config: cp.write(config) save_srcdir_location(datadir, srcdir)
python
def save_new_environment(name, datadir, srcdir, ckan_version, deploy_target=None, always_prod=False): """ Save an environment's configuration to the source dir and data dir """ with open(datadir + '/.version', 'w') as f: f.write('2') cp = ConfigParser.SafeConfigParser() cp.read(srcdir + '/.datacats-environment') if not cp.has_section('datacats'): cp.add_section('datacats') cp.set('datacats', 'name', name) cp.set('datacats', 'ckan_version', ckan_version) if deploy_target: if not cp.has_section('deploy'): cp.add_section('deploy') cp.set('deploy', 'target', deploy_target) if always_prod: cp.set('datacats', 'always_prod', 'true') with open(srcdir + '/.datacats-environment', 'w') as config: cp.write(config) save_srcdir_location(datadir, srcdir)
[ "def", "save_new_environment", "(", "name", ",", "datadir", ",", "srcdir", ",", "ckan_version", ",", "deploy_target", "=", "None", ",", "always_prod", "=", "False", ")", ":", "with", "open", "(", "datadir", "+", "'/.version'", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "'2'", ")", "cp", "=", "ConfigParser", ".", "SafeConfigParser", "(", ")", "cp", ".", "read", "(", "srcdir", "+", "'/.datacats-environment'", ")", "if", "not", "cp", ".", "has_section", "(", "'datacats'", ")", ":", "cp", ".", "add_section", "(", "'datacats'", ")", "cp", ".", "set", "(", "'datacats'", ",", "'name'", ",", "name", ")", "cp", ".", "set", "(", "'datacats'", ",", "'ckan_version'", ",", "ckan_version", ")", "if", "deploy_target", ":", "if", "not", "cp", ".", "has_section", "(", "'deploy'", ")", ":", "cp", ".", "add_section", "(", "'deploy'", ")", "cp", ".", "set", "(", "'deploy'", ",", "'target'", ",", "deploy_target", ")", "if", "always_prod", ":", "cp", ".", "set", "(", "'datacats'", ",", "'always_prod'", ",", "'true'", ")", "with", "open", "(", "srcdir", "+", "'/.datacats-environment'", ",", "'w'", ")", "as", "config", ":", "cp", ".", "write", "(", "config", ")", "save_srcdir_location", "(", "datadir", ",", "srcdir", ")" ]
Save an environment's configuration to the source dir and data dir
[ "Save", "an", "environment", "s", "configuration", "to", "the", "source", "dir", "and", "data", "dir" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L78-L106
train
datacats/datacats
datacats/task.py
find_environment_dirs
def find_environment_dirs(environment_name=None, data_only=False): """ :param environment_name: exising environment name, path or None to look in current or parent directories for project returns (srcdir, extension_dir, datadir) extension_dir is the name of extension directory user was in/referenced, default: 'ckan'. This value is used by the paster cli command. datadir will be None if environment_name was a path or None (not a name) """ docker.require_images() if environment_name is None: environment_name = '.' extension_dir = 'ckan' if validate.valid_name(environment_name) and path.isdir( path.expanduser('~/.datacats/' + environment_name)): # loading from a name datadir = path.expanduser('~/.datacats/' + environment_name) with open(datadir + '/project-dir') as pd: srcdir = pd.read() if not data_only and not path.exists(srcdir + '/.datacats-environment'): raise DatacatsError( 'Environment data found but environment directory is' ' missing. Try again from the new environment directory' ' location or remove the environment data with' ' "datacats purge"') return srcdir, extension_dir, datadir # loading from a path srcdir = path.abspath(environment_name) if not path.isdir(srcdir): raise DatacatsError('No environment found with that name') wd = srcdir oldwd = None while not path.exists(wd + '/.datacats-environment'): oldwd = wd wd, _ = path.split(wd) if wd == oldwd: raise DatacatsError( 'Environment not found in {0} or above'.format(srcdir)) srcdir = wd if oldwd: _, extension_dir = path.split(oldwd) return srcdir, extension_dir, None
python
def find_environment_dirs(environment_name=None, data_only=False): """ :param environment_name: exising environment name, path or None to look in current or parent directories for project returns (srcdir, extension_dir, datadir) extension_dir is the name of extension directory user was in/referenced, default: 'ckan'. This value is used by the paster cli command. datadir will be None if environment_name was a path or None (not a name) """ docker.require_images() if environment_name is None: environment_name = '.' extension_dir = 'ckan' if validate.valid_name(environment_name) and path.isdir( path.expanduser('~/.datacats/' + environment_name)): # loading from a name datadir = path.expanduser('~/.datacats/' + environment_name) with open(datadir + '/project-dir') as pd: srcdir = pd.read() if not data_only and not path.exists(srcdir + '/.datacats-environment'): raise DatacatsError( 'Environment data found but environment directory is' ' missing. Try again from the new environment directory' ' location or remove the environment data with' ' "datacats purge"') return srcdir, extension_dir, datadir # loading from a path srcdir = path.abspath(environment_name) if not path.isdir(srcdir): raise DatacatsError('No environment found with that name') wd = srcdir oldwd = None while not path.exists(wd + '/.datacats-environment'): oldwd = wd wd, _ = path.split(wd) if wd == oldwd: raise DatacatsError( 'Environment not found in {0} or above'.format(srcdir)) srcdir = wd if oldwd: _, extension_dir = path.split(oldwd) return srcdir, extension_dir, None
[ "def", "find_environment_dirs", "(", "environment_name", "=", "None", ",", "data_only", "=", "False", ")", ":", "docker", ".", "require_images", "(", ")", "if", "environment_name", "is", "None", ":", "environment_name", "=", "'.'", "extension_dir", "=", "'ckan'", "if", "validate", ".", "valid_name", "(", "environment_name", ")", "and", "path", ".", "isdir", "(", "path", ".", "expanduser", "(", "'~/.datacats/'", "+", "environment_name", ")", ")", ":", "# loading from a name", "datadir", "=", "path", ".", "expanduser", "(", "'~/.datacats/'", "+", "environment_name", ")", "with", "open", "(", "datadir", "+", "'/project-dir'", ")", "as", "pd", ":", "srcdir", "=", "pd", ".", "read", "(", ")", "if", "not", "data_only", "and", "not", "path", ".", "exists", "(", "srcdir", "+", "'/.datacats-environment'", ")", ":", "raise", "DatacatsError", "(", "'Environment data found but environment directory is'", "' missing. Try again from the new environment directory'", "' location or remove the environment data with'", "' \"datacats purge\"'", ")", "return", "srcdir", ",", "extension_dir", ",", "datadir", "# loading from a path", "srcdir", "=", "path", ".", "abspath", "(", "environment_name", ")", "if", "not", "path", ".", "isdir", "(", "srcdir", ")", ":", "raise", "DatacatsError", "(", "'No environment found with that name'", ")", "wd", "=", "srcdir", "oldwd", "=", "None", "while", "not", "path", ".", "exists", "(", "wd", "+", "'/.datacats-environment'", ")", ":", "oldwd", "=", "wd", "wd", ",", "_", "=", "path", ".", "split", "(", "wd", ")", "if", "wd", "==", "oldwd", ":", "raise", "DatacatsError", "(", "'Environment not found in {0} or above'", ".", "format", "(", "srcdir", ")", ")", "srcdir", "=", "wd", "if", "oldwd", ":", "_", ",", "extension_dir", "=", "path", ".", "split", "(", "oldwd", ")", "return", "srcdir", ",", "extension_dir", ",", "None" ]
:param environment_name: exising environment name, path or None to look in current or parent directories for project returns (srcdir, extension_dir, datadir) extension_dir is the name of extension directory user was in/referenced, default: 'ckan'. This value is used by the paster cli command. datadir will be None if environment_name was a path or None (not a name)
[ ":", "param", "environment_name", ":", "exising", "environment", "name", "path", "or", "None", "to", "look", "in", "current", "or", "parent", "directories", "for", "project" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L118-L170
train
datacats/datacats
datacats/task.py
load_environment
def load_environment(srcdir, datadir=None, allow_old=False): """ Load configuration values for an environment :param srcdir: environment source directory :param datadir: environment data direcory, if None will be discovered from srcdir :param allow_old: Don't throw an exception if this is an old site This is only valid for sites that you are purging. if datadir is None it will be discovered from srcdir Returns (datadir, name, ckan_version, always_prod, deploy_target, remote_server_key) """ cp = ConfigParser.SafeConfigParser() try: cp.read([srcdir + '/.datacats-environment']) except ConfigParser.Error: raise DatacatsError('Error reading environment information') name = cp.get('datacats', 'name') if datadir: # update the link in case user moved their srcdir save_srcdir_location(datadir, srcdir) else: datadir = path.expanduser('~/.datacats/' + name) # FIXME: check if datadir is sane, project-dir points back to srcdir if migrate.needs_format_conversion(datadir) and not allow_old: raise DatacatsError('This environment uses an old format. You must' ' migrate to the new format. To do so, use the' ' "datacats migrate" command.') if migrate.is_locked(datadir): raise DatacatsError('Migration in progress, cannot continue.\n' 'If you interrupted a migration, you should' ' attempt manual recovery or contact us by' ' filing an issue at http://github.com/datacats/' 'datacats.\nAs a last resort, you could delete' ' all your stored data and create a new environment' ' by running "datacats purge" followed by' ' "datacats init".') # FIXME: consider doing data_complete check here ckan_version = cp.get('datacats', 'ckan_version') try: always_prod = cp.getboolean('datacats', 'always_prod') except ConfigParser.NoOptionError: always_prod = False try: extra_containers = cp.get('datacats', 'extra_containers').split(' ') except ConfigParser.NoOptionError: extra_containers = () # if remote_server's custom ssh connection # address is defined, # we overwrite the default datacats.com one try: deploy_target = cp.get('deploy', 'remote_server_user') \ + "@" + cp.get('deploy', 'remote_server') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): deploy_target = DEFAULT_REMOTE_SERVER_TARGET # if remote_server's ssh public key is given, # we overwrite the default datacats.com one try: remote_server_key = cp.get('deploy', 'remote_server_key') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): remote_server_key = None return (datadir, name, ckan_version, always_prod, deploy_target, remote_server_key, extra_containers)
python
def load_environment(srcdir, datadir=None, allow_old=False): """ Load configuration values for an environment :param srcdir: environment source directory :param datadir: environment data direcory, if None will be discovered from srcdir :param allow_old: Don't throw an exception if this is an old site This is only valid for sites that you are purging. if datadir is None it will be discovered from srcdir Returns (datadir, name, ckan_version, always_prod, deploy_target, remote_server_key) """ cp = ConfigParser.SafeConfigParser() try: cp.read([srcdir + '/.datacats-environment']) except ConfigParser.Error: raise DatacatsError('Error reading environment information') name = cp.get('datacats', 'name') if datadir: # update the link in case user moved their srcdir save_srcdir_location(datadir, srcdir) else: datadir = path.expanduser('~/.datacats/' + name) # FIXME: check if datadir is sane, project-dir points back to srcdir if migrate.needs_format_conversion(datadir) and not allow_old: raise DatacatsError('This environment uses an old format. You must' ' migrate to the new format. To do so, use the' ' "datacats migrate" command.') if migrate.is_locked(datadir): raise DatacatsError('Migration in progress, cannot continue.\n' 'If you interrupted a migration, you should' ' attempt manual recovery or contact us by' ' filing an issue at http://github.com/datacats/' 'datacats.\nAs a last resort, you could delete' ' all your stored data and create a new environment' ' by running "datacats purge" followed by' ' "datacats init".') # FIXME: consider doing data_complete check here ckan_version = cp.get('datacats', 'ckan_version') try: always_prod = cp.getboolean('datacats', 'always_prod') except ConfigParser.NoOptionError: always_prod = False try: extra_containers = cp.get('datacats', 'extra_containers').split(' ') except ConfigParser.NoOptionError: extra_containers = () # if remote_server's custom ssh connection # address is defined, # we overwrite the default datacats.com one try: deploy_target = cp.get('deploy', 'remote_server_user') \ + "@" + cp.get('deploy', 'remote_server') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): deploy_target = DEFAULT_REMOTE_SERVER_TARGET # if remote_server's ssh public key is given, # we overwrite the default datacats.com one try: remote_server_key = cp.get('deploy', 'remote_server_key') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): remote_server_key = None return (datadir, name, ckan_version, always_prod, deploy_target, remote_server_key, extra_containers)
[ "def", "load_environment", "(", "srcdir", ",", "datadir", "=", "None", ",", "allow_old", "=", "False", ")", ":", "cp", "=", "ConfigParser", ".", "SafeConfigParser", "(", ")", "try", ":", "cp", ".", "read", "(", "[", "srcdir", "+", "'/.datacats-environment'", "]", ")", "except", "ConfigParser", ".", "Error", ":", "raise", "DatacatsError", "(", "'Error reading environment information'", ")", "name", "=", "cp", ".", "get", "(", "'datacats'", ",", "'name'", ")", "if", "datadir", ":", "# update the link in case user moved their srcdir", "save_srcdir_location", "(", "datadir", ",", "srcdir", ")", "else", ":", "datadir", "=", "path", ".", "expanduser", "(", "'~/.datacats/'", "+", "name", ")", "# FIXME: check if datadir is sane, project-dir points back to srcdir", "if", "migrate", ".", "needs_format_conversion", "(", "datadir", ")", "and", "not", "allow_old", ":", "raise", "DatacatsError", "(", "'This environment uses an old format. You must'", "' migrate to the new format. To do so, use the'", "' \"datacats migrate\" command.'", ")", "if", "migrate", ".", "is_locked", "(", "datadir", ")", ":", "raise", "DatacatsError", "(", "'Migration in progress, cannot continue.\\n'", "'If you interrupted a migration, you should'", "' attempt manual recovery or contact us by'", "' filing an issue at http://github.com/datacats/'", "'datacats.\\nAs a last resort, you could delete'", "' all your stored data and create a new environment'", "' by running \"datacats purge\" followed by'", "' \"datacats init\".'", ")", "# FIXME: consider doing data_complete check here", "ckan_version", "=", "cp", ".", "get", "(", "'datacats'", ",", "'ckan_version'", ")", "try", ":", "always_prod", "=", "cp", ".", "getboolean", "(", "'datacats'", ",", "'always_prod'", ")", "except", "ConfigParser", ".", "NoOptionError", ":", "always_prod", "=", "False", "try", ":", "extra_containers", "=", "cp", ".", "get", "(", "'datacats'", ",", "'extra_containers'", ")", ".", "split", "(", "' '", ")", "except", "ConfigParser", ".", "NoOptionError", ":", "extra_containers", "=", "(", ")", "# if remote_server's custom ssh connection", "# address is defined,", "# we overwrite the default datacats.com one", "try", ":", "deploy_target", "=", "cp", ".", "get", "(", "'deploy'", ",", "'remote_server_user'", ")", "+", "\"@\"", "+", "cp", ".", "get", "(", "'deploy'", ",", "'remote_server'", ")", "except", "(", "ConfigParser", ".", "NoOptionError", ",", "ConfigParser", ".", "NoSectionError", ")", ":", "deploy_target", "=", "DEFAULT_REMOTE_SERVER_TARGET", "# if remote_server's ssh public key is given,", "# we overwrite the default datacats.com one", "try", ":", "remote_server_key", "=", "cp", ".", "get", "(", "'deploy'", ",", "'remote_server_key'", ")", "except", "(", "ConfigParser", ".", "NoOptionError", ",", "ConfigParser", ".", "NoSectionError", ")", ":", "remote_server_key", "=", "None", "return", "(", "datadir", ",", "name", ",", "ckan_version", ",", "always_prod", ",", "deploy_target", ",", "remote_server_key", ",", "extra_containers", ")" ]
Load configuration values for an environment :param srcdir: environment source directory :param datadir: environment data direcory, if None will be discovered from srcdir :param allow_old: Don't throw an exception if this is an old site This is only valid for sites that you are purging. if datadir is None it will be discovered from srcdir Returns (datadir, name, ckan_version, always_prod, deploy_target, remote_server_key)
[ "Load", "configuration", "values", "for", "an", "environment" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L173-L247
train
datacats/datacats
datacats/task.py
load_site
def load_site(srcdir, datadir, site_name=None): """ Load configuration values for a site. Returns (port, address, site_url, passwords) """ if site_name is None: site_name = 'primary' if not validate.valid_name(site_name): raise DatacatsError('{} is not a valid site name.'.format(site_name)) cp = ConfigParser.SafeConfigParser() try: cp.read([srcdir + '/.datacats-environment']) except ConfigParser.Error: raise DatacatsError('Error reading environment information') site_section = 'site_' + site_name try: port = cp.getint(site_section, 'port') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): port = None try: address = cp.get(site_section, 'address') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): address = None try: site_url = cp.get(site_section, 'site_url') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): site_url = None passwords = {} cp = ConfigParser.SafeConfigParser() cp.read(datadir + '/sites/' + site_name + '/passwords.ini') try: pw_options = cp.options('passwords') except ConfigParser.NoSectionError: pw_options = [] for n in pw_options: passwords[n.upper()] = cp.get('passwords', n) return port, address, site_url, passwords
python
def load_site(srcdir, datadir, site_name=None): """ Load configuration values for a site. Returns (port, address, site_url, passwords) """ if site_name is None: site_name = 'primary' if not validate.valid_name(site_name): raise DatacatsError('{} is not a valid site name.'.format(site_name)) cp = ConfigParser.SafeConfigParser() try: cp.read([srcdir + '/.datacats-environment']) except ConfigParser.Error: raise DatacatsError('Error reading environment information') site_section = 'site_' + site_name try: port = cp.getint(site_section, 'port') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): port = None try: address = cp.get(site_section, 'address') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): address = None try: site_url = cp.get(site_section, 'site_url') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): site_url = None passwords = {} cp = ConfigParser.SafeConfigParser() cp.read(datadir + '/sites/' + site_name + '/passwords.ini') try: pw_options = cp.options('passwords') except ConfigParser.NoSectionError: pw_options = [] for n in pw_options: passwords[n.upper()] = cp.get('passwords', n) return port, address, site_url, passwords
[ "def", "load_site", "(", "srcdir", ",", "datadir", ",", "site_name", "=", "None", ")", ":", "if", "site_name", "is", "None", ":", "site_name", "=", "'primary'", "if", "not", "validate", ".", "valid_name", "(", "site_name", ")", ":", "raise", "DatacatsError", "(", "'{} is not a valid site name.'", ".", "format", "(", "site_name", ")", ")", "cp", "=", "ConfigParser", ".", "SafeConfigParser", "(", ")", "try", ":", "cp", ".", "read", "(", "[", "srcdir", "+", "'/.datacats-environment'", "]", ")", "except", "ConfigParser", ".", "Error", ":", "raise", "DatacatsError", "(", "'Error reading environment information'", ")", "site_section", "=", "'site_'", "+", "site_name", "try", ":", "port", "=", "cp", ".", "getint", "(", "site_section", ",", "'port'", ")", "except", "(", "ConfigParser", ".", "NoOptionError", ",", "ConfigParser", ".", "NoSectionError", ")", ":", "port", "=", "None", "try", ":", "address", "=", "cp", ".", "get", "(", "site_section", ",", "'address'", ")", "except", "(", "ConfigParser", ".", "NoOptionError", ",", "ConfigParser", ".", "NoSectionError", ")", ":", "address", "=", "None", "try", ":", "site_url", "=", "cp", ".", "get", "(", "site_section", ",", "'site_url'", ")", "except", "(", "ConfigParser", ".", "NoOptionError", ",", "ConfigParser", ".", "NoSectionError", ")", ":", "site_url", "=", "None", "passwords", "=", "{", "}", "cp", "=", "ConfigParser", ".", "SafeConfigParser", "(", ")", "cp", ".", "read", "(", "datadir", "+", "'/sites/'", "+", "site_name", "+", "'/passwords.ini'", ")", "try", ":", "pw_options", "=", "cp", ".", "options", "(", "'passwords'", ")", "except", "ConfigParser", ".", "NoSectionError", ":", "pw_options", "=", "[", "]", "for", "n", "in", "pw_options", ":", "passwords", "[", "n", ".", "upper", "(", ")", "]", "=", "cp", ".", "get", "(", "'passwords'", ",", "n", ")", "return", "port", ",", "address", ",", "site_url", ",", "passwords" ]
Load configuration values for a site. Returns (port, address, site_url, passwords)
[ "Load", "configuration", "values", "for", "a", "site", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L250-L292
train
datacats/datacats
datacats/task.py
new_environment_check
def new_environment_check(srcpath, site_name, ckan_version): """ Check if a new environment or site can be created at the given path. Returns (name, datadir, sitedir, srcdir) or raises DatacatsError """ docker.require_images() workdir, name = path.split(path.abspath(path.expanduser(srcpath))) if not validate.valid_name(name): raise DatacatsError('Please choose an environment name starting' ' with a letter and including only lowercase letters' ' and digits') if not path.isdir(workdir): raise DatacatsError('Parent directory for environment' ' does not exist') datadir = path.expanduser('~/.datacats/' + name) sitedir = datadir + '/sites/' + site_name # We track through the datadir to the target if we are just making a # site if path.isdir(datadir): with open(datadir + '/project-dir') as pd: srcdir = pd.read() else: srcdir = workdir + '/' + name if ckan_version not in SUPPORTED_PRELOADS: raise DatacatsError('''Datacats does not currently support CKAN version {}. Versions that are currently supported are: {}'''.format(ckan_version, ', '.join(SUPPORTED_PRELOADS))) preload_name = str(ckan_version) # Get all the versions from the tags downloaded_versions = [tag for tag in docker.get_tags('datacats/ckan')] if ckan_version not in downloaded_versions: retrying_pull_image('datacats/ckan:{}'.format(preload_name)) if path.isdir(sitedir): raise DatacatsError('Site data directory {0} already exists'.format( sitedir)) # This is the case where the data dir has been removed, if path.isdir(srcdir) and not path.isdir(datadir): raise DatacatsError('Environment directory exists, but data directory does not.\n' 'If you simply want to recreate the data directory, run ' '"datacats init" in the environment directory.') return name, datadir, srcdir
python
def new_environment_check(srcpath, site_name, ckan_version): """ Check if a new environment or site can be created at the given path. Returns (name, datadir, sitedir, srcdir) or raises DatacatsError """ docker.require_images() workdir, name = path.split(path.abspath(path.expanduser(srcpath))) if not validate.valid_name(name): raise DatacatsError('Please choose an environment name starting' ' with a letter and including only lowercase letters' ' and digits') if not path.isdir(workdir): raise DatacatsError('Parent directory for environment' ' does not exist') datadir = path.expanduser('~/.datacats/' + name) sitedir = datadir + '/sites/' + site_name # We track through the datadir to the target if we are just making a # site if path.isdir(datadir): with open(datadir + '/project-dir') as pd: srcdir = pd.read() else: srcdir = workdir + '/' + name if ckan_version not in SUPPORTED_PRELOADS: raise DatacatsError('''Datacats does not currently support CKAN version {}. Versions that are currently supported are: {}'''.format(ckan_version, ', '.join(SUPPORTED_PRELOADS))) preload_name = str(ckan_version) # Get all the versions from the tags downloaded_versions = [tag for tag in docker.get_tags('datacats/ckan')] if ckan_version not in downloaded_versions: retrying_pull_image('datacats/ckan:{}'.format(preload_name)) if path.isdir(sitedir): raise DatacatsError('Site data directory {0} already exists'.format( sitedir)) # This is the case where the data dir has been removed, if path.isdir(srcdir) and not path.isdir(datadir): raise DatacatsError('Environment directory exists, but data directory does not.\n' 'If you simply want to recreate the data directory, run ' '"datacats init" in the environment directory.') return name, datadir, srcdir
[ "def", "new_environment_check", "(", "srcpath", ",", "site_name", ",", "ckan_version", ")", ":", "docker", ".", "require_images", "(", ")", "workdir", ",", "name", "=", "path", ".", "split", "(", "path", ".", "abspath", "(", "path", ".", "expanduser", "(", "srcpath", ")", ")", ")", "if", "not", "validate", ".", "valid_name", "(", "name", ")", ":", "raise", "DatacatsError", "(", "'Please choose an environment name starting'", "' with a letter and including only lowercase letters'", "' and digits'", ")", "if", "not", "path", ".", "isdir", "(", "workdir", ")", ":", "raise", "DatacatsError", "(", "'Parent directory for environment'", "' does not exist'", ")", "datadir", "=", "path", ".", "expanduser", "(", "'~/.datacats/'", "+", "name", ")", "sitedir", "=", "datadir", "+", "'/sites/'", "+", "site_name", "# We track through the datadir to the target if we are just making a", "# site", "if", "path", ".", "isdir", "(", "datadir", ")", ":", "with", "open", "(", "datadir", "+", "'/project-dir'", ")", "as", "pd", ":", "srcdir", "=", "pd", ".", "read", "(", ")", "else", ":", "srcdir", "=", "workdir", "+", "'/'", "+", "name", "if", "ckan_version", "not", "in", "SUPPORTED_PRELOADS", ":", "raise", "DatacatsError", "(", "'''Datacats does not currently support CKAN version {}.\nVersions that are currently supported are: {}'''", ".", "format", "(", "ckan_version", ",", "', '", ".", "join", "(", "SUPPORTED_PRELOADS", ")", ")", ")", "preload_name", "=", "str", "(", "ckan_version", ")", "# Get all the versions from the tags", "downloaded_versions", "=", "[", "tag", "for", "tag", "in", "docker", ".", "get_tags", "(", "'datacats/ckan'", ")", "]", "if", "ckan_version", "not", "in", "downloaded_versions", ":", "retrying_pull_image", "(", "'datacats/ckan:{}'", ".", "format", "(", "preload_name", ")", ")", "if", "path", ".", "isdir", "(", "sitedir", ")", ":", "raise", "DatacatsError", "(", "'Site data directory {0} already exists'", ".", "format", "(", "sitedir", ")", ")", "# This is the case where the data dir has been removed,", "if", "path", ".", "isdir", "(", "srcdir", ")", "and", "not", "path", ".", "isdir", "(", "datadir", ")", ":", "raise", "DatacatsError", "(", "'Environment directory exists, but data directory does not.\\n'", "'If you simply want to recreate the data directory, run '", "'\"datacats init\" in the environment directory.'", ")", "return", "name", ",", "datadir", ",", "srcdir" ]
Check if a new environment or site can be created at the given path. Returns (name, datadir, sitedir, srcdir) or raises DatacatsError
[ "Check", "if", "a", "new", "environment", "or", "site", "can", "be", "created", "at", "the", "given", "path", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L298-L348
train
datacats/datacats
datacats/task.py
data_complete
def data_complete(datadir, sitedir, get_container_name): """ Return True if the directories and containers we're expecting are present in datadir, sitedir and containers """ if any(not path.isdir(sitedir + x) for x in ('/files', '/run', '/solr')): return False if docker.is_boot2docker(): # Inspect returns None if the container doesn't exist. return all(docker.inspect_container(get_container_name(x)) for x in ('pgdata', 'venv')) return path.isdir(datadir + '/venv') and path.isdir(sitedir + '/postgres')
python
def data_complete(datadir, sitedir, get_container_name): """ Return True if the directories and containers we're expecting are present in datadir, sitedir and containers """ if any(not path.isdir(sitedir + x) for x in ('/files', '/run', '/solr')): return False if docker.is_boot2docker(): # Inspect returns None if the container doesn't exist. return all(docker.inspect_container(get_container_name(x)) for x in ('pgdata', 'venv')) return path.isdir(datadir + '/venv') and path.isdir(sitedir + '/postgres')
[ "def", "data_complete", "(", "datadir", ",", "sitedir", ",", "get_container_name", ")", ":", "if", "any", "(", "not", "path", ".", "isdir", "(", "sitedir", "+", "x", ")", "for", "x", "in", "(", "'/files'", ",", "'/run'", ",", "'/solr'", ")", ")", ":", "return", "False", "if", "docker", ".", "is_boot2docker", "(", ")", ":", "# Inspect returns None if the container doesn't exist.", "return", "all", "(", "docker", ".", "inspect_container", "(", "get_container_name", "(", "x", ")", ")", "for", "x", "in", "(", "'pgdata'", ",", "'venv'", ")", ")", "return", "path", ".", "isdir", "(", "datadir", "+", "'/venv'", ")", "and", "path", ".", "isdir", "(", "sitedir", "+", "'/postgres'", ")" ]
Return True if the directories and containers we're expecting are present in datadir, sitedir and containers
[ "Return", "True", "if", "the", "directories", "and", "containers", "we", "re", "expecting", "are", "present", "in", "datadir", "sitedir", "and", "containers" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L351-L365
train
datacats/datacats
datacats/task.py
create_directories
def create_directories(datadir, sitedir, srcdir=None): """ Create expected directories in datadir, sitedir and optionally srcdir """ # It's possible that the datadir already exists # (we're making a secondary site) if not path.isdir(datadir): os.makedirs(datadir, mode=0o700) try: # This should take care if the 'site' subdir if needed os.makedirs(sitedir, mode=0o700) except OSError: raise DatacatsError("Site already exists.") # venv isn't site-specific, the rest are. if not docker.is_boot2docker(): if not path.isdir(datadir + '/venv'): os.makedirs(datadir + '/venv') os.makedirs(sitedir + '/postgres') os.makedirs(sitedir + '/solr') os.makedirs(sitedir + '/files') os.makedirs(sitedir + '/run') if srcdir: os.makedirs(srcdir)
python
def create_directories(datadir, sitedir, srcdir=None): """ Create expected directories in datadir, sitedir and optionally srcdir """ # It's possible that the datadir already exists # (we're making a secondary site) if not path.isdir(datadir): os.makedirs(datadir, mode=0o700) try: # This should take care if the 'site' subdir if needed os.makedirs(sitedir, mode=0o700) except OSError: raise DatacatsError("Site already exists.") # venv isn't site-specific, the rest are. if not docker.is_boot2docker(): if not path.isdir(datadir + '/venv'): os.makedirs(datadir + '/venv') os.makedirs(sitedir + '/postgres') os.makedirs(sitedir + '/solr') os.makedirs(sitedir + '/files') os.makedirs(sitedir + '/run') if srcdir: os.makedirs(srcdir)
[ "def", "create_directories", "(", "datadir", ",", "sitedir", ",", "srcdir", "=", "None", ")", ":", "# It's possible that the datadir already exists", "# (we're making a secondary site)", "if", "not", "path", ".", "isdir", "(", "datadir", ")", ":", "os", ".", "makedirs", "(", "datadir", ",", "mode", "=", "0o700", ")", "try", ":", "# This should take care if the 'site' subdir if needed", "os", ".", "makedirs", "(", "sitedir", ",", "mode", "=", "0o700", ")", "except", "OSError", ":", "raise", "DatacatsError", "(", "\"Site already exists.\"", ")", "# venv isn't site-specific, the rest are.", "if", "not", "docker", ".", "is_boot2docker", "(", ")", ":", "if", "not", "path", ".", "isdir", "(", "datadir", "+", "'/venv'", ")", ":", "os", ".", "makedirs", "(", "datadir", "+", "'/venv'", ")", "os", ".", "makedirs", "(", "sitedir", "+", "'/postgres'", ")", "os", ".", "makedirs", "(", "sitedir", "+", "'/solr'", ")", "os", ".", "makedirs", "(", "sitedir", "+", "'/files'", ")", "os", ".", "makedirs", "(", "sitedir", "+", "'/run'", ")", "if", "srcdir", ":", "os", ".", "makedirs", "(", "srcdir", ")" ]
Create expected directories in datadir, sitedir and optionally srcdir
[ "Create", "expected", "directories", "in", "datadir", "sitedir", "and", "optionally", "srcdir" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L377-L402
train
datacats/datacats
datacats/task.py
create_virtualenv
def create_virtualenv(srcdir, datadir, preload_image, get_container_name): """ Populate venv from preloaded image """ try: if docker.is_boot2docker(): docker.data_only_container( get_container_name('venv'), ['/usr/lib/ckan'], ) img_id = docker.web_command( '/bin/mv /usr/lib/ckan/ /usr/lib/ckan_original', image=preload_image, commit=True, ) docker.web_command( command='/bin/cp -a /usr/lib/ckan_original/. /usr/lib/ckan/.', volumes_from=get_container_name('venv'), image=img_id, ) docker.remove_image(img_id) return docker.web_command( command='/bin/cp -a /usr/lib/ckan/. /usr/lib/ckan_target/.', rw={datadir + '/venv': '/usr/lib/ckan_target'}, image=preload_image, ) finally: rw = {datadir + '/venv': '/usr/lib/ckan'} if not docker.is_boot2docker() else {} volumes_from = get_container_name('venv') if docker.is_boot2docker() else None # fix venv permissions docker.web_command( command='/bin/chown -R --reference=/project /usr/lib/ckan', rw=rw, volumes_from=volumes_from, ro={srcdir: '/project'}, )
python
def create_virtualenv(srcdir, datadir, preload_image, get_container_name): """ Populate venv from preloaded image """ try: if docker.is_boot2docker(): docker.data_only_container( get_container_name('venv'), ['/usr/lib/ckan'], ) img_id = docker.web_command( '/bin/mv /usr/lib/ckan/ /usr/lib/ckan_original', image=preload_image, commit=True, ) docker.web_command( command='/bin/cp -a /usr/lib/ckan_original/. /usr/lib/ckan/.', volumes_from=get_container_name('venv'), image=img_id, ) docker.remove_image(img_id) return docker.web_command( command='/bin/cp -a /usr/lib/ckan/. /usr/lib/ckan_target/.', rw={datadir + '/venv': '/usr/lib/ckan_target'}, image=preload_image, ) finally: rw = {datadir + '/venv': '/usr/lib/ckan'} if not docker.is_boot2docker() else {} volumes_from = get_container_name('venv') if docker.is_boot2docker() else None # fix venv permissions docker.web_command( command='/bin/chown -R --reference=/project /usr/lib/ckan', rw=rw, volumes_from=volumes_from, ro={srcdir: '/project'}, )
[ "def", "create_virtualenv", "(", "srcdir", ",", "datadir", ",", "preload_image", ",", "get_container_name", ")", ":", "try", ":", "if", "docker", ".", "is_boot2docker", "(", ")", ":", "docker", ".", "data_only_container", "(", "get_container_name", "(", "'venv'", ")", ",", "[", "'/usr/lib/ckan'", "]", ",", ")", "img_id", "=", "docker", ".", "web_command", "(", "'/bin/mv /usr/lib/ckan/ /usr/lib/ckan_original'", ",", "image", "=", "preload_image", ",", "commit", "=", "True", ",", ")", "docker", ".", "web_command", "(", "command", "=", "'/bin/cp -a /usr/lib/ckan_original/. /usr/lib/ckan/.'", ",", "volumes_from", "=", "get_container_name", "(", "'venv'", ")", ",", "image", "=", "img_id", ",", ")", "docker", ".", "remove_image", "(", "img_id", ")", "return", "docker", ".", "web_command", "(", "command", "=", "'/bin/cp -a /usr/lib/ckan/. /usr/lib/ckan_target/.'", ",", "rw", "=", "{", "datadir", "+", "'/venv'", ":", "'/usr/lib/ckan_target'", "}", ",", "image", "=", "preload_image", ",", ")", "finally", ":", "rw", "=", "{", "datadir", "+", "'/venv'", ":", "'/usr/lib/ckan'", "}", "if", "not", "docker", ".", "is_boot2docker", "(", ")", "else", "{", "}", "volumes_from", "=", "get_container_name", "(", "'venv'", ")", "if", "docker", ".", "is_boot2docker", "(", ")", "else", "None", "# fix venv permissions", "docker", ".", "web_command", "(", "command", "=", "'/bin/chown -R --reference=/project /usr/lib/ckan'", ",", "rw", "=", "rw", ",", "volumes_from", "=", "volumes_from", ",", "ro", "=", "{", "srcdir", ":", "'/project'", "}", ",", ")" ]
Populate venv from preloaded image
[ "Populate", "venv", "from", "preloaded", "image" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L405-L442
train
datacats/datacats
datacats/task.py
create_source
def create_source(srcdir, preload_image, datapusher=False): """ Copy ckan source, datapusher source (optional), who.ini and schema.xml from preload image into srcdir """ try: docker.web_command( command='/bin/cp -a /project/ckan /project_target/ckan', rw={srcdir: '/project_target'}, image=preload_image) if datapusher: docker.web_command( command='/bin/cp -a /project/datapusher /project_target/datapusher', rw={srcdir: '/project_target'}, image=preload_image) shutil.copy( srcdir + '/ckan/ckan/config/who.ini', srcdir) shutil.copy( srcdir + '/ckan/ckan/config/solr/schema.xml', srcdir) finally: # fix srcdir permissions docker.web_command( command='/bin/chown -R --reference=/project /project', rw={srcdir: '/project'}, )
python
def create_source(srcdir, preload_image, datapusher=False): """ Copy ckan source, datapusher source (optional), who.ini and schema.xml from preload image into srcdir """ try: docker.web_command( command='/bin/cp -a /project/ckan /project_target/ckan', rw={srcdir: '/project_target'}, image=preload_image) if datapusher: docker.web_command( command='/bin/cp -a /project/datapusher /project_target/datapusher', rw={srcdir: '/project_target'}, image=preload_image) shutil.copy( srcdir + '/ckan/ckan/config/who.ini', srcdir) shutil.copy( srcdir + '/ckan/ckan/config/solr/schema.xml', srcdir) finally: # fix srcdir permissions docker.web_command( command='/bin/chown -R --reference=/project /project', rw={srcdir: '/project'}, )
[ "def", "create_source", "(", "srcdir", ",", "preload_image", ",", "datapusher", "=", "False", ")", ":", "try", ":", "docker", ".", "web_command", "(", "command", "=", "'/bin/cp -a /project/ckan /project_target/ckan'", ",", "rw", "=", "{", "srcdir", ":", "'/project_target'", "}", ",", "image", "=", "preload_image", ")", "if", "datapusher", ":", "docker", ".", "web_command", "(", "command", "=", "'/bin/cp -a /project/datapusher /project_target/datapusher'", ",", "rw", "=", "{", "srcdir", ":", "'/project_target'", "}", ",", "image", "=", "preload_image", ")", "shutil", ".", "copy", "(", "srcdir", "+", "'/ckan/ckan/config/who.ini'", ",", "srcdir", ")", "shutil", ".", "copy", "(", "srcdir", "+", "'/ckan/ckan/config/solr/schema.xml'", ",", "srcdir", ")", "finally", ":", "# fix srcdir permissions", "docker", ".", "web_command", "(", "command", "=", "'/bin/chown -R --reference=/project /project'", ",", "rw", "=", "{", "srcdir", ":", "'/project'", "}", ",", ")" ]
Copy ckan source, datapusher source (optional), who.ini and schema.xml from preload image into srcdir
[ "Copy", "ckan", "source", "datapusher", "source", "(", "optional", ")", "who", ".", "ini", "and", "schema", ".", "xml", "from", "preload", "image", "into", "srcdir" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L445-L471
train
datacats/datacats
datacats/task.py
start_supporting_containers
def start_supporting_containers(sitedir, srcdir, passwords, get_container_name, extra_containers, log_syslog=False): """ Start all supporting containers (containers required for CKAN to operate) if they aren't already running, along with some extra containers specified by the user """ if docker.is_boot2docker(): docker.data_only_container(get_container_name('pgdata'), ['/var/lib/postgresql/data']) rw = {} volumes_from = get_container_name('pgdata') else: rw = {sitedir + '/postgres': '/var/lib/postgresql/data'} volumes_from = None running = set(containers_running(get_container_name)) needed = set(extra_containers).union({'postgres', 'solr'}) if not needed.issubset(running): stop_supporting_containers(get_container_name, extra_containers) # users are created when data dir is blank so we must pass # all the user passwords as environment vars # XXX: postgres entrypoint magic docker.run_container( name=get_container_name('postgres'), image='datacats/postgres', environment=passwords, rw=rw, volumes_from=volumes_from, log_syslog=log_syslog) docker.run_container( name=get_container_name('solr'), image='datacats/solr', rw={sitedir + '/solr': '/var/lib/solr'}, ro={srcdir + '/schema.xml': '/etc/solr/conf/schema.xml'}, log_syslog=log_syslog) for container in extra_containers: # We don't know a whole lot about the extra containers so we're just gonna have to # mount /project and /datadir r/o even if they're not needed for ease of # implementation. docker.run_container( name=get_container_name(container), image=EXTRA_IMAGE_MAPPING[container], ro={ sitedir: '/datadir', srcdir: '/project' }, log_syslog=log_syslog )
python
def start_supporting_containers(sitedir, srcdir, passwords, get_container_name, extra_containers, log_syslog=False): """ Start all supporting containers (containers required for CKAN to operate) if they aren't already running, along with some extra containers specified by the user """ if docker.is_boot2docker(): docker.data_only_container(get_container_name('pgdata'), ['/var/lib/postgresql/data']) rw = {} volumes_from = get_container_name('pgdata') else: rw = {sitedir + '/postgres': '/var/lib/postgresql/data'} volumes_from = None running = set(containers_running(get_container_name)) needed = set(extra_containers).union({'postgres', 'solr'}) if not needed.issubset(running): stop_supporting_containers(get_container_name, extra_containers) # users are created when data dir is blank so we must pass # all the user passwords as environment vars # XXX: postgres entrypoint magic docker.run_container( name=get_container_name('postgres'), image='datacats/postgres', environment=passwords, rw=rw, volumes_from=volumes_from, log_syslog=log_syslog) docker.run_container( name=get_container_name('solr'), image='datacats/solr', rw={sitedir + '/solr': '/var/lib/solr'}, ro={srcdir + '/schema.xml': '/etc/solr/conf/schema.xml'}, log_syslog=log_syslog) for container in extra_containers: # We don't know a whole lot about the extra containers so we're just gonna have to # mount /project and /datadir r/o even if they're not needed for ease of # implementation. docker.run_container( name=get_container_name(container), image=EXTRA_IMAGE_MAPPING[container], ro={ sitedir: '/datadir', srcdir: '/project' }, log_syslog=log_syslog )
[ "def", "start_supporting_containers", "(", "sitedir", ",", "srcdir", ",", "passwords", ",", "get_container_name", ",", "extra_containers", ",", "log_syslog", "=", "False", ")", ":", "if", "docker", ".", "is_boot2docker", "(", ")", ":", "docker", ".", "data_only_container", "(", "get_container_name", "(", "'pgdata'", ")", ",", "[", "'/var/lib/postgresql/data'", "]", ")", "rw", "=", "{", "}", "volumes_from", "=", "get_container_name", "(", "'pgdata'", ")", "else", ":", "rw", "=", "{", "sitedir", "+", "'/postgres'", ":", "'/var/lib/postgresql/data'", "}", "volumes_from", "=", "None", "running", "=", "set", "(", "containers_running", "(", "get_container_name", ")", ")", "needed", "=", "set", "(", "extra_containers", ")", ".", "union", "(", "{", "'postgres'", ",", "'solr'", "}", ")", "if", "not", "needed", ".", "issubset", "(", "running", ")", ":", "stop_supporting_containers", "(", "get_container_name", ",", "extra_containers", ")", "# users are created when data dir is blank so we must pass", "# all the user passwords as environment vars", "# XXX: postgres entrypoint magic", "docker", ".", "run_container", "(", "name", "=", "get_container_name", "(", "'postgres'", ")", ",", "image", "=", "'datacats/postgres'", ",", "environment", "=", "passwords", ",", "rw", "=", "rw", ",", "volumes_from", "=", "volumes_from", ",", "log_syslog", "=", "log_syslog", ")", "docker", ".", "run_container", "(", "name", "=", "get_container_name", "(", "'solr'", ")", ",", "image", "=", "'datacats/solr'", ",", "rw", "=", "{", "sitedir", "+", "'/solr'", ":", "'/var/lib/solr'", "}", ",", "ro", "=", "{", "srcdir", "+", "'/schema.xml'", ":", "'/etc/solr/conf/schema.xml'", "}", ",", "log_syslog", "=", "log_syslog", ")", "for", "container", "in", "extra_containers", ":", "# We don't know a whole lot about the extra containers so we're just gonna have to", "# mount /project and /datadir r/o even if they're not needed for ease of", "# implementation.", "docker", ".", "run_container", "(", "name", "=", "get_container_name", "(", "container", ")", ",", "image", "=", "EXTRA_IMAGE_MAPPING", "[", "container", "]", ",", "ro", "=", "{", "sitedir", ":", "'/datadir'", ",", "srcdir", ":", "'/project'", "}", ",", "log_syslog", "=", "log_syslog", ")" ]
Start all supporting containers (containers required for CKAN to operate) if they aren't already running, along with some extra containers specified by the user
[ "Start", "all", "supporting", "containers", "(", "containers", "required", "for", "CKAN", "to", "operate", ")", "if", "they", "aren", "t", "already", "running", "along", "with", "some", "extra", "containers", "specified", "by", "the", "user" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L478-L531
train
datacats/datacats
datacats/task.py
stop_supporting_containers
def stop_supporting_containers(get_container_name, extra_containers): """ Stop postgres and solr containers, along with any specified extra containers """ docker.remove_container(get_container_name('postgres')) docker.remove_container(get_container_name('solr')) for container in extra_containers: docker.remove_container(get_container_name(container))
python
def stop_supporting_containers(get_container_name, extra_containers): """ Stop postgres and solr containers, along with any specified extra containers """ docker.remove_container(get_container_name('postgres')) docker.remove_container(get_container_name('solr')) for container in extra_containers: docker.remove_container(get_container_name(container))
[ "def", "stop_supporting_containers", "(", "get_container_name", ",", "extra_containers", ")", ":", "docker", ".", "remove_container", "(", "get_container_name", "(", "'postgres'", ")", ")", "docker", ".", "remove_container", "(", "get_container_name", "(", "'solr'", ")", ")", "for", "container", "in", "extra_containers", ":", "docker", ".", "remove_container", "(", "get_container_name", "(", "container", ")", ")" ]
Stop postgres and solr containers, along with any specified extra containers
[ "Stop", "postgres", "and", "solr", "containers", "along", "with", "any", "specified", "extra", "containers" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L534-L541
train
datacats/datacats
datacats/task.py
containers_running
def containers_running(get_container_name): """ Return a list of containers tracked by this environment that are running """ running = [] for n in ['web', 'postgres', 'solr', 'datapusher', 'redis']: info = docker.inspect_container(get_container_name(n)) if info and not info['State']['Running']: running.append(n + '(halted)') elif info: running.append(n) return running
python
def containers_running(get_container_name): """ Return a list of containers tracked by this environment that are running """ running = [] for n in ['web', 'postgres', 'solr', 'datapusher', 'redis']: info = docker.inspect_container(get_container_name(n)) if info and not info['State']['Running']: running.append(n + '(halted)') elif info: running.append(n) return running
[ "def", "containers_running", "(", "get_container_name", ")", ":", "running", "=", "[", "]", "for", "n", "in", "[", "'web'", ",", "'postgres'", ",", "'solr'", ",", "'datapusher'", ",", "'redis'", "]", ":", "info", "=", "docker", ".", "inspect_container", "(", "get_container_name", "(", "n", ")", ")", "if", "info", "and", "not", "info", "[", "'State'", "]", "[", "'Running'", "]", ":", "running", ".", "append", "(", "n", "+", "'(halted)'", ")", "elif", "info", ":", "running", ".", "append", "(", "n", ")", "return", "running" ]
Return a list of containers tracked by this environment that are running
[ "Return", "a", "list", "of", "containers", "tracked", "by", "this", "environment", "that", "are", "running" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/task.py#L544-L555
train
datacats/datacats
datacats/environment.py
Environment._load_sites
def _load_sites(self): """ Gets the names of all of the sites from the datadir and stores them in self.sites. Also returns this list. """ if not self.sites: self.sites = task.list_sites(self.datadir) return self.sites
python
def _load_sites(self): """ Gets the names of all of the sites from the datadir and stores them in self.sites. Also returns this list. """ if not self.sites: self.sites = task.list_sites(self.datadir) return self.sites
[ "def", "_load_sites", "(", "self", ")", ":", "if", "not", "self", ".", "sites", ":", "self", ".", "sites", "=", "task", ".", "list_sites", "(", "self", ".", "datadir", ")", "return", "self", ".", "sites" ]
Gets the names of all of the sites from the datadir and stores them in self.sites. Also returns this list.
[ "Gets", "the", "names", "of", "all", "of", "the", "sites", "from", "the", "datadir", "and", "stores", "them", "in", "self", ".", "sites", ".", "Also", "returns", "this", "list", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L73-L80
train
datacats/datacats
datacats/environment.py
Environment.save_site
def save_site(self, create=True): """ Save environment settings in the directory that need to be saved even when creating only a new sub-site env. """ self._load_sites() if create: self.sites.append(self.site_name) task.save_new_site(self.site_name, self.sitedir, self.target, self.port, self.address, self.site_url, self.passwords)
python
def save_site(self, create=True): """ Save environment settings in the directory that need to be saved even when creating only a new sub-site env. """ self._load_sites() if create: self.sites.append(self.site_name) task.save_new_site(self.site_name, self.sitedir, self.target, self.port, self.address, self.site_url, self.passwords)
[ "def", "save_site", "(", "self", ",", "create", "=", "True", ")", ":", "self", ".", "_load_sites", "(", ")", "if", "create", ":", "self", ".", "sites", ".", "append", "(", "self", ".", "site_name", ")", "task", ".", "save_new_site", "(", "self", ".", "site_name", ",", "self", ".", "sitedir", ",", "self", ".", "target", ",", "self", ".", "port", ",", "self", ".", "address", ",", "self", ".", "site_url", ",", "self", ".", "passwords", ")" ]
Save environment settings in the directory that need to be saved even when creating only a new sub-site env.
[ "Save", "environment", "settings", "in", "the", "directory", "that", "need", "to", "be", "saved", "even", "when", "creating", "only", "a", "new", "sub", "-", "site", "env", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L82-L92
train
datacats/datacats
datacats/environment.py
Environment.save
def save(self): """ Save environment settings into environment directory, overwriting any existing configuration and discarding site config """ task.save_new_environment(self.name, self.datadir, self.target, self.ckan_version, self.deploy_target, self.always_prod)
python
def save(self): """ Save environment settings into environment directory, overwriting any existing configuration and discarding site config """ task.save_new_environment(self.name, self.datadir, self.target, self.ckan_version, self.deploy_target, self.always_prod)
[ "def", "save", "(", "self", ")", ":", "task", ".", "save_new_environment", "(", "self", ".", "name", ",", "self", ".", "datadir", ",", "self", ".", "target", ",", "self", ".", "ckan_version", ",", "self", ".", "deploy_target", ",", "self", ".", "always_prod", ")" ]
Save environment settings into environment directory, overwriting any existing configuration and discarding site config
[ "Save", "environment", "settings", "into", "environment", "directory", "overwriting", "any", "existing", "configuration", "and", "discarding", "site", "config" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L94-L100
train
datacats/datacats
datacats/environment.py
Environment.new
def new(cls, path, ckan_version, site_name, **kwargs): """ Return a Environment object with settings for a new project. No directories or containers are created by this call. :params path: location for new project directory, may be relative :params ckan_version: release of CKAN to install :params site_name: The name of the site to install database and solr \ eventually. For additional keyword arguments see the __init__ method. Raises DatcatsError if directories or project with same name already exits. """ if ckan_version == 'master': ckan_version = 'latest' name, datadir, srcdir = task.new_environment_check(path, site_name, ckan_version) environment = cls(name, srcdir, datadir, site_name, ckan_version, **kwargs) environment._generate_passwords() return environment
python
def new(cls, path, ckan_version, site_name, **kwargs): """ Return a Environment object with settings for a new project. No directories or containers are created by this call. :params path: location for new project directory, may be relative :params ckan_version: release of CKAN to install :params site_name: The name of the site to install database and solr \ eventually. For additional keyword arguments see the __init__ method. Raises DatcatsError if directories or project with same name already exits. """ if ckan_version == 'master': ckan_version = 'latest' name, datadir, srcdir = task.new_environment_check(path, site_name, ckan_version) environment = cls(name, srcdir, datadir, site_name, ckan_version, **kwargs) environment._generate_passwords() return environment
[ "def", "new", "(", "cls", ",", "path", ",", "ckan_version", ",", "site_name", ",", "*", "*", "kwargs", ")", ":", "if", "ckan_version", "==", "'master'", ":", "ckan_version", "=", "'latest'", "name", ",", "datadir", ",", "srcdir", "=", "task", ".", "new_environment_check", "(", "path", ",", "site_name", ",", "ckan_version", ")", "environment", "=", "cls", "(", "name", ",", "srcdir", ",", "datadir", ",", "site_name", ",", "ckan_version", ",", "*", "*", "kwargs", ")", "environment", ".", "_generate_passwords", "(", ")", "return", "environment" ]
Return a Environment object with settings for a new project. No directories or containers are created by this call. :params path: location for new project directory, may be relative :params ckan_version: release of CKAN to install :params site_name: The name of the site to install database and solr \ eventually. For additional keyword arguments see the __init__ method. Raises DatcatsError if directories or project with same name already exits.
[ "Return", "a", "Environment", "object", "with", "settings", "for", "a", "new", "project", ".", "No", "directories", "or", "containers", "are", "created", "by", "this", "call", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L103-L123
train
datacats/datacats
datacats/environment.py
Environment.load
def load(cls, environment_name=None, site_name='primary', data_only=False, allow_old=False): """ Return an Environment object based on an existing environnment+site. :param environment_name: exising environment name, path or None to look in current or parent directories for project :param data_only: set to True to only load from data dir, not the project dir; Used for purging environment data. :param allow_old: load a very minimal subset of what we usually load. This will only work for purging environment data on an old site. Raises DatacatsError if environment can't be found or if there is an error parsing the environment information. """ srcdir, extension_dir, datadir = task.find_environment_dirs( environment_name, data_only) if datadir and data_only: return cls(environment_name, None, datadir, site_name) (datadir, name, ckan_version, always_prod, deploy_target, remote_server_key, extra_containers) = task.load_environment(srcdir, datadir, allow_old) if not allow_old: (port, address, site_url, passwords) = task.load_site(srcdir, datadir, site_name) else: (port, address, site_url, passwords) = (None, None, None, None) environment = cls(name, srcdir, datadir, site_name, ckan_version=ckan_version, port=port, deploy_target=deploy_target, site_url=site_url, always_prod=always_prod, address=address, extension_dir=extension_dir, remote_server_key=remote_server_key, extra_containers=extra_containers) if passwords: environment.passwords = passwords else: environment._generate_passwords() if not allow_old: environment._load_sites() return environment
python
def load(cls, environment_name=None, site_name='primary', data_only=False, allow_old=False): """ Return an Environment object based on an existing environnment+site. :param environment_name: exising environment name, path or None to look in current or parent directories for project :param data_only: set to True to only load from data dir, not the project dir; Used for purging environment data. :param allow_old: load a very minimal subset of what we usually load. This will only work for purging environment data on an old site. Raises DatacatsError if environment can't be found or if there is an error parsing the environment information. """ srcdir, extension_dir, datadir = task.find_environment_dirs( environment_name, data_only) if datadir and data_only: return cls(environment_name, None, datadir, site_name) (datadir, name, ckan_version, always_prod, deploy_target, remote_server_key, extra_containers) = task.load_environment(srcdir, datadir, allow_old) if not allow_old: (port, address, site_url, passwords) = task.load_site(srcdir, datadir, site_name) else: (port, address, site_url, passwords) = (None, None, None, None) environment = cls(name, srcdir, datadir, site_name, ckan_version=ckan_version, port=port, deploy_target=deploy_target, site_url=site_url, always_prod=always_prod, address=address, extension_dir=extension_dir, remote_server_key=remote_server_key, extra_containers=extra_containers) if passwords: environment.passwords = passwords else: environment._generate_passwords() if not allow_old: environment._load_sites() return environment
[ "def", "load", "(", "cls", ",", "environment_name", "=", "None", ",", "site_name", "=", "'primary'", ",", "data_only", "=", "False", ",", "allow_old", "=", "False", ")", ":", "srcdir", ",", "extension_dir", ",", "datadir", "=", "task", ".", "find_environment_dirs", "(", "environment_name", ",", "data_only", ")", "if", "datadir", "and", "data_only", ":", "return", "cls", "(", "environment_name", ",", "None", ",", "datadir", ",", "site_name", ")", "(", "datadir", ",", "name", ",", "ckan_version", ",", "always_prod", ",", "deploy_target", ",", "remote_server_key", ",", "extra_containers", ")", "=", "task", ".", "load_environment", "(", "srcdir", ",", "datadir", ",", "allow_old", ")", "if", "not", "allow_old", ":", "(", "port", ",", "address", ",", "site_url", ",", "passwords", ")", "=", "task", ".", "load_site", "(", "srcdir", ",", "datadir", ",", "site_name", ")", "else", ":", "(", "port", ",", "address", ",", "site_url", ",", "passwords", ")", "=", "(", "None", ",", "None", ",", "None", ",", "None", ")", "environment", "=", "cls", "(", "name", ",", "srcdir", ",", "datadir", ",", "site_name", ",", "ckan_version", "=", "ckan_version", ",", "port", "=", "port", ",", "deploy_target", "=", "deploy_target", ",", "site_url", "=", "site_url", ",", "always_prod", "=", "always_prod", ",", "address", "=", "address", ",", "extension_dir", "=", "extension_dir", ",", "remote_server_key", "=", "remote_server_key", ",", "extra_containers", "=", "extra_containers", ")", "if", "passwords", ":", "environment", ".", "passwords", "=", "passwords", "else", ":", "environment", ".", "_generate_passwords", "(", ")", "if", "not", "allow_old", ":", "environment", ".", "_load_sites", "(", ")", "return", "environment" ]
Return an Environment object based on an existing environnment+site. :param environment_name: exising environment name, path or None to look in current or parent directories for project :param data_only: set to True to only load from data dir, not the project dir; Used for purging environment data. :param allow_old: load a very minimal subset of what we usually load. This will only work for purging environment data on an old site. Raises DatacatsError if environment can't be found or if there is an error parsing the environment information.
[ "Return", "an", "Environment", "object", "based", "on", "an", "existing", "environnment", "+", "site", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L126-L168
train
datacats/datacats
datacats/environment.py
Environment.data_complete
def data_complete(self): """ Return True if all the expected datadir files are present """ return task.data_complete(self.datadir, self.sitedir, self._get_container_name)
python
def data_complete(self): """ Return True if all the expected datadir files are present """ return task.data_complete(self.datadir, self.sitedir, self._get_container_name)
[ "def", "data_complete", "(", "self", ")", ":", "return", "task", ".", "data_complete", "(", "self", ".", "datadir", ",", "self", ".", "sitedir", ",", "self", ".", "_get_container_name", ")" ]
Return True if all the expected datadir files are present
[ "Return", "True", "if", "all", "the", "expected", "datadir", "files", "are", "present" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L182-L187
train
datacats/datacats
datacats/environment.py
Environment.require_data
def require_data(self): """ raise a DatacatsError if the datadir or volumes are missing or damaged """ files = task.source_missing(self.target) if files: raise DatacatsError('Missing files in source directory:\n' + '\n'.join(files)) if not self.data_exists(): raise DatacatsError('Environment datadir missing. ' 'Try "datacats init".') if not self.data_complete(): raise DatacatsError('Environment datadir damaged or volumes ' 'missing. ' 'To reset and discard all data use ' '"datacats reset"')
python
def require_data(self): """ raise a DatacatsError if the datadir or volumes are missing or damaged """ files = task.source_missing(self.target) if files: raise DatacatsError('Missing files in source directory:\n' + '\n'.join(files)) if not self.data_exists(): raise DatacatsError('Environment datadir missing. ' 'Try "datacats init".') if not self.data_complete(): raise DatacatsError('Environment datadir damaged or volumes ' 'missing. ' 'To reset and discard all data use ' '"datacats reset"')
[ "def", "require_data", "(", "self", ")", ":", "files", "=", "task", ".", "source_missing", "(", "self", ".", "target", ")", "if", "files", ":", "raise", "DatacatsError", "(", "'Missing files in source directory:\\n'", "+", "'\\n'", ".", "join", "(", "files", ")", ")", "if", "not", "self", ".", "data_exists", "(", ")", ":", "raise", "DatacatsError", "(", "'Environment datadir missing. '", "'Try \"datacats init\".'", ")", "if", "not", "self", ".", "data_complete", "(", ")", ":", "raise", "DatacatsError", "(", "'Environment datadir damaged or volumes '", "'missing. '", "'To reset and discard all data use '", "'\"datacats reset\"'", ")" ]
raise a DatacatsError if the datadir or volumes are missing or damaged
[ "raise", "a", "DatacatsError", "if", "the", "datadir", "or", "volumes", "are", "missing", "or", "damaged" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L189-L204
train
datacats/datacats
datacats/environment.py
Environment.create_directories
def create_directories(self, create_project_dir=True): """ Call once for new projects to create the initial project directories. """ return task.create_directories(self.datadir, self.sitedir, self.target if create_project_dir else None)
python
def create_directories(self, create_project_dir=True): """ Call once for new projects to create the initial project directories. """ return task.create_directories(self.datadir, self.sitedir, self.target if create_project_dir else None)
[ "def", "create_directories", "(", "self", ",", "create_project_dir", "=", "True", ")", ":", "return", "task", ".", "create_directories", "(", "self", ".", "datadir", ",", "self", ".", "sitedir", ",", "self", ".", "target", "if", "create_project_dir", "else", "None", ")" ]
Call once for new projects to create the initial project directories.
[ "Call", "once", "for", "new", "projects", "to", "create", "the", "initial", "project", "directories", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L206-L211
train
datacats/datacats
datacats/environment.py
Environment.create_virtualenv
def create_virtualenv(self): """ Populate venv from preloaded image """ return task.create_virtualenv(self.target, self.datadir, self._preload_image(), self._get_container_name)
python
def create_virtualenv(self): """ Populate venv from preloaded image """ return task.create_virtualenv(self.target, self.datadir, self._preload_image(), self._get_container_name)
[ "def", "create_virtualenv", "(", "self", ")", ":", "return", "task", ".", "create_virtualenv", "(", "self", ".", "target", ",", "self", ".", "datadir", ",", "self", ".", "_preload_image", "(", ")", ",", "self", ".", "_get_container_name", ")" ]
Populate venv from preloaded image
[ "Populate", "venv", "from", "preloaded", "image" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L228-L233
train
datacats/datacats
datacats/environment.py
Environment.clean_virtualenv
def clean_virtualenv(self): """ Empty our virtualenv so that new (or older) dependencies may be installed """ self.user_run_script( script=scripts.get_script_path('clean_virtualenv.sh'), args=[], rw_venv=True, )
python
def clean_virtualenv(self): """ Empty our virtualenv so that new (or older) dependencies may be installed """ self.user_run_script( script=scripts.get_script_path('clean_virtualenv.sh'), args=[], rw_venv=True, )
[ "def", "clean_virtualenv", "(", "self", ")", ":", "self", ".", "user_run_script", "(", "script", "=", "scripts", ".", "get_script_path", "(", "'clean_virtualenv.sh'", ")", ",", "args", "=", "[", "]", ",", "rw_venv", "=", "True", ",", ")" ]
Empty our virtualenv so that new (or older) dependencies may be installed
[ "Empty", "our", "virtualenv", "so", "that", "new", "(", "or", "older", ")", "dependencies", "may", "be", "installed" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L235-L244
train
datacats/datacats
datacats/environment.py
Environment.create_source
def create_source(self, datapusher=True): """ Populate ckan directory from preloaded image and copy who.ini and schema.xml info conf directory """ task.create_source(self.target, self._preload_image(), datapusher)
python
def create_source(self, datapusher=True): """ Populate ckan directory from preloaded image and copy who.ini and schema.xml info conf directory """ task.create_source(self.target, self._preload_image(), datapusher)
[ "def", "create_source", "(", "self", ",", "datapusher", "=", "True", ")", ":", "task", ".", "create_source", "(", "self", ".", "target", ",", "self", ".", "_preload_image", "(", ")", ",", "datapusher", ")" ]
Populate ckan directory from preloaded image and copy who.ini and schema.xml info conf directory
[ "Populate", "ckan", "directory", "from", "preloaded", "image", "and", "copy", "who", ".", "ini", "and", "schema", ".", "xml", "info", "conf", "directory" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L253-L258
train
datacats/datacats
datacats/environment.py
Environment.start_supporting_containers
def start_supporting_containers(self, log_syslog=False): """ Start all supporting containers (containers required for CKAN to operate) if they aren't already running. :param log_syslog: A flag to redirect all container logs to host's syslog """ log_syslog = True if self.always_prod else log_syslog # in production we always use log_syslog driver (to aggregate all the logs) task.start_supporting_containers( self.sitedir, self.target, self.passwords, self._get_container_name, self.extra_containers, log_syslog=log_syslog )
python
def start_supporting_containers(self, log_syslog=False): """ Start all supporting containers (containers required for CKAN to operate) if they aren't already running. :param log_syslog: A flag to redirect all container logs to host's syslog """ log_syslog = True if self.always_prod else log_syslog # in production we always use log_syslog driver (to aggregate all the logs) task.start_supporting_containers( self.sitedir, self.target, self.passwords, self._get_container_name, self.extra_containers, log_syslog=log_syslog )
[ "def", "start_supporting_containers", "(", "self", ",", "log_syslog", "=", "False", ")", ":", "log_syslog", "=", "True", "if", "self", ".", "always_prod", "else", "log_syslog", "# in production we always use log_syslog driver (to aggregate all the logs)", "task", ".", "start_supporting_containers", "(", "self", ".", "sitedir", ",", "self", ".", "target", ",", "self", ".", "passwords", ",", "self", ".", "_get_container_name", ",", "self", ".", "extra_containers", ",", "log_syslog", "=", "log_syslog", ")" ]
Start all supporting containers (containers required for CKAN to operate) if they aren't already running. :param log_syslog: A flag to redirect all container logs to host's syslog
[ "Start", "all", "supporting", "containers", "(", "containers", "required", "for", "CKAN", "to", "operate", ")", "if", "they", "aren", "t", "already", "running", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L260-L277
train
datacats/datacats
datacats/environment.py
Environment.create_ckan_ini
def create_ckan_ini(self): """ Use make-config to generate an initial development.ini file """ self.run_command( command='/scripts/run_as_user.sh /usr/lib/ckan/bin/paster make-config' ' ckan /project/development.ini', rw_project=True, ro={scripts.get_script_path('run_as_user.sh'): '/scripts/run_as_user.sh'}, )
python
def create_ckan_ini(self): """ Use make-config to generate an initial development.ini file """ self.run_command( command='/scripts/run_as_user.sh /usr/lib/ckan/bin/paster make-config' ' ckan /project/development.ini', rw_project=True, ro={scripts.get_script_path('run_as_user.sh'): '/scripts/run_as_user.sh'}, )
[ "def", "create_ckan_ini", "(", "self", ")", ":", "self", ".", "run_command", "(", "command", "=", "'/scripts/run_as_user.sh /usr/lib/ckan/bin/paster make-config'", "' ckan /project/development.ini'", ",", "rw_project", "=", "True", ",", "ro", "=", "{", "scripts", ".", "get_script_path", "(", "'run_as_user.sh'", ")", ":", "'/scripts/run_as_user.sh'", "}", ",", ")" ]
Use make-config to generate an initial development.ini file
[ "Use", "make", "-", "config", "to", "generate", "an", "initial", "development", ".", "ini", "file" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L295-L304
train
datacats/datacats
datacats/environment.py
Environment.update_ckan_ini
def update_ckan_ini(self, skin=True): """ Use config-tool to update development.ini with our environment settings :param skin: use environment template skin plugin True/False """ command = [ '/usr/lib/ckan/bin/paster', '--plugin=ckan', 'config-tool', '/project/development.ini', '-e', 'sqlalchemy.url = postgresql://<hidden>', 'ckan.datastore.read_url = postgresql://<hidden>', 'ckan.datastore.write_url = postgresql://<hidden>', 'ckan.datapusher.url = http://datapusher:8800', 'solr_url = http://solr:8080/solr', 'ckan.storage_path = /var/www/storage', 'ckan.plugins = datastore resource_proxy text_view ' + ('datapusher ' if exists(self.target + '/datapusher') else '') + 'recline_grid_view recline_graph_view' + (' {0}_theme'.format(self.name) if skin else ''), 'ckan.site_title = ' + self.name, 'ckan.site_logo =', 'ckan.auth.create_user_via_web = false', ] self.run_command(command=command, rw_project=True)
python
def update_ckan_ini(self, skin=True): """ Use config-tool to update development.ini with our environment settings :param skin: use environment template skin plugin True/False """ command = [ '/usr/lib/ckan/bin/paster', '--plugin=ckan', 'config-tool', '/project/development.ini', '-e', 'sqlalchemy.url = postgresql://<hidden>', 'ckan.datastore.read_url = postgresql://<hidden>', 'ckan.datastore.write_url = postgresql://<hidden>', 'ckan.datapusher.url = http://datapusher:8800', 'solr_url = http://solr:8080/solr', 'ckan.storage_path = /var/www/storage', 'ckan.plugins = datastore resource_proxy text_view ' + ('datapusher ' if exists(self.target + '/datapusher') else '') + 'recline_grid_view recline_graph_view' + (' {0}_theme'.format(self.name) if skin else ''), 'ckan.site_title = ' + self.name, 'ckan.site_logo =', 'ckan.auth.create_user_via_web = false', ] self.run_command(command=command, rw_project=True)
[ "def", "update_ckan_ini", "(", "self", ",", "skin", "=", "True", ")", ":", "command", "=", "[", "'/usr/lib/ckan/bin/paster'", ",", "'--plugin=ckan'", ",", "'config-tool'", ",", "'/project/development.ini'", ",", "'-e'", ",", "'sqlalchemy.url = postgresql://<hidden>'", ",", "'ckan.datastore.read_url = postgresql://<hidden>'", ",", "'ckan.datastore.write_url = postgresql://<hidden>'", ",", "'ckan.datapusher.url = http://datapusher:8800'", ",", "'solr_url = http://solr:8080/solr'", ",", "'ckan.storage_path = /var/www/storage'", ",", "'ckan.plugins = datastore resource_proxy text_view '", "+", "(", "'datapusher '", "if", "exists", "(", "self", ".", "target", "+", "'/datapusher'", ")", "else", "''", ")", "+", "'recline_grid_view recline_graph_view'", "+", "(", "' {0}_theme'", ".", "format", "(", "self", ".", "name", ")", "if", "skin", "else", "''", ")", ",", "'ckan.site_title = '", "+", "self", ".", "name", ",", "'ckan.site_logo ='", ",", "'ckan.auth.create_user_via_web = false'", ",", "]", "self", ".", "run_command", "(", "command", "=", "command", ",", "rw_project", "=", "True", ")" ]
Use config-tool to update development.ini with our environment settings :param skin: use environment template skin plugin True/False
[ "Use", "config", "-", "tool", "to", "update", "development", ".", "ini", "with", "our", "environment", "settings" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L306-L329
train
datacats/datacats
datacats/environment.py
Environment.create_install_template_skin
def create_install_template_skin(self): """ Create an example ckan extension for this environment and install it """ ckan_extension_template(self.name, self.target) self.install_package_develop('ckanext-' + self.name + 'theme')
python
def create_install_template_skin(self): """ Create an example ckan extension for this environment and install it """ ckan_extension_template(self.name, self.target) self.install_package_develop('ckanext-' + self.name + 'theme')
[ "def", "create_install_template_skin", "(", "self", ")", ":", "ckan_extension_template", "(", "self", ".", "name", ",", "self", ".", "target", ")", "self", ".", "install_package_develop", "(", "'ckanext-'", "+", "self", ".", "name", "+", "'theme'", ")" ]
Create an example ckan extension for this environment and install it
[ "Create", "an", "example", "ckan", "extension", "for", "this", "environment", "and", "install", "it" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L331-L336
train
datacats/datacats
datacats/environment.py
Environment.ckan_db_init
def ckan_db_init(self, retry_seconds=DB_INIT_RETRY_SECONDS): """ Run db init to create all ckan tables :param retry_seconds: how long to retry waiting for db to start """ # XXX workaround for not knowing how long we need to wait # for postgres to be ready. fix this by changing the postgres # entrypoint, or possibly running once with command=/bin/true started = time.time() while True: try: self.run_command( '/usr/lib/ckan/bin/paster --plugin=ckan db init ' '-c /project/development.ini', db_links=True, clean_up=True, ) break except WebCommandError: if started + retry_seconds > time.time(): raise time.sleep(DB_INIT_RETRY_DELAY)
python
def ckan_db_init(self, retry_seconds=DB_INIT_RETRY_SECONDS): """ Run db init to create all ckan tables :param retry_seconds: how long to retry waiting for db to start """ # XXX workaround for not knowing how long we need to wait # for postgres to be ready. fix this by changing the postgres # entrypoint, or possibly running once with command=/bin/true started = time.time() while True: try: self.run_command( '/usr/lib/ckan/bin/paster --plugin=ckan db init ' '-c /project/development.ini', db_links=True, clean_up=True, ) break except WebCommandError: if started + retry_seconds > time.time(): raise time.sleep(DB_INIT_RETRY_DELAY)
[ "def", "ckan_db_init", "(", "self", ",", "retry_seconds", "=", "DB_INIT_RETRY_SECONDS", ")", ":", "# XXX workaround for not knowing how long we need to wait", "# for postgres to be ready. fix this by changing the postgres", "# entrypoint, or possibly running once with command=/bin/true", "started", "=", "time", ".", "time", "(", ")", "while", "True", ":", "try", ":", "self", ".", "run_command", "(", "'/usr/lib/ckan/bin/paster --plugin=ckan db init '", "'-c /project/development.ini'", ",", "db_links", "=", "True", ",", "clean_up", "=", "True", ",", ")", "break", "except", "WebCommandError", ":", "if", "started", "+", "retry_seconds", ">", "time", ".", "time", "(", ")", ":", "raise", "time", ".", "sleep", "(", "DB_INIT_RETRY_DELAY", ")" ]
Run db init to create all ckan tables :param retry_seconds: how long to retry waiting for db to start
[ "Run", "db", "init", "to", "create", "all", "ckan", "tables" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L338-L360
train
datacats/datacats
datacats/environment.py
Environment.start_ckan
def start_ckan(self, production=False, log_syslog=False, paster_reload=True, interactive=False): """ Start the apache server or paster serve :param log_syslog: A flag to redirect all container logs to host's syslog :param production: True for apache, False for paster serve + debug on :param paster_reload: Instruct paster to watch for file changes """ self.stop_ckan() address = self.address or '127.0.0.1' port = self.port # in prod we always use log_syslog driver log_syslog = True if self.always_prod else log_syslog production = production or self.always_prod # We only override the site URL with the docker URL on three conditions override_site_url = (self.address is None and not is_boot2docker() and not self.site_url) command = ['/scripts/web.sh', str(production), str(override_site_url), str(paster_reload)] # XXX nasty hack, remove this once we have a lessc command # for users (not just for building our preload image) if not production: css = self.target + '/ckan/ckan/public/base/css' if not exists(css + '/main.debug.css'): from shutil import copyfile copyfile(css + '/main.css', css + '/main.debug.css') ro = { self.target: '/project', scripts.get_script_path('datapusher.sh'): '/scripts/datapusher.sh' } if not is_boot2docker(): ro[self.datadir + '/venv'] = '/usr/lib/ckan' datapusher = self.needs_datapusher() if datapusher: run_container( self._get_container_name('datapusher'), 'datacats/web', '/scripts/datapusher.sh', ro=ro, volumes_from=(self._get_container_name('venv') if is_boot2docker() else None), log_syslog=log_syslog) while True: self._create_run_ini(port, production) try: self._run_web_container(port, command, address, log_syslog=log_syslog, datapusher=datapusher, interactive=interactive) if not is_boot2docker(): self.address = address except PortAllocatedError: port = self._next_port(port) continue break
python
def start_ckan(self, production=False, log_syslog=False, paster_reload=True, interactive=False): """ Start the apache server or paster serve :param log_syslog: A flag to redirect all container logs to host's syslog :param production: True for apache, False for paster serve + debug on :param paster_reload: Instruct paster to watch for file changes """ self.stop_ckan() address = self.address or '127.0.0.1' port = self.port # in prod we always use log_syslog driver log_syslog = True if self.always_prod else log_syslog production = production or self.always_prod # We only override the site URL with the docker URL on three conditions override_site_url = (self.address is None and not is_boot2docker() and not self.site_url) command = ['/scripts/web.sh', str(production), str(override_site_url), str(paster_reload)] # XXX nasty hack, remove this once we have a lessc command # for users (not just for building our preload image) if not production: css = self.target + '/ckan/ckan/public/base/css' if not exists(css + '/main.debug.css'): from shutil import copyfile copyfile(css + '/main.css', css + '/main.debug.css') ro = { self.target: '/project', scripts.get_script_path('datapusher.sh'): '/scripts/datapusher.sh' } if not is_boot2docker(): ro[self.datadir + '/venv'] = '/usr/lib/ckan' datapusher = self.needs_datapusher() if datapusher: run_container( self._get_container_name('datapusher'), 'datacats/web', '/scripts/datapusher.sh', ro=ro, volumes_from=(self._get_container_name('venv') if is_boot2docker() else None), log_syslog=log_syslog) while True: self._create_run_ini(port, production) try: self._run_web_container(port, command, address, log_syslog=log_syslog, datapusher=datapusher, interactive=interactive) if not is_boot2docker(): self.address = address except PortAllocatedError: port = self._next_port(port) continue break
[ "def", "start_ckan", "(", "self", ",", "production", "=", "False", ",", "log_syslog", "=", "False", ",", "paster_reload", "=", "True", ",", "interactive", "=", "False", ")", ":", "self", ".", "stop_ckan", "(", ")", "address", "=", "self", ".", "address", "or", "'127.0.0.1'", "port", "=", "self", ".", "port", "# in prod we always use log_syslog driver", "log_syslog", "=", "True", "if", "self", ".", "always_prod", "else", "log_syslog", "production", "=", "production", "or", "self", ".", "always_prod", "# We only override the site URL with the docker URL on three conditions", "override_site_url", "=", "(", "self", ".", "address", "is", "None", "and", "not", "is_boot2docker", "(", ")", "and", "not", "self", ".", "site_url", ")", "command", "=", "[", "'/scripts/web.sh'", ",", "str", "(", "production", ")", ",", "str", "(", "override_site_url", ")", ",", "str", "(", "paster_reload", ")", "]", "# XXX nasty hack, remove this once we have a lessc command", "# for users (not just for building our preload image)", "if", "not", "production", ":", "css", "=", "self", ".", "target", "+", "'/ckan/ckan/public/base/css'", "if", "not", "exists", "(", "css", "+", "'/main.debug.css'", ")", ":", "from", "shutil", "import", "copyfile", "copyfile", "(", "css", "+", "'/main.css'", ",", "css", "+", "'/main.debug.css'", ")", "ro", "=", "{", "self", ".", "target", ":", "'/project'", ",", "scripts", ".", "get_script_path", "(", "'datapusher.sh'", ")", ":", "'/scripts/datapusher.sh'", "}", "if", "not", "is_boot2docker", "(", ")", ":", "ro", "[", "self", ".", "datadir", "+", "'/venv'", "]", "=", "'/usr/lib/ckan'", "datapusher", "=", "self", ".", "needs_datapusher", "(", ")", "if", "datapusher", ":", "run_container", "(", "self", ".", "_get_container_name", "(", "'datapusher'", ")", ",", "'datacats/web'", ",", "'/scripts/datapusher.sh'", ",", "ro", "=", "ro", ",", "volumes_from", "=", "(", "self", ".", "_get_container_name", "(", "'venv'", ")", "if", "is_boot2docker", "(", ")", "else", "None", ")", ",", "log_syslog", "=", "log_syslog", ")", "while", "True", ":", "self", ".", "_create_run_ini", "(", "port", ",", "production", ")", "try", ":", "self", ".", "_run_web_container", "(", "port", ",", "command", ",", "address", ",", "log_syslog", "=", "log_syslog", ",", "datapusher", "=", "datapusher", ",", "interactive", "=", "interactive", ")", "if", "not", "is_boot2docker", "(", ")", ":", "self", ".", "address", "=", "address", "except", "PortAllocatedError", ":", "port", "=", "self", ".", "_next_port", "(", "port", ")", "continue", "break" ]
Start the apache server or paster serve :param log_syslog: A flag to redirect all container logs to host's syslog :param production: True for apache, False for paster serve + debug on :param paster_reload: Instruct paster to watch for file changes
[ "Start", "the", "apache", "server", "or", "paster", "serve" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L391-L450
train
datacats/datacats
datacats/environment.py
Environment._create_run_ini
def _create_run_ini(self, port, production, output='development.ini', source='development.ini', override_site_url=True): """ Create run/development.ini in datadir with debug and site_url overridden and with correct db passwords inserted """ cp = SafeConfigParser() try: cp.read([self.target + '/' + source]) except ConfigParserError: raise DatacatsError('Error reading development.ini') cp.set('DEFAULT', 'debug', 'false' if production else 'true') if self.site_url: site_url = self.site_url else: if is_boot2docker(): web_address = socket.gethostbyname(docker_host()) else: web_address = self.address site_url = 'http://{}:{}'.format(web_address, port) if override_site_url: cp.set('app:main', 'ckan.site_url', site_url) cp.set('app:main', 'sqlalchemy.url', 'postgresql://ckan:{0}@db:5432/ckan' .format(self.passwords['CKAN_PASSWORD'])) cp.set('app:main', 'ckan.datastore.read_url', 'postgresql://ckan_datastore_readonly:{0}@db:5432/ckan_datastore' .format(self.passwords['DATASTORE_RO_PASSWORD'])) cp.set('app:main', 'ckan.datastore.write_url', 'postgresql://ckan_datastore_readwrite:{0}@db:5432/ckan_datastore' .format(self.passwords['DATASTORE_RW_PASSWORD'])) cp.set('app:main', 'solr_url', 'http://solr:8080/solr') cp.set('app:main', 'ckan.redis.url', 'http://redis:6379') cp.set('app:main', 'beaker.session.secret', self.passwords['BEAKER_SESSION_SECRET']) if not isdir(self.sitedir + '/run'): makedirs(self.sitedir + '/run') # upgrade old datadir with open(self.sitedir + '/run/' + output, 'w') as runini: cp.write(runini)
python
def _create_run_ini(self, port, production, output='development.ini', source='development.ini', override_site_url=True): """ Create run/development.ini in datadir with debug and site_url overridden and with correct db passwords inserted """ cp = SafeConfigParser() try: cp.read([self.target + '/' + source]) except ConfigParserError: raise DatacatsError('Error reading development.ini') cp.set('DEFAULT', 'debug', 'false' if production else 'true') if self.site_url: site_url = self.site_url else: if is_boot2docker(): web_address = socket.gethostbyname(docker_host()) else: web_address = self.address site_url = 'http://{}:{}'.format(web_address, port) if override_site_url: cp.set('app:main', 'ckan.site_url', site_url) cp.set('app:main', 'sqlalchemy.url', 'postgresql://ckan:{0}@db:5432/ckan' .format(self.passwords['CKAN_PASSWORD'])) cp.set('app:main', 'ckan.datastore.read_url', 'postgresql://ckan_datastore_readonly:{0}@db:5432/ckan_datastore' .format(self.passwords['DATASTORE_RO_PASSWORD'])) cp.set('app:main', 'ckan.datastore.write_url', 'postgresql://ckan_datastore_readwrite:{0}@db:5432/ckan_datastore' .format(self.passwords['DATASTORE_RW_PASSWORD'])) cp.set('app:main', 'solr_url', 'http://solr:8080/solr') cp.set('app:main', 'ckan.redis.url', 'http://redis:6379') cp.set('app:main', 'beaker.session.secret', self.passwords['BEAKER_SESSION_SECRET']) if not isdir(self.sitedir + '/run'): makedirs(self.sitedir + '/run') # upgrade old datadir with open(self.sitedir + '/run/' + output, 'w') as runini: cp.write(runini)
[ "def", "_create_run_ini", "(", "self", ",", "port", ",", "production", ",", "output", "=", "'development.ini'", ",", "source", "=", "'development.ini'", ",", "override_site_url", "=", "True", ")", ":", "cp", "=", "SafeConfigParser", "(", ")", "try", ":", "cp", ".", "read", "(", "[", "self", ".", "target", "+", "'/'", "+", "source", "]", ")", "except", "ConfigParserError", ":", "raise", "DatacatsError", "(", "'Error reading development.ini'", ")", "cp", ".", "set", "(", "'DEFAULT'", ",", "'debug'", ",", "'false'", "if", "production", "else", "'true'", ")", "if", "self", ".", "site_url", ":", "site_url", "=", "self", ".", "site_url", "else", ":", "if", "is_boot2docker", "(", ")", ":", "web_address", "=", "socket", ".", "gethostbyname", "(", "docker_host", "(", ")", ")", "else", ":", "web_address", "=", "self", ".", "address", "site_url", "=", "'http://{}:{}'", ".", "format", "(", "web_address", ",", "port", ")", "if", "override_site_url", ":", "cp", ".", "set", "(", "'app:main'", ",", "'ckan.site_url'", ",", "site_url", ")", "cp", ".", "set", "(", "'app:main'", ",", "'sqlalchemy.url'", ",", "'postgresql://ckan:{0}@db:5432/ckan'", ".", "format", "(", "self", ".", "passwords", "[", "'CKAN_PASSWORD'", "]", ")", ")", "cp", ".", "set", "(", "'app:main'", ",", "'ckan.datastore.read_url'", ",", "'postgresql://ckan_datastore_readonly:{0}@db:5432/ckan_datastore'", ".", "format", "(", "self", ".", "passwords", "[", "'DATASTORE_RO_PASSWORD'", "]", ")", ")", "cp", ".", "set", "(", "'app:main'", ",", "'ckan.datastore.write_url'", ",", "'postgresql://ckan_datastore_readwrite:{0}@db:5432/ckan_datastore'", ".", "format", "(", "self", ".", "passwords", "[", "'DATASTORE_RW_PASSWORD'", "]", ")", ")", "cp", ".", "set", "(", "'app:main'", ",", "'solr_url'", ",", "'http://solr:8080/solr'", ")", "cp", ".", "set", "(", "'app:main'", ",", "'ckan.redis.url'", ",", "'http://redis:6379'", ")", "cp", ".", "set", "(", "'app:main'", ",", "'beaker.session.secret'", ",", "self", ".", "passwords", "[", "'BEAKER_SESSION_SECRET'", "]", ")", "if", "not", "isdir", "(", "self", ".", "sitedir", "+", "'/run'", ")", ":", "makedirs", "(", "self", ".", "sitedir", "+", "'/run'", ")", "# upgrade old datadir", "with", "open", "(", "self", ".", "sitedir", "+", "'/run/'", "+", "output", ",", "'w'", ")", "as", "runini", ":", "cp", ".", "write", "(", "runini", ")" ]
Create run/development.ini in datadir with debug and site_url overridden and with correct db passwords inserted
[ "Create", "run", "/", "development", ".", "ini", "in", "datadir", "with", "debug", "and", "site_url", "overridden", "and", "with", "correct", "db", "passwords", "inserted" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L452-L494
train
datacats/datacats
datacats/environment.py
Environment._run_web_container
def _run_web_container(self, port, command, address, log_syslog=False, datapusher=True, interactive=False): """ Start web container on port with command """ if is_boot2docker(): ro = {} volumes_from = self._get_container_name('venv') else: ro = {self.datadir + '/venv': '/usr/lib/ckan'} volumes_from = None links = { self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db' } links.update({self._get_container_name(container): container for container in self.extra_containers}) if datapusher: if 'datapusher' not in self.containers_running(): raise DatacatsError(container_logs(self._get_container_name('datapusher'), "all", False, False)) links[self._get_container_name('datapusher')] = 'datapusher' ro = dict({ self.target: '/project/', scripts.get_script_path('web.sh'): '/scripts/web.sh', scripts.get_script_path('adjust_devini.py'): '/scripts/adjust_devini.py'}, **ro) rw = { self.sitedir + '/files': '/var/www/storage', self.sitedir + '/run/development.ini': '/project/development.ini' } try: if not interactive: run_container( name=self._get_container_name('web'), image='datacats/web', rw=rw, ro=ro, links=links, volumes_from=volumes_from, command=command, port_bindings={ 5000: port if is_boot2docker() else (address, port)}, log_syslog=log_syslog ) else: # FIXME: share more code with interactive_shell if is_boot2docker(): switches = ['--volumes-from', self._get_container_name('pgdata'), '--volumes-from', self._get_container_name('venv')] else: switches = [] switches += ['--volume={}:{}:ro'.format(vol, ro[vol]) for vol in ro] switches += ['--volume={}:{}'.format(vol, rw[vol]) for vol in rw] links = ['--link={}:{}'.format(link, links[link]) for link in links] args = ['docker', 'run', '-it', '--name', self._get_container_name('web'), '-p', '{}:5000'.format(port) if is_boot2docker() else '{}:{}:5000'.format(address, port)] + \ switches + links + ['datacats/web', ] + command subprocess.call(args) except APIError as e: if '409' in str(e): raise DatacatsError('Web container already running. ' 'Please stop_web before running.') else: raise
python
def _run_web_container(self, port, command, address, log_syslog=False, datapusher=True, interactive=False): """ Start web container on port with command """ if is_boot2docker(): ro = {} volumes_from = self._get_container_name('venv') else: ro = {self.datadir + '/venv': '/usr/lib/ckan'} volumes_from = None links = { self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db' } links.update({self._get_container_name(container): container for container in self.extra_containers}) if datapusher: if 'datapusher' not in self.containers_running(): raise DatacatsError(container_logs(self._get_container_name('datapusher'), "all", False, False)) links[self._get_container_name('datapusher')] = 'datapusher' ro = dict({ self.target: '/project/', scripts.get_script_path('web.sh'): '/scripts/web.sh', scripts.get_script_path('adjust_devini.py'): '/scripts/adjust_devini.py'}, **ro) rw = { self.sitedir + '/files': '/var/www/storage', self.sitedir + '/run/development.ini': '/project/development.ini' } try: if not interactive: run_container( name=self._get_container_name('web'), image='datacats/web', rw=rw, ro=ro, links=links, volumes_from=volumes_from, command=command, port_bindings={ 5000: port if is_boot2docker() else (address, port)}, log_syslog=log_syslog ) else: # FIXME: share more code with interactive_shell if is_boot2docker(): switches = ['--volumes-from', self._get_container_name('pgdata'), '--volumes-from', self._get_container_name('venv')] else: switches = [] switches += ['--volume={}:{}:ro'.format(vol, ro[vol]) for vol in ro] switches += ['--volume={}:{}'.format(vol, rw[vol]) for vol in rw] links = ['--link={}:{}'.format(link, links[link]) for link in links] args = ['docker', 'run', '-it', '--name', self._get_container_name('web'), '-p', '{}:5000'.format(port) if is_boot2docker() else '{}:{}:5000'.format(address, port)] + \ switches + links + ['datacats/web', ] + command subprocess.call(args) except APIError as e: if '409' in str(e): raise DatacatsError('Web container already running. ' 'Please stop_web before running.') else: raise
[ "def", "_run_web_container", "(", "self", ",", "port", ",", "command", ",", "address", ",", "log_syslog", "=", "False", ",", "datapusher", "=", "True", ",", "interactive", "=", "False", ")", ":", "if", "is_boot2docker", "(", ")", ":", "ro", "=", "{", "}", "volumes_from", "=", "self", ".", "_get_container_name", "(", "'venv'", ")", "else", ":", "ro", "=", "{", "self", ".", "datadir", "+", "'/venv'", ":", "'/usr/lib/ckan'", "}", "volumes_from", "=", "None", "links", "=", "{", "self", ".", "_get_container_name", "(", "'solr'", ")", ":", "'solr'", ",", "self", ".", "_get_container_name", "(", "'postgres'", ")", ":", "'db'", "}", "links", ".", "update", "(", "{", "self", ".", "_get_container_name", "(", "container", ")", ":", "container", "for", "container", "in", "self", ".", "extra_containers", "}", ")", "if", "datapusher", ":", "if", "'datapusher'", "not", "in", "self", ".", "containers_running", "(", ")", ":", "raise", "DatacatsError", "(", "container_logs", "(", "self", ".", "_get_container_name", "(", "'datapusher'", ")", ",", "\"all\"", ",", "False", ",", "False", ")", ")", "links", "[", "self", ".", "_get_container_name", "(", "'datapusher'", ")", "]", "=", "'datapusher'", "ro", "=", "dict", "(", "{", "self", ".", "target", ":", "'/project/'", ",", "scripts", ".", "get_script_path", "(", "'web.sh'", ")", ":", "'/scripts/web.sh'", ",", "scripts", ".", "get_script_path", "(", "'adjust_devini.py'", ")", ":", "'/scripts/adjust_devini.py'", "}", ",", "*", "*", "ro", ")", "rw", "=", "{", "self", ".", "sitedir", "+", "'/files'", ":", "'/var/www/storage'", ",", "self", ".", "sitedir", "+", "'/run/development.ini'", ":", "'/project/development.ini'", "}", "try", ":", "if", "not", "interactive", ":", "run_container", "(", "name", "=", "self", ".", "_get_container_name", "(", "'web'", ")", ",", "image", "=", "'datacats/web'", ",", "rw", "=", "rw", ",", "ro", "=", "ro", ",", "links", "=", "links", ",", "volumes_from", "=", "volumes_from", ",", "command", "=", "command", ",", "port_bindings", "=", "{", "5000", ":", "port", "if", "is_boot2docker", "(", ")", "else", "(", "address", ",", "port", ")", "}", ",", "log_syslog", "=", "log_syslog", ")", "else", ":", "# FIXME: share more code with interactive_shell", "if", "is_boot2docker", "(", ")", ":", "switches", "=", "[", "'--volumes-from'", ",", "self", ".", "_get_container_name", "(", "'pgdata'", ")", ",", "'--volumes-from'", ",", "self", ".", "_get_container_name", "(", "'venv'", ")", "]", "else", ":", "switches", "=", "[", "]", "switches", "+=", "[", "'--volume={}:{}:ro'", ".", "format", "(", "vol", ",", "ro", "[", "vol", "]", ")", "for", "vol", "in", "ro", "]", "switches", "+=", "[", "'--volume={}:{}'", ".", "format", "(", "vol", ",", "rw", "[", "vol", "]", ")", "for", "vol", "in", "rw", "]", "links", "=", "[", "'--link={}:{}'", ".", "format", "(", "link", ",", "links", "[", "link", "]", ")", "for", "link", "in", "links", "]", "args", "=", "[", "'docker'", ",", "'run'", ",", "'-it'", ",", "'--name'", ",", "self", ".", "_get_container_name", "(", "'web'", ")", ",", "'-p'", ",", "'{}:5000'", ".", "format", "(", "port", ")", "if", "is_boot2docker", "(", ")", "else", "'{}:{}:5000'", ".", "format", "(", "address", ",", "port", ")", "]", "+", "switches", "+", "links", "+", "[", "'datacats/web'", ",", "]", "+", "command", "subprocess", ".", "call", "(", "args", ")", "except", "APIError", "as", "e", ":", "if", "'409'", "in", "str", "(", "e", ")", ":", "raise", "DatacatsError", "(", "'Web container already running. '", "'Please stop_web before running.'", ")", "else", ":", "raise" ]
Start web container on port with command
[ "Start", "web", "container", "on", "port", "with", "command" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L496-L566
train
datacats/datacats
datacats/environment.py
Environment.wait_for_web_available
def wait_for_web_available(self): """ Wait for the web server to become available or raise DatacatsError if it fails to start. """ try: if not wait_for_service_available( self._get_container_name('web'), self.web_address(), WEB_START_TIMEOUT_SECONDS): raise DatacatsError('Error while starting web container:\n' + container_logs(self._get_container_name('web'), "all", False, None)) except ServiceTimeout: raise DatacatsError('Timeout while starting web container. Logs:' + container_logs(self._get_container_name('web'), "all", False, None))
python
def wait_for_web_available(self): """ Wait for the web server to become available or raise DatacatsError if it fails to start. """ try: if not wait_for_service_available( self._get_container_name('web'), self.web_address(), WEB_START_TIMEOUT_SECONDS): raise DatacatsError('Error while starting web container:\n' + container_logs(self._get_container_name('web'), "all", False, None)) except ServiceTimeout: raise DatacatsError('Timeout while starting web container. Logs:' + container_logs(self._get_container_name('web'), "all", False, None))
[ "def", "wait_for_web_available", "(", "self", ")", ":", "try", ":", "if", "not", "wait_for_service_available", "(", "self", ".", "_get_container_name", "(", "'web'", ")", ",", "self", ".", "web_address", "(", ")", ",", "WEB_START_TIMEOUT_SECONDS", ")", ":", "raise", "DatacatsError", "(", "'Error while starting web container:\\n'", "+", "container_logs", "(", "self", ".", "_get_container_name", "(", "'web'", ")", ",", "\"all\"", ",", "False", ",", "None", ")", ")", "except", "ServiceTimeout", ":", "raise", "DatacatsError", "(", "'Timeout while starting web container. Logs:'", "+", "container_logs", "(", "self", ".", "_get_container_name", "(", "'web'", ")", ",", "\"all\"", ",", "False", ",", "None", ")", ")" ]
Wait for the web server to become available or raise DatacatsError if it fails to start.
[ "Wait", "for", "the", "web", "server", "to", "become", "available", "or", "raise", "DatacatsError", "if", "it", "fails", "to", "start", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L568-L583
train
datacats/datacats
datacats/environment.py
Environment._choose_port
def _choose_port(self): """ Return a port number from 5000-5999 based on the environment name to be used as a default when the user hasn't selected one. """ # instead of random let's base it on the name chosen (and the site name) return 5000 + unpack('Q', sha((self.name + self.site_name) .decode('ascii')).digest()[:8])[0] % 1000
python
def _choose_port(self): """ Return a port number from 5000-5999 based on the environment name to be used as a default when the user hasn't selected one. """ # instead of random let's base it on the name chosen (and the site name) return 5000 + unpack('Q', sha((self.name + self.site_name) .decode('ascii')).digest()[:8])[0] % 1000
[ "def", "_choose_port", "(", "self", ")", ":", "# instead of random let's base it on the name chosen (and the site name)", "return", "5000", "+", "unpack", "(", "'Q'", ",", "sha", "(", "(", "self", ".", "name", "+", "self", ".", "site_name", ")", ".", "decode", "(", "'ascii'", ")", ")", ".", "digest", "(", ")", "[", ":", "8", "]", ")", "[", "0", "]", "%", "1000" ]
Return a port number from 5000-5999 based on the environment name to be used as a default when the user hasn't selected one.
[ "Return", "a", "port", "number", "from", "5000", "-", "5999", "based", "on", "the", "environment", "name", "to", "be", "used", "as", "a", "default", "when", "the", "user", "hasn", "t", "selected", "one", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L585-L593
train
datacats/datacats
datacats/environment.py
Environment._next_port
def _next_port(self, port): """ Return another port from the 5000-5999 range """ port = 5000 + (port + 1) % 1000 if port == self.port: raise DatacatsError('Too many instances running') return port
python
def _next_port(self, port): """ Return another port from the 5000-5999 range """ port = 5000 + (port + 1) % 1000 if port == self.port: raise DatacatsError('Too many instances running') return port
[ "def", "_next_port", "(", "self", ",", "port", ")", ":", "port", "=", "5000", "+", "(", "port", "+", "1", ")", "%", "1000", "if", "port", "==", "self", ".", "port", ":", "raise", "DatacatsError", "(", "'Too many instances running'", ")", "return", "port" ]
Return another port from the 5000-5999 range
[ "Return", "another", "port", "from", "the", "5000", "-", "5999", "range" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L595-L602
train
datacats/datacats
datacats/environment.py
Environment.stop_ckan
def stop_ckan(self): """ Stop and remove the web container """ remove_container(self._get_container_name('web'), force=True) remove_container(self._get_container_name('datapusher'), force=True)
python
def stop_ckan(self): """ Stop and remove the web container """ remove_container(self._get_container_name('web'), force=True) remove_container(self._get_container_name('datapusher'), force=True)
[ "def", "stop_ckan", "(", "self", ")", ":", "remove_container", "(", "self", ".", "_get_container_name", "(", "'web'", ")", ",", "force", "=", "True", ")", "remove_container", "(", "self", ".", "_get_container_name", "(", "'datapusher'", ")", ",", "force", "=", "True", ")" ]
Stop and remove the web container
[ "Stop", "and", "remove", "the", "web", "container" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L604-L609
train
datacats/datacats
datacats/environment.py
Environment._current_web_port
def _current_web_port(self): """ return just the port number for the web container, or None if not running """ info = inspect_container(self._get_container_name('web')) if info is None: return None try: if not info['State']['Running']: return None return info['NetworkSettings']['Ports']['5000/tcp'][0]['HostPort'] except TypeError: return None
python
def _current_web_port(self): """ return just the port number for the web container, or None if not running """ info = inspect_container(self._get_container_name('web')) if info is None: return None try: if not info['State']['Running']: return None return info['NetworkSettings']['Ports']['5000/tcp'][0]['HostPort'] except TypeError: return None
[ "def", "_current_web_port", "(", "self", ")", ":", "info", "=", "inspect_container", "(", "self", ".", "_get_container_name", "(", "'web'", ")", ")", "if", "info", "is", "None", ":", "return", "None", "try", ":", "if", "not", "info", "[", "'State'", "]", "[", "'Running'", "]", ":", "return", "None", "return", "info", "[", "'NetworkSettings'", "]", "[", "'Ports'", "]", "[", "'5000/tcp'", "]", "[", "0", "]", "[", "'HostPort'", "]", "except", "TypeError", ":", "return", "None" ]
return just the port number for the web container, or None if not running
[ "return", "just", "the", "port", "number", "for", "the", "web", "container", "or", "None", "if", "not", "running" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L611-L624
train
datacats/datacats
datacats/environment.py
Environment.add_extra_container
def add_extra_container(self, container, error_on_exists=False): """ Add a container as a 'extra'. These are running containers which are not necessary for running default CKAN but are useful for certain extensions :param container: The container name to add :param error_on_exists: Raise a DatacatsError if the extra container already exists. """ if container in self.extra_containers: if error_on_exists: raise DatacatsError('{} is already added as an extra container.'.format(container)) else: return self.extra_containers.append(container) cp = SafeConfigParser() cp.read(self.target + '/.datacats-environment') cp.set('datacats', 'extra_containers', ' '.join(self.extra_containers)) with open(self.target + '/.datacats-environment', 'w') as f: cp.write(f)
python
def add_extra_container(self, container, error_on_exists=False): """ Add a container as a 'extra'. These are running containers which are not necessary for running default CKAN but are useful for certain extensions :param container: The container name to add :param error_on_exists: Raise a DatacatsError if the extra container already exists. """ if container in self.extra_containers: if error_on_exists: raise DatacatsError('{} is already added as an extra container.'.format(container)) else: return self.extra_containers.append(container) cp = SafeConfigParser() cp.read(self.target + '/.datacats-environment') cp.set('datacats', 'extra_containers', ' '.join(self.extra_containers)) with open(self.target + '/.datacats-environment', 'w') as f: cp.write(f)
[ "def", "add_extra_container", "(", "self", ",", "container", ",", "error_on_exists", "=", "False", ")", ":", "if", "container", "in", "self", ".", "extra_containers", ":", "if", "error_on_exists", ":", "raise", "DatacatsError", "(", "'{} is already added as an extra container.'", ".", "format", "(", "container", ")", ")", "else", ":", "return", "self", ".", "extra_containers", ".", "append", "(", "container", ")", "cp", "=", "SafeConfigParser", "(", ")", "cp", ".", "read", "(", "self", ".", "target", "+", "'/.datacats-environment'", ")", "cp", ".", "set", "(", "'datacats'", ",", "'extra_containers'", ",", "' '", ".", "join", "(", "self", ".", "extra_containers", ")", ")", "with", "open", "(", "self", ".", "target", "+", "'/.datacats-environment'", ",", "'w'", ")", "as", "f", ":", "cp", ".", "write", "(", "f", ")" ]
Add a container as a 'extra'. These are running containers which are not necessary for running default CKAN but are useful for certain extensions :param container: The container name to add :param error_on_exists: Raise a DatacatsError if the extra container already exists.
[ "Add", "a", "container", "as", "a", "extra", ".", "These", "are", "running", "containers", "which", "are", "not", "necessary", "for", "running", "default", "CKAN", "but", "are", "useful", "for", "certain", "extensions", ":", "param", "container", ":", "The", "container", "name", "to", "add", ":", "param", "error_on_exists", ":", "Raise", "a", "DatacatsError", "if", "the", "extra", "container", "already", "exists", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L636-L657
train
datacats/datacats
datacats/environment.py
Environment.web_address
def web_address(self): """ Return the url of the web server or None if not running """ port = self._current_web_port() address = self.address or '127.0.0.1' if port is None: return None return 'http://{0}:{1}/'.format( address if address and not is_boot2docker() else docker_host(), port)
python
def web_address(self): """ Return the url of the web server or None if not running """ port = self._current_web_port() address = self.address or '127.0.0.1' if port is None: return None return 'http://{0}:{1}/'.format( address if address and not is_boot2docker() else docker_host(), port)
[ "def", "web_address", "(", "self", ")", ":", "port", "=", "self", ".", "_current_web_port", "(", ")", "address", "=", "self", ".", "address", "or", "'127.0.0.1'", "if", "port", "is", "None", ":", "return", "None", "return", "'http://{0}:{1}/'", ".", "format", "(", "address", "if", "address", "and", "not", "is_boot2docker", "(", ")", "else", "docker_host", "(", ")", ",", "port", ")" ]
Return the url of the web server or None if not running
[ "Return", "the", "url", "of", "the", "web", "server", "or", "None", "if", "not", "running" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L665-L675
train
datacats/datacats
datacats/environment.py
Environment.create_admin_set_password
def create_admin_set_password(self, password): """ create 'admin' account with given password """ with open(self.sitedir + '/run/admin.json', 'w') as out: json.dump({ 'name': 'admin', 'email': 'none', 'password': password, 'sysadmin': True}, out) self.user_run_script( script=scripts.get_script_path('update_add_admin.sh'), args=[], db_links=True, ro={ self.sitedir + '/run/admin.json': '/input/admin.json' }, ) remove(self.sitedir + '/run/admin.json')
python
def create_admin_set_password(self, password): """ create 'admin' account with given password """ with open(self.sitedir + '/run/admin.json', 'w') as out: json.dump({ 'name': 'admin', 'email': 'none', 'password': password, 'sysadmin': True}, out) self.user_run_script( script=scripts.get_script_path('update_add_admin.sh'), args=[], db_links=True, ro={ self.sitedir + '/run/admin.json': '/input/admin.json' }, ) remove(self.sitedir + '/run/admin.json')
[ "def", "create_admin_set_password", "(", "self", ",", "password", ")", ":", "with", "open", "(", "self", ".", "sitedir", "+", "'/run/admin.json'", ",", "'w'", ")", "as", "out", ":", "json", ".", "dump", "(", "{", "'name'", ":", "'admin'", ",", "'email'", ":", "'none'", ",", "'password'", ":", "password", ",", "'sysadmin'", ":", "True", "}", ",", "out", ")", "self", ".", "user_run_script", "(", "script", "=", "scripts", ".", "get_script_path", "(", "'update_add_admin.sh'", ")", ",", "args", "=", "[", "]", ",", "db_links", "=", "True", ",", "ro", "=", "{", "self", ".", "sitedir", "+", "'/run/admin.json'", ":", "'/input/admin.json'", "}", ",", ")", "remove", "(", "self", ".", "sitedir", "+", "'/run/admin.json'", ")" ]
create 'admin' account with given password
[ "create", "admin", "account", "with", "given", "password" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L677-L696
train
datacats/datacats
datacats/environment.py
Environment.interactive_shell
def interactive_shell(self, command=None, paster=False, detach=False): """ launch interactive shell session with all writable volumes :param: list of strings to execute instead of bash """ if not exists(self.target + '/.bash_profile'): # this file is required for activating the virtualenv self.create_bash_profile() if not command: command = [] use_tty = sys.stdin.isatty() and sys.stdout.isatty() background = environ.get('CIRCLECI', False) or detach if is_boot2docker(): venv_volumes = ['--volumes-from', self._get_container_name('venv')] else: venv_volumes = ['-v', self.datadir + '/venv:/usr/lib/ckan:rw'] self._create_run_ini(self.port, production=False, output='run.ini') self._create_run_ini(self.port, production=True, output='test.ini', source='ckan/test-core.ini', override_site_url=False) script = scripts.get_script_path('shell.sh') if paster: script = scripts.get_script_path('paster.sh') if command and command != ['help'] and command != ['--help']: command += ['--config=/project/development.ini'] command = [self.extension_dir] + command proxy_settings = self._proxy_settings() if proxy_settings: venv_volumes += ['-v', self.sitedir + '/run/proxy-environment:/etc/environment:ro'] links = {self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db'} links.update({self._get_container_name(container): container for container in self.extra_containers}) link_params = [] for link in links: link_params.append('--link') link_params.append(link + ':' + links[link]) if 'datapusher' in self.containers_running(): link_params.append('--link') link_params.append(self._get_container_name('datapusher') + ':datapusher') # FIXME: consider switching this to dockerpty # using subprocess for docker client's interactive session return subprocess.call([ DOCKER_EXE, 'run', ] + (['--rm'] if not background else []) + [ '-t' if use_tty else '', '-d' if detach else '-i', ] + venv_volumes + [ '-v', self.target + ':/project:rw', '-v', self.sitedir + '/files:/var/www/storage:rw', '-v', script + ':/scripts/shell.sh:ro', '-v', scripts.get_script_path('paster_cd.sh') + ':/scripts/paster_cd.sh:ro', '-v', self.sitedir + '/run/run.ini:/project/development.ini:ro', '-v', self.sitedir + '/run/test.ini:/project/ckan/test-core.ini:ro'] + link_params + ['--hostname', self.name, 'datacats/web', '/scripts/shell.sh'] + command)
python
def interactive_shell(self, command=None, paster=False, detach=False): """ launch interactive shell session with all writable volumes :param: list of strings to execute instead of bash """ if not exists(self.target + '/.bash_profile'): # this file is required for activating the virtualenv self.create_bash_profile() if not command: command = [] use_tty = sys.stdin.isatty() and sys.stdout.isatty() background = environ.get('CIRCLECI', False) or detach if is_boot2docker(): venv_volumes = ['--volumes-from', self._get_container_name('venv')] else: venv_volumes = ['-v', self.datadir + '/venv:/usr/lib/ckan:rw'] self._create_run_ini(self.port, production=False, output='run.ini') self._create_run_ini(self.port, production=True, output='test.ini', source='ckan/test-core.ini', override_site_url=False) script = scripts.get_script_path('shell.sh') if paster: script = scripts.get_script_path('paster.sh') if command and command != ['help'] and command != ['--help']: command += ['--config=/project/development.ini'] command = [self.extension_dir] + command proxy_settings = self._proxy_settings() if proxy_settings: venv_volumes += ['-v', self.sitedir + '/run/proxy-environment:/etc/environment:ro'] links = {self._get_container_name('solr'): 'solr', self._get_container_name('postgres'): 'db'} links.update({self._get_container_name(container): container for container in self.extra_containers}) link_params = [] for link in links: link_params.append('--link') link_params.append(link + ':' + links[link]) if 'datapusher' in self.containers_running(): link_params.append('--link') link_params.append(self._get_container_name('datapusher') + ':datapusher') # FIXME: consider switching this to dockerpty # using subprocess for docker client's interactive session return subprocess.call([ DOCKER_EXE, 'run', ] + (['--rm'] if not background else []) + [ '-t' if use_tty else '', '-d' if detach else '-i', ] + venv_volumes + [ '-v', self.target + ':/project:rw', '-v', self.sitedir + '/files:/var/www/storage:rw', '-v', script + ':/scripts/shell.sh:ro', '-v', scripts.get_script_path('paster_cd.sh') + ':/scripts/paster_cd.sh:ro', '-v', self.sitedir + '/run/run.ini:/project/development.ini:ro', '-v', self.sitedir + '/run/test.ini:/project/ckan/test-core.ini:ro'] + link_params + ['--hostname', self.name, 'datacats/web', '/scripts/shell.sh'] + command)
[ "def", "interactive_shell", "(", "self", ",", "command", "=", "None", ",", "paster", "=", "False", ",", "detach", "=", "False", ")", ":", "if", "not", "exists", "(", "self", ".", "target", "+", "'/.bash_profile'", ")", ":", "# this file is required for activating the virtualenv", "self", ".", "create_bash_profile", "(", ")", "if", "not", "command", ":", "command", "=", "[", "]", "use_tty", "=", "sys", ".", "stdin", ".", "isatty", "(", ")", "and", "sys", ".", "stdout", ".", "isatty", "(", ")", "background", "=", "environ", ".", "get", "(", "'CIRCLECI'", ",", "False", ")", "or", "detach", "if", "is_boot2docker", "(", ")", ":", "venv_volumes", "=", "[", "'--volumes-from'", ",", "self", ".", "_get_container_name", "(", "'venv'", ")", "]", "else", ":", "venv_volumes", "=", "[", "'-v'", ",", "self", ".", "datadir", "+", "'/venv:/usr/lib/ckan:rw'", "]", "self", ".", "_create_run_ini", "(", "self", ".", "port", ",", "production", "=", "False", ",", "output", "=", "'run.ini'", ")", "self", ".", "_create_run_ini", "(", "self", ".", "port", ",", "production", "=", "True", ",", "output", "=", "'test.ini'", ",", "source", "=", "'ckan/test-core.ini'", ",", "override_site_url", "=", "False", ")", "script", "=", "scripts", ".", "get_script_path", "(", "'shell.sh'", ")", "if", "paster", ":", "script", "=", "scripts", ".", "get_script_path", "(", "'paster.sh'", ")", "if", "command", "and", "command", "!=", "[", "'help'", "]", "and", "command", "!=", "[", "'--help'", "]", ":", "command", "+=", "[", "'--config=/project/development.ini'", "]", "command", "=", "[", "self", ".", "extension_dir", "]", "+", "command", "proxy_settings", "=", "self", ".", "_proxy_settings", "(", ")", "if", "proxy_settings", ":", "venv_volumes", "+=", "[", "'-v'", ",", "self", ".", "sitedir", "+", "'/run/proxy-environment:/etc/environment:ro'", "]", "links", "=", "{", "self", ".", "_get_container_name", "(", "'solr'", ")", ":", "'solr'", ",", "self", ".", "_get_container_name", "(", "'postgres'", ")", ":", "'db'", "}", "links", ".", "update", "(", "{", "self", ".", "_get_container_name", "(", "container", ")", ":", "container", "for", "container", "in", "self", ".", "extra_containers", "}", ")", "link_params", "=", "[", "]", "for", "link", "in", "links", ":", "link_params", ".", "append", "(", "'--link'", ")", "link_params", ".", "append", "(", "link", "+", "':'", "+", "links", "[", "link", "]", ")", "if", "'datapusher'", "in", "self", ".", "containers_running", "(", ")", ":", "link_params", ".", "append", "(", "'--link'", ")", "link_params", ".", "append", "(", "self", ".", "_get_container_name", "(", "'datapusher'", ")", "+", "':datapusher'", ")", "# FIXME: consider switching this to dockerpty", "# using subprocess for docker client's interactive session", "return", "subprocess", ".", "call", "(", "[", "DOCKER_EXE", ",", "'run'", ",", "]", "+", "(", "[", "'--rm'", "]", "if", "not", "background", "else", "[", "]", ")", "+", "[", "'-t'", "if", "use_tty", "else", "''", ",", "'-d'", "if", "detach", "else", "'-i'", ",", "]", "+", "venv_volumes", "+", "[", "'-v'", ",", "self", ".", "target", "+", "':/project:rw'", ",", "'-v'", ",", "self", ".", "sitedir", "+", "'/files:/var/www/storage:rw'", ",", "'-v'", ",", "script", "+", "':/scripts/shell.sh:ro'", ",", "'-v'", ",", "scripts", ".", "get_script_path", "(", "'paster_cd.sh'", ")", "+", "':/scripts/paster_cd.sh:ro'", ",", "'-v'", ",", "self", ".", "sitedir", "+", "'/run/run.ini:/project/development.ini:ro'", ",", "'-v'", ",", "self", ".", "sitedir", "+", "'/run/test.ini:/project/ckan/test-core.ini:ro'", "]", "+", "link_params", "+", "[", "'--hostname'", ",", "self", ".", "name", ",", "'datacats/web'", ",", "'/scripts/shell.sh'", "]", "+", "command", ")" ]
launch interactive shell session with all writable volumes :param: list of strings to execute instead of bash
[ "launch", "interactive", "shell", "session", "with", "all", "writable", "volumes" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L698-L768
train
datacats/datacats
datacats/environment.py
Environment.install_package_requirements
def install_package_requirements(self, psrc, stream_output=None): """ Install from requirements.txt file found in psrc :param psrc: name of directory in environment directory """ package = self.target + '/' + psrc assert isdir(package), package reqname = '/requirements.txt' if not exists(package + reqname): reqname = '/pip-requirements.txt' if not exists(package + reqname): return return self.user_run_script( script=scripts.get_script_path('install_reqs.sh'), args=['/project/' + psrc + reqname], rw_venv=True, rw_project=True, stream_output=stream_output )
python
def install_package_requirements(self, psrc, stream_output=None): """ Install from requirements.txt file found in psrc :param psrc: name of directory in environment directory """ package = self.target + '/' + psrc assert isdir(package), package reqname = '/requirements.txt' if not exists(package + reqname): reqname = '/pip-requirements.txt' if not exists(package + reqname): return return self.user_run_script( script=scripts.get_script_path('install_reqs.sh'), args=['/project/' + psrc + reqname], rw_venv=True, rw_project=True, stream_output=stream_output )
[ "def", "install_package_requirements", "(", "self", ",", "psrc", ",", "stream_output", "=", "None", ")", ":", "package", "=", "self", ".", "target", "+", "'/'", "+", "psrc", "assert", "isdir", "(", "package", ")", ",", "package", "reqname", "=", "'/requirements.txt'", "if", "not", "exists", "(", "package", "+", "reqname", ")", ":", "reqname", "=", "'/pip-requirements.txt'", "if", "not", "exists", "(", "package", "+", "reqname", ")", ":", "return", "return", "self", ".", "user_run_script", "(", "script", "=", "scripts", ".", "get_script_path", "(", "'install_reqs.sh'", ")", ",", "args", "=", "[", "'/project/'", "+", "psrc", "+", "reqname", "]", ",", "rw_venv", "=", "True", ",", "rw_project", "=", "True", ",", "stream_output", "=", "stream_output", ")" ]
Install from requirements.txt file found in psrc :param psrc: name of directory in environment directory
[ "Install", "from", "requirements", ".", "txt", "file", "found", "in", "psrc" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L770-L789
train
datacats/datacats
datacats/environment.py
Environment.purge_data
def purge_data(self, which_sites=None, never_delete=False): """ Remove uploaded files, postgres db, solr index, venv """ # Default to the set of all sites if not exists(self.datadir + '/.version'): format_version = 1 else: with open(self.datadir + '/.version') as f: format_version = int(f.read().strip()) if format_version == 1: print 'WARNING: Defaulting to old purge for version 1.' datadirs = ['files', 'solr'] if is_boot2docker(): remove_container('datacats_pgdata_{}'.format(self.name)) remove_container('datacats_venv_{}'.format(self.name)) else: datadirs += ['postgres', 'venv'] web_command( command=['/scripts/purge.sh'] + ['/project/data/' + d for d in datadirs], ro={scripts.get_script_path('purge.sh'): '/scripts/purge.sh'}, rw={self.datadir: '/project/data'}, ) shutil.rmtree(self.datadir) elif format_version == 2: if not which_sites: which_sites = self.sites datadirs = [] boot2docker = is_boot2docker() if which_sites: if self.target: cp = SafeConfigParser() cp.read([self.target + '/.datacats-environment']) for site in which_sites: if boot2docker: remove_container(self._get_container_name('pgdata')) else: datadirs += [site + '/postgres'] # Always rm the site dir & solr & files datadirs += [site, site + '/files', site + '/solr'] if self.target: cp.remove_section('site_' + site) self.sites.remove(site) if self.target: with open(self.target + '/.datacats-environment', 'w') as conf: cp.write(conf) datadirs = ['sites/' + datadir for datadir in datadirs] if not self.sites and not never_delete: datadirs.append('venv') web_command( command=['/scripts/purge.sh'] + ['/project/data/' + d for d in datadirs], ro={scripts.get_script_path('purge.sh'): '/scripts/purge.sh'}, rw={self.datadir: '/project/data'}, ) if not self.sites and not never_delete: shutil.rmtree(self.datadir) else: raise DatacatsError('Unknown format version {}'.format(format_version))
python
def purge_data(self, which_sites=None, never_delete=False): """ Remove uploaded files, postgres db, solr index, venv """ # Default to the set of all sites if not exists(self.datadir + '/.version'): format_version = 1 else: with open(self.datadir + '/.version') as f: format_version = int(f.read().strip()) if format_version == 1: print 'WARNING: Defaulting to old purge for version 1.' datadirs = ['files', 'solr'] if is_boot2docker(): remove_container('datacats_pgdata_{}'.format(self.name)) remove_container('datacats_venv_{}'.format(self.name)) else: datadirs += ['postgres', 'venv'] web_command( command=['/scripts/purge.sh'] + ['/project/data/' + d for d in datadirs], ro={scripts.get_script_path('purge.sh'): '/scripts/purge.sh'}, rw={self.datadir: '/project/data'}, ) shutil.rmtree(self.datadir) elif format_version == 2: if not which_sites: which_sites = self.sites datadirs = [] boot2docker = is_boot2docker() if which_sites: if self.target: cp = SafeConfigParser() cp.read([self.target + '/.datacats-environment']) for site in which_sites: if boot2docker: remove_container(self._get_container_name('pgdata')) else: datadirs += [site + '/postgres'] # Always rm the site dir & solr & files datadirs += [site, site + '/files', site + '/solr'] if self.target: cp.remove_section('site_' + site) self.sites.remove(site) if self.target: with open(self.target + '/.datacats-environment', 'w') as conf: cp.write(conf) datadirs = ['sites/' + datadir for datadir in datadirs] if not self.sites and not never_delete: datadirs.append('venv') web_command( command=['/scripts/purge.sh'] + ['/project/data/' + d for d in datadirs], ro={scripts.get_script_path('purge.sh'): '/scripts/purge.sh'}, rw={self.datadir: '/project/data'}, ) if not self.sites and not never_delete: shutil.rmtree(self.datadir) else: raise DatacatsError('Unknown format version {}'.format(format_version))
[ "def", "purge_data", "(", "self", ",", "which_sites", "=", "None", ",", "never_delete", "=", "False", ")", ":", "# Default to the set of all sites", "if", "not", "exists", "(", "self", ".", "datadir", "+", "'/.version'", ")", ":", "format_version", "=", "1", "else", ":", "with", "open", "(", "self", ".", "datadir", "+", "'/.version'", ")", "as", "f", ":", "format_version", "=", "int", "(", "f", ".", "read", "(", ")", ".", "strip", "(", ")", ")", "if", "format_version", "==", "1", ":", "print", "'WARNING: Defaulting to old purge for version 1.'", "datadirs", "=", "[", "'files'", ",", "'solr'", "]", "if", "is_boot2docker", "(", ")", ":", "remove_container", "(", "'datacats_pgdata_{}'", ".", "format", "(", "self", ".", "name", ")", ")", "remove_container", "(", "'datacats_venv_{}'", ".", "format", "(", "self", ".", "name", ")", ")", "else", ":", "datadirs", "+=", "[", "'postgres'", ",", "'venv'", "]", "web_command", "(", "command", "=", "[", "'/scripts/purge.sh'", "]", "+", "[", "'/project/data/'", "+", "d", "for", "d", "in", "datadirs", "]", ",", "ro", "=", "{", "scripts", ".", "get_script_path", "(", "'purge.sh'", ")", ":", "'/scripts/purge.sh'", "}", ",", "rw", "=", "{", "self", ".", "datadir", ":", "'/project/data'", "}", ",", ")", "shutil", ".", "rmtree", "(", "self", ".", "datadir", ")", "elif", "format_version", "==", "2", ":", "if", "not", "which_sites", ":", "which_sites", "=", "self", ".", "sites", "datadirs", "=", "[", "]", "boot2docker", "=", "is_boot2docker", "(", ")", "if", "which_sites", ":", "if", "self", ".", "target", ":", "cp", "=", "SafeConfigParser", "(", ")", "cp", ".", "read", "(", "[", "self", ".", "target", "+", "'/.datacats-environment'", "]", ")", "for", "site", "in", "which_sites", ":", "if", "boot2docker", ":", "remove_container", "(", "self", ".", "_get_container_name", "(", "'pgdata'", ")", ")", "else", ":", "datadirs", "+=", "[", "site", "+", "'/postgres'", "]", "# Always rm the site dir & solr & files", "datadirs", "+=", "[", "site", ",", "site", "+", "'/files'", ",", "site", "+", "'/solr'", "]", "if", "self", ".", "target", ":", "cp", ".", "remove_section", "(", "'site_'", "+", "site", ")", "self", ".", "sites", ".", "remove", "(", "site", ")", "if", "self", ".", "target", ":", "with", "open", "(", "self", ".", "target", "+", "'/.datacats-environment'", ",", "'w'", ")", "as", "conf", ":", "cp", ".", "write", "(", "conf", ")", "datadirs", "=", "[", "'sites/'", "+", "datadir", "for", "datadir", "in", "datadirs", "]", "if", "not", "self", ".", "sites", "and", "not", "never_delete", ":", "datadirs", ".", "append", "(", "'venv'", ")", "web_command", "(", "command", "=", "[", "'/scripts/purge.sh'", "]", "+", "[", "'/project/data/'", "+", "d", "for", "d", "in", "datadirs", "]", ",", "ro", "=", "{", "scripts", ".", "get_script_path", "(", "'purge.sh'", ")", ":", "'/scripts/purge.sh'", "}", ",", "rw", "=", "{", "self", ".", "datadir", ":", "'/project/data'", "}", ",", ")", "if", "not", "self", ".", "sites", "and", "not", "never_delete", ":", "shutil", ".", "rmtree", "(", "self", ".", "datadir", ")", "else", ":", "raise", "DatacatsError", "(", "'Unknown format version {}'", ".", "format", "(", "format_version", ")", ")" ]
Remove uploaded files, postgres db, solr index, venv
[ "Remove", "uploaded", "files", "postgres", "db", "solr", "index", "venv" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L856-L925
train
datacats/datacats
datacats/environment.py
Environment.logs
def logs(self, container, tail='all', follow=False, timestamps=False): """ :param container: 'web', 'solr' or 'postgres' :param tail: number of lines to show :param follow: True to return generator instead of list :param timestamps: True to include timestamps """ return container_logs( self._get_container_name(container), tail, follow, timestamps)
python
def logs(self, container, tail='all', follow=False, timestamps=False): """ :param container: 'web', 'solr' or 'postgres' :param tail: number of lines to show :param follow: True to return generator instead of list :param timestamps: True to include timestamps """ return container_logs( self._get_container_name(container), tail, follow, timestamps)
[ "def", "logs", "(", "self", ",", "container", ",", "tail", "=", "'all'", ",", "follow", "=", "False", ",", "timestamps", "=", "False", ")", ":", "return", "container_logs", "(", "self", ".", "_get_container_name", "(", "container", ")", ",", "tail", ",", "follow", ",", "timestamps", ")" ]
:param container: 'web', 'solr' or 'postgres' :param tail: number of lines to show :param follow: True to return generator instead of list :param timestamps: True to include timestamps
[ ":", "param", "container", ":", "web", "solr", "or", "postgres", ":", "param", "tail", ":", "number", "of", "lines", "to", "show", ":", "param", "follow", ":", "True", "to", "return", "generator", "instead", "of", "list", ":", "param", "timestamps", ":", "True", "to", "include", "timestamps" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L927-L938
train
datacats/datacats
datacats/environment.py
Environment._proxy_settings
def _proxy_settings(self): """ Create/replace ~/.datacats/run/proxy-environment and return entry for ro mount for containers """ if not ('https_proxy' in environ or 'HTTPS_PROXY' in environ or 'http_proxy' in environ or 'HTTP_PROXY' in environ): return {} https_proxy = environ.get('https_proxy') if https_proxy is None: https_proxy = environ.get('HTTPS_PROXY') http_proxy = environ.get('http_proxy') if http_proxy is None: http_proxy = environ.get('HTTP_PROXY') no_proxy = environ.get('no_proxy') if no_proxy is None: no_proxy = environ.get('NO_PROXY', '') no_proxy = no_proxy + ',solr,db' out = [ 'PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:' '/bin:/usr/games:/usr/local/games"\n'] if https_proxy is not None: out.append('https_proxy=' + posix_quote(https_proxy) + '\n') out.append('HTTPS_PROXY=' + posix_quote(https_proxy) + '\n') if http_proxy is not None: out.append('http_proxy=' + posix_quote(http_proxy) + '\n') out.append('HTTP_PROXY=' + posix_quote(http_proxy) + '\n') if no_proxy is not None: out.append('no_proxy=' + posix_quote(no_proxy) + '\n') out.append('NO_PROXY=' + posix_quote(no_proxy) + '\n') with open(self.sitedir + '/run/proxy-environment', 'w') as f: f.write("".join(out)) return {self.sitedir + '/run/proxy-environment': '/etc/environment'}
python
def _proxy_settings(self): """ Create/replace ~/.datacats/run/proxy-environment and return entry for ro mount for containers """ if not ('https_proxy' in environ or 'HTTPS_PROXY' in environ or 'http_proxy' in environ or 'HTTP_PROXY' in environ): return {} https_proxy = environ.get('https_proxy') if https_proxy is None: https_proxy = environ.get('HTTPS_PROXY') http_proxy = environ.get('http_proxy') if http_proxy is None: http_proxy = environ.get('HTTP_PROXY') no_proxy = environ.get('no_proxy') if no_proxy is None: no_proxy = environ.get('NO_PROXY', '') no_proxy = no_proxy + ',solr,db' out = [ 'PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:' '/bin:/usr/games:/usr/local/games"\n'] if https_proxy is not None: out.append('https_proxy=' + posix_quote(https_proxy) + '\n') out.append('HTTPS_PROXY=' + posix_quote(https_proxy) + '\n') if http_proxy is not None: out.append('http_proxy=' + posix_quote(http_proxy) + '\n') out.append('HTTP_PROXY=' + posix_quote(http_proxy) + '\n') if no_proxy is not None: out.append('no_proxy=' + posix_quote(no_proxy) + '\n') out.append('NO_PROXY=' + posix_quote(no_proxy) + '\n') with open(self.sitedir + '/run/proxy-environment', 'w') as f: f.write("".join(out)) return {self.sitedir + '/run/proxy-environment': '/etc/environment'}
[ "def", "_proxy_settings", "(", "self", ")", ":", "if", "not", "(", "'https_proxy'", "in", "environ", "or", "'HTTPS_PROXY'", "in", "environ", "or", "'http_proxy'", "in", "environ", "or", "'HTTP_PROXY'", "in", "environ", ")", ":", "return", "{", "}", "https_proxy", "=", "environ", ".", "get", "(", "'https_proxy'", ")", "if", "https_proxy", "is", "None", ":", "https_proxy", "=", "environ", ".", "get", "(", "'HTTPS_PROXY'", ")", "http_proxy", "=", "environ", ".", "get", "(", "'http_proxy'", ")", "if", "http_proxy", "is", "None", ":", "http_proxy", "=", "environ", ".", "get", "(", "'HTTP_PROXY'", ")", "no_proxy", "=", "environ", ".", "get", "(", "'no_proxy'", ")", "if", "no_proxy", "is", "None", ":", "no_proxy", "=", "environ", ".", "get", "(", "'NO_PROXY'", ",", "''", ")", "no_proxy", "=", "no_proxy", "+", "',solr,db'", "out", "=", "[", "'PATH=\"/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:'", "'/bin:/usr/games:/usr/local/games\"\\n'", "]", "if", "https_proxy", "is", "not", "None", ":", "out", ".", "append", "(", "'https_proxy='", "+", "posix_quote", "(", "https_proxy", ")", "+", "'\\n'", ")", "out", ".", "append", "(", "'HTTPS_PROXY='", "+", "posix_quote", "(", "https_proxy", ")", "+", "'\\n'", ")", "if", "http_proxy", "is", "not", "None", ":", "out", ".", "append", "(", "'http_proxy='", "+", "posix_quote", "(", "http_proxy", ")", "+", "'\\n'", ")", "out", ".", "append", "(", "'HTTP_PROXY='", "+", "posix_quote", "(", "http_proxy", ")", "+", "'\\n'", ")", "if", "no_proxy", "is", "not", "None", ":", "out", ".", "append", "(", "'no_proxy='", "+", "posix_quote", "(", "no_proxy", ")", "+", "'\\n'", ")", "out", ".", "append", "(", "'NO_PROXY='", "+", "posix_quote", "(", "no_proxy", ")", "+", "'\\n'", ")", "with", "open", "(", "self", ".", "sitedir", "+", "'/run/proxy-environment'", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "\"\"", ".", "join", "(", "out", ")", ")", "return", "{", "self", ".", "sitedir", "+", "'/run/proxy-environment'", ":", "'/etc/environment'", "}" ]
Create/replace ~/.datacats/run/proxy-environment and return entry for ro mount for containers
[ "Create", "/", "replace", "~", "/", ".", "datacats", "/", "run", "/", "proxy", "-", "environment", "and", "return", "entry", "for", "ro", "mount", "for", "containers" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L949-L984
train
datacats/datacats
datacats/environment.py
Environment._get_container_name
def _get_container_name(self, container_type): """ Gets the full name of a container of the type specified. Currently the supported types are: - 'venv' - 'postgres' - 'solr' - 'web' - 'pgdata' - 'lessc' - 'datapusher' - 'redis' The name will be formatted appropriately with any prefixes and postfixes needed. :param container_type: The type of container name to generate (see above). """ if container_type in ['venv']: return 'datacats_{}_{}'.format(container_type, self.name) else: return 'datacats_{}_{}_{}'.format(container_type, self.name, self.site_name)
python
def _get_container_name(self, container_type): """ Gets the full name of a container of the type specified. Currently the supported types are: - 'venv' - 'postgres' - 'solr' - 'web' - 'pgdata' - 'lessc' - 'datapusher' - 'redis' The name will be formatted appropriately with any prefixes and postfixes needed. :param container_type: The type of container name to generate (see above). """ if container_type in ['venv']: return 'datacats_{}_{}'.format(container_type, self.name) else: return 'datacats_{}_{}_{}'.format(container_type, self.name, self.site_name)
[ "def", "_get_container_name", "(", "self", ",", "container_type", ")", ":", "if", "container_type", "in", "[", "'venv'", "]", ":", "return", "'datacats_{}_{}'", ".", "format", "(", "container_type", ",", "self", ".", "name", ")", "else", ":", "return", "'datacats_{}_{}_{}'", ".", "format", "(", "container_type", ",", "self", ".", "name", ",", "self", ".", "site_name", ")" ]
Gets the full name of a container of the type specified. Currently the supported types are: - 'venv' - 'postgres' - 'solr' - 'web' - 'pgdata' - 'lessc' - 'datapusher' - 'redis' The name will be formatted appropriately with any prefixes and postfixes needed. :param container_type: The type of container name to generate (see above).
[ "Gets", "the", "full", "name", "of", "a", "container", "of", "the", "type", "specified", ".", "Currently", "the", "supported", "types", "are", ":", "-", "venv", "-", "postgres", "-", "solr", "-", "web", "-", "pgdata", "-", "lessc", "-", "datapusher", "-", "redis", "The", "name", "will", "be", "formatted", "appropriately", "with", "any", "prefixes", "and", "postfixes", "needed", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/environment.py#L986-L1006
train
datacats/datacats
datacats/cli/less.py
less
def less(environment, opts): # pylint: disable=unused-argument """Recompiles less files in an environment. Usage: datacats less [ENVIRONMENT] ENVIRONMENT may be an environment name or a path to an environment directory. Default: '.' """ require_extra_image(LESSC_IMAGE) print 'Converting .less files to .css...' for log in environment.compile_less(): print log
python
def less(environment, opts): # pylint: disable=unused-argument """Recompiles less files in an environment. Usage: datacats less [ENVIRONMENT] ENVIRONMENT may be an environment name or a path to an environment directory. Default: '.' """ require_extra_image(LESSC_IMAGE) print 'Converting .less files to .css...' for log in environment.compile_less(): print log
[ "def", "less", "(", "environment", ",", "opts", ")", ":", "# pylint: disable=unused-argument", "require_extra_image", "(", "LESSC_IMAGE", ")", "print", "'Converting .less files to .css...'", "for", "log", "in", "environment", ".", "compile_less", "(", ")", ":", "print", "log" ]
Recompiles less files in an environment. Usage: datacats less [ENVIRONMENT] ENVIRONMENT may be an environment name or a path to an environment directory. Default: '.'
[ "Recompiles", "less", "files", "in", "an", "environment", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/less.py#L13-L27
train
Britefury/batchup
batchup/datasets/dataset.py
fetch_and_convert_dataset
def fetch_and_convert_dataset(source_files, target_filename): """ Decorator applied to a dataset conversion function that converts acquired source files into a dataset file that BatchUp can use. Parameters ---------- source_file: list of `AbstractSourceFile` instances A list of files to be acquired target_filename: str or callable The name of the target file in which to store the converted data either as a string or as a function of the form `fn() -> str` that returns it. The conversion function is of the form `fn(source_paths, target_path)`. It should return `target_path` if successful, `None` otherwise. After the conversion function is successfully applied, the temporary source files that were downloaded or copied into BatchUp's temporary directory are deleted, unless the conversion function moved or deleted them in which case no action is taken. Example ------- In this example, we will show how to acquire the USPS dataset from an online source. USPS is provided as an HDF5 file anyway, so the conversion function simply moves it to the target path: >>> import shutil >>> >>> _USPS_SRC_ONLINE = DownloadSourceFile( ... filename='usps.h5', ... url='https://github.com/Britefury/usps_dataset/raw/master/' ... 'usps.h5', ... sha256='ba768d9a9b11e79b31c1e40130647c4fc04e6afc1fb41a0d4b9f11' ... '76065482b4' ... ) >>> >>> @fetch_and_convert_dataset([_USPS_SRC_ONLINE], 'usps.h5') ... def usps_data_online(source_paths, target_path): ... usps_path = source_paths[0] ... # For other datasets, you would convert the data here ... # In this case, we move the file ... shutil.move(usps_path, target_path) ... # Return the target path indicating success ... return target_path >>> >>> # Now use it: >>> usps_path = usps_data_online() #doctest: +SKIP In this example, the USPS dataset will be acquired from a file on the filesystem. Note that the source path is fixed; the next example shows how we can determine the source path dynamically: >>> _USPS_SRC_OFFLINE_FIXED = CopySourceFile( ... filename='usps.h5', ... source_path='some/path/to/usps.h5', ... sha256='ba768d9a9b11e79b31c1e40130647c4fc04e6afc1fb41a0d4b9f11' ... '76065482b4' ... ) >>> >>> @fetch_and_convert_dataset([_USPS_SRC_OFFLINE_FIXED], 'usps.h5') ... def usps_data_offline_fixed(source_paths, target_path): ... usps_path = source_paths[0] ... # For other datasets, you would convert the data here ... # In this case, we move the file ... shutil.move(usps_path, target_path) ... # Return the target path indicating success ... return target_path >>> >>> # Now use it: >>> usps_path = usps_data_offline_fixed() #doctest: +SKIP The source path is provided as an argument to the decorated fetch function: >>> _USPS_SRC_OFFLINE_DYNAMIC = CopySourceFile( ... filename='usps.h5', ... arg_name='usps_path', ... sha256='ba768d9a9b11e79b31c1e40130647c4fc04e6afc1fb41a0d4b9f11' ... '76065482b4' ... ) >>> >>> @fetch_and_convert_dataset([_USPS_SRC_OFFLINE_DYNAMIC], 'usps.h5') ... def usps_data_offline_dynamic(source_paths, target_path): ... usps_path = source_paths[0] ... # For other datasets, you would convert the data here ... # In this case, we move the file ... shutil.move(usps_path, target_path) ... # Return the target path indicating success ... return target_path >>> >>> # Now use it (note that the KW-arg `usps_path` is the same >>> # as the `arg_name` parameter given to `CopySourceFile` above: >>> usps_path = usps_data_offline_dynamic( ... usps_path=get_config('mypath')) #doctest: +SKIP """ if not isinstance(target_filename, six.string_types) and \ not callable(target_filename): raise TypeError( 'target_filename must either be a string or be callable (it is ' 'a {})'.format(type(target_filename))) for src in source_files: if not isinstance(src, AbstractSourceFile): raise TypeError('source_files should contain' '`AbstractSourceFile` instances, ' 'not {}'.format(type(src))) def decorate_fetcher(convert_function): def fetch(**kwargs): target_fn = path_string(target_filename) target_path = config.get_data_path(target_fn) # If the target file does not exist, we need to acquire the # source files and convert them if not os.path.exists(target_path): # Acquire the source files source_paths = [] for src in source_files: p = src.acquire(**kwargs) if p is not None: if p in source_paths: raise ValueError( 'Duplicate source file {}'.format(p)) source_paths.append(p) else: print('Failed to acquire {}'.format(src)) return None # Got the source files # Convert converted_path = convert_function(source_paths, target_path) # If successful, delete the source files if converted_path is not None: for src in source_files: src.clean_up() return converted_path else: # Target file already exists return target_path fetch.__name__ = convert_function.__name__ return fetch return decorate_fetcher
python
def fetch_and_convert_dataset(source_files, target_filename): """ Decorator applied to a dataset conversion function that converts acquired source files into a dataset file that BatchUp can use. Parameters ---------- source_file: list of `AbstractSourceFile` instances A list of files to be acquired target_filename: str or callable The name of the target file in which to store the converted data either as a string or as a function of the form `fn() -> str` that returns it. The conversion function is of the form `fn(source_paths, target_path)`. It should return `target_path` if successful, `None` otherwise. After the conversion function is successfully applied, the temporary source files that were downloaded or copied into BatchUp's temporary directory are deleted, unless the conversion function moved or deleted them in which case no action is taken. Example ------- In this example, we will show how to acquire the USPS dataset from an online source. USPS is provided as an HDF5 file anyway, so the conversion function simply moves it to the target path: >>> import shutil >>> >>> _USPS_SRC_ONLINE = DownloadSourceFile( ... filename='usps.h5', ... url='https://github.com/Britefury/usps_dataset/raw/master/' ... 'usps.h5', ... sha256='ba768d9a9b11e79b31c1e40130647c4fc04e6afc1fb41a0d4b9f11' ... '76065482b4' ... ) >>> >>> @fetch_and_convert_dataset([_USPS_SRC_ONLINE], 'usps.h5') ... def usps_data_online(source_paths, target_path): ... usps_path = source_paths[0] ... # For other datasets, you would convert the data here ... # In this case, we move the file ... shutil.move(usps_path, target_path) ... # Return the target path indicating success ... return target_path >>> >>> # Now use it: >>> usps_path = usps_data_online() #doctest: +SKIP In this example, the USPS dataset will be acquired from a file on the filesystem. Note that the source path is fixed; the next example shows how we can determine the source path dynamically: >>> _USPS_SRC_OFFLINE_FIXED = CopySourceFile( ... filename='usps.h5', ... source_path='some/path/to/usps.h5', ... sha256='ba768d9a9b11e79b31c1e40130647c4fc04e6afc1fb41a0d4b9f11' ... '76065482b4' ... ) >>> >>> @fetch_and_convert_dataset([_USPS_SRC_OFFLINE_FIXED], 'usps.h5') ... def usps_data_offline_fixed(source_paths, target_path): ... usps_path = source_paths[0] ... # For other datasets, you would convert the data here ... # In this case, we move the file ... shutil.move(usps_path, target_path) ... # Return the target path indicating success ... return target_path >>> >>> # Now use it: >>> usps_path = usps_data_offline_fixed() #doctest: +SKIP The source path is provided as an argument to the decorated fetch function: >>> _USPS_SRC_OFFLINE_DYNAMIC = CopySourceFile( ... filename='usps.h5', ... arg_name='usps_path', ... sha256='ba768d9a9b11e79b31c1e40130647c4fc04e6afc1fb41a0d4b9f11' ... '76065482b4' ... ) >>> >>> @fetch_and_convert_dataset([_USPS_SRC_OFFLINE_DYNAMIC], 'usps.h5') ... def usps_data_offline_dynamic(source_paths, target_path): ... usps_path = source_paths[0] ... # For other datasets, you would convert the data here ... # In this case, we move the file ... shutil.move(usps_path, target_path) ... # Return the target path indicating success ... return target_path >>> >>> # Now use it (note that the KW-arg `usps_path` is the same >>> # as the `arg_name` parameter given to `CopySourceFile` above: >>> usps_path = usps_data_offline_dynamic( ... usps_path=get_config('mypath')) #doctest: +SKIP """ if not isinstance(target_filename, six.string_types) and \ not callable(target_filename): raise TypeError( 'target_filename must either be a string or be callable (it is ' 'a {})'.format(type(target_filename))) for src in source_files: if not isinstance(src, AbstractSourceFile): raise TypeError('source_files should contain' '`AbstractSourceFile` instances, ' 'not {}'.format(type(src))) def decorate_fetcher(convert_function): def fetch(**kwargs): target_fn = path_string(target_filename) target_path = config.get_data_path(target_fn) # If the target file does not exist, we need to acquire the # source files and convert them if not os.path.exists(target_path): # Acquire the source files source_paths = [] for src in source_files: p = src.acquire(**kwargs) if p is not None: if p in source_paths: raise ValueError( 'Duplicate source file {}'.format(p)) source_paths.append(p) else: print('Failed to acquire {}'.format(src)) return None # Got the source files # Convert converted_path = convert_function(source_paths, target_path) # If successful, delete the source files if converted_path is not None: for src in source_files: src.clean_up() return converted_path else: # Target file already exists return target_path fetch.__name__ = convert_function.__name__ return fetch return decorate_fetcher
[ "def", "fetch_and_convert_dataset", "(", "source_files", ",", "target_filename", ")", ":", "if", "not", "isinstance", "(", "target_filename", ",", "six", ".", "string_types", ")", "and", "not", "callable", "(", "target_filename", ")", ":", "raise", "TypeError", "(", "'target_filename must either be a string or be callable (it is '", "'a {})'", ".", "format", "(", "type", "(", "target_filename", ")", ")", ")", "for", "src", "in", "source_files", ":", "if", "not", "isinstance", "(", "src", ",", "AbstractSourceFile", ")", ":", "raise", "TypeError", "(", "'source_files should contain'", "'`AbstractSourceFile` instances, '", "'not {}'", ".", "format", "(", "type", "(", "src", ")", ")", ")", "def", "decorate_fetcher", "(", "convert_function", ")", ":", "def", "fetch", "(", "*", "*", "kwargs", ")", ":", "target_fn", "=", "path_string", "(", "target_filename", ")", "target_path", "=", "config", ".", "get_data_path", "(", "target_fn", ")", "# If the target file does not exist, we need to acquire the", "# source files and convert them", "if", "not", "os", ".", "path", ".", "exists", "(", "target_path", ")", ":", "# Acquire the source files", "source_paths", "=", "[", "]", "for", "src", "in", "source_files", ":", "p", "=", "src", ".", "acquire", "(", "*", "*", "kwargs", ")", "if", "p", "is", "not", "None", ":", "if", "p", "in", "source_paths", ":", "raise", "ValueError", "(", "'Duplicate source file {}'", ".", "format", "(", "p", ")", ")", "source_paths", ".", "append", "(", "p", ")", "else", ":", "print", "(", "'Failed to acquire {}'", ".", "format", "(", "src", ")", ")", "return", "None", "# Got the source files", "# Convert", "converted_path", "=", "convert_function", "(", "source_paths", ",", "target_path", ")", "# If successful, delete the source files", "if", "converted_path", "is", "not", "None", ":", "for", "src", "in", "source_files", ":", "src", ".", "clean_up", "(", ")", "return", "converted_path", "else", ":", "# Target file already exists", "return", "target_path", "fetch", ".", "__name__", "=", "convert_function", ".", "__name__", "return", "fetch", "return", "decorate_fetcher" ]
Decorator applied to a dataset conversion function that converts acquired source files into a dataset file that BatchUp can use. Parameters ---------- source_file: list of `AbstractSourceFile` instances A list of files to be acquired target_filename: str or callable The name of the target file in which to store the converted data either as a string or as a function of the form `fn() -> str` that returns it. The conversion function is of the form `fn(source_paths, target_path)`. It should return `target_path` if successful, `None` otherwise. After the conversion function is successfully applied, the temporary source files that were downloaded or copied into BatchUp's temporary directory are deleted, unless the conversion function moved or deleted them in which case no action is taken. Example ------- In this example, we will show how to acquire the USPS dataset from an online source. USPS is provided as an HDF5 file anyway, so the conversion function simply moves it to the target path: >>> import shutil >>> >>> _USPS_SRC_ONLINE = DownloadSourceFile( ... filename='usps.h5', ... url='https://github.com/Britefury/usps_dataset/raw/master/' ... 'usps.h5', ... sha256='ba768d9a9b11e79b31c1e40130647c4fc04e6afc1fb41a0d4b9f11' ... '76065482b4' ... ) >>> >>> @fetch_and_convert_dataset([_USPS_SRC_ONLINE], 'usps.h5') ... def usps_data_online(source_paths, target_path): ... usps_path = source_paths[0] ... # For other datasets, you would convert the data here ... # In this case, we move the file ... shutil.move(usps_path, target_path) ... # Return the target path indicating success ... return target_path >>> >>> # Now use it: >>> usps_path = usps_data_online() #doctest: +SKIP In this example, the USPS dataset will be acquired from a file on the filesystem. Note that the source path is fixed; the next example shows how we can determine the source path dynamically: >>> _USPS_SRC_OFFLINE_FIXED = CopySourceFile( ... filename='usps.h5', ... source_path='some/path/to/usps.h5', ... sha256='ba768d9a9b11e79b31c1e40130647c4fc04e6afc1fb41a0d4b9f11' ... '76065482b4' ... ) >>> >>> @fetch_and_convert_dataset([_USPS_SRC_OFFLINE_FIXED], 'usps.h5') ... def usps_data_offline_fixed(source_paths, target_path): ... usps_path = source_paths[0] ... # For other datasets, you would convert the data here ... # In this case, we move the file ... shutil.move(usps_path, target_path) ... # Return the target path indicating success ... return target_path >>> >>> # Now use it: >>> usps_path = usps_data_offline_fixed() #doctest: +SKIP The source path is provided as an argument to the decorated fetch function: >>> _USPS_SRC_OFFLINE_DYNAMIC = CopySourceFile( ... filename='usps.h5', ... arg_name='usps_path', ... sha256='ba768d9a9b11e79b31c1e40130647c4fc04e6afc1fb41a0d4b9f11' ... '76065482b4' ... ) >>> >>> @fetch_and_convert_dataset([_USPS_SRC_OFFLINE_DYNAMIC], 'usps.h5') ... def usps_data_offline_dynamic(source_paths, target_path): ... usps_path = source_paths[0] ... # For other datasets, you would convert the data here ... # In this case, we move the file ... shutil.move(usps_path, target_path) ... # Return the target path indicating success ... return target_path >>> >>> # Now use it (note that the KW-arg `usps_path` is the same >>> # as the `arg_name` parameter given to `CopySourceFile` above: >>> usps_path = usps_data_offline_dynamic( ... usps_path=get_config('mypath')) #doctest: +SKIP
[ "Decorator", "applied", "to", "a", "dataset", "conversion", "function", "that", "converts", "acquired", "source", "files", "into", "a", "dataset", "file", "that", "BatchUp", "can", "use", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/datasets/dataset.py#L252-L399
train
Britefury/batchup
batchup/datasets/dataset.py
delete_dataset_cache
def delete_dataset_cache(*filenames): """ Delete the cache (converted files) for a dataset. Parameters ---------- filenames: str Filenames of files to delete """ for filename in filenames: filename = path_string(filename) path = config.get_data_path(filename) if os.path.exists(path): os.remove(path)
python
def delete_dataset_cache(*filenames): """ Delete the cache (converted files) for a dataset. Parameters ---------- filenames: str Filenames of files to delete """ for filename in filenames: filename = path_string(filename) path = config.get_data_path(filename) if os.path.exists(path): os.remove(path)
[ "def", "delete_dataset_cache", "(", "*", "filenames", ")", ":", "for", "filename", "in", "filenames", ":", "filename", "=", "path_string", "(", "filename", ")", "path", "=", "config", ".", "get_data_path", "(", "filename", ")", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "os", ".", "remove", "(", "path", ")" ]
Delete the cache (converted files) for a dataset. Parameters ---------- filenames: str Filenames of files to delete
[ "Delete", "the", "cache", "(", "converted", "files", ")", "for", "a", "dataset", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/datasets/dataset.py#L402-L415
train
Britefury/batchup
batchup/datasets/dataset.py
DownloadSourceFile.acquire
def acquire(self, **kwargs): """ Download the file and return its path Returns ------- str or None The path of the file in BatchUp's temporary directory or None if the download failed. """ return config.download_data(self.temp_filename, self.url, self.sha256)
python
def acquire(self, **kwargs): """ Download the file and return its path Returns ------- str or None The path of the file in BatchUp's temporary directory or None if the download failed. """ return config.download_data(self.temp_filename, self.url, self.sha256)
[ "def", "acquire", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "config", ".", "download_data", "(", "self", ".", "temp_filename", ",", "self", ".", "url", ",", "self", ".", "sha256", ")" ]
Download the file and return its path Returns ------- str or None The path of the file in BatchUp's temporary directory or None if the download failed.
[ "Download", "the", "file", "and", "return", "its", "path" ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/datasets/dataset.py#L124-L135
train
Britefury/batchup
batchup/datasets/dataset.py
CopySourceFile.acquire
def acquire(self, **kwargs): """ Copy the file and return its path Returns ------- str or None The path of the file in BatchUp's temporary directory or None if the copy failed. """ if self.source_path is None: source_path = kwargs[self.arg_name] else: source_path = self.source_path return config.copy_data(self.temp_filename, source_path, self.sha256)
python
def acquire(self, **kwargs): """ Copy the file and return its path Returns ------- str or None The path of the file in BatchUp's temporary directory or None if the copy failed. """ if self.source_path is None: source_path = kwargs[self.arg_name] else: source_path = self.source_path return config.copy_data(self.temp_filename, source_path, self.sha256)
[ "def", "acquire", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "source_path", "is", "None", ":", "source_path", "=", "kwargs", "[", "self", ".", "arg_name", "]", "else", ":", "source_path", "=", "self", ".", "source_path", "return", "config", ".", "copy_data", "(", "self", ".", "temp_filename", ",", "source_path", ",", "self", ".", "sha256", ")" ]
Copy the file and return its path Returns ------- str or None The path of the file in BatchUp's temporary directory or None if the copy failed.
[ "Copy", "the", "file", "and", "return", "its", "path" ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/datasets/dataset.py#L185-L199
train
Britefury/batchup
batchup/datasets/dataset.py
ExistingSourceFile.acquire
def acquire(self, **kwargs): """ Copy the file and return its path Returns ------- str or None The path of the file or None if it does not exist or if verification failed. """ path = path_string(self.path) if os.path.exists(path): if config.verify_file(path, self.sha256): return path return None
python
def acquire(self, **kwargs): """ Copy the file and return its path Returns ------- str or None The path of the file or None if it does not exist or if verification failed. """ path = path_string(self.path) if os.path.exists(path): if config.verify_file(path, self.sha256): return path return None
[ "def", "acquire", "(", "self", ",", "*", "*", "kwargs", ")", ":", "path", "=", "path_string", "(", "self", ".", "path", ")", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "if", "config", ".", "verify_file", "(", "path", ",", "self", ".", "sha256", ")", ":", "return", "path", "return", "None" ]
Copy the file and return its path Returns ------- str or None The path of the file or None if it does not exist or if verification failed.
[ "Copy", "the", "file", "and", "return", "its", "path" ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/datasets/dataset.py#L235-L249
train
datacats/datacats
datacats/cli/pull.py
_retry_func
def _retry_func(func, param, num, retry_notif, error_msg): """ A function which retries a given function num times and calls retry_notif each time the function is retried. :param func: The function to retry num times. :param num: The number of times to try before giving up. :param retry_notif: Will be called with the same parameter as func if we have to retry the function. Will also receive the number of retries so far as a second parameter. :param: error_msg: The message Throws DatacatsError if we run out of retries. Returns otherwise. """ for retry_num in range(num): if retry_num: retry_notif(param, retry_num) try: func(param) return except DatacatsError: pass raise DatacatsError(error_msg)
python
def _retry_func(func, param, num, retry_notif, error_msg): """ A function which retries a given function num times and calls retry_notif each time the function is retried. :param func: The function to retry num times. :param num: The number of times to try before giving up. :param retry_notif: Will be called with the same parameter as func if we have to retry the function. Will also receive the number of retries so far as a second parameter. :param: error_msg: The message Throws DatacatsError if we run out of retries. Returns otherwise. """ for retry_num in range(num): if retry_num: retry_notif(param, retry_num) try: func(param) return except DatacatsError: pass raise DatacatsError(error_msg)
[ "def", "_retry_func", "(", "func", ",", "param", ",", "num", ",", "retry_notif", ",", "error_msg", ")", ":", "for", "retry_num", "in", "range", "(", "num", ")", ":", "if", "retry_num", ":", "retry_notif", "(", "param", ",", "retry_num", ")", "try", ":", "func", "(", "param", ")", "return", "except", "DatacatsError", ":", "pass", "raise", "DatacatsError", "(", "error_msg", ")" ]
A function which retries a given function num times and calls retry_notif each time the function is retried. :param func: The function to retry num times. :param num: The number of times to try before giving up. :param retry_notif: Will be called with the same parameter as func if we have to retry the function. Will also receive the number of retries so far as a second parameter. :param: error_msg: The message Throws DatacatsError if we run out of retries. Returns otherwise.
[ "A", "function", "which", "retries", "a", "given", "function", "num", "times", "and", "calls", "retry_notif", "each", "time", "the", "function", "is", "retried", ".", ":", "param", "func", ":", "The", "function", "to", "retry", "num", "times", ".", ":", "param", "num", ":", "The", "number", "of", "times", "to", "try", "before", "giving", "up", ".", ":", "param", "retry_notif", ":", "Will", "be", "called", "with", "the", "same", "parameter", "as", "func", "if", "we", "have", "to", "retry", "the", "function", ".", "Will", "also", "receive", "the", "number", "of", "retries", "so", "far", "as", "a", "second", "parameter", ".", ":", "param", ":", "error_msg", ":", "The", "message" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/pull.py#L55-L77
train
Britefury/batchup
batchup/work_pool.py
WorkStream.retrieve
def retrieve(self): """ Retrieve a result from executing a task. Note that tasks are executed in order and that if the next task has not yet completed, this call will block until the result is available. Returns ------- A result from the result buffer. """ if len(self.__result_buffer) > 0: res = self.__result_buffer.popleft() value = res.get() else: return None self.__populate_buffer() return value
python
def retrieve(self): """ Retrieve a result from executing a task. Note that tasks are executed in order and that if the next task has not yet completed, this call will block until the result is available. Returns ------- A result from the result buffer. """ if len(self.__result_buffer) > 0: res = self.__result_buffer.popleft() value = res.get() else: return None self.__populate_buffer() return value
[ "def", "retrieve", "(", "self", ")", ":", "if", "len", "(", "self", ".", "__result_buffer", ")", ">", "0", ":", "res", "=", "self", ".", "__result_buffer", ".", "popleft", "(", ")", "value", "=", "res", ".", "get", "(", ")", "else", ":", "return", "None", "self", ".", "__populate_buffer", "(", ")", "return", "value" ]
Retrieve a result from executing a task. Note that tasks are executed in order and that if the next task has not yet completed, this call will block until the result is available. Returns ------- A result from the result buffer.
[ "Retrieve", "a", "result", "from", "executing", "a", "task", ".", "Note", "that", "tasks", "are", "executed", "in", "order", "and", "that", "if", "the", "next", "task", "has", "not", "yet", "completed", "this", "call", "will", "block", "until", "the", "result", "is", "available", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/work_pool.py#L407-L425
train
datacats/datacats
datacats/cli/install.py
install
def install(environment, opts): """Install or reinstall Python packages within this environment Usage: datacats install [-q] [--address=IP] [ENVIRONMENT [PACKAGE ...]] datacats install -c [q] [--address=IP] [ENVIRONMENT] Options: --address=IP The address to bind to when reloading after install -c --clean Reinstall packages into a clean virtualenv -q --quiet Do not show output from installing packages and requirements. ENVIRONMENT may be an environment name or a path to an environment directory. Default: '.' """ environment.require_data() install_all(environment, opts['--clean'], verbose=not opts['--quiet'], packages=opts['PACKAGE']) for site in environment.sites: environment = Environment.load(environment.name, site) if 'web' in environment.containers_running(): # FIXME: reload without changing debug setting? manage.reload_(environment, { '--address': opts['--address'], '--background': False, '--no-watch': False, '--production': False, 'PORT': None, '--syslog': False, '--site-url': None, '--interactive': False })
python
def install(environment, opts): """Install or reinstall Python packages within this environment Usage: datacats install [-q] [--address=IP] [ENVIRONMENT [PACKAGE ...]] datacats install -c [q] [--address=IP] [ENVIRONMENT] Options: --address=IP The address to bind to when reloading after install -c --clean Reinstall packages into a clean virtualenv -q --quiet Do not show output from installing packages and requirements. ENVIRONMENT may be an environment name or a path to an environment directory. Default: '.' """ environment.require_data() install_all(environment, opts['--clean'], verbose=not opts['--quiet'], packages=opts['PACKAGE']) for site in environment.sites: environment = Environment.load(environment.name, site) if 'web' in environment.containers_running(): # FIXME: reload without changing debug setting? manage.reload_(environment, { '--address': opts['--address'], '--background': False, '--no-watch': False, '--production': False, 'PORT': None, '--syslog': False, '--site-url': None, '--interactive': False })
[ "def", "install", "(", "environment", ",", "opts", ")", ":", "environment", ".", "require_data", "(", ")", "install_all", "(", "environment", ",", "opts", "[", "'--clean'", "]", ",", "verbose", "=", "not", "opts", "[", "'--quiet'", "]", ",", "packages", "=", "opts", "[", "'PACKAGE'", "]", ")", "for", "site", "in", "environment", ".", "sites", ":", "environment", "=", "Environment", ".", "load", "(", "environment", ".", "name", ",", "site", ")", "if", "'web'", "in", "environment", ".", "containers_running", "(", ")", ":", "# FIXME: reload without changing debug setting?", "manage", ".", "reload_", "(", "environment", ",", "{", "'--address'", ":", "opts", "[", "'--address'", "]", ",", "'--background'", ":", "False", ",", "'--no-watch'", ":", "False", ",", "'--production'", ":", "False", ",", "'PORT'", ":", "None", ",", "'--syslog'", ":", "False", ",", "'--site-url'", ":", "None", ",", "'--interactive'", ":", "False", "}", ")" ]
Install or reinstall Python packages within this environment Usage: datacats install [-q] [--address=IP] [ENVIRONMENT [PACKAGE ...]] datacats install -c [q] [--address=IP] [ENVIRONMENT] Options: --address=IP The address to bind to when reloading after install -c --clean Reinstall packages into a clean virtualenv -q --quiet Do not show output from installing packages and requirements. ENVIRONMENT may be an environment name or a path to an environment directory. Default: '.'
[ "Install", "or", "reinstall", "Python", "packages", "within", "this", "environment" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/install.py#L20-L52
train
datacats/datacats
datacats/cli/migrate.py
migrate
def migrate(opts): """Migrate an environment to a given revision of the datadir format. Usage: datacats migrate [-y] [-r VERSION] [ENVIRONMENT_DIR] Options: -r --revision=VERSION The version of the datadir format you want to convert to [default: 2] -y --yes Answer yes to all questions. Defaults to '.' if ENVIRONMENT_DIR isn't specified. """ try: version = int(opts['--revision']) except: raise DatacatsError('--revision parameter must be an integer.') always_yes = opts['--yes'] if 'ENVIRONMENT_DIR' not in opts or not opts['ENVIRONMENT_DIR']: cwd = getcwd() # Get the dirname opts['ENVIRONMENT_DIR'] = split(cwd if cwd[-1] != '/' else cwd[:-1])[1] datadir = expanduser('~/.datacats/' + opts['ENVIRONMENT_DIR']) if needs_format_conversion(datadir, version): convert_environment(datadir, version, always_yes) print 'Successfully converted datadir {} to format version {}'.format(datadir, version) else: print 'datadir {} is already at version {}.'.format(datadir, version)
python
def migrate(opts): """Migrate an environment to a given revision of the datadir format. Usage: datacats migrate [-y] [-r VERSION] [ENVIRONMENT_DIR] Options: -r --revision=VERSION The version of the datadir format you want to convert to [default: 2] -y --yes Answer yes to all questions. Defaults to '.' if ENVIRONMENT_DIR isn't specified. """ try: version = int(opts['--revision']) except: raise DatacatsError('--revision parameter must be an integer.') always_yes = opts['--yes'] if 'ENVIRONMENT_DIR' not in opts or not opts['ENVIRONMENT_DIR']: cwd = getcwd() # Get the dirname opts['ENVIRONMENT_DIR'] = split(cwd if cwd[-1] != '/' else cwd[:-1])[1] datadir = expanduser('~/.datacats/' + opts['ENVIRONMENT_DIR']) if needs_format_conversion(datadir, version): convert_environment(datadir, version, always_yes) print 'Successfully converted datadir {} to format version {}'.format(datadir, version) else: print 'datadir {} is already at version {}.'.format(datadir, version)
[ "def", "migrate", "(", "opts", ")", ":", "try", ":", "version", "=", "int", "(", "opts", "[", "'--revision'", "]", ")", "except", ":", "raise", "DatacatsError", "(", "'--revision parameter must be an integer.'", ")", "always_yes", "=", "opts", "[", "'--yes'", "]", "if", "'ENVIRONMENT_DIR'", "not", "in", "opts", "or", "not", "opts", "[", "'ENVIRONMENT_DIR'", "]", ":", "cwd", "=", "getcwd", "(", ")", "# Get the dirname", "opts", "[", "'ENVIRONMENT_DIR'", "]", "=", "split", "(", "cwd", "if", "cwd", "[", "-", "1", "]", "!=", "'/'", "else", "cwd", "[", ":", "-", "1", "]", ")", "[", "1", "]", "datadir", "=", "expanduser", "(", "'~/.datacats/'", "+", "opts", "[", "'ENVIRONMENT_DIR'", "]", ")", "if", "needs_format_conversion", "(", "datadir", ",", "version", ")", ":", "convert_environment", "(", "datadir", ",", "version", ",", "always_yes", ")", "print", "'Successfully converted datadir {} to format version {}'", ".", "format", "(", "datadir", ",", "version", ")", "else", ":", "print", "'datadir {} is already at version {}.'", ".", "format", "(", "datadir", ",", "version", ")" ]
Migrate an environment to a given revision of the datadir format. Usage: datacats migrate [-y] [-r VERSION] [ENVIRONMENT_DIR] Options: -r --revision=VERSION The version of the datadir format you want to convert to [default: 2] -y --yes Answer yes to all questions. Defaults to '.' if ENVIRONMENT_DIR isn't specified.
[ "Migrate", "an", "environment", "to", "a", "given", "revision", "of", "the", "datadir", "format", "." ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/migrate.py#L8-L38
train
datacats/datacats
datacats/cli/deploy.py
deploy
def deploy(environment, opts, profile): """Deploy environment to production DataCats.com cloud service Usage: datacats deploy [--create] [ENVIRONMENT [TARGET_NAME]] Options: --create Create a new environment on DataCats.com instead of updating an existing environment ENVIRONMENT may be an environment name or a path to a environment directory. Default: '.' TARGET_NAME is the name of the environment on DataCats.com. Defaults to the environment name. """ target_name = opts['TARGET_NAME'] if target_name is None: target_name = environment.name if not valid_deploy_name(target_name): raise DatacatsError(" `{target_name}` target name for deployment can't be accepted.\n" "Can't have http://{target_name}.datacats.io for your datcat URL\n" "Please choose a target name at least 5 characters long,\n" "and containing only lowercase letters and numbers\n" .format(target_name=target_name)) if opts['--create']: profile.create(environment, target_name) profile.deploy(environment, target_name, stdout) print "Deployed source to http://{0}.datacats.io".format(target_name) if opts['--create']: try: pw = confirm_password() profile.admin_password(environment, target_name, pw) except KeyboardInterrupt: pass
python
def deploy(environment, opts, profile): """Deploy environment to production DataCats.com cloud service Usage: datacats deploy [--create] [ENVIRONMENT [TARGET_NAME]] Options: --create Create a new environment on DataCats.com instead of updating an existing environment ENVIRONMENT may be an environment name or a path to a environment directory. Default: '.' TARGET_NAME is the name of the environment on DataCats.com. Defaults to the environment name. """ target_name = opts['TARGET_NAME'] if target_name is None: target_name = environment.name if not valid_deploy_name(target_name): raise DatacatsError(" `{target_name}` target name for deployment can't be accepted.\n" "Can't have http://{target_name}.datacats.io for your datcat URL\n" "Please choose a target name at least 5 characters long,\n" "and containing only lowercase letters and numbers\n" .format(target_name=target_name)) if opts['--create']: profile.create(environment, target_name) profile.deploy(environment, target_name, stdout) print "Deployed source to http://{0}.datacats.io".format(target_name) if opts['--create']: try: pw = confirm_password() profile.admin_password(environment, target_name, pw) except KeyboardInterrupt: pass
[ "def", "deploy", "(", "environment", ",", "opts", ",", "profile", ")", ":", "target_name", "=", "opts", "[", "'TARGET_NAME'", "]", "if", "target_name", "is", "None", ":", "target_name", "=", "environment", ".", "name", "if", "not", "valid_deploy_name", "(", "target_name", ")", ":", "raise", "DatacatsError", "(", "\" `{target_name}` target name for deployment can't be accepted.\\n\"", "\"Can't have http://{target_name}.datacats.io for your datcat URL\\n\"", "\"Please choose a target name at least 5 characters long,\\n\"", "\"and containing only lowercase letters and numbers\\n\"", ".", "format", "(", "target_name", "=", "target_name", ")", ")", "if", "opts", "[", "'--create'", "]", ":", "profile", ".", "create", "(", "environment", ",", "target_name", ")", "profile", ".", "deploy", "(", "environment", ",", "target_name", ",", "stdout", ")", "print", "\"Deployed source to http://{0}.datacats.io\"", ".", "format", "(", "target_name", ")", "if", "opts", "[", "'--create'", "]", ":", "try", ":", "pw", "=", "confirm_password", "(", ")", "profile", ".", "admin_password", "(", "environment", ",", "target_name", ",", "pw", ")", "except", "KeyboardInterrupt", ":", "pass" ]
Deploy environment to production DataCats.com cloud service Usage: datacats deploy [--create] [ENVIRONMENT [TARGET_NAME]] Options: --create Create a new environment on DataCats.com instead of updating an existing environment ENVIRONMENT may be an environment name or a path to a environment directory. Default: '.' TARGET_NAME is the name of the environment on DataCats.com. Defaults to the environment name.
[ "Deploy", "environment", "to", "production", "DataCats", ".", "com", "cloud", "service" ]
e4bae503efa997660fb3f34fe166699569653157
https://github.com/datacats/datacats/blob/e4bae503efa997660fb3f34fe166699569653157/datacats/cli/deploy.py#L14-L53
train
Britefury/batchup
batchup/data_source.py
_trim_batch
def _trim_batch(batch, length): """Trim the mini-batch `batch` to the size `length`. `batch` can be: - a NumPy array, in which case it's first axis will be trimmed to size `length` - a tuple, in which case `_trim_batch` applied recursively to each element and the resulting tuple returned As a consequence, mini-batches can be structured; lists and tuples can be nested arbitrarily deep. Parameters ---------- batch: tuple or NumPy array the mini-batch to trim length: int the size to which `batch` is to be trimmed Returns ------- tuple or NumPy array of same structure as `batch` The trimmed mini-batch """ if isinstance(batch, tuple): return tuple([_trim_batch(b, length) for b in batch]) else: return batch[:length]
python
def _trim_batch(batch, length): """Trim the mini-batch `batch` to the size `length`. `batch` can be: - a NumPy array, in which case it's first axis will be trimmed to size `length` - a tuple, in which case `_trim_batch` applied recursively to each element and the resulting tuple returned As a consequence, mini-batches can be structured; lists and tuples can be nested arbitrarily deep. Parameters ---------- batch: tuple or NumPy array the mini-batch to trim length: int the size to which `batch` is to be trimmed Returns ------- tuple or NumPy array of same structure as `batch` The trimmed mini-batch """ if isinstance(batch, tuple): return tuple([_trim_batch(b, length) for b in batch]) else: return batch[:length]
[ "def", "_trim_batch", "(", "batch", ",", "length", ")", ":", "if", "isinstance", "(", "batch", ",", "tuple", ")", ":", "return", "tuple", "(", "[", "_trim_batch", "(", "b", ",", "length", ")", "for", "b", "in", "batch", "]", ")", "else", ":", "return", "batch", "[", ":", "length", "]" ]
Trim the mini-batch `batch` to the size `length`. `batch` can be: - a NumPy array, in which case it's first axis will be trimmed to size `length` - a tuple, in which case `_trim_batch` applied recursively to each element and the resulting tuple returned As a consequence, mini-batches can be structured; lists and tuples can be nested arbitrarily deep. Parameters ---------- batch: tuple or NumPy array the mini-batch to trim length: int the size to which `batch` is to be trimmed Returns ------- tuple or NumPy array of same structure as `batch` The trimmed mini-batch
[ "Trim", "the", "mini", "-", "batch", "batch", "to", "the", "size", "length", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/data_source.py#L41-L68
train
Britefury/batchup
batchup/data_source.py
batch_map_concat
def batch_map_concat(func, batch_iter, progress_iter_func=None, n_batches=None, prepend_args=None): """ Apply a function to all the samples that are accessed as mini-batches obtained from an iterator. Returns the per-sample results. The function `func` should return the result for each sample in the mini-batch as an array. To return multiple results (e.g. loss and errors) return a tuple of arrays (e.g. `(loss_array, error_array)`) `batch_iter` must be an iterator that generates mini-batches that contain samples Parameters ---------- func: callable `func(*batch) -> results` The function to call on each mini-batch. Note that the results must be `None`, a tuple or a NumPy array batch_iter: data set iterator Iterator that generates mini-batches of data progress_iter_func: [optional] callable `progress_iter_func(iterator, total=total, leave=leave)` A `tqdm` style function that will be passed the iterator that generates training batches along with the total number of batches and `False` for the `leave` parameter. By passing either `tqdm.tqdm` or `tqdm.tqdm_notebook` as this argument you can have the training loop display a progress bar. n_batches: [optional] integer Process at most this number of batches before returning. prepend_args: [optional] tuple Arguments to prepend to the arguments passed to `func` Returns ------- tuple The per-sample sum of the results of the function `func` e.g. `(batch_A, batch_B, ...)` Returns an empty tuple if there were 0 samples in the data set. Examples -------- In these examples we will demonstrate the use of `batch_map` to apply a function (e.g. a Theano function that runs on the GPU) to samples in a data set. We construct an iterator that generates mini-batches from the data set and pass it to `batch_map` along with the function that we wish to apply. The function will receive the batches and process them. Define a function to apply to samples: >>> def sqr_sum(x): ... # Ensure that we receive batches of the expected size: ... assert len(x) in {5, 2} ... return (x ** 2).sum(axis=1) Construct data to process and create a data source: >>> X = np.random.normal(size=(7, 10)) >>> ds = ArrayDataSource([X]) Apply the function defined above: >>> batch_iter = ds.batch_iterator(batch_size=5) >>> X_sqr_sum = batch_map_concat(sqr_sum, batch_iter) >>> assert np.allclose(X_sqr_sum[0], (X ** 2).sum(axis=1)) There are also cases where we wish to limit the number of batches that will be processed: - when the iterator generates an infinite number of samples - when the data set is huge and we wish to show results as we go Use the `n_batches` argument to limit the number of batches to process: >>> X_large = np.random.normal(size=(100, 10)) >>> ds_large = ArrayDataSource([X_large]) >>> iter_large = ds_large.batch_iterator(batch_size=5) >>> for i in range(10): ... partial_result = batch_map_concat(sqr_sum, iter_large, n_batches=2) ... # Should have 10 samples per partial result ... assert len(partial_result[0]) == 10 ... j = i * 10 ... assert np.allclose(partial_result[0], ... (X_large[j:j + 10]**2).sum(axis=1)) """ # Accumulator for results and number of samples results = [] # If `progress_iter_func` is not `None`, apply it if progress_iter_func is not None: batch_iter = progress_iter_func(batch_iter, total=n_batches, leave=False) # Apply `func` to each batch n_processed = 0 for batch in batch_iter: # Apply on batch and check the type of the results if prepend_args is not None: batch_results = func(*(prepend_args + tuple(batch))) else: batch_results = func(*batch) if batch_results is None: pass elif isinstance(batch_results, np.ndarray): batch_results = (batch_results,) elif isinstance(batch_results, tuple): pass else: raise TypeError( 'Batch function should return a tuple of results, a ' 'single result as a NumPy array, or None, ' 'not {}'.format(type(batch_results))) # Accumulate training results if batch_results is not None: results.append(batch_results) n_processed += 1 if n_batches is not None and n_processed >= n_batches: break # Concatenate result arrays if len(results) > 0: results = zip(*results) results = tuple([np.concatenate(list(r), axis=0) for r in results]) return results else: return None
python
def batch_map_concat(func, batch_iter, progress_iter_func=None, n_batches=None, prepend_args=None): """ Apply a function to all the samples that are accessed as mini-batches obtained from an iterator. Returns the per-sample results. The function `func` should return the result for each sample in the mini-batch as an array. To return multiple results (e.g. loss and errors) return a tuple of arrays (e.g. `(loss_array, error_array)`) `batch_iter` must be an iterator that generates mini-batches that contain samples Parameters ---------- func: callable `func(*batch) -> results` The function to call on each mini-batch. Note that the results must be `None`, a tuple or a NumPy array batch_iter: data set iterator Iterator that generates mini-batches of data progress_iter_func: [optional] callable `progress_iter_func(iterator, total=total, leave=leave)` A `tqdm` style function that will be passed the iterator that generates training batches along with the total number of batches and `False` for the `leave` parameter. By passing either `tqdm.tqdm` or `tqdm.tqdm_notebook` as this argument you can have the training loop display a progress bar. n_batches: [optional] integer Process at most this number of batches before returning. prepend_args: [optional] tuple Arguments to prepend to the arguments passed to `func` Returns ------- tuple The per-sample sum of the results of the function `func` e.g. `(batch_A, batch_B, ...)` Returns an empty tuple if there were 0 samples in the data set. Examples -------- In these examples we will demonstrate the use of `batch_map` to apply a function (e.g. a Theano function that runs on the GPU) to samples in a data set. We construct an iterator that generates mini-batches from the data set and pass it to `batch_map` along with the function that we wish to apply. The function will receive the batches and process them. Define a function to apply to samples: >>> def sqr_sum(x): ... # Ensure that we receive batches of the expected size: ... assert len(x) in {5, 2} ... return (x ** 2).sum(axis=1) Construct data to process and create a data source: >>> X = np.random.normal(size=(7, 10)) >>> ds = ArrayDataSource([X]) Apply the function defined above: >>> batch_iter = ds.batch_iterator(batch_size=5) >>> X_sqr_sum = batch_map_concat(sqr_sum, batch_iter) >>> assert np.allclose(X_sqr_sum[0], (X ** 2).sum(axis=1)) There are also cases where we wish to limit the number of batches that will be processed: - when the iterator generates an infinite number of samples - when the data set is huge and we wish to show results as we go Use the `n_batches` argument to limit the number of batches to process: >>> X_large = np.random.normal(size=(100, 10)) >>> ds_large = ArrayDataSource([X_large]) >>> iter_large = ds_large.batch_iterator(batch_size=5) >>> for i in range(10): ... partial_result = batch_map_concat(sqr_sum, iter_large, n_batches=2) ... # Should have 10 samples per partial result ... assert len(partial_result[0]) == 10 ... j = i * 10 ... assert np.allclose(partial_result[0], ... (X_large[j:j + 10]**2).sum(axis=1)) """ # Accumulator for results and number of samples results = [] # If `progress_iter_func` is not `None`, apply it if progress_iter_func is not None: batch_iter = progress_iter_func(batch_iter, total=n_batches, leave=False) # Apply `func` to each batch n_processed = 0 for batch in batch_iter: # Apply on batch and check the type of the results if prepend_args is not None: batch_results = func(*(prepend_args + tuple(batch))) else: batch_results = func(*batch) if batch_results is None: pass elif isinstance(batch_results, np.ndarray): batch_results = (batch_results,) elif isinstance(batch_results, tuple): pass else: raise TypeError( 'Batch function should return a tuple of results, a ' 'single result as a NumPy array, or None, ' 'not {}'.format(type(batch_results))) # Accumulate training results if batch_results is not None: results.append(batch_results) n_processed += 1 if n_batches is not None and n_processed >= n_batches: break # Concatenate result arrays if len(results) > 0: results = zip(*results) results = tuple([np.concatenate(list(r), axis=0) for r in results]) return results else: return None
[ "def", "batch_map_concat", "(", "func", ",", "batch_iter", ",", "progress_iter_func", "=", "None", ",", "n_batches", "=", "None", ",", "prepend_args", "=", "None", ")", ":", "# Accumulator for results and number of samples", "results", "=", "[", "]", "# If `progress_iter_func` is not `None`, apply it", "if", "progress_iter_func", "is", "not", "None", ":", "batch_iter", "=", "progress_iter_func", "(", "batch_iter", ",", "total", "=", "n_batches", ",", "leave", "=", "False", ")", "# Apply `func` to each batch", "n_processed", "=", "0", "for", "batch", "in", "batch_iter", ":", "# Apply on batch and check the type of the results", "if", "prepend_args", "is", "not", "None", ":", "batch_results", "=", "func", "(", "*", "(", "prepend_args", "+", "tuple", "(", "batch", ")", ")", ")", "else", ":", "batch_results", "=", "func", "(", "*", "batch", ")", "if", "batch_results", "is", "None", ":", "pass", "elif", "isinstance", "(", "batch_results", ",", "np", ".", "ndarray", ")", ":", "batch_results", "=", "(", "batch_results", ",", ")", "elif", "isinstance", "(", "batch_results", ",", "tuple", ")", ":", "pass", "else", ":", "raise", "TypeError", "(", "'Batch function should return a tuple of results, a '", "'single result as a NumPy array, or None, '", "'not {}'", ".", "format", "(", "type", "(", "batch_results", ")", ")", ")", "# Accumulate training results", "if", "batch_results", "is", "not", "None", ":", "results", ".", "append", "(", "batch_results", ")", "n_processed", "+=", "1", "if", "n_batches", "is", "not", "None", "and", "n_processed", ">=", "n_batches", ":", "break", "# Concatenate result arrays", "if", "len", "(", "results", ")", ">", "0", ":", "results", "=", "zip", "(", "*", "results", ")", "results", "=", "tuple", "(", "[", "np", ".", "concatenate", "(", "list", "(", "r", ")", ",", "axis", "=", "0", ")", "for", "r", "in", "results", "]", ")", "return", "results", "else", ":", "return", "None" ]
Apply a function to all the samples that are accessed as mini-batches obtained from an iterator. Returns the per-sample results. The function `func` should return the result for each sample in the mini-batch as an array. To return multiple results (e.g. loss and errors) return a tuple of arrays (e.g. `(loss_array, error_array)`) `batch_iter` must be an iterator that generates mini-batches that contain samples Parameters ---------- func: callable `func(*batch) -> results` The function to call on each mini-batch. Note that the results must be `None`, a tuple or a NumPy array batch_iter: data set iterator Iterator that generates mini-batches of data progress_iter_func: [optional] callable `progress_iter_func(iterator, total=total, leave=leave)` A `tqdm` style function that will be passed the iterator that generates training batches along with the total number of batches and `False` for the `leave` parameter. By passing either `tqdm.tqdm` or `tqdm.tqdm_notebook` as this argument you can have the training loop display a progress bar. n_batches: [optional] integer Process at most this number of batches before returning. prepend_args: [optional] tuple Arguments to prepend to the arguments passed to `func` Returns ------- tuple The per-sample sum of the results of the function `func` e.g. `(batch_A, batch_B, ...)` Returns an empty tuple if there were 0 samples in the data set. Examples -------- In these examples we will demonstrate the use of `batch_map` to apply a function (e.g. a Theano function that runs on the GPU) to samples in a data set. We construct an iterator that generates mini-batches from the data set and pass it to `batch_map` along with the function that we wish to apply. The function will receive the batches and process them. Define a function to apply to samples: >>> def sqr_sum(x): ... # Ensure that we receive batches of the expected size: ... assert len(x) in {5, 2} ... return (x ** 2).sum(axis=1) Construct data to process and create a data source: >>> X = np.random.normal(size=(7, 10)) >>> ds = ArrayDataSource([X]) Apply the function defined above: >>> batch_iter = ds.batch_iterator(batch_size=5) >>> X_sqr_sum = batch_map_concat(sqr_sum, batch_iter) >>> assert np.allclose(X_sqr_sum[0], (X ** 2).sum(axis=1)) There are also cases where we wish to limit the number of batches that will be processed: - when the iterator generates an infinite number of samples - when the data set is huge and we wish to show results as we go Use the `n_batches` argument to limit the number of batches to process: >>> X_large = np.random.normal(size=(100, 10)) >>> ds_large = ArrayDataSource([X_large]) >>> iter_large = ds_large.batch_iterator(batch_size=5) >>> for i in range(10): ... partial_result = batch_map_concat(sqr_sum, iter_large, n_batches=2) ... # Should have 10 samples per partial result ... assert len(partial_result[0]) == 10 ... j = i * 10 ... assert np.allclose(partial_result[0], ... (X_large[j:j + 10]**2).sum(axis=1))
[ "Apply", "a", "function", "to", "all", "the", "samples", "that", "are", "accessed", "as", "mini", "-", "batches", "obtained", "from", "an", "iterator", ".", "Returns", "the", "per", "-", "sample", "results", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/data_source.py#L1478-L1599
train
Britefury/batchup
batchup/data_source.py
batch_map_mean
def batch_map_mean(func, batch_iter, progress_iter_func=None, sum_axis=None, n_batches=None, prepend_args=None): """ Apply a function to all the samples that are accessed as mini-batches obtained from an iterator. Returns the across-samples mean of the results returned by `func` The `sum_axis` arguments tells `mean_batch_map` how to process the results of `func` before accumulating them: - If `sum_axis` is `None`, `func` should return the across-samples SUM of the results of operating on the mini-batch the sum of the values for the samples, e.g. for loss and error it should return `(sum([loss0, loss1, ... lossN]), sum([err0, err1, ... errN]))` - Otherwise, `sum_axis` should specify the axis or axes over which the the batch results should be summed, e.g. if `func` returns a per-sample loss and error in two arrays `[[loss0, loss1, ... lossN], [err0, err1, ... errN]`, give `sum_axis` a value of `0` to sum over axis 0 to get the per-batch loss and error. These results will be accumulated and divided by the number of samples at the end to get the mean. Parameters ---------- func: callable `func(*batch) -> results` The function to call on each mini-batch. Note that the results must be `None`, a tuple or a NumPy array batch_iter: data set iterator Iterator that generates mini-batches of data progress_iter_func: [optional] callable `progress_iter_func(iterator, total=total, leave=leave)` A `tqdm` style function that will be passed the iterator that generates training batches along with the total number of batches and `False` for the `leave` parameter. By passing either `tqdm.tqdm` or `tqdm.tqdm_notebook` as this argument you can have the training loop display a progress bar. sum_axis: (default=`None`) int, tuple of ints or None If an integer or a tuple of integers, the results returned by `func` will be summed across this axis / these axes before being accumulated; e.g. if `func` returns an array of per-sample losses, with axis 0 being the sample dimension, passing a value of `0` as `sum_axis` will cause these results to be summed along axis 0 to get the per-batch sum before accumulating the losses. The total summed loss will be divided by the number of samples at the end in order to compute the mean loss. n_batches: [optional] integer that specifies the number of mini-batches to process before returning prepend_args: [optional] tuple Arguments to prepend to the arguments passed to `func` Returns ------- tuple The sum of the results of the function `fn` divided by the number of samples processed, e.g. `(sum(outA_per_batch) / n_samples, sum(outB_per_batch) / n_samples, ...)` Examples -------- The following examples will demonstrate the use of `mean_batch_map` to compute binary cross entropy loss over a data set. A few variants will be demonstrated: - the default behaviour in which the function being applied should return the sum over the batch sample axis - having the function return per sample results and maving `mean_batch_map` perform the sum operation. This is easier to understand but less efficient as a Theano function would have to move more data back from the GPU. - limiting the number of batches that will be processed in order to get partial results when dealing with a large data set Define a function to compute the per-sample binary cross entropy loss: >>> def binary_crossentropy_loss(pred, target): ... e = -target * np.log(pred) - (1 - target) * np.log(1 - pred) ... return e.mean(axis=1) Now define a function that computes the *SUM* of the binary cross entropy losses over the sample axis (axis 0), as the default behaviour of `mean_batch_map` will sum them up and divide by the number of samples at the end: >>> def binary_crossentropy_loss_sum(pred, target): ... return binary_crossentropy_loss(pred, target).sum() Construct prediction and target data >>> pred = np.random.uniform(0.1, 0.9, size=(7, 10)) >>> tgt = np.random.uniform(0.1, 0.9, size=(7, 10)) >>> ds = ArrayDataSource([pred, tgt]) Apply the loss sum function defined above: >>> batch_iter = ds.batch_iterator(batch_size=5) >>> loss = batch_map_mean(binary_crossentropy_loss_sum, batch_iter) >>> assert np.allclose( ... loss, binary_crossentropy_loss(pred, tgt).mean()) Have `mean_batch_map` sum over axis 0: >>> batch_iter = ds.batch_iterator(batch_size=5) >>> loss = batch_map_mean(binary_crossentropy_loss, batch_iter, ... sum_axis=0) >>> assert np.allclose( ... loss, binary_crossentropy_loss(pred, tgt).mean()) Construct a large data set and use `batch >>> pred_large = np.random.uniform(0.1, 0.9, size=(100, 10)) >>> tgt_large = np.random.uniform(0.1, 0.9, size=(100, 10)) >>> ds_large = ArrayDataSource([pred_large, tgt_large]) >>> iter_large = ds_large.batch_iterator(batch_size=5) >>> for i in range(10): ... partial_loss = batch_map_mean(binary_crossentropy_loss_sum, ... iter_large, n_batches=2) ... j = i * 10 ... assert np.allclose( ... partial_loss, binary_crossentropy_loss( ... pred_large[j:j + 10], tgt_large[j:j + 10]).mean()) """ # Accumulator for results and number of samples results_accum = None n_samples_accum = 0 # If `progress_iter_func` is not `None`, apply it if progress_iter_func is not None: batch_iter = progress_iter_func(batch_iter, total=n_batches, leave=False) # Train on each batch n_processed = 0 for batch in batch_iter: # Get number of samples in batch; can vary batch_n = _length_of_batch(batch) # Apply on batch and check the type of the results if prepend_args is not None: batch_results = func(*(prepend_args + tuple(batch))) else: batch_results = func(*batch) if batch_results is None: pass elif isinstance(batch_results, (np.ndarray, float)): batch_results = (batch_results,) elif isinstance(batch_results, tuple): pass else: raise TypeError( 'Batch function should return a tuple of results, a ' 'single result as a NumPy array or float, or None, ' 'not {}'.format(type(batch_results))) # Accumulate results and number of samples if results_accum is None: # Initialise the accumulator to the batch results if `func` # returns summed results or if it returned None; # don't attempt to iterate over None and sum each item if batch_results is None: pass elif sum_axis is None: results_accum = list(batch_results) else: results_accum = [br.sum(axis=sum_axis) for br in batch_results] else: if batch_results is not None: for i in range(len(results_accum)): br = batch_results[i] if sum_axis is not None: br = br.sum(axis=sum_axis) results_accum[i] += br n_samples_accum += batch_n n_processed += 1 if n_batches is not None and n_processed >= n_batches: break # Divide by the number of training examples used to compute mean if results_accum is not None: results_accum = tuple([np.array(r).astype(float) / n_samples_accum for r in results_accum]) return results_accum
python
def batch_map_mean(func, batch_iter, progress_iter_func=None, sum_axis=None, n_batches=None, prepend_args=None): """ Apply a function to all the samples that are accessed as mini-batches obtained from an iterator. Returns the across-samples mean of the results returned by `func` The `sum_axis` arguments tells `mean_batch_map` how to process the results of `func` before accumulating them: - If `sum_axis` is `None`, `func` should return the across-samples SUM of the results of operating on the mini-batch the sum of the values for the samples, e.g. for loss and error it should return `(sum([loss0, loss1, ... lossN]), sum([err0, err1, ... errN]))` - Otherwise, `sum_axis` should specify the axis or axes over which the the batch results should be summed, e.g. if `func` returns a per-sample loss and error in two arrays `[[loss0, loss1, ... lossN], [err0, err1, ... errN]`, give `sum_axis` a value of `0` to sum over axis 0 to get the per-batch loss and error. These results will be accumulated and divided by the number of samples at the end to get the mean. Parameters ---------- func: callable `func(*batch) -> results` The function to call on each mini-batch. Note that the results must be `None`, a tuple or a NumPy array batch_iter: data set iterator Iterator that generates mini-batches of data progress_iter_func: [optional] callable `progress_iter_func(iterator, total=total, leave=leave)` A `tqdm` style function that will be passed the iterator that generates training batches along with the total number of batches and `False` for the `leave` parameter. By passing either `tqdm.tqdm` or `tqdm.tqdm_notebook` as this argument you can have the training loop display a progress bar. sum_axis: (default=`None`) int, tuple of ints or None If an integer or a tuple of integers, the results returned by `func` will be summed across this axis / these axes before being accumulated; e.g. if `func` returns an array of per-sample losses, with axis 0 being the sample dimension, passing a value of `0` as `sum_axis` will cause these results to be summed along axis 0 to get the per-batch sum before accumulating the losses. The total summed loss will be divided by the number of samples at the end in order to compute the mean loss. n_batches: [optional] integer that specifies the number of mini-batches to process before returning prepend_args: [optional] tuple Arguments to prepend to the arguments passed to `func` Returns ------- tuple The sum of the results of the function `fn` divided by the number of samples processed, e.g. `(sum(outA_per_batch) / n_samples, sum(outB_per_batch) / n_samples, ...)` Examples -------- The following examples will demonstrate the use of `mean_batch_map` to compute binary cross entropy loss over a data set. A few variants will be demonstrated: - the default behaviour in which the function being applied should return the sum over the batch sample axis - having the function return per sample results and maving `mean_batch_map` perform the sum operation. This is easier to understand but less efficient as a Theano function would have to move more data back from the GPU. - limiting the number of batches that will be processed in order to get partial results when dealing with a large data set Define a function to compute the per-sample binary cross entropy loss: >>> def binary_crossentropy_loss(pred, target): ... e = -target * np.log(pred) - (1 - target) * np.log(1 - pred) ... return e.mean(axis=1) Now define a function that computes the *SUM* of the binary cross entropy losses over the sample axis (axis 0), as the default behaviour of `mean_batch_map` will sum them up and divide by the number of samples at the end: >>> def binary_crossentropy_loss_sum(pred, target): ... return binary_crossentropy_loss(pred, target).sum() Construct prediction and target data >>> pred = np.random.uniform(0.1, 0.9, size=(7, 10)) >>> tgt = np.random.uniform(0.1, 0.9, size=(7, 10)) >>> ds = ArrayDataSource([pred, tgt]) Apply the loss sum function defined above: >>> batch_iter = ds.batch_iterator(batch_size=5) >>> loss = batch_map_mean(binary_crossentropy_loss_sum, batch_iter) >>> assert np.allclose( ... loss, binary_crossentropy_loss(pred, tgt).mean()) Have `mean_batch_map` sum over axis 0: >>> batch_iter = ds.batch_iterator(batch_size=5) >>> loss = batch_map_mean(binary_crossentropy_loss, batch_iter, ... sum_axis=0) >>> assert np.allclose( ... loss, binary_crossentropy_loss(pred, tgt).mean()) Construct a large data set and use `batch >>> pred_large = np.random.uniform(0.1, 0.9, size=(100, 10)) >>> tgt_large = np.random.uniform(0.1, 0.9, size=(100, 10)) >>> ds_large = ArrayDataSource([pred_large, tgt_large]) >>> iter_large = ds_large.batch_iterator(batch_size=5) >>> for i in range(10): ... partial_loss = batch_map_mean(binary_crossentropy_loss_sum, ... iter_large, n_batches=2) ... j = i * 10 ... assert np.allclose( ... partial_loss, binary_crossentropy_loss( ... pred_large[j:j + 10], tgt_large[j:j + 10]).mean()) """ # Accumulator for results and number of samples results_accum = None n_samples_accum = 0 # If `progress_iter_func` is not `None`, apply it if progress_iter_func is not None: batch_iter = progress_iter_func(batch_iter, total=n_batches, leave=False) # Train on each batch n_processed = 0 for batch in batch_iter: # Get number of samples in batch; can vary batch_n = _length_of_batch(batch) # Apply on batch and check the type of the results if prepend_args is not None: batch_results = func(*(prepend_args + tuple(batch))) else: batch_results = func(*batch) if batch_results is None: pass elif isinstance(batch_results, (np.ndarray, float)): batch_results = (batch_results,) elif isinstance(batch_results, tuple): pass else: raise TypeError( 'Batch function should return a tuple of results, a ' 'single result as a NumPy array or float, or None, ' 'not {}'.format(type(batch_results))) # Accumulate results and number of samples if results_accum is None: # Initialise the accumulator to the batch results if `func` # returns summed results or if it returned None; # don't attempt to iterate over None and sum each item if batch_results is None: pass elif sum_axis is None: results_accum = list(batch_results) else: results_accum = [br.sum(axis=sum_axis) for br in batch_results] else: if batch_results is not None: for i in range(len(results_accum)): br = batch_results[i] if sum_axis is not None: br = br.sum(axis=sum_axis) results_accum[i] += br n_samples_accum += batch_n n_processed += 1 if n_batches is not None and n_processed >= n_batches: break # Divide by the number of training examples used to compute mean if results_accum is not None: results_accum = tuple([np.array(r).astype(float) / n_samples_accum for r in results_accum]) return results_accum
[ "def", "batch_map_mean", "(", "func", ",", "batch_iter", ",", "progress_iter_func", "=", "None", ",", "sum_axis", "=", "None", ",", "n_batches", "=", "None", ",", "prepend_args", "=", "None", ")", ":", "# Accumulator for results and number of samples", "results_accum", "=", "None", "n_samples_accum", "=", "0", "# If `progress_iter_func` is not `None`, apply it", "if", "progress_iter_func", "is", "not", "None", ":", "batch_iter", "=", "progress_iter_func", "(", "batch_iter", ",", "total", "=", "n_batches", ",", "leave", "=", "False", ")", "# Train on each batch", "n_processed", "=", "0", "for", "batch", "in", "batch_iter", ":", "# Get number of samples in batch; can vary", "batch_n", "=", "_length_of_batch", "(", "batch", ")", "# Apply on batch and check the type of the results", "if", "prepend_args", "is", "not", "None", ":", "batch_results", "=", "func", "(", "*", "(", "prepend_args", "+", "tuple", "(", "batch", ")", ")", ")", "else", ":", "batch_results", "=", "func", "(", "*", "batch", ")", "if", "batch_results", "is", "None", ":", "pass", "elif", "isinstance", "(", "batch_results", ",", "(", "np", ".", "ndarray", ",", "float", ")", ")", ":", "batch_results", "=", "(", "batch_results", ",", ")", "elif", "isinstance", "(", "batch_results", ",", "tuple", ")", ":", "pass", "else", ":", "raise", "TypeError", "(", "'Batch function should return a tuple of results, a '", "'single result as a NumPy array or float, or None, '", "'not {}'", ".", "format", "(", "type", "(", "batch_results", ")", ")", ")", "# Accumulate results and number of samples", "if", "results_accum", "is", "None", ":", "# Initialise the accumulator to the batch results if `func`", "# returns summed results or if it returned None;", "# don't attempt to iterate over None and sum each item", "if", "batch_results", "is", "None", ":", "pass", "elif", "sum_axis", "is", "None", ":", "results_accum", "=", "list", "(", "batch_results", ")", "else", ":", "results_accum", "=", "[", "br", ".", "sum", "(", "axis", "=", "sum_axis", ")", "for", "br", "in", "batch_results", "]", "else", ":", "if", "batch_results", "is", "not", "None", ":", "for", "i", "in", "range", "(", "len", "(", "results_accum", ")", ")", ":", "br", "=", "batch_results", "[", "i", "]", "if", "sum_axis", "is", "not", "None", ":", "br", "=", "br", ".", "sum", "(", "axis", "=", "sum_axis", ")", "results_accum", "[", "i", "]", "+=", "br", "n_samples_accum", "+=", "batch_n", "n_processed", "+=", "1", "if", "n_batches", "is", "not", "None", "and", "n_processed", ">=", "n_batches", ":", "break", "# Divide by the number of training examples used to compute mean", "if", "results_accum", "is", "not", "None", ":", "results_accum", "=", "tuple", "(", "[", "np", ".", "array", "(", "r", ")", ".", "astype", "(", "float", ")", "/", "n_samples_accum", "for", "r", "in", "results_accum", "]", ")", "return", "results_accum" ]
Apply a function to all the samples that are accessed as mini-batches obtained from an iterator. Returns the across-samples mean of the results returned by `func` The `sum_axis` arguments tells `mean_batch_map` how to process the results of `func` before accumulating them: - If `sum_axis` is `None`, `func` should return the across-samples SUM of the results of operating on the mini-batch the sum of the values for the samples, e.g. for loss and error it should return `(sum([loss0, loss1, ... lossN]), sum([err0, err1, ... errN]))` - Otherwise, `sum_axis` should specify the axis or axes over which the the batch results should be summed, e.g. if `func` returns a per-sample loss and error in two arrays `[[loss0, loss1, ... lossN], [err0, err1, ... errN]`, give `sum_axis` a value of `0` to sum over axis 0 to get the per-batch loss and error. These results will be accumulated and divided by the number of samples at the end to get the mean. Parameters ---------- func: callable `func(*batch) -> results` The function to call on each mini-batch. Note that the results must be `None`, a tuple or a NumPy array batch_iter: data set iterator Iterator that generates mini-batches of data progress_iter_func: [optional] callable `progress_iter_func(iterator, total=total, leave=leave)` A `tqdm` style function that will be passed the iterator that generates training batches along with the total number of batches and `False` for the `leave` parameter. By passing either `tqdm.tqdm` or `tqdm.tqdm_notebook` as this argument you can have the training loop display a progress bar. sum_axis: (default=`None`) int, tuple of ints or None If an integer or a tuple of integers, the results returned by `func` will be summed across this axis / these axes before being accumulated; e.g. if `func` returns an array of per-sample losses, with axis 0 being the sample dimension, passing a value of `0` as `sum_axis` will cause these results to be summed along axis 0 to get the per-batch sum before accumulating the losses. The total summed loss will be divided by the number of samples at the end in order to compute the mean loss. n_batches: [optional] integer that specifies the number of mini-batches to process before returning prepend_args: [optional] tuple Arguments to prepend to the arguments passed to `func` Returns ------- tuple The sum of the results of the function `fn` divided by the number of samples processed, e.g. `(sum(outA_per_batch) / n_samples, sum(outB_per_batch) / n_samples, ...)` Examples -------- The following examples will demonstrate the use of `mean_batch_map` to compute binary cross entropy loss over a data set. A few variants will be demonstrated: - the default behaviour in which the function being applied should return the sum over the batch sample axis - having the function return per sample results and maving `mean_batch_map` perform the sum operation. This is easier to understand but less efficient as a Theano function would have to move more data back from the GPU. - limiting the number of batches that will be processed in order to get partial results when dealing with a large data set Define a function to compute the per-sample binary cross entropy loss: >>> def binary_crossentropy_loss(pred, target): ... e = -target * np.log(pred) - (1 - target) * np.log(1 - pred) ... return e.mean(axis=1) Now define a function that computes the *SUM* of the binary cross entropy losses over the sample axis (axis 0), as the default behaviour of `mean_batch_map` will sum them up and divide by the number of samples at the end: >>> def binary_crossentropy_loss_sum(pred, target): ... return binary_crossentropy_loss(pred, target).sum() Construct prediction and target data >>> pred = np.random.uniform(0.1, 0.9, size=(7, 10)) >>> tgt = np.random.uniform(0.1, 0.9, size=(7, 10)) >>> ds = ArrayDataSource([pred, tgt]) Apply the loss sum function defined above: >>> batch_iter = ds.batch_iterator(batch_size=5) >>> loss = batch_map_mean(binary_crossentropy_loss_sum, batch_iter) >>> assert np.allclose( ... loss, binary_crossentropy_loss(pred, tgt).mean()) Have `mean_batch_map` sum over axis 0: >>> batch_iter = ds.batch_iterator(batch_size=5) >>> loss = batch_map_mean(binary_crossentropy_loss, batch_iter, ... sum_axis=0) >>> assert np.allclose( ... loss, binary_crossentropy_loss(pred, tgt).mean()) Construct a large data set and use `batch >>> pred_large = np.random.uniform(0.1, 0.9, size=(100, 10)) >>> tgt_large = np.random.uniform(0.1, 0.9, size=(100, 10)) >>> ds_large = ArrayDataSource([pred_large, tgt_large]) >>> iter_large = ds_large.batch_iterator(batch_size=5) >>> for i in range(10): ... partial_loss = batch_map_mean(binary_crossentropy_loss_sum, ... iter_large, n_batches=2) ... j = i * 10 ... assert np.allclose( ... partial_loss, binary_crossentropy_loss( ... pred_large[j:j + 10], tgt_large[j:j + 10]).mean())
[ "Apply", "a", "function", "to", "all", "the", "samples", "that", "are", "accessed", "as", "mini", "-", "batches", "obtained", "from", "an", "iterator", ".", "Returns", "the", "across", "-", "samples", "mean", "of", "the", "results", "returned", "by", "func" ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/data_source.py#L1602-L1779
train
Britefury/batchup
batchup/data_source.py
coerce_data_source
def coerce_data_source(x): """ Helper function to coerce an object into a data source, selecting the appropriate data source class for the given object. If `x` is already a data source it is returned as is. Parameters ---------- x: any The object to coerce. If `x` is a data source, it is returned as is. If it is a list or tuple of array-like objects they will be wrapped in an `ArrayDataSource` that will be returned. If `x` is an iterator it will be wrapped in an `IteratorDataSource`. If it is a callable it will be wrapped in a `CallableDataSource`. Returns ------- `x` coerced into a data source Raises ------ `TypeError` if `x` is not a data souce, a list or tuple of array-like objects, an iterator or a callable. """ if isinstance(x, AbstractDataSource): return x elif isinstance(x, (list, tuple)): # Sequence of array-likes items = [] for item in x: if _is_array_like(item): items.append(item) else: raise TypeError( 'Cannot convert x to a data source; x is a sequence and ' 'one of the elements is not an array-like object, rather ' 'a {}'.format(type(item))) if len(items) == 0: raise ValueError('Cannot convert x to a data source; x is an ' 'empty sequence') return ArrayDataSource(items) elif isinstance(x, collections.Iterator): return IteratorDataSource(x) elif callable(x): return CallableDataSource(x) else: raise TypeError('Cannot convert x to a data source; can only handle ' 'iterators, callables, non-empty sequences of ' 'array-like objects; cannot ' 'handle {}'.format(type(x)))
python
def coerce_data_source(x): """ Helper function to coerce an object into a data source, selecting the appropriate data source class for the given object. If `x` is already a data source it is returned as is. Parameters ---------- x: any The object to coerce. If `x` is a data source, it is returned as is. If it is a list or tuple of array-like objects they will be wrapped in an `ArrayDataSource` that will be returned. If `x` is an iterator it will be wrapped in an `IteratorDataSource`. If it is a callable it will be wrapped in a `CallableDataSource`. Returns ------- `x` coerced into a data source Raises ------ `TypeError` if `x` is not a data souce, a list or tuple of array-like objects, an iterator or a callable. """ if isinstance(x, AbstractDataSource): return x elif isinstance(x, (list, tuple)): # Sequence of array-likes items = [] for item in x: if _is_array_like(item): items.append(item) else: raise TypeError( 'Cannot convert x to a data source; x is a sequence and ' 'one of the elements is not an array-like object, rather ' 'a {}'.format(type(item))) if len(items) == 0: raise ValueError('Cannot convert x to a data source; x is an ' 'empty sequence') return ArrayDataSource(items) elif isinstance(x, collections.Iterator): return IteratorDataSource(x) elif callable(x): return CallableDataSource(x) else: raise TypeError('Cannot convert x to a data source; can only handle ' 'iterators, callables, non-empty sequences of ' 'array-like objects; cannot ' 'handle {}'.format(type(x)))
[ "def", "coerce_data_source", "(", "x", ")", ":", "if", "isinstance", "(", "x", ",", "AbstractDataSource", ")", ":", "return", "x", "elif", "isinstance", "(", "x", ",", "(", "list", ",", "tuple", ")", ")", ":", "# Sequence of array-likes", "items", "=", "[", "]", "for", "item", "in", "x", ":", "if", "_is_array_like", "(", "item", ")", ":", "items", ".", "append", "(", "item", ")", "else", ":", "raise", "TypeError", "(", "'Cannot convert x to a data source; x is a sequence and '", "'one of the elements is not an array-like object, rather '", "'a {}'", ".", "format", "(", "type", "(", "item", ")", ")", ")", "if", "len", "(", "items", ")", "==", "0", ":", "raise", "ValueError", "(", "'Cannot convert x to a data source; x is an '", "'empty sequence'", ")", "return", "ArrayDataSource", "(", "items", ")", "elif", "isinstance", "(", "x", ",", "collections", ".", "Iterator", ")", ":", "return", "IteratorDataSource", "(", "x", ")", "elif", "callable", "(", "x", ")", ":", "return", "CallableDataSource", "(", "x", ")", "else", ":", "raise", "TypeError", "(", "'Cannot convert x to a data source; can only handle '", "'iterators, callables, non-empty sequences of '", "'array-like objects; cannot '", "'handle {}'", ".", "format", "(", "type", "(", "x", ")", ")", ")" ]
Helper function to coerce an object into a data source, selecting the appropriate data source class for the given object. If `x` is already a data source it is returned as is. Parameters ---------- x: any The object to coerce. If `x` is a data source, it is returned as is. If it is a list or tuple of array-like objects they will be wrapped in an `ArrayDataSource` that will be returned. If `x` is an iterator it will be wrapped in an `IteratorDataSource`. If it is a callable it will be wrapped in a `CallableDataSource`. Returns ------- `x` coerced into a data source Raises ------ `TypeError` if `x` is not a data souce, a list or tuple of array-like objects, an iterator or a callable.
[ "Helper", "function", "to", "coerce", "an", "object", "into", "a", "data", "source", "selecting", "the", "appropriate", "data", "source", "class", "for", "the", "given", "object", ".", "If", "x", "is", "already", "a", "data", "source", "it", "is", "returned", "as", "is", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/data_source.py#L1800-L1849
train
Britefury/batchup
batchup/data_source.py
AbstractDataSource.batch_map_concat
def batch_map_concat(self, func, batch_size, progress_iter_func=None, n_batches=None, prepend_args=None, **kwargs): """A batch oriented implementation of `map`. Applies a function to all the samples in this data source by breaking the data into mini-batches and applying the function to each mini-batch. Returns the per-sample results. This method is a wrapper around the :func:`batch_map` function; please see its documentation for more information and examples. The function `func` should return the result for each sample in the mini-batch as an array. To return multiple results (e.g. loss and errors) return a tuple of arrays (e.g. `(loss_array, error_array)`) Parameters ---------- func: callable `func(*batch) -> results` The function to call on each mini-batch. Note that the results must be `None`, a tuple or a NumPy array batch_size: int The mini-batch size progress_iter_func: [optional] callable `progress_iter_func(iterator, total=total, leave=leave)` A `tqdm` style function that will be passed the iterator that generates training batches along with the total number of batches and `False` for the `leave` parameter. By passing either `tqdm.tqdm` or `tqdm.tqdm_notebook` as this argument you can have the training loop display a progress bar. n_batches: [optional] integer that specifies the number of mini-batches to process before returning prepend_args: [optional] tuple Arguments to prepend to the arguments passed to `func` Returns ------- tuple The per-sample sum of the results of the function `func` e.g. `(batch_A, batch_B, ...)` Returns an empty tuple if there were 0 samples in the data set. Examples -------- Define a function to apply to samples: >>> def sqr_sum(x): ... return (x ** 2).sum(axis=1) Construct data to process and create a data source: >>> X = np.random.normal(size=(7, 10)) >>> ds = ArrayDataSource([X]) Apply the function defined above: >>> X_sqr_sum = ds.batch_map_concat(sqr_sum, batch_size=5) >>> assert (X_sqr_sum[0] == (X ** 2).sum(axis=1)).all() """ if n_batches is None: n = self.num_samples(**kwargs) if n == np.inf: raise ValueError('Data set has infinite size or sampler will ' 'generate infinite samples but no n_batches ' 'limit specified') elif n is not None: n_batches = sampling.num_batches(n, batch_size) batch_iter = self.batch_iterator(batch_size, **kwargs) return batch_map_concat(func, batch_iter, progress_iter_func, n_batches, prepend_args)
python
def batch_map_concat(self, func, batch_size, progress_iter_func=None, n_batches=None, prepend_args=None, **kwargs): """A batch oriented implementation of `map`. Applies a function to all the samples in this data source by breaking the data into mini-batches and applying the function to each mini-batch. Returns the per-sample results. This method is a wrapper around the :func:`batch_map` function; please see its documentation for more information and examples. The function `func` should return the result for each sample in the mini-batch as an array. To return multiple results (e.g. loss and errors) return a tuple of arrays (e.g. `(loss_array, error_array)`) Parameters ---------- func: callable `func(*batch) -> results` The function to call on each mini-batch. Note that the results must be `None`, a tuple or a NumPy array batch_size: int The mini-batch size progress_iter_func: [optional] callable `progress_iter_func(iterator, total=total, leave=leave)` A `tqdm` style function that will be passed the iterator that generates training batches along with the total number of batches and `False` for the `leave` parameter. By passing either `tqdm.tqdm` or `tqdm.tqdm_notebook` as this argument you can have the training loop display a progress bar. n_batches: [optional] integer that specifies the number of mini-batches to process before returning prepend_args: [optional] tuple Arguments to prepend to the arguments passed to `func` Returns ------- tuple The per-sample sum of the results of the function `func` e.g. `(batch_A, batch_B, ...)` Returns an empty tuple if there were 0 samples in the data set. Examples -------- Define a function to apply to samples: >>> def sqr_sum(x): ... return (x ** 2).sum(axis=1) Construct data to process and create a data source: >>> X = np.random.normal(size=(7, 10)) >>> ds = ArrayDataSource([X]) Apply the function defined above: >>> X_sqr_sum = ds.batch_map_concat(sqr_sum, batch_size=5) >>> assert (X_sqr_sum[0] == (X ** 2).sum(axis=1)).all() """ if n_batches is None: n = self.num_samples(**kwargs) if n == np.inf: raise ValueError('Data set has infinite size or sampler will ' 'generate infinite samples but no n_batches ' 'limit specified') elif n is not None: n_batches = sampling.num_batches(n, batch_size) batch_iter = self.batch_iterator(batch_size, **kwargs) return batch_map_concat(func, batch_iter, progress_iter_func, n_batches, prepend_args)
[ "def", "batch_map_concat", "(", "self", ",", "func", ",", "batch_size", ",", "progress_iter_func", "=", "None", ",", "n_batches", "=", "None", ",", "prepend_args", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "n_batches", "is", "None", ":", "n", "=", "self", ".", "num_samples", "(", "*", "*", "kwargs", ")", "if", "n", "==", "np", ".", "inf", ":", "raise", "ValueError", "(", "'Data set has infinite size or sampler will '", "'generate infinite samples but no n_batches '", "'limit specified'", ")", "elif", "n", "is", "not", "None", ":", "n_batches", "=", "sampling", ".", "num_batches", "(", "n", ",", "batch_size", ")", "batch_iter", "=", "self", ".", "batch_iterator", "(", "batch_size", ",", "*", "*", "kwargs", ")", "return", "batch_map_concat", "(", "func", ",", "batch_iter", ",", "progress_iter_func", ",", "n_batches", ",", "prepend_args", ")" ]
A batch oriented implementation of `map`. Applies a function to all the samples in this data source by breaking the data into mini-batches and applying the function to each mini-batch. Returns the per-sample results. This method is a wrapper around the :func:`batch_map` function; please see its documentation for more information and examples. The function `func` should return the result for each sample in the mini-batch as an array. To return multiple results (e.g. loss and errors) return a tuple of arrays (e.g. `(loss_array, error_array)`) Parameters ---------- func: callable `func(*batch) -> results` The function to call on each mini-batch. Note that the results must be `None`, a tuple or a NumPy array batch_size: int The mini-batch size progress_iter_func: [optional] callable `progress_iter_func(iterator, total=total, leave=leave)` A `tqdm` style function that will be passed the iterator that generates training batches along with the total number of batches and `False` for the `leave` parameter. By passing either `tqdm.tqdm` or `tqdm.tqdm_notebook` as this argument you can have the training loop display a progress bar. n_batches: [optional] integer that specifies the number of mini-batches to process before returning prepend_args: [optional] tuple Arguments to prepend to the arguments passed to `func` Returns ------- tuple The per-sample sum of the results of the function `func` e.g. `(batch_A, batch_B, ...)` Returns an empty tuple if there were 0 samples in the data set. Examples -------- Define a function to apply to samples: >>> def sqr_sum(x): ... return (x ** 2).sum(axis=1) Construct data to process and create a data source: >>> X = np.random.normal(size=(7, 10)) >>> ds = ArrayDataSource([X]) Apply the function defined above: >>> X_sqr_sum = ds.batch_map_concat(sqr_sum, batch_size=5) >>> assert (X_sqr_sum[0] == (X ** 2).sum(axis=1)).all()
[ "A", "batch", "oriented", "implementation", "of", "map", ".", "Applies", "a", "function", "to", "all", "the", "samples", "in", "this", "data", "source", "by", "breaking", "the", "data", "into", "mini", "-", "batches", "and", "applying", "the", "function", "to", "each", "mini", "-", "batch", ".", "Returns", "the", "per", "-", "sample", "results", "." ]
3fc2304e629f813c05f9e7a85a18acef3581a536
https://github.com/Britefury/batchup/blob/3fc2304e629f813c05f9e7a85a18acef3581a536/batchup/data_source.py#L123-L188
train