Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- vllm/lib/python3.10/site-packages/tqdm/__init__.py +38 -0
- vllm/lib/python3.10/site-packages/tqdm/_main.py +9 -0
- vllm/lib/python3.10/site-packages/tqdm/_monitor.py +95 -0
- vllm/lib/python3.10/site-packages/tqdm/_tqdm_gui.py +9 -0
- vllm/lib/python3.10/site-packages/tqdm/_tqdm_notebook.py +9 -0
- vllm/lib/python3.10/site-packages/tqdm/_tqdm_pandas.py +24 -0
- vllm/lib/python3.10/site-packages/tqdm/_utils.py +11 -0
- vllm/lib/python3.10/site-packages/tqdm/asyncio.py +93 -0
- vllm/lib/python3.10/site-packages/tqdm/auto.py +40 -0
- vllm/lib/python3.10/site-packages/tqdm/autonotebook.py +29 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/__init__.py +92 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/__pycache__/itertools.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/__pycache__/slack.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/bells.py +26 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/concurrent.py +105 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/discord.py +156 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/itertools.py +35 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/slack.py +120 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/telegram.py +153 -0
- vllm/lib/python3.10/site-packages/tqdm/contrib/utils_worker.py +38 -0
- vllm/lib/python3.10/site-packages/tqdm/dask.py +44 -0
- vllm/lib/python3.10/site-packages/tqdm/notebook.py +317 -0
- vllm/lib/python3.10/site-packages/tqdm/rich.py +151 -0
- vllm/lib/python3.10/site-packages/tqdm/std.py +1524 -0
- vllm/lib/python3.10/site-packages/tqdm/tqdm.1 +314 -0
- vllm/lib/python3.10/site-packages/wandb/cli/__init__.py +0 -0
- vllm/lib/python3.10/site-packages/wandb/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/cli/__pycache__/beta.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/cli/__pycache__/cli.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/cli/beta.py +181 -0
- vllm/lib/python3.10/site-packages/wandb/cli/cli.py +2810 -0
- vllm/lib/python3.10/site-packages/wandb/old/__init__.py +0 -0
- vllm/lib/python3.10/site-packages/wandb/old/__pycache__/__init__.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/old/__pycache__/core.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/old/__pycache__/settings.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/old/__pycache__/summary.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/old/core.py +53 -0
- vllm/lib/python3.10/site-packages/wandb/old/settings.py +173 -0
- vllm/lib/python3.10/site-packages/wandb/old/summary.py +440 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__init__.py +28 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/__init__.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/bar.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/confusion_matrix.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/custom_chart.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/histogram.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/line.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/line_series.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/pr_curve.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/roc_curve.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/scatter.cpython-310.pyc +0 -0
vllm/lib/python3.10/site-packages/tqdm/__init__.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ._monitor import TMonitor, TqdmSynchronisationWarning
|
| 2 |
+
from ._tqdm_pandas import tqdm_pandas
|
| 3 |
+
from .cli import main # TODO: remove in v5.0.0
|
| 4 |
+
from .gui import tqdm as tqdm_gui # TODO: remove in v5.0.0
|
| 5 |
+
from .gui import trange as tgrange # TODO: remove in v5.0.0
|
| 6 |
+
from .std import (
|
| 7 |
+
TqdmDeprecationWarning, TqdmExperimentalWarning, TqdmKeyError, TqdmMonitorWarning,
|
| 8 |
+
TqdmTypeError, TqdmWarning, tqdm, trange)
|
| 9 |
+
from .version import __version__
|
| 10 |
+
|
| 11 |
+
__all__ = ['tqdm', 'tqdm_gui', 'trange', 'tgrange', 'tqdm_pandas',
|
| 12 |
+
'tqdm_notebook', 'tnrange', 'main', 'TMonitor',
|
| 13 |
+
'TqdmTypeError', 'TqdmKeyError',
|
| 14 |
+
'TqdmWarning', 'TqdmDeprecationWarning',
|
| 15 |
+
'TqdmExperimentalWarning',
|
| 16 |
+
'TqdmMonitorWarning', 'TqdmSynchronisationWarning',
|
| 17 |
+
'__version__']
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def tqdm_notebook(*args, **kwargs): # pragma: no cover
|
| 21 |
+
"""See tqdm.notebook.tqdm for full documentation"""
|
| 22 |
+
from warnings import warn
|
| 23 |
+
|
| 24 |
+
from .notebook import tqdm as _tqdm_notebook
|
| 25 |
+
warn("This function will be removed in tqdm==5.0.0\n"
|
| 26 |
+
"Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`",
|
| 27 |
+
TqdmDeprecationWarning, stacklevel=2)
|
| 28 |
+
return _tqdm_notebook(*args, **kwargs)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def tnrange(*args, **kwargs): # pragma: no cover
|
| 32 |
+
"""Shortcut for `tqdm.notebook.tqdm(range(*args), **kwargs)`."""
|
| 33 |
+
from warnings import warn
|
| 34 |
+
|
| 35 |
+
from .notebook import trange as _tnrange
|
| 36 |
+
warn("Please use `tqdm.notebook.trange` instead of `tqdm.tnrange`",
|
| 37 |
+
TqdmDeprecationWarning, stacklevel=2)
|
| 38 |
+
return _tnrange(*args, **kwargs)
|
vllm/lib/python3.10/site-packages/tqdm/_main.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from warnings import warn
|
| 2 |
+
|
| 3 |
+
from .cli import * # NOQA
|
| 4 |
+
from .cli import __all__ # NOQA
|
| 5 |
+
from .std import TqdmDeprecationWarning
|
| 6 |
+
|
| 7 |
+
warn("This function will be removed in tqdm==5.0.0\n"
|
| 8 |
+
"Please use `tqdm.cli.*` instead of `tqdm._main.*`",
|
| 9 |
+
TqdmDeprecationWarning, stacklevel=2)
|
vllm/lib/python3.10/site-packages/tqdm/_monitor.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import atexit
|
| 2 |
+
from threading import Event, Thread, current_thread
|
| 3 |
+
from time import time
|
| 4 |
+
from warnings import warn
|
| 5 |
+
|
| 6 |
+
__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TqdmSynchronisationWarning(RuntimeWarning):
|
| 10 |
+
"""tqdm multi-thread/-process errors which may cause incorrect nesting
|
| 11 |
+
but otherwise no adverse effects"""
|
| 12 |
+
pass
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TMonitor(Thread):
|
| 16 |
+
"""
|
| 17 |
+
Monitoring thread for tqdm bars.
|
| 18 |
+
Monitors if tqdm bars are taking too much time to display
|
| 19 |
+
and readjusts miniters automatically if necessary.
|
| 20 |
+
|
| 21 |
+
Parameters
|
| 22 |
+
----------
|
| 23 |
+
tqdm_cls : class
|
| 24 |
+
tqdm class to use (can be core tqdm or a submodule).
|
| 25 |
+
sleep_interval : float
|
| 26 |
+
Time to sleep between monitoring checks.
|
| 27 |
+
"""
|
| 28 |
+
_test = {} # internal vars for unit testing
|
| 29 |
+
|
| 30 |
+
def __init__(self, tqdm_cls, sleep_interval):
|
| 31 |
+
Thread.__init__(self)
|
| 32 |
+
self.daemon = True # kill thread when main killed (KeyboardInterrupt)
|
| 33 |
+
self.woken = 0 # last time woken up, to sync with monitor
|
| 34 |
+
self.tqdm_cls = tqdm_cls
|
| 35 |
+
self.sleep_interval = sleep_interval
|
| 36 |
+
self._time = self._test.get("time", time)
|
| 37 |
+
self.was_killed = self._test.get("Event", Event)()
|
| 38 |
+
atexit.register(self.exit)
|
| 39 |
+
self.start()
|
| 40 |
+
|
| 41 |
+
def exit(self):
|
| 42 |
+
self.was_killed.set()
|
| 43 |
+
if self is not current_thread():
|
| 44 |
+
self.join()
|
| 45 |
+
return self.report()
|
| 46 |
+
|
| 47 |
+
def get_instances(self):
|
| 48 |
+
# returns a copy of started `tqdm_cls` instances
|
| 49 |
+
return [i for i in self.tqdm_cls._instances.copy()
|
| 50 |
+
# Avoid race by checking that the instance started
|
| 51 |
+
if hasattr(i, 'start_t')]
|
| 52 |
+
|
| 53 |
+
def run(self):
|
| 54 |
+
cur_t = self._time()
|
| 55 |
+
while True:
|
| 56 |
+
# After processing and before sleeping, notify that we woke
|
| 57 |
+
# Need to be done just before sleeping
|
| 58 |
+
self.woken = cur_t
|
| 59 |
+
# Sleep some time...
|
| 60 |
+
self.was_killed.wait(self.sleep_interval)
|
| 61 |
+
# Quit if killed
|
| 62 |
+
if self.was_killed.is_set():
|
| 63 |
+
return
|
| 64 |
+
# Then monitor!
|
| 65 |
+
# Acquire lock (to access _instances)
|
| 66 |
+
with self.tqdm_cls.get_lock():
|
| 67 |
+
cur_t = self._time()
|
| 68 |
+
# Check tqdm instances are waiting too long to print
|
| 69 |
+
instances = self.get_instances()
|
| 70 |
+
for instance in instances:
|
| 71 |
+
# Check event in loop to reduce blocking time on exit
|
| 72 |
+
if self.was_killed.is_set():
|
| 73 |
+
return
|
| 74 |
+
# Only if mininterval > 1 (else iterations are just slow)
|
| 75 |
+
# and last refresh exceeded maxinterval
|
| 76 |
+
if (
|
| 77 |
+
instance.miniters > 1
|
| 78 |
+
and (cur_t - instance.last_print_t) >= instance.maxinterval
|
| 79 |
+
):
|
| 80 |
+
# force bypassing miniters on next iteration
|
| 81 |
+
# (dynamic_miniters adjusts mininterval automatically)
|
| 82 |
+
instance.miniters = 1
|
| 83 |
+
# Refresh now! (works only for manual tqdm)
|
| 84 |
+
instance.refresh(nolock=True)
|
| 85 |
+
# Remove accidental long-lived strong reference
|
| 86 |
+
del instance
|
| 87 |
+
if instances != self.get_instances(): # pragma: nocover
|
| 88 |
+
warn("Set changed size during iteration" +
|
| 89 |
+
" (see https://github.com/tqdm/tqdm/issues/481)",
|
| 90 |
+
TqdmSynchronisationWarning, stacklevel=2)
|
| 91 |
+
# Remove accidental long-lived strong references
|
| 92 |
+
del instances
|
| 93 |
+
|
| 94 |
+
def report(self):
|
| 95 |
+
return not self.was_killed.is_set()
|
vllm/lib/python3.10/site-packages/tqdm/_tqdm_gui.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from warnings import warn
|
| 2 |
+
|
| 3 |
+
from .gui import * # NOQA
|
| 4 |
+
from .gui import __all__ # NOQA
|
| 5 |
+
from .std import TqdmDeprecationWarning
|
| 6 |
+
|
| 7 |
+
warn("This function will be removed in tqdm==5.0.0\n"
|
| 8 |
+
"Please use `tqdm.gui.*` instead of `tqdm._tqdm_gui.*`",
|
| 9 |
+
TqdmDeprecationWarning, stacklevel=2)
|
vllm/lib/python3.10/site-packages/tqdm/_tqdm_notebook.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from warnings import warn
|
| 2 |
+
|
| 3 |
+
from .notebook import * # NOQA
|
| 4 |
+
from .notebook import __all__ # NOQA
|
| 5 |
+
from .std import TqdmDeprecationWarning
|
| 6 |
+
|
| 7 |
+
warn("This function will be removed in tqdm==5.0.0\n"
|
| 8 |
+
"Please use `tqdm.notebook.*` instead of `tqdm._tqdm_notebook.*`",
|
| 9 |
+
TqdmDeprecationWarning, stacklevel=2)
|
vllm/lib/python3.10/site-packages/tqdm/_tqdm_pandas.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
__author__ = "github.com/casperdcl"
|
| 4 |
+
__all__ = ['tqdm_pandas']
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def tqdm_pandas(tclass, **tqdm_kwargs):
|
| 8 |
+
"""
|
| 9 |
+
Registers the given `tqdm` instance with
|
| 10 |
+
`pandas.core.groupby.DataFrameGroupBy.progress_apply`.
|
| 11 |
+
"""
|
| 12 |
+
from tqdm import TqdmDeprecationWarning
|
| 13 |
+
|
| 14 |
+
if isinstance(tclass, type) or (getattr(tclass, '__name__', '').startswith(
|
| 15 |
+
'tqdm_')): # delayed adapter case
|
| 16 |
+
TqdmDeprecationWarning(
|
| 17 |
+
"Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm, ...)`.",
|
| 18 |
+
fp_write=getattr(tqdm_kwargs.get('file', None), 'write', sys.stderr.write))
|
| 19 |
+
tclass.pandas(**tqdm_kwargs)
|
| 20 |
+
else:
|
| 21 |
+
TqdmDeprecationWarning(
|
| 22 |
+
"Please use `tqdm.pandas(...)` instead of `tqdm_pandas(tqdm(...))`.",
|
| 23 |
+
fp_write=getattr(tclass.fp, 'write', sys.stderr.write))
|
| 24 |
+
type(tclass).pandas(deprecated_t=tclass)
|
vllm/lib/python3.10/site-packages/tqdm/_utils.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from warnings import warn
|
| 2 |
+
|
| 3 |
+
from .std import TqdmDeprecationWarning
|
| 4 |
+
from .utils import ( # NOQA, pylint: disable=unused-import
|
| 5 |
+
CUR_OS, IS_NIX, IS_WIN, RE_ANSI, Comparable, FormatReplace, SimpleTextIOWrapper,
|
| 6 |
+
_environ_cols_wrapper, _is_ascii, _is_utf, _screen_shape_linux, _screen_shape_tput,
|
| 7 |
+
_screen_shape_windows, _screen_shape_wrapper, _supports_unicode, _term_move_up, colorama)
|
| 8 |
+
|
| 9 |
+
warn("This function will be removed in tqdm==5.0.0\n"
|
| 10 |
+
"Please use `tqdm.utils.*` instead of `tqdm._utils.*`",
|
| 11 |
+
TqdmDeprecationWarning, stacklevel=2)
|
vllm/lib/python3.10/site-packages/tqdm/asyncio.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Asynchronous progressbar decorator for iterators.
|
| 3 |
+
Includes a default `range` iterator printing to `stderr`.
|
| 4 |
+
|
| 5 |
+
Usage:
|
| 6 |
+
>>> from tqdm.asyncio import trange, tqdm
|
| 7 |
+
>>> async for i in trange(10):
|
| 8 |
+
... ...
|
| 9 |
+
"""
|
| 10 |
+
import asyncio
|
| 11 |
+
from sys import version_info
|
| 12 |
+
|
| 13 |
+
from .std import tqdm as std_tqdm
|
| 14 |
+
|
| 15 |
+
__author__ = {"github.com/": ["casperdcl"]}
|
| 16 |
+
__all__ = ['tqdm_asyncio', 'tarange', 'tqdm', 'trange']
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class tqdm_asyncio(std_tqdm):
|
| 20 |
+
"""
|
| 21 |
+
Asynchronous-friendly version of tqdm.
|
| 22 |
+
"""
|
| 23 |
+
def __init__(self, iterable=None, *args, **kwargs):
|
| 24 |
+
super().__init__(iterable, *args, **kwargs)
|
| 25 |
+
self.iterable_awaitable = False
|
| 26 |
+
if iterable is not None:
|
| 27 |
+
if hasattr(iterable, "__anext__"):
|
| 28 |
+
self.iterable_next = iterable.__anext__
|
| 29 |
+
self.iterable_awaitable = True
|
| 30 |
+
elif hasattr(iterable, "__next__"):
|
| 31 |
+
self.iterable_next = iterable.__next__
|
| 32 |
+
else:
|
| 33 |
+
self.iterable_iterator = iter(iterable)
|
| 34 |
+
self.iterable_next = self.iterable_iterator.__next__
|
| 35 |
+
|
| 36 |
+
def __aiter__(self):
|
| 37 |
+
return self
|
| 38 |
+
|
| 39 |
+
async def __anext__(self):
|
| 40 |
+
try:
|
| 41 |
+
if self.iterable_awaitable:
|
| 42 |
+
res = await self.iterable_next()
|
| 43 |
+
else:
|
| 44 |
+
res = self.iterable_next()
|
| 45 |
+
self.update()
|
| 46 |
+
return res
|
| 47 |
+
except StopIteration:
|
| 48 |
+
self.close()
|
| 49 |
+
raise StopAsyncIteration
|
| 50 |
+
except BaseException:
|
| 51 |
+
self.close()
|
| 52 |
+
raise
|
| 53 |
+
|
| 54 |
+
def send(self, *args, **kwargs):
|
| 55 |
+
return self.iterable.send(*args, **kwargs)
|
| 56 |
+
|
| 57 |
+
@classmethod
|
| 58 |
+
def as_completed(cls, fs, *, loop=None, timeout=None, total=None, **tqdm_kwargs):
|
| 59 |
+
"""
|
| 60 |
+
Wrapper for `asyncio.as_completed`.
|
| 61 |
+
"""
|
| 62 |
+
if total is None:
|
| 63 |
+
total = len(fs)
|
| 64 |
+
kwargs = {}
|
| 65 |
+
if version_info[:2] < (3, 10):
|
| 66 |
+
kwargs['loop'] = loop
|
| 67 |
+
yield from cls(asyncio.as_completed(fs, timeout=timeout, **kwargs),
|
| 68 |
+
total=total, **tqdm_kwargs)
|
| 69 |
+
|
| 70 |
+
@classmethod
|
| 71 |
+
async def gather(cls, *fs, loop=None, timeout=None, total=None, **tqdm_kwargs):
|
| 72 |
+
"""
|
| 73 |
+
Wrapper for `asyncio.gather`.
|
| 74 |
+
"""
|
| 75 |
+
async def wrap_awaitable(i, f):
|
| 76 |
+
return i, await f
|
| 77 |
+
|
| 78 |
+
ifs = [wrap_awaitable(i, f) for i, f in enumerate(fs)]
|
| 79 |
+
res = [await f for f in cls.as_completed(ifs, loop=loop, timeout=timeout,
|
| 80 |
+
total=total, **tqdm_kwargs)]
|
| 81 |
+
return [i for _, i in sorted(res)]
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def tarange(*args, **kwargs):
|
| 85 |
+
"""
|
| 86 |
+
A shortcut for `tqdm.asyncio.tqdm(range(*args), **kwargs)`.
|
| 87 |
+
"""
|
| 88 |
+
return tqdm_asyncio(range(*args), **kwargs)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
# Aliases
|
| 92 |
+
tqdm = tqdm_asyncio
|
| 93 |
+
trange = tarange
|
vllm/lib/python3.10/site-packages/tqdm/auto.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Enables multiple commonly used features.
|
| 3 |
+
|
| 4 |
+
Method resolution order:
|
| 5 |
+
|
| 6 |
+
- `tqdm.autonotebook` without import warnings
|
| 7 |
+
- `tqdm.asyncio`
|
| 8 |
+
- `tqdm.std` base class
|
| 9 |
+
|
| 10 |
+
Usage:
|
| 11 |
+
>>> from tqdm.auto import trange, tqdm
|
| 12 |
+
>>> for i in trange(10):
|
| 13 |
+
... ...
|
| 14 |
+
"""
|
| 15 |
+
import warnings
|
| 16 |
+
|
| 17 |
+
from .std import TqdmExperimentalWarning
|
| 18 |
+
|
| 19 |
+
with warnings.catch_warnings():
|
| 20 |
+
warnings.simplefilter("ignore", category=TqdmExperimentalWarning)
|
| 21 |
+
from .autonotebook import tqdm as notebook_tqdm
|
| 22 |
+
|
| 23 |
+
from .asyncio import tqdm as asyncio_tqdm
|
| 24 |
+
from .std import tqdm as std_tqdm
|
| 25 |
+
|
| 26 |
+
if notebook_tqdm != std_tqdm:
|
| 27 |
+
class tqdm(notebook_tqdm, asyncio_tqdm): # pylint: disable=inconsistent-mro
|
| 28 |
+
pass
|
| 29 |
+
else:
|
| 30 |
+
tqdm = asyncio_tqdm
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def trange(*args, **kwargs):
|
| 34 |
+
"""
|
| 35 |
+
A shortcut for `tqdm.auto.tqdm(range(*args), **kwargs)`.
|
| 36 |
+
"""
|
| 37 |
+
return tqdm(range(*args), **kwargs)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
__all__ = ["tqdm", "trange"]
|
vllm/lib/python3.10/site-packages/tqdm/autonotebook.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Automatically choose between `tqdm.notebook` and `tqdm.std`.
|
| 3 |
+
|
| 4 |
+
Usage:
|
| 5 |
+
>>> from tqdm.autonotebook import trange, tqdm
|
| 6 |
+
>>> for i in trange(10):
|
| 7 |
+
... ...
|
| 8 |
+
"""
|
| 9 |
+
import sys
|
| 10 |
+
from warnings import warn
|
| 11 |
+
|
| 12 |
+
try:
|
| 13 |
+
get_ipython = sys.modules['IPython'].get_ipython
|
| 14 |
+
if 'IPKernelApp' not in get_ipython().config: # pragma: no cover
|
| 15 |
+
raise ImportError("console")
|
| 16 |
+
from .notebook import WARN_NOIPYW, IProgress
|
| 17 |
+
if IProgress is None:
|
| 18 |
+
from .std import TqdmWarning
|
| 19 |
+
warn(WARN_NOIPYW, TqdmWarning, stacklevel=2)
|
| 20 |
+
raise ImportError('ipywidgets')
|
| 21 |
+
except Exception:
|
| 22 |
+
from .std import tqdm, trange
|
| 23 |
+
else: # pragma: no cover
|
| 24 |
+
from .notebook import tqdm, trange
|
| 25 |
+
from .std import TqdmExperimentalWarning
|
| 26 |
+
warn("Using `tqdm.autonotebook.tqdm` in notebook mode."
|
| 27 |
+
" Use `tqdm.tqdm` instead to force console mode"
|
| 28 |
+
" (e.g. in jupyter console)", TqdmExperimentalWarning, stacklevel=2)
|
| 29 |
+
__all__ = ["tqdm", "trange"]
|
vllm/lib/python3.10/site-packages/tqdm/contrib/__init__.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Thin wrappers around common functions.
|
| 3 |
+
|
| 4 |
+
Subpackages contain potentially unstable extensions.
|
| 5 |
+
"""
|
| 6 |
+
from warnings import warn
|
| 7 |
+
|
| 8 |
+
from ..auto import tqdm as tqdm_auto
|
| 9 |
+
from ..std import TqdmDeprecationWarning, tqdm
|
| 10 |
+
from ..utils import ObjectWrapper
|
| 11 |
+
|
| 12 |
+
__author__ = {"github.com/": ["casperdcl"]}
|
| 13 |
+
__all__ = ['tenumerate', 'tzip', 'tmap']
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class DummyTqdmFile(ObjectWrapper):
|
| 17 |
+
"""Dummy file-like that will write to tqdm"""
|
| 18 |
+
|
| 19 |
+
def __init__(self, wrapped):
|
| 20 |
+
super().__init__(wrapped)
|
| 21 |
+
self._buf = []
|
| 22 |
+
|
| 23 |
+
def write(self, x, nolock=False):
|
| 24 |
+
nl = b"\n" if isinstance(x, bytes) else "\n"
|
| 25 |
+
pre, sep, post = x.rpartition(nl)
|
| 26 |
+
if sep:
|
| 27 |
+
blank = type(nl)()
|
| 28 |
+
tqdm.write(blank.join(self._buf + [pre, sep]),
|
| 29 |
+
end=blank, file=self._wrapped, nolock=nolock)
|
| 30 |
+
self._buf = [post]
|
| 31 |
+
else:
|
| 32 |
+
self._buf.append(x)
|
| 33 |
+
|
| 34 |
+
def __del__(self):
|
| 35 |
+
if self._buf:
|
| 36 |
+
blank = type(self._buf[0])()
|
| 37 |
+
try:
|
| 38 |
+
tqdm.write(blank.join(self._buf), end=blank, file=self._wrapped)
|
| 39 |
+
except (OSError, ValueError):
|
| 40 |
+
pass
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def builtin_iterable(func):
|
| 44 |
+
"""Returns `func`"""
|
| 45 |
+
warn("This function has no effect, and will be removed in tqdm==5.0.0",
|
| 46 |
+
TqdmDeprecationWarning, stacklevel=2)
|
| 47 |
+
return func
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def tenumerate(iterable, start=0, total=None, tqdm_class=tqdm_auto, **tqdm_kwargs):
|
| 51 |
+
"""
|
| 52 |
+
Equivalent of `numpy.ndenumerate` or builtin `enumerate`.
|
| 53 |
+
|
| 54 |
+
Parameters
|
| 55 |
+
----------
|
| 56 |
+
tqdm_class : [default: tqdm.auto.tqdm].
|
| 57 |
+
"""
|
| 58 |
+
try:
|
| 59 |
+
import numpy as np
|
| 60 |
+
except ImportError:
|
| 61 |
+
pass
|
| 62 |
+
else:
|
| 63 |
+
if isinstance(iterable, np.ndarray):
|
| 64 |
+
return tqdm_class(np.ndenumerate(iterable), total=total or iterable.size,
|
| 65 |
+
**tqdm_kwargs)
|
| 66 |
+
return enumerate(tqdm_class(iterable, total=total, **tqdm_kwargs), start)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def tzip(iter1, *iter2plus, **tqdm_kwargs):
|
| 70 |
+
"""
|
| 71 |
+
Equivalent of builtin `zip`.
|
| 72 |
+
|
| 73 |
+
Parameters
|
| 74 |
+
----------
|
| 75 |
+
tqdm_class : [default: tqdm.auto.tqdm].
|
| 76 |
+
"""
|
| 77 |
+
kwargs = tqdm_kwargs.copy()
|
| 78 |
+
tqdm_class = kwargs.pop("tqdm_class", tqdm_auto)
|
| 79 |
+
for i in zip(tqdm_class(iter1, **kwargs), *iter2plus):
|
| 80 |
+
yield i
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def tmap(function, *sequences, **tqdm_kwargs):
|
| 84 |
+
"""
|
| 85 |
+
Equivalent of builtin `map`.
|
| 86 |
+
|
| 87 |
+
Parameters
|
| 88 |
+
----------
|
| 89 |
+
tqdm_class : [default: tqdm.auto.tqdm].
|
| 90 |
+
"""
|
| 91 |
+
for i in tzip(*sequences, **tqdm_kwargs):
|
| 92 |
+
yield function(*i)
|
vllm/lib/python3.10/site-packages/tqdm/contrib/__pycache__/itertools.cpython-310.pyc
ADDED
|
Binary file (944 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/tqdm/contrib/__pycache__/slack.cpython-310.pyc
ADDED
|
Binary file (4.31 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/tqdm/contrib/bells.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Even more features than `tqdm.auto` (all the bells & whistles):
|
| 3 |
+
|
| 4 |
+
- `tqdm.auto`
|
| 5 |
+
- `tqdm.tqdm.pandas`
|
| 6 |
+
- `tqdm.contrib.telegram`
|
| 7 |
+
+ uses `${TQDM_TELEGRAM_TOKEN}` and `${TQDM_TELEGRAM_CHAT_ID}`
|
| 8 |
+
- `tqdm.contrib.discord`
|
| 9 |
+
+ uses `${TQDM_DISCORD_TOKEN}` and `${TQDM_DISCORD_CHANNEL_ID}`
|
| 10 |
+
"""
|
| 11 |
+
__all__ = ['tqdm', 'trange']
|
| 12 |
+
import warnings
|
| 13 |
+
from os import getenv
|
| 14 |
+
|
| 15 |
+
if getenv("TQDM_SLACK_TOKEN") and getenv("TQDM_SLACK_CHANNEL"):
|
| 16 |
+
from .slack import tqdm, trange
|
| 17 |
+
elif getenv("TQDM_TELEGRAM_TOKEN") and getenv("TQDM_TELEGRAM_CHAT_ID"):
|
| 18 |
+
from .telegram import tqdm, trange
|
| 19 |
+
elif getenv("TQDM_DISCORD_TOKEN") and getenv("TQDM_DISCORD_CHANNEL_ID"):
|
| 20 |
+
from .discord import tqdm, trange
|
| 21 |
+
else:
|
| 22 |
+
from ..auto import tqdm, trange
|
| 23 |
+
|
| 24 |
+
with warnings.catch_warnings():
|
| 25 |
+
warnings.simplefilter("ignore", category=FutureWarning)
|
| 26 |
+
tqdm.pandas()
|
vllm/lib/python3.10/site-packages/tqdm/contrib/concurrent.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Thin wrappers around `concurrent.futures`.
|
| 3 |
+
"""
|
| 4 |
+
from contextlib import contextmanager
|
| 5 |
+
from operator import length_hint
|
| 6 |
+
from os import cpu_count
|
| 7 |
+
|
| 8 |
+
from ..auto import tqdm as tqdm_auto
|
| 9 |
+
from ..std import TqdmWarning
|
| 10 |
+
|
| 11 |
+
__author__ = {"github.com/": ["casperdcl"]}
|
| 12 |
+
__all__ = ['thread_map', 'process_map']
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@contextmanager
|
| 16 |
+
def ensure_lock(tqdm_class, lock_name=""):
|
| 17 |
+
"""get (create if necessary) and then restore `tqdm_class`'s lock"""
|
| 18 |
+
old_lock = getattr(tqdm_class, '_lock', None) # don't create a new lock
|
| 19 |
+
lock = old_lock or tqdm_class.get_lock() # maybe create a new lock
|
| 20 |
+
lock = getattr(lock, lock_name, lock) # maybe subtype
|
| 21 |
+
tqdm_class.set_lock(lock)
|
| 22 |
+
yield lock
|
| 23 |
+
if old_lock is None:
|
| 24 |
+
del tqdm_class._lock
|
| 25 |
+
else:
|
| 26 |
+
tqdm_class.set_lock(old_lock)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _executor_map(PoolExecutor, fn, *iterables, **tqdm_kwargs):
|
| 30 |
+
"""
|
| 31 |
+
Implementation of `thread_map` and `process_map`.
|
| 32 |
+
|
| 33 |
+
Parameters
|
| 34 |
+
----------
|
| 35 |
+
tqdm_class : [default: tqdm.auto.tqdm].
|
| 36 |
+
max_workers : [default: min(32, cpu_count() + 4)].
|
| 37 |
+
chunksize : [default: 1].
|
| 38 |
+
lock_name : [default: "":str].
|
| 39 |
+
"""
|
| 40 |
+
kwargs = tqdm_kwargs.copy()
|
| 41 |
+
if "total" not in kwargs:
|
| 42 |
+
kwargs["total"] = length_hint(iterables[0])
|
| 43 |
+
tqdm_class = kwargs.pop("tqdm_class", tqdm_auto)
|
| 44 |
+
max_workers = kwargs.pop("max_workers", min(32, cpu_count() + 4))
|
| 45 |
+
chunksize = kwargs.pop("chunksize", 1)
|
| 46 |
+
lock_name = kwargs.pop("lock_name", "")
|
| 47 |
+
with ensure_lock(tqdm_class, lock_name=lock_name) as lk:
|
| 48 |
+
# share lock in case workers are already using `tqdm`
|
| 49 |
+
with PoolExecutor(max_workers=max_workers, initializer=tqdm_class.set_lock,
|
| 50 |
+
initargs=(lk,)) as ex:
|
| 51 |
+
return list(tqdm_class(ex.map(fn, *iterables, chunksize=chunksize), **kwargs))
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def thread_map(fn, *iterables, **tqdm_kwargs):
|
| 55 |
+
"""
|
| 56 |
+
Equivalent of `list(map(fn, *iterables))`
|
| 57 |
+
driven by `concurrent.futures.ThreadPoolExecutor`.
|
| 58 |
+
|
| 59 |
+
Parameters
|
| 60 |
+
----------
|
| 61 |
+
tqdm_class : optional
|
| 62 |
+
`tqdm` class to use for bars [default: tqdm.auto.tqdm].
|
| 63 |
+
max_workers : int, optional
|
| 64 |
+
Maximum number of workers to spawn; passed to
|
| 65 |
+
`concurrent.futures.ThreadPoolExecutor.__init__`.
|
| 66 |
+
[default: max(32, cpu_count() + 4)].
|
| 67 |
+
"""
|
| 68 |
+
from concurrent.futures import ThreadPoolExecutor
|
| 69 |
+
return _executor_map(ThreadPoolExecutor, fn, *iterables, **tqdm_kwargs)
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def process_map(fn, *iterables, **tqdm_kwargs):
|
| 73 |
+
"""
|
| 74 |
+
Equivalent of `list(map(fn, *iterables))`
|
| 75 |
+
driven by `concurrent.futures.ProcessPoolExecutor`.
|
| 76 |
+
|
| 77 |
+
Parameters
|
| 78 |
+
----------
|
| 79 |
+
tqdm_class : optional
|
| 80 |
+
`tqdm` class to use for bars [default: tqdm.auto.tqdm].
|
| 81 |
+
max_workers : int, optional
|
| 82 |
+
Maximum number of workers to spawn; passed to
|
| 83 |
+
`concurrent.futures.ProcessPoolExecutor.__init__`.
|
| 84 |
+
[default: min(32, cpu_count() + 4)].
|
| 85 |
+
chunksize : int, optional
|
| 86 |
+
Size of chunks sent to worker processes; passed to
|
| 87 |
+
`concurrent.futures.ProcessPoolExecutor.map`. [default: 1].
|
| 88 |
+
lock_name : str, optional
|
| 89 |
+
Member of `tqdm_class.get_lock()` to use [default: mp_lock].
|
| 90 |
+
"""
|
| 91 |
+
from concurrent.futures import ProcessPoolExecutor
|
| 92 |
+
if iterables and "chunksize" not in tqdm_kwargs:
|
| 93 |
+
# default `chunksize=1` has poor performance for large iterables
|
| 94 |
+
# (most time spent dispatching items to workers).
|
| 95 |
+
longest_iterable_len = max(map(length_hint, iterables))
|
| 96 |
+
if longest_iterable_len > 1000:
|
| 97 |
+
from warnings import warn
|
| 98 |
+
warn("Iterable length %d > 1000 but `chunksize` is not set."
|
| 99 |
+
" This may seriously degrade multiprocess performance."
|
| 100 |
+
" Set `chunksize=1` or more." % longest_iterable_len,
|
| 101 |
+
TqdmWarning, stacklevel=2)
|
| 102 |
+
if "lock_name" not in tqdm_kwargs:
|
| 103 |
+
tqdm_kwargs = tqdm_kwargs.copy()
|
| 104 |
+
tqdm_kwargs["lock_name"] = "mp_lock"
|
| 105 |
+
return _executor_map(ProcessPoolExecutor, fn, *iterables, **tqdm_kwargs)
|
vllm/lib/python3.10/site-packages/tqdm/contrib/discord.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Sends updates to a Discord bot.
|
| 3 |
+
|
| 4 |
+
Usage:
|
| 5 |
+
>>> from tqdm.contrib.discord import tqdm, trange
|
| 6 |
+
>>> for i in trange(10, token='{token}', channel_id='{channel_id}'):
|
| 7 |
+
... ...
|
| 8 |
+
|
| 9 |
+

|
| 10 |
+
"""
|
| 11 |
+
from os import getenv
|
| 12 |
+
from warnings import warn
|
| 13 |
+
|
| 14 |
+
from requests import Session
|
| 15 |
+
from requests.utils import default_user_agent
|
| 16 |
+
|
| 17 |
+
from ..auto import tqdm as tqdm_auto
|
| 18 |
+
from ..std import TqdmWarning
|
| 19 |
+
from ..version import __version__
|
| 20 |
+
from .utils_worker import MonoWorker
|
| 21 |
+
|
| 22 |
+
__author__ = {"github.com/": ["casperdcl", "guigoruiz1"]}
|
| 23 |
+
__all__ = ['DiscordIO', 'tqdm_discord', 'tdrange', 'tqdm', 'trange']
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class DiscordIO(MonoWorker):
|
| 27 |
+
"""Non-blocking file-like IO using a Discord Bot."""
|
| 28 |
+
API = "https://discord.com/api/v10"
|
| 29 |
+
UA = f"tqdm (https://tqdm.github.io, {__version__}) {default_user_agent()}"
|
| 30 |
+
|
| 31 |
+
def __init__(self, token, channel_id):
|
| 32 |
+
"""Creates a new message in the given `channel_id`."""
|
| 33 |
+
super().__init__()
|
| 34 |
+
self.token = token
|
| 35 |
+
self.channel_id = channel_id
|
| 36 |
+
self.session = Session()
|
| 37 |
+
self.text = self.__class__.__name__
|
| 38 |
+
self.message_id
|
| 39 |
+
|
| 40 |
+
@property
|
| 41 |
+
def message_id(self):
|
| 42 |
+
if hasattr(self, '_message_id'):
|
| 43 |
+
return self._message_id
|
| 44 |
+
try:
|
| 45 |
+
res = self.session.post(
|
| 46 |
+
f'{self.API}/channels/{self.channel_id}/messages',
|
| 47 |
+
headers={'Authorization': f'Bot {self.token}', 'User-Agent': self.UA},
|
| 48 |
+
json={'content': f"`{self.text}`"}).json()
|
| 49 |
+
except Exception as e:
|
| 50 |
+
tqdm_auto.write(str(e))
|
| 51 |
+
else:
|
| 52 |
+
if res.get('error_code') == 429:
|
| 53 |
+
warn("Creation rate limit: try increasing `mininterval`.",
|
| 54 |
+
TqdmWarning, stacklevel=2)
|
| 55 |
+
else:
|
| 56 |
+
self._message_id = res['id']
|
| 57 |
+
return self._message_id
|
| 58 |
+
|
| 59 |
+
def write(self, s):
|
| 60 |
+
"""Replaces internal `message_id`'s text with `s`."""
|
| 61 |
+
if not s:
|
| 62 |
+
s = "..."
|
| 63 |
+
s = s.replace('\r', '').strip()
|
| 64 |
+
if s == self.text:
|
| 65 |
+
return # avoid duplicate message Bot error
|
| 66 |
+
message_id = self.message_id
|
| 67 |
+
if message_id is None:
|
| 68 |
+
return
|
| 69 |
+
self.text = s
|
| 70 |
+
try:
|
| 71 |
+
future = self.submit(
|
| 72 |
+
self.session.patch,
|
| 73 |
+
f'{self.API}/channels/{self.channel_id}/messages/{message_id}',
|
| 74 |
+
headers={'Authorization': f'Bot {self.token}', 'User-Agent': self.UA},
|
| 75 |
+
json={'content': f"`{self.text}`"})
|
| 76 |
+
except Exception as e:
|
| 77 |
+
tqdm_auto.write(str(e))
|
| 78 |
+
else:
|
| 79 |
+
return future
|
| 80 |
+
|
| 81 |
+
def delete(self):
|
| 82 |
+
"""Deletes internal `message_id`."""
|
| 83 |
+
try:
|
| 84 |
+
future = self.submit(
|
| 85 |
+
self.session.delete,
|
| 86 |
+
f'{self.API}/channels/{self.channel_id}/messages/{self.message_id}',
|
| 87 |
+
headers={'Authorization': f'Bot {self.token}', 'User-Agent': self.UA})
|
| 88 |
+
except Exception as e:
|
| 89 |
+
tqdm_auto.write(str(e))
|
| 90 |
+
else:
|
| 91 |
+
return future
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class tqdm_discord(tqdm_auto):
|
| 95 |
+
"""
|
| 96 |
+
Standard `tqdm.auto.tqdm` but also sends updates to a Discord Bot.
|
| 97 |
+
May take a few seconds to create (`__init__`).
|
| 98 |
+
|
| 99 |
+
- create a discord bot (not public, no requirement of OAuth2 code
|
| 100 |
+
grant, only send message permissions) & invite it to a channel:
|
| 101 |
+
<https://discordpy.readthedocs.io/en/latest/discord.html>
|
| 102 |
+
- copy the bot `{token}` & `{channel_id}` and paste below
|
| 103 |
+
|
| 104 |
+
>>> from tqdm.contrib.discord import tqdm, trange
|
| 105 |
+
>>> for i in tqdm(iterable, token='{token}', channel_id='{channel_id}'):
|
| 106 |
+
... ...
|
| 107 |
+
"""
|
| 108 |
+
def __init__(self, *args, **kwargs):
|
| 109 |
+
"""
|
| 110 |
+
Parameters
|
| 111 |
+
----------
|
| 112 |
+
token : str, required. Discord bot token
|
| 113 |
+
[default: ${TQDM_DISCORD_TOKEN}].
|
| 114 |
+
channel_id : int, required. Discord channel ID
|
| 115 |
+
[default: ${TQDM_DISCORD_CHANNEL_ID}].
|
| 116 |
+
|
| 117 |
+
See `tqdm.auto.tqdm.__init__` for other parameters.
|
| 118 |
+
"""
|
| 119 |
+
if not kwargs.get('disable'):
|
| 120 |
+
kwargs = kwargs.copy()
|
| 121 |
+
self.dio = DiscordIO(
|
| 122 |
+
kwargs.pop('token', getenv('TQDM_DISCORD_TOKEN')),
|
| 123 |
+
kwargs.pop('channel_id', getenv('TQDM_DISCORD_CHANNEL_ID')))
|
| 124 |
+
super().__init__(*args, **kwargs)
|
| 125 |
+
|
| 126 |
+
def display(self, **kwargs):
|
| 127 |
+
super().display(**kwargs)
|
| 128 |
+
fmt = self.format_dict
|
| 129 |
+
if fmt.get('bar_format', None):
|
| 130 |
+
fmt['bar_format'] = fmt['bar_format'].replace(
|
| 131 |
+
'<bar/>', '{bar:10u}').replace('{bar}', '{bar:10u}')
|
| 132 |
+
else:
|
| 133 |
+
fmt['bar_format'] = '{l_bar}{bar:10u}{r_bar}'
|
| 134 |
+
self.dio.write(self.format_meter(**fmt))
|
| 135 |
+
|
| 136 |
+
def clear(self, *args, **kwargs):
|
| 137 |
+
super().clear(*args, **kwargs)
|
| 138 |
+
if not self.disable:
|
| 139 |
+
self.dio.write("")
|
| 140 |
+
|
| 141 |
+
def close(self):
|
| 142 |
+
if self.disable:
|
| 143 |
+
return
|
| 144 |
+
super().close()
|
| 145 |
+
if not (self.leave or (self.leave is None and self.pos == 0)):
|
| 146 |
+
self.dio.delete()
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def tdrange(*args, **kwargs):
|
| 150 |
+
"""Shortcut for `tqdm.contrib.discord.tqdm(range(*args), **kwargs)`."""
|
| 151 |
+
return tqdm_discord(range(*args), **kwargs)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
# Aliases
|
| 155 |
+
tqdm = tqdm_discord
|
| 156 |
+
trange = tdrange
|
vllm/lib/python3.10/site-packages/tqdm/contrib/itertools.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Thin wrappers around `itertools`.
|
| 3 |
+
"""
|
| 4 |
+
import itertools
|
| 5 |
+
|
| 6 |
+
from ..auto import tqdm as tqdm_auto
|
| 7 |
+
|
| 8 |
+
__author__ = {"github.com/": ["casperdcl"]}
|
| 9 |
+
__all__ = ['product']
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def product(*iterables, **tqdm_kwargs):
|
| 13 |
+
"""
|
| 14 |
+
Equivalent of `itertools.product`.
|
| 15 |
+
|
| 16 |
+
Parameters
|
| 17 |
+
----------
|
| 18 |
+
tqdm_class : [default: tqdm.auto.tqdm].
|
| 19 |
+
"""
|
| 20 |
+
kwargs = tqdm_kwargs.copy()
|
| 21 |
+
tqdm_class = kwargs.pop("tqdm_class", tqdm_auto)
|
| 22 |
+
try:
|
| 23 |
+
lens = list(map(len, iterables))
|
| 24 |
+
except TypeError:
|
| 25 |
+
total = None
|
| 26 |
+
else:
|
| 27 |
+
total = 1
|
| 28 |
+
for i in lens:
|
| 29 |
+
total *= i
|
| 30 |
+
kwargs.setdefault("total", total)
|
| 31 |
+
with tqdm_class(**kwargs) as t:
|
| 32 |
+
it = itertools.product(*iterables)
|
| 33 |
+
for i in it:
|
| 34 |
+
yield i
|
| 35 |
+
t.update()
|
vllm/lib/python3.10/site-packages/tqdm/contrib/slack.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Sends updates to a Slack app.
|
| 3 |
+
|
| 4 |
+
Usage:
|
| 5 |
+
>>> from tqdm.contrib.slack import tqdm, trange
|
| 6 |
+
>>> for i in trange(10, token='{token}', channel='{channel}'):
|
| 7 |
+
... ...
|
| 8 |
+
|
| 9 |
+

|
| 10 |
+
"""
|
| 11 |
+
import logging
|
| 12 |
+
from os import getenv
|
| 13 |
+
|
| 14 |
+
try:
|
| 15 |
+
from slack_sdk import WebClient
|
| 16 |
+
except ImportError:
|
| 17 |
+
raise ImportError("Please `pip install slack-sdk`")
|
| 18 |
+
|
| 19 |
+
from ..auto import tqdm as tqdm_auto
|
| 20 |
+
from .utils_worker import MonoWorker
|
| 21 |
+
|
| 22 |
+
__author__ = {"github.com/": ["0x2b3bfa0", "casperdcl"]}
|
| 23 |
+
__all__ = ['SlackIO', 'tqdm_slack', 'tsrange', 'tqdm', 'trange']
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class SlackIO(MonoWorker):
|
| 27 |
+
"""Non-blocking file-like IO using a Slack app."""
|
| 28 |
+
def __init__(self, token, channel):
|
| 29 |
+
"""Creates a new message in the given `channel`."""
|
| 30 |
+
super().__init__()
|
| 31 |
+
self.client = WebClient(token=token)
|
| 32 |
+
self.text = self.__class__.__name__
|
| 33 |
+
try:
|
| 34 |
+
self.message = self.client.chat_postMessage(channel=channel, text=self.text)
|
| 35 |
+
except Exception as e:
|
| 36 |
+
tqdm_auto.write(str(e))
|
| 37 |
+
self.message = None
|
| 38 |
+
|
| 39 |
+
def write(self, s):
|
| 40 |
+
"""Replaces internal `message`'s text with `s`."""
|
| 41 |
+
if not s:
|
| 42 |
+
s = "..."
|
| 43 |
+
s = s.replace('\r', '').strip()
|
| 44 |
+
if s == self.text:
|
| 45 |
+
return # skip duplicate message
|
| 46 |
+
message = self.message
|
| 47 |
+
if message is None:
|
| 48 |
+
return
|
| 49 |
+
self.text = s
|
| 50 |
+
try:
|
| 51 |
+
future = self.submit(self.client.chat_update, channel=message['channel'],
|
| 52 |
+
ts=message['ts'], text='`' + s + '`')
|
| 53 |
+
except Exception as e:
|
| 54 |
+
tqdm_auto.write(str(e))
|
| 55 |
+
else:
|
| 56 |
+
return future
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class tqdm_slack(tqdm_auto):
|
| 60 |
+
"""
|
| 61 |
+
Standard `tqdm.auto.tqdm` but also sends updates to a Slack app.
|
| 62 |
+
May take a few seconds to create (`__init__`).
|
| 63 |
+
|
| 64 |
+
- create a Slack app with the `chat:write` scope & invite it to a
|
| 65 |
+
channel: <https://api.slack.com/authentication/basics>
|
| 66 |
+
- copy the bot `{token}` & `{channel}` and paste below
|
| 67 |
+
>>> from tqdm.contrib.slack import tqdm, trange
|
| 68 |
+
>>> for i in tqdm(iterable, token='{token}', channel='{channel}'):
|
| 69 |
+
... ...
|
| 70 |
+
"""
|
| 71 |
+
def __init__(self, *args, **kwargs):
|
| 72 |
+
"""
|
| 73 |
+
Parameters
|
| 74 |
+
----------
|
| 75 |
+
token : str, required. Slack token
|
| 76 |
+
[default: ${TQDM_SLACK_TOKEN}].
|
| 77 |
+
channel : int, required. Slack channel
|
| 78 |
+
[default: ${TQDM_SLACK_CHANNEL}].
|
| 79 |
+
mininterval : float, optional.
|
| 80 |
+
Minimum of [default: 1.5] to avoid rate limit.
|
| 81 |
+
|
| 82 |
+
See `tqdm.auto.tqdm.__init__` for other parameters.
|
| 83 |
+
"""
|
| 84 |
+
if not kwargs.get('disable'):
|
| 85 |
+
kwargs = kwargs.copy()
|
| 86 |
+
logging.getLogger("HTTPClient").setLevel(logging.WARNING)
|
| 87 |
+
self.sio = SlackIO(
|
| 88 |
+
kwargs.pop('token', getenv("TQDM_SLACK_TOKEN")),
|
| 89 |
+
kwargs.pop('channel', getenv("TQDM_SLACK_CHANNEL")))
|
| 90 |
+
kwargs['mininterval'] = max(1.5, kwargs.get('mininterval', 1.5))
|
| 91 |
+
super().__init__(*args, **kwargs)
|
| 92 |
+
|
| 93 |
+
def display(self, **kwargs):
|
| 94 |
+
super().display(**kwargs)
|
| 95 |
+
fmt = self.format_dict
|
| 96 |
+
if fmt.get('bar_format', None):
|
| 97 |
+
fmt['bar_format'] = fmt['bar_format'].replace(
|
| 98 |
+
'<bar/>', '`{bar:10}`').replace('{bar}', '`{bar:10u}`')
|
| 99 |
+
else:
|
| 100 |
+
fmt['bar_format'] = '{l_bar}`{bar:10}`{r_bar}'
|
| 101 |
+
if fmt['ascii'] is False:
|
| 102 |
+
fmt['ascii'] = [":black_square:", ":small_blue_diamond:", ":large_blue_diamond:",
|
| 103 |
+
":large_blue_square:"]
|
| 104 |
+
fmt['ncols'] = 336
|
| 105 |
+
self.sio.write(self.format_meter(**fmt))
|
| 106 |
+
|
| 107 |
+
def clear(self, *args, **kwargs):
|
| 108 |
+
super().clear(*args, **kwargs)
|
| 109 |
+
if not self.disable:
|
| 110 |
+
self.sio.write("")
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def tsrange(*args, **kwargs):
|
| 114 |
+
"""Shortcut for `tqdm.contrib.slack.tqdm(range(*args), **kwargs)`."""
|
| 115 |
+
return tqdm_slack(range(*args), **kwargs)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
# Aliases
|
| 119 |
+
tqdm = tqdm_slack
|
| 120 |
+
trange = tsrange
|
vllm/lib/python3.10/site-packages/tqdm/contrib/telegram.py
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Sends updates to a Telegram bot.
|
| 3 |
+
|
| 4 |
+
Usage:
|
| 5 |
+
>>> from tqdm.contrib.telegram import tqdm, trange
|
| 6 |
+
>>> for i in trange(10, token='{token}', chat_id='{chat_id}'):
|
| 7 |
+
... ...
|
| 8 |
+
|
| 9 |
+

|
| 10 |
+
"""
|
| 11 |
+
from os import getenv
|
| 12 |
+
from warnings import warn
|
| 13 |
+
|
| 14 |
+
from requests import Session
|
| 15 |
+
|
| 16 |
+
from ..auto import tqdm as tqdm_auto
|
| 17 |
+
from ..std import TqdmWarning
|
| 18 |
+
from .utils_worker import MonoWorker
|
| 19 |
+
|
| 20 |
+
__author__ = {"github.com/": ["casperdcl"]}
|
| 21 |
+
__all__ = ['TelegramIO', 'tqdm_telegram', 'ttgrange', 'tqdm', 'trange']
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class TelegramIO(MonoWorker):
|
| 25 |
+
"""Non-blocking file-like IO using a Telegram Bot."""
|
| 26 |
+
API = 'https://api.telegram.org/bot'
|
| 27 |
+
|
| 28 |
+
def __init__(self, token, chat_id):
|
| 29 |
+
"""Creates a new message in the given `chat_id`."""
|
| 30 |
+
super().__init__()
|
| 31 |
+
self.token = token
|
| 32 |
+
self.chat_id = chat_id
|
| 33 |
+
self.session = Session()
|
| 34 |
+
self.text = self.__class__.__name__
|
| 35 |
+
self.message_id
|
| 36 |
+
|
| 37 |
+
@property
|
| 38 |
+
def message_id(self):
|
| 39 |
+
if hasattr(self, '_message_id'):
|
| 40 |
+
return self._message_id
|
| 41 |
+
try:
|
| 42 |
+
res = self.session.post(
|
| 43 |
+
self.API + '%s/sendMessage' % self.token,
|
| 44 |
+
data={'text': '`' + self.text + '`', 'chat_id': self.chat_id,
|
| 45 |
+
'parse_mode': 'MarkdownV2'}).json()
|
| 46 |
+
except Exception as e:
|
| 47 |
+
tqdm_auto.write(str(e))
|
| 48 |
+
else:
|
| 49 |
+
if res.get('error_code') == 429:
|
| 50 |
+
warn("Creation rate limit: try increasing `mininterval`.",
|
| 51 |
+
TqdmWarning, stacklevel=2)
|
| 52 |
+
else:
|
| 53 |
+
self._message_id = res['result']['message_id']
|
| 54 |
+
return self._message_id
|
| 55 |
+
|
| 56 |
+
def write(self, s):
|
| 57 |
+
"""Replaces internal `message_id`'s text with `s`."""
|
| 58 |
+
if not s:
|
| 59 |
+
s = "..."
|
| 60 |
+
s = s.replace('\r', '').strip()
|
| 61 |
+
if s == self.text:
|
| 62 |
+
return # avoid duplicate message Bot error
|
| 63 |
+
message_id = self.message_id
|
| 64 |
+
if message_id is None:
|
| 65 |
+
return
|
| 66 |
+
self.text = s
|
| 67 |
+
try:
|
| 68 |
+
future = self.submit(
|
| 69 |
+
self.session.post, self.API + '%s/editMessageText' % self.token,
|
| 70 |
+
data={'text': '`' + s + '`', 'chat_id': self.chat_id,
|
| 71 |
+
'message_id': message_id, 'parse_mode': 'MarkdownV2'})
|
| 72 |
+
except Exception as e:
|
| 73 |
+
tqdm_auto.write(str(e))
|
| 74 |
+
else:
|
| 75 |
+
return future
|
| 76 |
+
|
| 77 |
+
def delete(self):
|
| 78 |
+
"""Deletes internal `message_id`."""
|
| 79 |
+
try:
|
| 80 |
+
future = self.submit(
|
| 81 |
+
self.session.post, self.API + '%s/deleteMessage' % self.token,
|
| 82 |
+
data={'chat_id': self.chat_id, 'message_id': self.message_id})
|
| 83 |
+
except Exception as e:
|
| 84 |
+
tqdm_auto.write(str(e))
|
| 85 |
+
else:
|
| 86 |
+
return future
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
class tqdm_telegram(tqdm_auto):
|
| 90 |
+
"""
|
| 91 |
+
Standard `tqdm.auto.tqdm` but also sends updates to a Telegram Bot.
|
| 92 |
+
May take a few seconds to create (`__init__`).
|
| 93 |
+
|
| 94 |
+
- create a bot <https://core.telegram.org/bots#6-botfather>
|
| 95 |
+
- copy its `{token}`
|
| 96 |
+
- add the bot to a chat and send it a message such as `/start`
|
| 97 |
+
- go to <https://api.telegram.org/bot`{token}`/getUpdates> to find out
|
| 98 |
+
the `{chat_id}`
|
| 99 |
+
- paste the `{token}` & `{chat_id}` below
|
| 100 |
+
|
| 101 |
+
>>> from tqdm.contrib.telegram import tqdm, trange
|
| 102 |
+
>>> for i in tqdm(iterable, token='{token}', chat_id='{chat_id}'):
|
| 103 |
+
... ...
|
| 104 |
+
"""
|
| 105 |
+
def __init__(self, *args, **kwargs):
|
| 106 |
+
"""
|
| 107 |
+
Parameters
|
| 108 |
+
----------
|
| 109 |
+
token : str, required. Telegram token
|
| 110 |
+
[default: ${TQDM_TELEGRAM_TOKEN}].
|
| 111 |
+
chat_id : str, required. Telegram chat ID
|
| 112 |
+
[default: ${TQDM_TELEGRAM_CHAT_ID}].
|
| 113 |
+
|
| 114 |
+
See `tqdm.auto.tqdm.__init__` for other parameters.
|
| 115 |
+
"""
|
| 116 |
+
if not kwargs.get('disable'):
|
| 117 |
+
kwargs = kwargs.copy()
|
| 118 |
+
self.tgio = TelegramIO(
|
| 119 |
+
kwargs.pop('token', getenv('TQDM_TELEGRAM_TOKEN')),
|
| 120 |
+
kwargs.pop('chat_id', getenv('TQDM_TELEGRAM_CHAT_ID')))
|
| 121 |
+
super().__init__(*args, **kwargs)
|
| 122 |
+
|
| 123 |
+
def display(self, **kwargs):
|
| 124 |
+
super().display(**kwargs)
|
| 125 |
+
fmt = self.format_dict
|
| 126 |
+
if fmt.get('bar_format', None):
|
| 127 |
+
fmt['bar_format'] = fmt['bar_format'].replace(
|
| 128 |
+
'<bar/>', '{bar:10u}').replace('{bar}', '{bar:10u}')
|
| 129 |
+
else:
|
| 130 |
+
fmt['bar_format'] = '{l_bar}{bar:10u}{r_bar}'
|
| 131 |
+
self.tgio.write(self.format_meter(**fmt))
|
| 132 |
+
|
| 133 |
+
def clear(self, *args, **kwargs):
|
| 134 |
+
super().clear(*args, **kwargs)
|
| 135 |
+
if not self.disable:
|
| 136 |
+
self.tgio.write("")
|
| 137 |
+
|
| 138 |
+
def close(self):
|
| 139 |
+
if self.disable:
|
| 140 |
+
return
|
| 141 |
+
super().close()
|
| 142 |
+
if not (self.leave or (self.leave is None and self.pos == 0)):
|
| 143 |
+
self.tgio.delete()
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def ttgrange(*args, **kwargs):
|
| 147 |
+
"""Shortcut for `tqdm.contrib.telegram.tqdm(range(*args), **kwargs)`."""
|
| 148 |
+
return tqdm_telegram(range(*args), **kwargs)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
# Aliases
|
| 152 |
+
tqdm = tqdm_telegram
|
| 153 |
+
trange = ttgrange
|
vllm/lib/python3.10/site-packages/tqdm/contrib/utils_worker.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
IO/concurrency helpers for `tqdm.contrib`.
|
| 3 |
+
"""
|
| 4 |
+
from collections import deque
|
| 5 |
+
from concurrent.futures import ThreadPoolExecutor
|
| 6 |
+
|
| 7 |
+
from ..auto import tqdm as tqdm_auto
|
| 8 |
+
|
| 9 |
+
__author__ = {"github.com/": ["casperdcl"]}
|
| 10 |
+
__all__ = ['MonoWorker']
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class MonoWorker(object):
|
| 14 |
+
"""
|
| 15 |
+
Supports one running task and one waiting task.
|
| 16 |
+
The waiting task is the most recent submitted (others are discarded).
|
| 17 |
+
"""
|
| 18 |
+
def __init__(self):
|
| 19 |
+
self.pool = ThreadPoolExecutor(max_workers=1)
|
| 20 |
+
self.futures = deque([], 2)
|
| 21 |
+
|
| 22 |
+
def submit(self, func, *args, **kwargs):
|
| 23 |
+
"""`func(*args, **kwargs)` may replace currently waiting task."""
|
| 24 |
+
futures = self.futures
|
| 25 |
+
if len(futures) == futures.maxlen:
|
| 26 |
+
running = futures.popleft()
|
| 27 |
+
if not running.done():
|
| 28 |
+
if len(futures): # clear waiting
|
| 29 |
+
waiting = futures.pop()
|
| 30 |
+
waiting.cancel()
|
| 31 |
+
futures.appendleft(running) # re-insert running
|
| 32 |
+
try:
|
| 33 |
+
waiting = self.pool.submit(func, *args, **kwargs)
|
| 34 |
+
except Exception as e:
|
| 35 |
+
tqdm_auto.write(str(e))
|
| 36 |
+
else:
|
| 37 |
+
futures.append(waiting)
|
| 38 |
+
return waiting
|
vllm/lib/python3.10/site-packages/tqdm/dask.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from functools import partial
|
| 2 |
+
|
| 3 |
+
from dask.callbacks import Callback
|
| 4 |
+
|
| 5 |
+
from .auto import tqdm as tqdm_auto
|
| 6 |
+
|
| 7 |
+
__author__ = {"github.com/": ["casperdcl"]}
|
| 8 |
+
__all__ = ['TqdmCallback']
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class TqdmCallback(Callback):
|
| 12 |
+
"""Dask callback for task progress."""
|
| 13 |
+
def __init__(self, start=None, pretask=None, tqdm_class=tqdm_auto,
|
| 14 |
+
**tqdm_kwargs):
|
| 15 |
+
"""
|
| 16 |
+
Parameters
|
| 17 |
+
----------
|
| 18 |
+
tqdm_class : optional
|
| 19 |
+
`tqdm` class to use for bars [default: `tqdm.auto.tqdm`].
|
| 20 |
+
tqdm_kwargs : optional
|
| 21 |
+
Any other arguments used for all bars.
|
| 22 |
+
"""
|
| 23 |
+
super().__init__(start=start, pretask=pretask)
|
| 24 |
+
if tqdm_kwargs:
|
| 25 |
+
tqdm_class = partial(tqdm_class, **tqdm_kwargs)
|
| 26 |
+
self.tqdm_class = tqdm_class
|
| 27 |
+
|
| 28 |
+
def _start_state(self, _, state):
|
| 29 |
+
self.pbar = self.tqdm_class(total=sum(
|
| 30 |
+
len(state[k]) for k in ['ready', 'waiting', 'running', 'finished']))
|
| 31 |
+
|
| 32 |
+
def _posttask(self, *_, **__):
|
| 33 |
+
self.pbar.update()
|
| 34 |
+
|
| 35 |
+
def _finish(self, *_, **__):
|
| 36 |
+
self.pbar.close()
|
| 37 |
+
|
| 38 |
+
def display(self):
|
| 39 |
+
"""Displays in the current cell in Notebooks."""
|
| 40 |
+
container = getattr(self.bar, 'container', None)
|
| 41 |
+
if container is None:
|
| 42 |
+
return
|
| 43 |
+
from .notebook import display
|
| 44 |
+
display(container)
|
vllm/lib/python3.10/site-packages/tqdm/notebook.py
ADDED
|
@@ -0,0 +1,317 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
IPython/Jupyter Notebook progressbar decorator for iterators.
|
| 3 |
+
Includes a default `range` iterator printing to `stderr`.
|
| 4 |
+
|
| 5 |
+
Usage:
|
| 6 |
+
>>> from tqdm.notebook import trange, tqdm
|
| 7 |
+
>>> for i in trange(10):
|
| 8 |
+
... ...
|
| 9 |
+
"""
|
| 10 |
+
# import compatibility functions and utilities
|
| 11 |
+
import re
|
| 12 |
+
import sys
|
| 13 |
+
from html import escape
|
| 14 |
+
from weakref import proxy
|
| 15 |
+
|
| 16 |
+
# to inherit from the tqdm class
|
| 17 |
+
from .std import tqdm as std_tqdm
|
| 18 |
+
|
| 19 |
+
if True: # pragma: no cover
|
| 20 |
+
# import IPython/Jupyter base widget and display utilities
|
| 21 |
+
IPY = 0
|
| 22 |
+
try: # IPython 4.x
|
| 23 |
+
import ipywidgets
|
| 24 |
+
IPY = 4
|
| 25 |
+
except ImportError: # IPython 3.x / 2.x
|
| 26 |
+
IPY = 32
|
| 27 |
+
import warnings
|
| 28 |
+
with warnings.catch_warnings():
|
| 29 |
+
warnings.filterwarnings(
|
| 30 |
+
'ignore', message=".*The `IPython.html` package has been deprecated.*")
|
| 31 |
+
try:
|
| 32 |
+
import IPython.html.widgets as ipywidgets # NOQA: F401
|
| 33 |
+
except ImportError:
|
| 34 |
+
pass
|
| 35 |
+
|
| 36 |
+
try: # IPython 4.x / 3.x
|
| 37 |
+
if IPY == 32:
|
| 38 |
+
from IPython.html.widgets import HTML
|
| 39 |
+
from IPython.html.widgets import FloatProgress as IProgress
|
| 40 |
+
from IPython.html.widgets import HBox
|
| 41 |
+
IPY = 3
|
| 42 |
+
else:
|
| 43 |
+
from ipywidgets import HTML
|
| 44 |
+
from ipywidgets import FloatProgress as IProgress
|
| 45 |
+
from ipywidgets import HBox
|
| 46 |
+
except ImportError:
|
| 47 |
+
try: # IPython 2.x
|
| 48 |
+
from IPython.html.widgets import HTML
|
| 49 |
+
from IPython.html.widgets import ContainerWidget as HBox
|
| 50 |
+
from IPython.html.widgets import FloatProgressWidget as IProgress
|
| 51 |
+
IPY = 2
|
| 52 |
+
except ImportError:
|
| 53 |
+
IPY = 0
|
| 54 |
+
IProgress = None
|
| 55 |
+
HBox = object
|
| 56 |
+
|
| 57 |
+
try:
|
| 58 |
+
from IPython.display import display # , clear_output
|
| 59 |
+
except ImportError:
|
| 60 |
+
pass
|
| 61 |
+
|
| 62 |
+
__author__ = {"github.com/": ["lrq3000", "casperdcl", "alexanderkuk"]}
|
| 63 |
+
__all__ = ['tqdm_notebook', 'tnrange', 'tqdm', 'trange']
|
| 64 |
+
WARN_NOIPYW = ("IProgress not found. Please update jupyter and ipywidgets."
|
| 65 |
+
" See https://ipywidgets.readthedocs.io/en/stable"
|
| 66 |
+
"/user_install.html")
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class TqdmHBox(HBox):
|
| 70 |
+
"""`ipywidgets.HBox` with a pretty representation"""
|
| 71 |
+
def _json_(self, pretty=None):
|
| 72 |
+
pbar = getattr(self, 'pbar', None)
|
| 73 |
+
if pbar is None:
|
| 74 |
+
return {}
|
| 75 |
+
d = pbar.format_dict
|
| 76 |
+
if pretty is not None:
|
| 77 |
+
d["ascii"] = not pretty
|
| 78 |
+
return d
|
| 79 |
+
|
| 80 |
+
def __repr__(self, pretty=False):
|
| 81 |
+
pbar = getattr(self, 'pbar', None)
|
| 82 |
+
if pbar is None:
|
| 83 |
+
return super().__repr__()
|
| 84 |
+
return pbar.format_meter(**self._json_(pretty))
|
| 85 |
+
|
| 86 |
+
def _repr_pretty_(self, pp, *_, **__):
|
| 87 |
+
pp.text(self.__repr__(True))
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
class tqdm_notebook(std_tqdm):
|
| 91 |
+
"""
|
| 92 |
+
Experimental IPython/Jupyter Notebook widget using tqdm!
|
| 93 |
+
"""
|
| 94 |
+
@staticmethod
|
| 95 |
+
def status_printer(_, total=None, desc=None, ncols=None):
|
| 96 |
+
"""
|
| 97 |
+
Manage the printing of an IPython/Jupyter Notebook progress bar widget.
|
| 98 |
+
"""
|
| 99 |
+
# Fallback to text bar if there's no total
|
| 100 |
+
# DEPRECATED: replaced with an 'info' style bar
|
| 101 |
+
# if not total:
|
| 102 |
+
# return super(tqdm_notebook, tqdm_notebook).status_printer(file)
|
| 103 |
+
|
| 104 |
+
# fp = file
|
| 105 |
+
|
| 106 |
+
# Prepare IPython progress bar
|
| 107 |
+
if IProgress is None: # #187 #451 #558 #872
|
| 108 |
+
raise ImportError(WARN_NOIPYW)
|
| 109 |
+
if total:
|
| 110 |
+
pbar = IProgress(min=0, max=total)
|
| 111 |
+
else: # No total? Show info style bar with no progress tqdm status
|
| 112 |
+
pbar = IProgress(min=0, max=1)
|
| 113 |
+
pbar.value = 1
|
| 114 |
+
pbar.bar_style = 'info'
|
| 115 |
+
if ncols is None:
|
| 116 |
+
pbar.layout.width = "20px"
|
| 117 |
+
|
| 118 |
+
ltext = HTML()
|
| 119 |
+
rtext = HTML()
|
| 120 |
+
if desc:
|
| 121 |
+
ltext.value = desc
|
| 122 |
+
container = TqdmHBox(children=[ltext, pbar, rtext])
|
| 123 |
+
# Prepare layout
|
| 124 |
+
if ncols is not None: # use default style of ipywidgets
|
| 125 |
+
# ncols could be 100, "100px", "100%"
|
| 126 |
+
ncols = str(ncols) # ipywidgets only accepts string
|
| 127 |
+
try:
|
| 128 |
+
if int(ncols) > 0: # isnumeric and positive
|
| 129 |
+
ncols += 'px'
|
| 130 |
+
except ValueError:
|
| 131 |
+
pass
|
| 132 |
+
pbar.layout.flex = '2'
|
| 133 |
+
container.layout.width = ncols
|
| 134 |
+
container.layout.display = 'inline-flex'
|
| 135 |
+
container.layout.flex_flow = 'row wrap'
|
| 136 |
+
|
| 137 |
+
return container
|
| 138 |
+
|
| 139 |
+
def display(self, msg=None, pos=None,
|
| 140 |
+
# additional signals
|
| 141 |
+
close=False, bar_style=None, check_delay=True):
|
| 142 |
+
# Note: contrary to native tqdm, msg='' does NOT clear bar
|
| 143 |
+
# goal is to keep all infos if error happens so user knows
|
| 144 |
+
# at which iteration the loop failed.
|
| 145 |
+
|
| 146 |
+
# Clear previous output (really necessary?)
|
| 147 |
+
# clear_output(wait=1)
|
| 148 |
+
|
| 149 |
+
if not msg and not close:
|
| 150 |
+
d = self.format_dict
|
| 151 |
+
# remove {bar}
|
| 152 |
+
d['bar_format'] = (d['bar_format'] or "{l_bar}<bar/>{r_bar}").replace(
|
| 153 |
+
"{bar}", "<bar/>")
|
| 154 |
+
msg = self.format_meter(**d)
|
| 155 |
+
|
| 156 |
+
ltext, pbar, rtext = self.container.children
|
| 157 |
+
pbar.value = self.n
|
| 158 |
+
|
| 159 |
+
if msg:
|
| 160 |
+
msg = msg.replace(' ', u'\u2007') # fix html space padding
|
| 161 |
+
# html escape special characters (like '&')
|
| 162 |
+
if '<bar/>' in msg:
|
| 163 |
+
left, right = map(escape, re.split(r'\|?<bar/>\|?', msg, maxsplit=1))
|
| 164 |
+
else:
|
| 165 |
+
left, right = '', escape(msg)
|
| 166 |
+
|
| 167 |
+
# Update description
|
| 168 |
+
ltext.value = left
|
| 169 |
+
# never clear the bar (signal: msg='')
|
| 170 |
+
if right:
|
| 171 |
+
rtext.value = right
|
| 172 |
+
|
| 173 |
+
# Change bar style
|
| 174 |
+
if bar_style:
|
| 175 |
+
# Hack-ish way to avoid the danger bar_style being overridden by
|
| 176 |
+
# success because the bar gets closed after the error...
|
| 177 |
+
if pbar.bar_style != 'danger' or bar_style != 'success':
|
| 178 |
+
pbar.bar_style = bar_style
|
| 179 |
+
|
| 180 |
+
# Special signal to close the bar
|
| 181 |
+
if close and pbar.bar_style != 'danger': # hide only if no error
|
| 182 |
+
try:
|
| 183 |
+
self.container.close()
|
| 184 |
+
except AttributeError:
|
| 185 |
+
self.container.visible = False
|
| 186 |
+
self.container.layout.visibility = 'hidden' # IPYW>=8
|
| 187 |
+
|
| 188 |
+
if check_delay and self.delay > 0 and not self.displayed:
|
| 189 |
+
display(self.container)
|
| 190 |
+
self.displayed = True
|
| 191 |
+
|
| 192 |
+
@property
|
| 193 |
+
def colour(self):
|
| 194 |
+
if hasattr(self, 'container'):
|
| 195 |
+
return self.container.children[-2].style.bar_color
|
| 196 |
+
|
| 197 |
+
@colour.setter
|
| 198 |
+
def colour(self, bar_color):
|
| 199 |
+
if hasattr(self, 'container'):
|
| 200 |
+
self.container.children[-2].style.bar_color = bar_color
|
| 201 |
+
|
| 202 |
+
def __init__(self, *args, **kwargs):
|
| 203 |
+
"""
|
| 204 |
+
Supports the usual `tqdm.tqdm` parameters as well as those listed below.
|
| 205 |
+
|
| 206 |
+
Parameters
|
| 207 |
+
----------
|
| 208 |
+
display : Whether to call `display(self.container)` immediately
|
| 209 |
+
[default: True].
|
| 210 |
+
"""
|
| 211 |
+
kwargs = kwargs.copy()
|
| 212 |
+
# Setup default output
|
| 213 |
+
file_kwarg = kwargs.get('file', sys.stderr)
|
| 214 |
+
if file_kwarg is sys.stderr or file_kwarg is None:
|
| 215 |
+
kwargs['file'] = sys.stdout # avoid the red block in IPython
|
| 216 |
+
|
| 217 |
+
# Initialize parent class + avoid printing by using gui=True
|
| 218 |
+
kwargs['gui'] = True
|
| 219 |
+
# convert disable = None to False
|
| 220 |
+
kwargs['disable'] = bool(kwargs.get('disable', False))
|
| 221 |
+
colour = kwargs.pop('colour', None)
|
| 222 |
+
display_here = kwargs.pop('display', True)
|
| 223 |
+
super().__init__(*args, **kwargs)
|
| 224 |
+
if self.disable or not kwargs['gui']:
|
| 225 |
+
self.disp = lambda *_, **__: None
|
| 226 |
+
return
|
| 227 |
+
|
| 228 |
+
# Get bar width
|
| 229 |
+
self.ncols = '100%' if self.dynamic_ncols else kwargs.get("ncols", None)
|
| 230 |
+
|
| 231 |
+
# Replace with IPython progress bar display (with correct total)
|
| 232 |
+
unit_scale = 1 if self.unit_scale is True else self.unit_scale or 1
|
| 233 |
+
total = self.total * unit_scale if self.total else self.total
|
| 234 |
+
self.container = self.status_printer(self.fp, total, self.desc, self.ncols)
|
| 235 |
+
self.container.pbar = proxy(self)
|
| 236 |
+
self.displayed = False
|
| 237 |
+
if display_here and self.delay <= 0:
|
| 238 |
+
display(self.container)
|
| 239 |
+
self.displayed = True
|
| 240 |
+
self.disp = self.display
|
| 241 |
+
self.colour = colour
|
| 242 |
+
|
| 243 |
+
# Print initial bar state
|
| 244 |
+
if not self.disable:
|
| 245 |
+
self.display(check_delay=False)
|
| 246 |
+
|
| 247 |
+
def __iter__(self):
|
| 248 |
+
try:
|
| 249 |
+
it = super().__iter__()
|
| 250 |
+
for obj in it:
|
| 251 |
+
# return super(tqdm...) will not catch exception
|
| 252 |
+
yield obj
|
| 253 |
+
# NB: except ... [ as ...] breaks IPython async KeyboardInterrupt
|
| 254 |
+
except: # NOQA
|
| 255 |
+
self.disp(bar_style='danger')
|
| 256 |
+
raise
|
| 257 |
+
# NB: don't `finally: close()`
|
| 258 |
+
# since this could be a shared bar which the user will `reset()`
|
| 259 |
+
|
| 260 |
+
def update(self, n=1):
|
| 261 |
+
try:
|
| 262 |
+
return super().update(n=n)
|
| 263 |
+
# NB: except ... [ as ...] breaks IPython async KeyboardInterrupt
|
| 264 |
+
except: # NOQA
|
| 265 |
+
# cannot catch KeyboardInterrupt when using manual tqdm
|
| 266 |
+
# as the interrupt will most likely happen on another statement
|
| 267 |
+
self.disp(bar_style='danger')
|
| 268 |
+
raise
|
| 269 |
+
# NB: don't `finally: close()`
|
| 270 |
+
# since this could be a shared bar which the user will `reset()`
|
| 271 |
+
|
| 272 |
+
def close(self):
|
| 273 |
+
if self.disable:
|
| 274 |
+
return
|
| 275 |
+
super().close()
|
| 276 |
+
# Try to detect if there was an error or KeyboardInterrupt
|
| 277 |
+
# in manual mode: if n < total, things probably got wrong
|
| 278 |
+
if self.total and self.n < self.total:
|
| 279 |
+
self.disp(bar_style='danger', check_delay=False)
|
| 280 |
+
else:
|
| 281 |
+
if self.leave:
|
| 282 |
+
self.disp(bar_style='success', check_delay=False)
|
| 283 |
+
else:
|
| 284 |
+
self.disp(close=True, check_delay=False)
|
| 285 |
+
|
| 286 |
+
def clear(self, *_, **__):
|
| 287 |
+
pass
|
| 288 |
+
|
| 289 |
+
def reset(self, total=None):
|
| 290 |
+
"""
|
| 291 |
+
Resets to 0 iterations for repeated use.
|
| 292 |
+
|
| 293 |
+
Consider combining with `leave=True`.
|
| 294 |
+
|
| 295 |
+
Parameters
|
| 296 |
+
----------
|
| 297 |
+
total : int or float, optional. Total to use for the new bar.
|
| 298 |
+
"""
|
| 299 |
+
if self.disable:
|
| 300 |
+
return super().reset(total=total)
|
| 301 |
+
_, pbar, _ = self.container.children
|
| 302 |
+
pbar.bar_style = ''
|
| 303 |
+
if total is not None:
|
| 304 |
+
pbar.max = total
|
| 305 |
+
if not self.total and self.ncols is None: # no longer unknown total
|
| 306 |
+
pbar.layout.width = None # reset width
|
| 307 |
+
return super().reset(total=total)
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def tnrange(*args, **kwargs):
|
| 311 |
+
"""Shortcut for `tqdm.notebook.tqdm(range(*args), **kwargs)`."""
|
| 312 |
+
return tqdm_notebook(range(*args), **kwargs)
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
# Aliases
|
| 316 |
+
tqdm = tqdm_notebook
|
| 317 |
+
trange = tnrange
|
vllm/lib/python3.10/site-packages/tqdm/rich.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
`rich.progress` decorator for iterators.
|
| 3 |
+
|
| 4 |
+
Usage:
|
| 5 |
+
>>> from tqdm.rich import trange, tqdm
|
| 6 |
+
>>> for i in trange(10):
|
| 7 |
+
... ...
|
| 8 |
+
"""
|
| 9 |
+
from warnings import warn
|
| 10 |
+
|
| 11 |
+
from rich.progress import (
|
| 12 |
+
BarColumn, Progress, ProgressColumn, Text, TimeElapsedColumn, TimeRemainingColumn, filesize)
|
| 13 |
+
|
| 14 |
+
from .std import TqdmExperimentalWarning
|
| 15 |
+
from .std import tqdm as std_tqdm
|
| 16 |
+
|
| 17 |
+
__author__ = {"github.com/": ["casperdcl"]}
|
| 18 |
+
__all__ = ['tqdm_rich', 'trrange', 'tqdm', 'trange']
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class FractionColumn(ProgressColumn):
|
| 22 |
+
"""Renders completed/total, e.g. '0.5/2.3 G'."""
|
| 23 |
+
def __init__(self, unit_scale=False, unit_divisor=1000):
|
| 24 |
+
self.unit_scale = unit_scale
|
| 25 |
+
self.unit_divisor = unit_divisor
|
| 26 |
+
super().__init__()
|
| 27 |
+
|
| 28 |
+
def render(self, task):
|
| 29 |
+
"""Calculate common unit for completed and total."""
|
| 30 |
+
completed = int(task.completed)
|
| 31 |
+
total = int(task.total)
|
| 32 |
+
if self.unit_scale:
|
| 33 |
+
unit, suffix = filesize.pick_unit_and_suffix(
|
| 34 |
+
total,
|
| 35 |
+
["", "K", "M", "G", "T", "P", "E", "Z", "Y"],
|
| 36 |
+
self.unit_divisor,
|
| 37 |
+
)
|
| 38 |
+
else:
|
| 39 |
+
unit, suffix = filesize.pick_unit_and_suffix(total, [""], 1)
|
| 40 |
+
precision = 0 if unit == 1 else 1
|
| 41 |
+
return Text(
|
| 42 |
+
f"{completed/unit:,.{precision}f}/{total/unit:,.{precision}f} {suffix}",
|
| 43 |
+
style="progress.download")
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class RateColumn(ProgressColumn):
|
| 47 |
+
"""Renders human readable transfer speed."""
|
| 48 |
+
def __init__(self, unit="", unit_scale=False, unit_divisor=1000):
|
| 49 |
+
self.unit = unit
|
| 50 |
+
self.unit_scale = unit_scale
|
| 51 |
+
self.unit_divisor = unit_divisor
|
| 52 |
+
super().__init__()
|
| 53 |
+
|
| 54 |
+
def render(self, task):
|
| 55 |
+
"""Show data transfer speed."""
|
| 56 |
+
speed = task.speed
|
| 57 |
+
if speed is None:
|
| 58 |
+
return Text(f"? {self.unit}/s", style="progress.data.speed")
|
| 59 |
+
if self.unit_scale:
|
| 60 |
+
unit, suffix = filesize.pick_unit_and_suffix(
|
| 61 |
+
speed,
|
| 62 |
+
["", "K", "M", "G", "T", "P", "E", "Z", "Y"],
|
| 63 |
+
self.unit_divisor,
|
| 64 |
+
)
|
| 65 |
+
else:
|
| 66 |
+
unit, suffix = filesize.pick_unit_and_suffix(speed, [""], 1)
|
| 67 |
+
precision = 0 if unit == 1 else 1
|
| 68 |
+
return Text(f"{speed/unit:,.{precision}f} {suffix}{self.unit}/s",
|
| 69 |
+
style="progress.data.speed")
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class tqdm_rich(std_tqdm): # pragma: no cover
|
| 73 |
+
"""Experimental rich.progress GUI version of tqdm!"""
|
| 74 |
+
# TODO: @classmethod: write()?
|
| 75 |
+
def __init__(self, *args, **kwargs):
|
| 76 |
+
"""
|
| 77 |
+
This class accepts the following parameters *in addition* to
|
| 78 |
+
the parameters accepted by `tqdm`.
|
| 79 |
+
|
| 80 |
+
Parameters
|
| 81 |
+
----------
|
| 82 |
+
progress : tuple, optional
|
| 83 |
+
arguments for `rich.progress.Progress()`.
|
| 84 |
+
options : dict, optional
|
| 85 |
+
keyword arguments for `rich.progress.Progress()`.
|
| 86 |
+
"""
|
| 87 |
+
kwargs = kwargs.copy()
|
| 88 |
+
kwargs['gui'] = True
|
| 89 |
+
# convert disable = None to False
|
| 90 |
+
kwargs['disable'] = bool(kwargs.get('disable', False))
|
| 91 |
+
progress = kwargs.pop('progress', None)
|
| 92 |
+
options = kwargs.pop('options', {}).copy()
|
| 93 |
+
super().__init__(*args, **kwargs)
|
| 94 |
+
|
| 95 |
+
if self.disable:
|
| 96 |
+
return
|
| 97 |
+
|
| 98 |
+
warn("rich is experimental/alpha", TqdmExperimentalWarning, stacklevel=2)
|
| 99 |
+
d = self.format_dict
|
| 100 |
+
if progress is None:
|
| 101 |
+
progress = (
|
| 102 |
+
"[progress.description]{task.description}"
|
| 103 |
+
"[progress.percentage]{task.percentage:>4.0f}%",
|
| 104 |
+
BarColumn(bar_width=None),
|
| 105 |
+
FractionColumn(
|
| 106 |
+
unit_scale=d['unit_scale'], unit_divisor=d['unit_divisor']),
|
| 107 |
+
"[", TimeElapsedColumn(), "<", TimeRemainingColumn(),
|
| 108 |
+
",", RateColumn(unit=d['unit'], unit_scale=d['unit_scale'],
|
| 109 |
+
unit_divisor=d['unit_divisor']), "]"
|
| 110 |
+
)
|
| 111 |
+
options.setdefault('transient', not self.leave)
|
| 112 |
+
self._prog = Progress(*progress, **options)
|
| 113 |
+
self._prog.__enter__()
|
| 114 |
+
self._task_id = self._prog.add_task(self.desc or "", **d)
|
| 115 |
+
|
| 116 |
+
def close(self):
|
| 117 |
+
if self.disable:
|
| 118 |
+
return
|
| 119 |
+
self.display() # print 100%, vis #1306
|
| 120 |
+
super().close()
|
| 121 |
+
self._prog.__exit__(None, None, None)
|
| 122 |
+
|
| 123 |
+
def clear(self, *_, **__):
|
| 124 |
+
pass
|
| 125 |
+
|
| 126 |
+
def display(self, *_, **__):
|
| 127 |
+
if not hasattr(self, '_prog'):
|
| 128 |
+
return
|
| 129 |
+
self._prog.update(self._task_id, completed=self.n, description=self.desc)
|
| 130 |
+
|
| 131 |
+
def reset(self, total=None):
|
| 132 |
+
"""
|
| 133 |
+
Resets to 0 iterations for repeated use.
|
| 134 |
+
|
| 135 |
+
Parameters
|
| 136 |
+
----------
|
| 137 |
+
total : int or float, optional. Total to use for the new bar.
|
| 138 |
+
"""
|
| 139 |
+
if hasattr(self, '_prog'):
|
| 140 |
+
self._prog.reset(total=total)
|
| 141 |
+
super().reset(total=total)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def trrange(*args, **kwargs):
|
| 145 |
+
"""Shortcut for `tqdm.rich.tqdm(range(*args), **kwargs)`."""
|
| 146 |
+
return tqdm_rich(range(*args), **kwargs)
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
# Aliases
|
| 150 |
+
tqdm = tqdm_rich
|
| 151 |
+
trange = trrange
|
vllm/lib/python3.10/site-packages/tqdm/std.py
ADDED
|
@@ -0,0 +1,1524 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Customisable progressbar decorator for iterators.
|
| 3 |
+
Includes a default `range` iterator printing to `stderr`.
|
| 4 |
+
|
| 5 |
+
Usage:
|
| 6 |
+
>>> from tqdm import trange, tqdm
|
| 7 |
+
>>> for i in trange(10):
|
| 8 |
+
... ...
|
| 9 |
+
"""
|
| 10 |
+
import sys
|
| 11 |
+
from collections import OrderedDict, defaultdict
|
| 12 |
+
from contextlib import contextmanager
|
| 13 |
+
from datetime import datetime, timedelta, timezone
|
| 14 |
+
from numbers import Number
|
| 15 |
+
from time import time
|
| 16 |
+
from warnings import warn
|
| 17 |
+
from weakref import WeakSet
|
| 18 |
+
|
| 19 |
+
from ._monitor import TMonitor
|
| 20 |
+
from .utils import (
|
| 21 |
+
CallbackIOWrapper, Comparable, DisableOnWriteError, FormatReplace, SimpleTextIOWrapper,
|
| 22 |
+
_is_ascii, _screen_shape_wrapper, _supports_unicode, _term_move_up, disp_len, disp_trim,
|
| 23 |
+
envwrap)
|
| 24 |
+
|
| 25 |
+
__author__ = "https://github.com/tqdm/tqdm#contributions"
|
| 26 |
+
__all__ = ['tqdm', 'trange',
|
| 27 |
+
'TqdmTypeError', 'TqdmKeyError', 'TqdmWarning',
|
| 28 |
+
'TqdmExperimentalWarning', 'TqdmDeprecationWarning',
|
| 29 |
+
'TqdmMonitorWarning']
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class TqdmTypeError(TypeError):
|
| 33 |
+
pass
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class TqdmKeyError(KeyError):
|
| 37 |
+
pass
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class TqdmWarning(Warning):
|
| 41 |
+
"""base class for all tqdm warnings.
|
| 42 |
+
|
| 43 |
+
Used for non-external-code-breaking errors, such as garbled printing.
|
| 44 |
+
"""
|
| 45 |
+
def __init__(self, msg, fp_write=None, *a, **k):
|
| 46 |
+
if fp_write is not None:
|
| 47 |
+
fp_write("\n" + self.__class__.__name__ + ": " + str(msg).rstrip() + '\n')
|
| 48 |
+
else:
|
| 49 |
+
super().__init__(msg, *a, **k)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class TqdmExperimentalWarning(TqdmWarning, FutureWarning):
|
| 53 |
+
"""beta feature, unstable API and behaviour"""
|
| 54 |
+
pass
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class TqdmDeprecationWarning(TqdmWarning, DeprecationWarning):
|
| 58 |
+
# not suppressed if raised
|
| 59 |
+
pass
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class TqdmMonitorWarning(TqdmWarning, RuntimeWarning):
|
| 63 |
+
"""tqdm monitor errors which do not affect external functionality"""
|
| 64 |
+
pass
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def TRLock(*args, **kwargs):
|
| 68 |
+
"""threading RLock"""
|
| 69 |
+
try:
|
| 70 |
+
from threading import RLock
|
| 71 |
+
return RLock(*args, **kwargs)
|
| 72 |
+
except (ImportError, OSError): # pragma: no cover
|
| 73 |
+
pass
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class TqdmDefaultWriteLock(object):
|
| 77 |
+
"""
|
| 78 |
+
Provide a default write lock for thread and multiprocessing safety.
|
| 79 |
+
Works only on platforms supporting `fork` (so Windows is excluded).
|
| 80 |
+
You must initialise a `tqdm` or `TqdmDefaultWriteLock` instance
|
| 81 |
+
before forking in order for the write lock to work.
|
| 82 |
+
On Windows, you need to supply the lock from the parent to the children as
|
| 83 |
+
an argument to joblib or the parallelism lib you use.
|
| 84 |
+
"""
|
| 85 |
+
# global thread lock so no setup required for multithreading.
|
| 86 |
+
# NB: Do not create multiprocessing lock as it sets the multiprocessing
|
| 87 |
+
# context, disallowing `spawn()`/`forkserver()`
|
| 88 |
+
th_lock = TRLock()
|
| 89 |
+
|
| 90 |
+
def __init__(self):
|
| 91 |
+
# Create global parallelism locks to avoid racing issues with parallel
|
| 92 |
+
# bars works only if fork available (Linux/MacOSX, but not Windows)
|
| 93 |
+
cls = type(self)
|
| 94 |
+
root_lock = cls.th_lock
|
| 95 |
+
if root_lock is not None:
|
| 96 |
+
root_lock.acquire()
|
| 97 |
+
cls.create_mp_lock()
|
| 98 |
+
self.locks = [lk for lk in [cls.mp_lock, cls.th_lock] if lk is not None]
|
| 99 |
+
if root_lock is not None:
|
| 100 |
+
root_lock.release()
|
| 101 |
+
|
| 102 |
+
def acquire(self, *a, **k):
|
| 103 |
+
for lock in self.locks:
|
| 104 |
+
lock.acquire(*a, **k)
|
| 105 |
+
|
| 106 |
+
def release(self):
|
| 107 |
+
for lock in self.locks[::-1]: # Release in inverse order of acquisition
|
| 108 |
+
lock.release()
|
| 109 |
+
|
| 110 |
+
def __enter__(self):
|
| 111 |
+
self.acquire()
|
| 112 |
+
|
| 113 |
+
def __exit__(self, *exc):
|
| 114 |
+
self.release()
|
| 115 |
+
|
| 116 |
+
@classmethod
|
| 117 |
+
def create_mp_lock(cls):
|
| 118 |
+
if not hasattr(cls, 'mp_lock'):
|
| 119 |
+
try:
|
| 120 |
+
from multiprocessing import RLock
|
| 121 |
+
cls.mp_lock = RLock()
|
| 122 |
+
except (ImportError, OSError): # pragma: no cover
|
| 123 |
+
cls.mp_lock = None
|
| 124 |
+
|
| 125 |
+
@classmethod
|
| 126 |
+
def create_th_lock(cls):
|
| 127 |
+
assert hasattr(cls, 'th_lock')
|
| 128 |
+
warn("create_th_lock not needed anymore", TqdmDeprecationWarning, stacklevel=2)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class Bar(object):
|
| 132 |
+
"""
|
| 133 |
+
`str.format`-able bar with format specifiers: `[width][type]`
|
| 134 |
+
|
| 135 |
+
- `width`
|
| 136 |
+
+ unspecified (default): use `self.default_len`
|
| 137 |
+
+ `int >= 0`: overrides `self.default_len`
|
| 138 |
+
+ `int < 0`: subtract from `self.default_len`
|
| 139 |
+
- `type`
|
| 140 |
+
+ `a`: ascii (`charset=self.ASCII` override)
|
| 141 |
+
+ `u`: unicode (`charset=self.UTF` override)
|
| 142 |
+
+ `b`: blank (`charset=" "` override)
|
| 143 |
+
"""
|
| 144 |
+
ASCII = " 123456789#"
|
| 145 |
+
UTF = u" " + u''.join(map(chr, range(0x258F, 0x2587, -1)))
|
| 146 |
+
BLANK = " "
|
| 147 |
+
COLOUR_RESET = '\x1b[0m'
|
| 148 |
+
COLOUR_RGB = '\x1b[38;2;%d;%d;%dm'
|
| 149 |
+
COLOURS = {'BLACK': '\x1b[30m', 'RED': '\x1b[31m', 'GREEN': '\x1b[32m',
|
| 150 |
+
'YELLOW': '\x1b[33m', 'BLUE': '\x1b[34m', 'MAGENTA': '\x1b[35m',
|
| 151 |
+
'CYAN': '\x1b[36m', 'WHITE': '\x1b[37m'}
|
| 152 |
+
|
| 153 |
+
def __init__(self, frac, default_len=10, charset=UTF, colour=None):
|
| 154 |
+
if not 0 <= frac <= 1:
|
| 155 |
+
warn("clamping frac to range [0, 1]", TqdmWarning, stacklevel=2)
|
| 156 |
+
frac = max(0, min(1, frac))
|
| 157 |
+
assert default_len > 0
|
| 158 |
+
self.frac = frac
|
| 159 |
+
self.default_len = default_len
|
| 160 |
+
self.charset = charset
|
| 161 |
+
self.colour = colour
|
| 162 |
+
|
| 163 |
+
@property
|
| 164 |
+
def colour(self):
|
| 165 |
+
return self._colour
|
| 166 |
+
|
| 167 |
+
@colour.setter
|
| 168 |
+
def colour(self, value):
|
| 169 |
+
if not value:
|
| 170 |
+
self._colour = None
|
| 171 |
+
return
|
| 172 |
+
try:
|
| 173 |
+
if value.upper() in self.COLOURS:
|
| 174 |
+
self._colour = self.COLOURS[value.upper()]
|
| 175 |
+
elif value[0] == '#' and len(value) == 7:
|
| 176 |
+
self._colour = self.COLOUR_RGB % tuple(
|
| 177 |
+
int(i, 16) for i in (value[1:3], value[3:5], value[5:7]))
|
| 178 |
+
else:
|
| 179 |
+
raise KeyError
|
| 180 |
+
except (KeyError, AttributeError):
|
| 181 |
+
warn("Unknown colour (%s); valid choices: [hex (#00ff00), %s]" % (
|
| 182 |
+
value, ", ".join(self.COLOURS)),
|
| 183 |
+
TqdmWarning, stacklevel=2)
|
| 184 |
+
self._colour = None
|
| 185 |
+
|
| 186 |
+
def __format__(self, format_spec):
|
| 187 |
+
if format_spec:
|
| 188 |
+
_type = format_spec[-1].lower()
|
| 189 |
+
try:
|
| 190 |
+
charset = {'a': self.ASCII, 'u': self.UTF, 'b': self.BLANK}[_type]
|
| 191 |
+
except KeyError:
|
| 192 |
+
charset = self.charset
|
| 193 |
+
else:
|
| 194 |
+
format_spec = format_spec[:-1]
|
| 195 |
+
if format_spec:
|
| 196 |
+
N_BARS = int(format_spec)
|
| 197 |
+
if N_BARS < 0:
|
| 198 |
+
N_BARS += self.default_len
|
| 199 |
+
else:
|
| 200 |
+
N_BARS = self.default_len
|
| 201 |
+
else:
|
| 202 |
+
charset = self.charset
|
| 203 |
+
N_BARS = self.default_len
|
| 204 |
+
|
| 205 |
+
nsyms = len(charset) - 1
|
| 206 |
+
bar_length, frac_bar_length = divmod(int(self.frac * N_BARS * nsyms), nsyms)
|
| 207 |
+
|
| 208 |
+
res = charset[-1] * bar_length
|
| 209 |
+
if bar_length < N_BARS: # whitespace padding
|
| 210 |
+
res = res + charset[frac_bar_length] + charset[0] * (N_BARS - bar_length - 1)
|
| 211 |
+
return self.colour + res + self.COLOUR_RESET if self.colour else res
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
class EMA(object):
|
| 215 |
+
"""
|
| 216 |
+
Exponential moving average: smoothing to give progressively lower
|
| 217 |
+
weights to older values.
|
| 218 |
+
|
| 219 |
+
Parameters
|
| 220 |
+
----------
|
| 221 |
+
smoothing : float, optional
|
| 222 |
+
Smoothing factor in range [0, 1], [default: 0.3].
|
| 223 |
+
Increase to give more weight to recent values.
|
| 224 |
+
Ranges from 0 (yields old value) to 1 (yields new value).
|
| 225 |
+
"""
|
| 226 |
+
def __init__(self, smoothing=0.3):
|
| 227 |
+
self.alpha = smoothing
|
| 228 |
+
self.last = 0
|
| 229 |
+
self.calls = 0
|
| 230 |
+
|
| 231 |
+
def __call__(self, x=None):
|
| 232 |
+
"""
|
| 233 |
+
Parameters
|
| 234 |
+
----------
|
| 235 |
+
x : float
|
| 236 |
+
New value to include in EMA.
|
| 237 |
+
"""
|
| 238 |
+
beta = 1 - self.alpha
|
| 239 |
+
if x is not None:
|
| 240 |
+
self.last = self.alpha * x + beta * self.last
|
| 241 |
+
self.calls += 1
|
| 242 |
+
return self.last / (1 - beta ** self.calls) if self.calls else self.last
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
class tqdm(Comparable):
|
| 246 |
+
"""
|
| 247 |
+
Decorate an iterable object, returning an iterator which acts exactly
|
| 248 |
+
like the original iterable, but prints a dynamically updating
|
| 249 |
+
progressbar every time a value is requested.
|
| 250 |
+
|
| 251 |
+
Parameters
|
| 252 |
+
----------
|
| 253 |
+
iterable : iterable, optional
|
| 254 |
+
Iterable to decorate with a progressbar.
|
| 255 |
+
Leave blank to manually manage the updates.
|
| 256 |
+
desc : str, optional
|
| 257 |
+
Prefix for the progressbar.
|
| 258 |
+
total : int or float, optional
|
| 259 |
+
The number of expected iterations. If unspecified,
|
| 260 |
+
len(iterable) is used if possible. If float("inf") or as a last
|
| 261 |
+
resort, only basic progress statistics are displayed
|
| 262 |
+
(no ETA, no progressbar).
|
| 263 |
+
If `gui` is True and this parameter needs subsequent updating,
|
| 264 |
+
specify an initial arbitrary large positive number,
|
| 265 |
+
e.g. 9e9.
|
| 266 |
+
leave : bool, optional
|
| 267 |
+
If [default: True], keeps all traces of the progressbar
|
| 268 |
+
upon termination of iteration.
|
| 269 |
+
If `None`, will leave only if `position` is `0`.
|
| 270 |
+
file : `io.TextIOWrapper` or `io.StringIO`, optional
|
| 271 |
+
Specifies where to output the progress messages
|
| 272 |
+
(default: sys.stderr). Uses `file.write(str)` and `file.flush()`
|
| 273 |
+
methods. For encoding, see `write_bytes`.
|
| 274 |
+
ncols : int, optional
|
| 275 |
+
The width of the entire output message. If specified,
|
| 276 |
+
dynamically resizes the progressbar to stay within this bound.
|
| 277 |
+
If unspecified, attempts to use environment width. The
|
| 278 |
+
fallback is a meter width of 10 and no limit for the counter and
|
| 279 |
+
statistics. If 0, will not print any meter (only stats).
|
| 280 |
+
mininterval : float, optional
|
| 281 |
+
Minimum progress display update interval [default: 0.1] seconds.
|
| 282 |
+
maxinterval : float, optional
|
| 283 |
+
Maximum progress display update interval [default: 10] seconds.
|
| 284 |
+
Automatically adjusts `miniters` to correspond to `mininterval`
|
| 285 |
+
after long display update lag. Only works if `dynamic_miniters`
|
| 286 |
+
or monitor thread is enabled.
|
| 287 |
+
miniters : int or float, optional
|
| 288 |
+
Minimum progress display update interval, in iterations.
|
| 289 |
+
If 0 and `dynamic_miniters`, will automatically adjust to equal
|
| 290 |
+
`mininterval` (more CPU efficient, good for tight loops).
|
| 291 |
+
If > 0, will skip display of specified number of iterations.
|
| 292 |
+
Tweak this and `mininterval` to get very efficient loops.
|
| 293 |
+
If your progress is erratic with both fast and slow iterations
|
| 294 |
+
(network, skipping items, etc) you should set miniters=1.
|
| 295 |
+
ascii : bool or str, optional
|
| 296 |
+
If unspecified or False, use unicode (smooth blocks) to fill
|
| 297 |
+
the meter. The fallback is to use ASCII characters " 123456789#".
|
| 298 |
+
disable : bool, optional
|
| 299 |
+
Whether to disable the entire progressbar wrapper
|
| 300 |
+
[default: False]. If set to None, disable on non-TTY.
|
| 301 |
+
unit : str, optional
|
| 302 |
+
String that will be used to define the unit of each iteration
|
| 303 |
+
[default: it].
|
| 304 |
+
unit_scale : bool or int or float, optional
|
| 305 |
+
If 1 or True, the number of iterations will be reduced/scaled
|
| 306 |
+
automatically and a metric prefix following the
|
| 307 |
+
International System of Units standard will be added
|
| 308 |
+
(kilo, mega, etc.) [default: False]. If any other non-zero
|
| 309 |
+
number, will scale `total` and `n`.
|
| 310 |
+
dynamic_ncols : bool, optional
|
| 311 |
+
If set, constantly alters `ncols` and `nrows` to the
|
| 312 |
+
environment (allowing for window resizes) [default: False].
|
| 313 |
+
smoothing : float, optional
|
| 314 |
+
Exponential moving average smoothing factor for speed estimates
|
| 315 |
+
(ignored in GUI mode). Ranges from 0 (average speed) to 1
|
| 316 |
+
(current/instantaneous speed) [default: 0.3].
|
| 317 |
+
bar_format : str, optional
|
| 318 |
+
Specify a custom bar string formatting. May impact performance.
|
| 319 |
+
[default: '{l_bar}{bar}{r_bar}'], where
|
| 320 |
+
l_bar='{desc}: {percentage:3.0f}%|' and
|
| 321 |
+
r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, '
|
| 322 |
+
'{rate_fmt}{postfix}]'
|
| 323 |
+
Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt,
|
| 324 |
+
percentage, elapsed, elapsed_s, ncols, nrows, desc, unit,
|
| 325 |
+
rate, rate_fmt, rate_noinv, rate_noinv_fmt,
|
| 326 |
+
rate_inv, rate_inv_fmt, postfix, unit_divisor,
|
| 327 |
+
remaining, remaining_s, eta.
|
| 328 |
+
Note that a trailing ": " is automatically removed after {desc}
|
| 329 |
+
if the latter is empty.
|
| 330 |
+
initial : int or float, optional
|
| 331 |
+
The initial counter value. Useful when restarting a progress
|
| 332 |
+
bar [default: 0]. If using float, consider specifying `{n:.3f}`
|
| 333 |
+
or similar in `bar_format`, or specifying `unit_scale`.
|
| 334 |
+
position : int, optional
|
| 335 |
+
Specify the line offset to print this bar (starting from 0)
|
| 336 |
+
Automatic if unspecified.
|
| 337 |
+
Useful to manage multiple bars at once (eg, from threads).
|
| 338 |
+
postfix : dict or *, optional
|
| 339 |
+
Specify additional stats to display at the end of the bar.
|
| 340 |
+
Calls `set_postfix(**postfix)` if possible (dict).
|
| 341 |
+
unit_divisor : float, optional
|
| 342 |
+
[default: 1000], ignored unless `unit_scale` is True.
|
| 343 |
+
write_bytes : bool, optional
|
| 344 |
+
Whether to write bytes. If (default: False) will write unicode.
|
| 345 |
+
lock_args : tuple, optional
|
| 346 |
+
Passed to `refresh` for intermediate output
|
| 347 |
+
(initialisation, iterating, and updating).
|
| 348 |
+
nrows : int, optional
|
| 349 |
+
The screen height. If specified, hides nested bars outside this
|
| 350 |
+
bound. If unspecified, attempts to use environment height.
|
| 351 |
+
The fallback is 20.
|
| 352 |
+
colour : str, optional
|
| 353 |
+
Bar colour (e.g. 'green', '#00ff00').
|
| 354 |
+
delay : float, optional
|
| 355 |
+
Don't display until [default: 0] seconds have elapsed.
|
| 356 |
+
gui : bool, optional
|
| 357 |
+
WARNING: internal parameter - do not use.
|
| 358 |
+
Use tqdm.gui.tqdm(...) instead. If set, will attempt to use
|
| 359 |
+
matplotlib animations for a graphical output [default: False].
|
| 360 |
+
|
| 361 |
+
Returns
|
| 362 |
+
-------
|
| 363 |
+
out : decorated iterator.
|
| 364 |
+
"""
|
| 365 |
+
|
| 366 |
+
monitor_interval = 10 # set to 0 to disable the thread
|
| 367 |
+
monitor = None
|
| 368 |
+
_instances = WeakSet()
|
| 369 |
+
|
| 370 |
+
@staticmethod
|
| 371 |
+
def format_sizeof(num, suffix='', divisor=1000):
|
| 372 |
+
"""
|
| 373 |
+
Formats a number (greater than unity) with SI Order of Magnitude
|
| 374 |
+
prefixes.
|
| 375 |
+
|
| 376 |
+
Parameters
|
| 377 |
+
----------
|
| 378 |
+
num : float
|
| 379 |
+
Number ( >= 1) to format.
|
| 380 |
+
suffix : str, optional
|
| 381 |
+
Post-postfix [default: ''].
|
| 382 |
+
divisor : float, optional
|
| 383 |
+
Divisor between prefixes [default: 1000].
|
| 384 |
+
|
| 385 |
+
Returns
|
| 386 |
+
-------
|
| 387 |
+
out : str
|
| 388 |
+
Number with Order of Magnitude SI unit postfix.
|
| 389 |
+
"""
|
| 390 |
+
for unit in ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z']:
|
| 391 |
+
if abs(num) < 999.5:
|
| 392 |
+
if abs(num) < 99.95:
|
| 393 |
+
if abs(num) < 9.995:
|
| 394 |
+
return f'{num:1.2f}{unit}{suffix}'
|
| 395 |
+
return f'{num:2.1f}{unit}{suffix}'
|
| 396 |
+
return f'{num:3.0f}{unit}{suffix}'
|
| 397 |
+
num /= divisor
|
| 398 |
+
return f'{num:3.1f}Y{suffix}'
|
| 399 |
+
|
| 400 |
+
@staticmethod
|
| 401 |
+
def format_interval(t):
|
| 402 |
+
"""
|
| 403 |
+
Formats a number of seconds as a clock time, [H:]MM:SS
|
| 404 |
+
|
| 405 |
+
Parameters
|
| 406 |
+
----------
|
| 407 |
+
t : int
|
| 408 |
+
Number of seconds.
|
| 409 |
+
|
| 410 |
+
Returns
|
| 411 |
+
-------
|
| 412 |
+
out : str
|
| 413 |
+
[H:]MM:SS
|
| 414 |
+
"""
|
| 415 |
+
mins, s = divmod(int(t), 60)
|
| 416 |
+
h, m = divmod(mins, 60)
|
| 417 |
+
return f'{h:d}:{m:02d}:{s:02d}' if h else f'{m:02d}:{s:02d}'
|
| 418 |
+
|
| 419 |
+
@staticmethod
|
| 420 |
+
def format_num(n):
|
| 421 |
+
"""
|
| 422 |
+
Intelligent scientific notation (.3g).
|
| 423 |
+
|
| 424 |
+
Parameters
|
| 425 |
+
----------
|
| 426 |
+
n : int or float or Numeric
|
| 427 |
+
A Number.
|
| 428 |
+
|
| 429 |
+
Returns
|
| 430 |
+
-------
|
| 431 |
+
out : str
|
| 432 |
+
Formatted number.
|
| 433 |
+
"""
|
| 434 |
+
f = f'{n:.3g}'.replace('e+0', 'e+').replace('e-0', 'e-')
|
| 435 |
+
n = str(n)
|
| 436 |
+
return f if len(f) < len(n) else n
|
| 437 |
+
|
| 438 |
+
@staticmethod
|
| 439 |
+
def status_printer(file):
|
| 440 |
+
"""
|
| 441 |
+
Manage the printing and in-place updating of a line of characters.
|
| 442 |
+
Note that if the string is longer than a line, then in-place
|
| 443 |
+
updating may not work (it will print a new line at each refresh).
|
| 444 |
+
"""
|
| 445 |
+
fp = file
|
| 446 |
+
fp_flush = getattr(fp, 'flush', lambda: None) # pragma: no cover
|
| 447 |
+
if fp in (sys.stderr, sys.stdout):
|
| 448 |
+
getattr(sys.stderr, 'flush', lambda: None)()
|
| 449 |
+
getattr(sys.stdout, 'flush', lambda: None)()
|
| 450 |
+
|
| 451 |
+
def fp_write(s):
|
| 452 |
+
fp.write(str(s))
|
| 453 |
+
fp_flush()
|
| 454 |
+
|
| 455 |
+
last_len = [0]
|
| 456 |
+
|
| 457 |
+
def print_status(s):
|
| 458 |
+
len_s = disp_len(s)
|
| 459 |
+
fp_write('\r' + s + (' ' * max(last_len[0] - len_s, 0)))
|
| 460 |
+
last_len[0] = len_s
|
| 461 |
+
|
| 462 |
+
return print_status
|
| 463 |
+
|
| 464 |
+
@staticmethod
|
| 465 |
+
def format_meter(n, total, elapsed, ncols=None, prefix='', ascii=False, unit='it',
|
| 466 |
+
unit_scale=False, rate=None, bar_format=None, postfix=None,
|
| 467 |
+
unit_divisor=1000, initial=0, colour=None, **extra_kwargs):
|
| 468 |
+
"""
|
| 469 |
+
Return a string-based progress bar given some parameters
|
| 470 |
+
|
| 471 |
+
Parameters
|
| 472 |
+
----------
|
| 473 |
+
n : int or float
|
| 474 |
+
Number of finished iterations.
|
| 475 |
+
total : int or float
|
| 476 |
+
The expected total number of iterations. If meaningless (None),
|
| 477 |
+
only basic progress statistics are displayed (no ETA).
|
| 478 |
+
elapsed : float
|
| 479 |
+
Number of seconds passed since start.
|
| 480 |
+
ncols : int, optional
|
| 481 |
+
The width of the entire output message. If specified,
|
| 482 |
+
dynamically resizes `{bar}` to stay within this bound
|
| 483 |
+
[default: None]. If `0`, will not print any bar (only stats).
|
| 484 |
+
The fallback is `{bar:10}`.
|
| 485 |
+
prefix : str, optional
|
| 486 |
+
Prefix message (included in total width) [default: ''].
|
| 487 |
+
Use as {desc} in bar_format string.
|
| 488 |
+
ascii : bool, optional or str, optional
|
| 489 |
+
If not set, use unicode (smooth blocks) to fill the meter
|
| 490 |
+
[default: False]. The fallback is to use ASCII characters
|
| 491 |
+
" 123456789#".
|
| 492 |
+
unit : str, optional
|
| 493 |
+
The iteration unit [default: 'it'].
|
| 494 |
+
unit_scale : bool or int or float, optional
|
| 495 |
+
If 1 or True, the number of iterations will be printed with an
|
| 496 |
+
appropriate SI metric prefix (k = 10^3, M = 10^6, etc.)
|
| 497 |
+
[default: False]. If any other non-zero number, will scale
|
| 498 |
+
`total` and `n`.
|
| 499 |
+
rate : float, optional
|
| 500 |
+
Manual override for iteration rate.
|
| 501 |
+
If [default: None], uses n/elapsed.
|
| 502 |
+
bar_format : str, optional
|
| 503 |
+
Specify a custom bar string formatting. May impact performance.
|
| 504 |
+
[default: '{l_bar}{bar}{r_bar}'], where
|
| 505 |
+
l_bar='{desc}: {percentage:3.0f}%|' and
|
| 506 |
+
r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, '
|
| 507 |
+
'{rate_fmt}{postfix}]'
|
| 508 |
+
Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt,
|
| 509 |
+
percentage, elapsed, elapsed_s, ncols, nrows, desc, unit,
|
| 510 |
+
rate, rate_fmt, rate_noinv, rate_noinv_fmt,
|
| 511 |
+
rate_inv, rate_inv_fmt, postfix, unit_divisor,
|
| 512 |
+
remaining, remaining_s, eta.
|
| 513 |
+
Note that a trailing ": " is automatically removed after {desc}
|
| 514 |
+
if the latter is empty.
|
| 515 |
+
postfix : *, optional
|
| 516 |
+
Similar to `prefix`, but placed at the end
|
| 517 |
+
(e.g. for additional stats).
|
| 518 |
+
Note: postfix is usually a string (not a dict) for this method,
|
| 519 |
+
and will if possible be set to postfix = ', ' + postfix.
|
| 520 |
+
However other types are supported (#382).
|
| 521 |
+
unit_divisor : float, optional
|
| 522 |
+
[default: 1000], ignored unless `unit_scale` is True.
|
| 523 |
+
initial : int or float, optional
|
| 524 |
+
The initial counter value [default: 0].
|
| 525 |
+
colour : str, optional
|
| 526 |
+
Bar colour (e.g. 'green', '#00ff00').
|
| 527 |
+
|
| 528 |
+
Returns
|
| 529 |
+
-------
|
| 530 |
+
out : Formatted meter and stats, ready to display.
|
| 531 |
+
"""
|
| 532 |
+
|
| 533 |
+
# sanity check: total
|
| 534 |
+
if total and n >= (total + 0.5): # allow float imprecision (#849)
|
| 535 |
+
total = None
|
| 536 |
+
|
| 537 |
+
# apply custom scale if necessary
|
| 538 |
+
if unit_scale and unit_scale not in (True, 1):
|
| 539 |
+
if total:
|
| 540 |
+
total *= unit_scale
|
| 541 |
+
n *= unit_scale
|
| 542 |
+
if rate:
|
| 543 |
+
rate *= unit_scale # by default rate = self.avg_dn / self.avg_dt
|
| 544 |
+
unit_scale = False
|
| 545 |
+
|
| 546 |
+
elapsed_str = tqdm.format_interval(elapsed)
|
| 547 |
+
|
| 548 |
+
# if unspecified, attempt to use rate = average speed
|
| 549 |
+
# (we allow manual override since predicting time is an arcane art)
|
| 550 |
+
if rate is None and elapsed:
|
| 551 |
+
rate = (n - initial) / elapsed
|
| 552 |
+
inv_rate = 1 / rate if rate else None
|
| 553 |
+
format_sizeof = tqdm.format_sizeof
|
| 554 |
+
rate_noinv_fmt = ((format_sizeof(rate) if unit_scale else f'{rate:5.2f}')
|
| 555 |
+
if rate else '?') + unit + '/s'
|
| 556 |
+
rate_inv_fmt = (
|
| 557 |
+
(format_sizeof(inv_rate) if unit_scale else f'{inv_rate:5.2f}')
|
| 558 |
+
if inv_rate else '?') + 's/' + unit
|
| 559 |
+
rate_fmt = rate_inv_fmt if inv_rate and inv_rate > 1 else rate_noinv_fmt
|
| 560 |
+
|
| 561 |
+
if unit_scale:
|
| 562 |
+
n_fmt = format_sizeof(n, divisor=unit_divisor)
|
| 563 |
+
total_fmt = format_sizeof(total, divisor=unit_divisor) if total is not None else '?'
|
| 564 |
+
else:
|
| 565 |
+
n_fmt = str(n)
|
| 566 |
+
total_fmt = str(total) if total is not None else '?'
|
| 567 |
+
|
| 568 |
+
try:
|
| 569 |
+
postfix = ', ' + postfix if postfix else ''
|
| 570 |
+
except TypeError:
|
| 571 |
+
pass
|
| 572 |
+
|
| 573 |
+
remaining = (total - n) / rate if rate and total else 0
|
| 574 |
+
remaining_str = tqdm.format_interval(remaining) if rate else '?'
|
| 575 |
+
try:
|
| 576 |
+
eta_dt = (datetime.now() + timedelta(seconds=remaining)
|
| 577 |
+
if rate and total else datetime.fromtimestamp(0, timezone.utc))
|
| 578 |
+
except OverflowError:
|
| 579 |
+
eta_dt = datetime.max
|
| 580 |
+
|
| 581 |
+
# format the stats displayed to the left and right sides of the bar
|
| 582 |
+
if prefix:
|
| 583 |
+
# old prefix setup work around
|
| 584 |
+
bool_prefix_colon_already = (prefix[-2:] == ": ")
|
| 585 |
+
l_bar = prefix if bool_prefix_colon_already else prefix + ": "
|
| 586 |
+
else:
|
| 587 |
+
l_bar = ''
|
| 588 |
+
|
| 589 |
+
r_bar = f'| {n_fmt}/{total_fmt} [{elapsed_str}<{remaining_str}, {rate_fmt}{postfix}]'
|
| 590 |
+
|
| 591 |
+
# Custom bar formatting
|
| 592 |
+
# Populate a dict with all available progress indicators
|
| 593 |
+
format_dict = {
|
| 594 |
+
# slight extension of self.format_dict
|
| 595 |
+
'n': n, 'n_fmt': n_fmt, 'total': total, 'total_fmt': total_fmt,
|
| 596 |
+
'elapsed': elapsed_str, 'elapsed_s': elapsed,
|
| 597 |
+
'ncols': ncols, 'desc': prefix or '', 'unit': unit,
|
| 598 |
+
'rate': inv_rate if inv_rate and inv_rate > 1 else rate,
|
| 599 |
+
'rate_fmt': rate_fmt, 'rate_noinv': rate,
|
| 600 |
+
'rate_noinv_fmt': rate_noinv_fmt, 'rate_inv': inv_rate,
|
| 601 |
+
'rate_inv_fmt': rate_inv_fmt,
|
| 602 |
+
'postfix': postfix, 'unit_divisor': unit_divisor,
|
| 603 |
+
'colour': colour,
|
| 604 |
+
# plus more useful definitions
|
| 605 |
+
'remaining': remaining_str, 'remaining_s': remaining,
|
| 606 |
+
'l_bar': l_bar, 'r_bar': r_bar, 'eta': eta_dt,
|
| 607 |
+
**extra_kwargs}
|
| 608 |
+
|
| 609 |
+
# total is known: we can predict some stats
|
| 610 |
+
if total:
|
| 611 |
+
# fractional and percentage progress
|
| 612 |
+
frac = n / total
|
| 613 |
+
percentage = frac * 100
|
| 614 |
+
|
| 615 |
+
l_bar += f'{percentage:3.0f}%|'
|
| 616 |
+
|
| 617 |
+
if ncols == 0:
|
| 618 |
+
return l_bar[:-1] + r_bar[1:]
|
| 619 |
+
|
| 620 |
+
format_dict.update(l_bar=l_bar)
|
| 621 |
+
if bar_format:
|
| 622 |
+
format_dict.update(percentage=percentage)
|
| 623 |
+
|
| 624 |
+
# auto-remove colon for empty `{desc}`
|
| 625 |
+
if not prefix:
|
| 626 |
+
bar_format = bar_format.replace("{desc}: ", '')
|
| 627 |
+
else:
|
| 628 |
+
bar_format = "{l_bar}{bar}{r_bar}"
|
| 629 |
+
|
| 630 |
+
full_bar = FormatReplace()
|
| 631 |
+
nobar = bar_format.format(bar=full_bar, **format_dict)
|
| 632 |
+
if not full_bar.format_called:
|
| 633 |
+
return nobar # no `{bar}`; nothing else to do
|
| 634 |
+
|
| 635 |
+
# Formatting progress bar space available for bar's display
|
| 636 |
+
full_bar = Bar(frac,
|
| 637 |
+
max(1, ncols - disp_len(nobar)) if ncols else 10,
|
| 638 |
+
charset=Bar.ASCII if ascii is True else ascii or Bar.UTF,
|
| 639 |
+
colour=colour)
|
| 640 |
+
if not _is_ascii(full_bar.charset) and _is_ascii(bar_format):
|
| 641 |
+
bar_format = str(bar_format)
|
| 642 |
+
res = bar_format.format(bar=full_bar, **format_dict)
|
| 643 |
+
return disp_trim(res, ncols) if ncols else res
|
| 644 |
+
|
| 645 |
+
elif bar_format:
|
| 646 |
+
# user-specified bar_format but no total
|
| 647 |
+
l_bar += '|'
|
| 648 |
+
format_dict.update(l_bar=l_bar, percentage=0)
|
| 649 |
+
full_bar = FormatReplace()
|
| 650 |
+
nobar = bar_format.format(bar=full_bar, **format_dict)
|
| 651 |
+
if not full_bar.format_called:
|
| 652 |
+
return nobar
|
| 653 |
+
full_bar = Bar(0,
|
| 654 |
+
max(1, ncols - disp_len(nobar)) if ncols else 10,
|
| 655 |
+
charset=Bar.BLANK, colour=colour)
|
| 656 |
+
res = bar_format.format(bar=full_bar, **format_dict)
|
| 657 |
+
return disp_trim(res, ncols) if ncols else res
|
| 658 |
+
else:
|
| 659 |
+
# no total: no progressbar, ETA, just progress stats
|
| 660 |
+
return (f'{(prefix + ": ") if prefix else ""}'
|
| 661 |
+
f'{n_fmt}{unit} [{elapsed_str}, {rate_fmt}{postfix}]')
|
| 662 |
+
|
| 663 |
+
def __new__(cls, *_, **__):
|
| 664 |
+
instance = object.__new__(cls)
|
| 665 |
+
with cls.get_lock(): # also constructs lock if non-existent
|
| 666 |
+
cls._instances.add(instance)
|
| 667 |
+
# create monitoring thread
|
| 668 |
+
if cls.monitor_interval and (cls.monitor is None
|
| 669 |
+
or not cls.monitor.report()):
|
| 670 |
+
try:
|
| 671 |
+
cls.monitor = TMonitor(cls, cls.monitor_interval)
|
| 672 |
+
except Exception as e: # pragma: nocover
|
| 673 |
+
warn("tqdm:disabling monitor support"
|
| 674 |
+
" (monitor_interval = 0) due to:\n" + str(e),
|
| 675 |
+
TqdmMonitorWarning, stacklevel=2)
|
| 676 |
+
cls.monitor_interval = 0
|
| 677 |
+
return instance
|
| 678 |
+
|
| 679 |
+
@classmethod
|
| 680 |
+
def _get_free_pos(cls, instance=None):
|
| 681 |
+
"""Skips specified instance."""
|
| 682 |
+
positions = {abs(inst.pos) for inst in cls._instances
|
| 683 |
+
if inst is not instance and hasattr(inst, "pos")}
|
| 684 |
+
return min(set(range(len(positions) + 1)).difference(positions))
|
| 685 |
+
|
| 686 |
+
@classmethod
|
| 687 |
+
def _decr_instances(cls, instance):
|
| 688 |
+
"""
|
| 689 |
+
Remove from list and reposition another unfixed bar
|
| 690 |
+
to fill the new gap.
|
| 691 |
+
|
| 692 |
+
This means that by default (where all nested bars are unfixed),
|
| 693 |
+
order is not maintained but screen flicker/blank space is minimised.
|
| 694 |
+
(tqdm<=4.44.1 moved ALL subsequent unfixed bars up.)
|
| 695 |
+
"""
|
| 696 |
+
with cls._lock:
|
| 697 |
+
try:
|
| 698 |
+
cls._instances.remove(instance)
|
| 699 |
+
except KeyError:
|
| 700 |
+
# if not instance.gui: # pragma: no cover
|
| 701 |
+
# raise
|
| 702 |
+
pass # py2: maybe magically removed already
|
| 703 |
+
# else:
|
| 704 |
+
if not instance.gui:
|
| 705 |
+
last = (instance.nrows or 20) - 1
|
| 706 |
+
# find unfixed (`pos >= 0`) overflow (`pos >= nrows - 1`)
|
| 707 |
+
instances = list(filter(
|
| 708 |
+
lambda i: hasattr(i, "pos") and last <= i.pos,
|
| 709 |
+
cls._instances))
|
| 710 |
+
# set first found to current `pos`
|
| 711 |
+
if instances:
|
| 712 |
+
inst = min(instances, key=lambda i: i.pos)
|
| 713 |
+
inst.clear(nolock=True)
|
| 714 |
+
inst.pos = abs(instance.pos)
|
| 715 |
+
|
| 716 |
+
@classmethod
|
| 717 |
+
def write(cls, s, file=None, end="\n", nolock=False):
|
| 718 |
+
"""Print a message via tqdm (without overlap with bars)."""
|
| 719 |
+
fp = file if file is not None else sys.stdout
|
| 720 |
+
with cls.external_write_mode(file=file, nolock=nolock):
|
| 721 |
+
# Write the message
|
| 722 |
+
fp.write(s)
|
| 723 |
+
fp.write(end)
|
| 724 |
+
|
| 725 |
+
@classmethod
|
| 726 |
+
@contextmanager
|
| 727 |
+
def external_write_mode(cls, file=None, nolock=False):
|
| 728 |
+
"""
|
| 729 |
+
Disable tqdm within context and refresh tqdm when exits.
|
| 730 |
+
Useful when writing to standard output stream
|
| 731 |
+
"""
|
| 732 |
+
fp = file if file is not None else sys.stdout
|
| 733 |
+
|
| 734 |
+
try:
|
| 735 |
+
if not nolock:
|
| 736 |
+
cls.get_lock().acquire()
|
| 737 |
+
# Clear all bars
|
| 738 |
+
inst_cleared = []
|
| 739 |
+
for inst in getattr(cls, '_instances', []):
|
| 740 |
+
# Clear instance if in the target output file
|
| 741 |
+
# or if write output + tqdm output are both either
|
| 742 |
+
# sys.stdout or sys.stderr (because both are mixed in terminal)
|
| 743 |
+
if hasattr(inst, "start_t") and (inst.fp == fp or all(
|
| 744 |
+
f in (sys.stdout, sys.stderr) for f in (fp, inst.fp))):
|
| 745 |
+
inst.clear(nolock=True)
|
| 746 |
+
inst_cleared.append(inst)
|
| 747 |
+
yield
|
| 748 |
+
# Force refresh display of bars we cleared
|
| 749 |
+
for inst in inst_cleared:
|
| 750 |
+
inst.refresh(nolock=True)
|
| 751 |
+
finally:
|
| 752 |
+
if not nolock:
|
| 753 |
+
cls._lock.release()
|
| 754 |
+
|
| 755 |
+
@classmethod
|
| 756 |
+
def set_lock(cls, lock):
|
| 757 |
+
"""Set the global lock."""
|
| 758 |
+
cls._lock = lock
|
| 759 |
+
|
| 760 |
+
@classmethod
|
| 761 |
+
def get_lock(cls):
|
| 762 |
+
"""Get the global lock. Construct it if it does not exist."""
|
| 763 |
+
if not hasattr(cls, '_lock'):
|
| 764 |
+
cls._lock = TqdmDefaultWriteLock()
|
| 765 |
+
return cls._lock
|
| 766 |
+
|
| 767 |
+
@classmethod
|
| 768 |
+
def pandas(cls, **tqdm_kwargs):
|
| 769 |
+
"""
|
| 770 |
+
Registers the current `tqdm` class with
|
| 771 |
+
pandas.core.
|
| 772 |
+
( frame.DataFrame
|
| 773 |
+
| series.Series
|
| 774 |
+
| groupby.(generic.)DataFrameGroupBy
|
| 775 |
+
| groupby.(generic.)SeriesGroupBy
|
| 776 |
+
).progress_apply
|
| 777 |
+
|
| 778 |
+
A new instance will be created every time `progress_apply` is called,
|
| 779 |
+
and each instance will automatically `close()` upon completion.
|
| 780 |
+
|
| 781 |
+
Parameters
|
| 782 |
+
----------
|
| 783 |
+
tqdm_kwargs : arguments for the tqdm instance
|
| 784 |
+
|
| 785 |
+
Examples
|
| 786 |
+
--------
|
| 787 |
+
>>> import pandas as pd
|
| 788 |
+
>>> import numpy as np
|
| 789 |
+
>>> from tqdm import tqdm
|
| 790 |
+
>>> from tqdm.gui import tqdm as tqdm_gui
|
| 791 |
+
>>>
|
| 792 |
+
>>> df = pd.DataFrame(np.random.randint(0, 100, (100000, 6)))
|
| 793 |
+
>>> tqdm.pandas(ncols=50) # can use tqdm_gui, optional kwargs, etc
|
| 794 |
+
>>> # Now you can use `progress_apply` instead of `apply`
|
| 795 |
+
>>> df.groupby(0).progress_apply(lambda x: x**2)
|
| 796 |
+
|
| 797 |
+
References
|
| 798 |
+
----------
|
| 799 |
+
<https://stackoverflow.com/questions/18603270/\
|
| 800 |
+
progress-indicator-during-pandas-operations-python>
|
| 801 |
+
"""
|
| 802 |
+
from warnings import catch_warnings, simplefilter
|
| 803 |
+
|
| 804 |
+
from pandas.core.frame import DataFrame
|
| 805 |
+
from pandas.core.series import Series
|
| 806 |
+
try:
|
| 807 |
+
with catch_warnings():
|
| 808 |
+
simplefilter("ignore", category=FutureWarning)
|
| 809 |
+
from pandas import Panel
|
| 810 |
+
except ImportError: # pandas>=1.2.0
|
| 811 |
+
Panel = None
|
| 812 |
+
Rolling, Expanding = None, None
|
| 813 |
+
try: # pandas>=1.0.0
|
| 814 |
+
from pandas.core.window.rolling import _Rolling_and_Expanding
|
| 815 |
+
except ImportError:
|
| 816 |
+
try: # pandas>=0.18.0
|
| 817 |
+
from pandas.core.window import _Rolling_and_Expanding
|
| 818 |
+
except ImportError: # pandas>=1.2.0
|
| 819 |
+
try: # pandas>=1.2.0
|
| 820 |
+
from pandas.core.window.expanding import Expanding
|
| 821 |
+
from pandas.core.window.rolling import Rolling
|
| 822 |
+
_Rolling_and_Expanding = Rolling, Expanding
|
| 823 |
+
except ImportError: # pragma: no cover
|
| 824 |
+
_Rolling_and_Expanding = None
|
| 825 |
+
try: # pandas>=0.25.0
|
| 826 |
+
from pandas.core.groupby.generic import SeriesGroupBy # , NDFrameGroupBy
|
| 827 |
+
from pandas.core.groupby.generic import DataFrameGroupBy
|
| 828 |
+
except ImportError: # pragma: no cover
|
| 829 |
+
try: # pandas>=0.23.0
|
| 830 |
+
from pandas.core.groupby.groupby import DataFrameGroupBy, SeriesGroupBy
|
| 831 |
+
except ImportError:
|
| 832 |
+
from pandas.core.groupby import DataFrameGroupBy, SeriesGroupBy
|
| 833 |
+
try: # pandas>=0.23.0
|
| 834 |
+
from pandas.core.groupby.groupby import GroupBy
|
| 835 |
+
except ImportError: # pragma: no cover
|
| 836 |
+
from pandas.core.groupby import GroupBy
|
| 837 |
+
|
| 838 |
+
try: # pandas>=0.23.0
|
| 839 |
+
from pandas.core.groupby.groupby import PanelGroupBy
|
| 840 |
+
except ImportError:
|
| 841 |
+
try:
|
| 842 |
+
from pandas.core.groupby import PanelGroupBy
|
| 843 |
+
except ImportError: # pandas>=0.25.0
|
| 844 |
+
PanelGroupBy = None
|
| 845 |
+
|
| 846 |
+
tqdm_kwargs = tqdm_kwargs.copy()
|
| 847 |
+
deprecated_t = [tqdm_kwargs.pop('deprecated_t', None)]
|
| 848 |
+
|
| 849 |
+
def inner_generator(df_function='apply'):
|
| 850 |
+
def inner(df, func, *args, **kwargs):
|
| 851 |
+
"""
|
| 852 |
+
Parameters
|
| 853 |
+
----------
|
| 854 |
+
df : (DataFrame|Series)[GroupBy]
|
| 855 |
+
Data (may be grouped).
|
| 856 |
+
func : function
|
| 857 |
+
To be applied on the (grouped) data.
|
| 858 |
+
**kwargs : optional
|
| 859 |
+
Transmitted to `df.apply()`.
|
| 860 |
+
"""
|
| 861 |
+
|
| 862 |
+
# Precompute total iterations
|
| 863 |
+
total = tqdm_kwargs.pop("total", getattr(df, 'ngroups', None))
|
| 864 |
+
if total is None: # not grouped
|
| 865 |
+
if df_function == 'applymap':
|
| 866 |
+
total = df.size
|
| 867 |
+
elif isinstance(df, Series):
|
| 868 |
+
total = len(df)
|
| 869 |
+
elif (_Rolling_and_Expanding is None or
|
| 870 |
+
not isinstance(df, _Rolling_and_Expanding)):
|
| 871 |
+
# DataFrame or Panel
|
| 872 |
+
axis = kwargs.get('axis', 0)
|
| 873 |
+
if axis == 'index':
|
| 874 |
+
axis = 0
|
| 875 |
+
elif axis == 'columns':
|
| 876 |
+
axis = 1
|
| 877 |
+
# when axis=0, total is shape[axis1]
|
| 878 |
+
total = df.size // df.shape[axis]
|
| 879 |
+
|
| 880 |
+
# Init bar
|
| 881 |
+
if deprecated_t[0] is not None:
|
| 882 |
+
t = deprecated_t[0]
|
| 883 |
+
deprecated_t[0] = None
|
| 884 |
+
else:
|
| 885 |
+
t = cls(total=total, **tqdm_kwargs)
|
| 886 |
+
|
| 887 |
+
if len(args) > 0:
|
| 888 |
+
# *args intentionally not supported (see #244, #299)
|
| 889 |
+
TqdmDeprecationWarning(
|
| 890 |
+
"Except func, normal arguments are intentionally" +
|
| 891 |
+
" not supported by" +
|
| 892 |
+
" `(DataFrame|Series|GroupBy).progress_apply`." +
|
| 893 |
+
" Use keyword arguments instead.",
|
| 894 |
+
fp_write=getattr(t.fp, 'write', sys.stderr.write))
|
| 895 |
+
|
| 896 |
+
try: # pandas>=1.3.0
|
| 897 |
+
from pandas.core.common import is_builtin_func
|
| 898 |
+
except ImportError:
|
| 899 |
+
is_builtin_func = df._is_builtin_func
|
| 900 |
+
try:
|
| 901 |
+
func = is_builtin_func(func)
|
| 902 |
+
except TypeError:
|
| 903 |
+
pass
|
| 904 |
+
|
| 905 |
+
# Define bar updating wrapper
|
| 906 |
+
def wrapper(*args, **kwargs):
|
| 907 |
+
# update tbar correctly
|
| 908 |
+
# it seems `pandas apply` calls `func` twice
|
| 909 |
+
# on the first column/row to decide whether it can
|
| 910 |
+
# take a fast or slow code path; so stop when t.total==t.n
|
| 911 |
+
t.update(n=1 if not t.total or t.n < t.total else 0)
|
| 912 |
+
return func(*args, **kwargs)
|
| 913 |
+
|
| 914 |
+
# Apply the provided function (in **kwargs)
|
| 915 |
+
# on the df using our wrapper (which provides bar updating)
|
| 916 |
+
try:
|
| 917 |
+
return getattr(df, df_function)(wrapper, **kwargs)
|
| 918 |
+
finally:
|
| 919 |
+
t.close()
|
| 920 |
+
|
| 921 |
+
return inner
|
| 922 |
+
|
| 923 |
+
# Monkeypatch pandas to provide easy methods
|
| 924 |
+
# Enable custom tqdm progress in pandas!
|
| 925 |
+
Series.progress_apply = inner_generator()
|
| 926 |
+
SeriesGroupBy.progress_apply = inner_generator()
|
| 927 |
+
Series.progress_map = inner_generator('map')
|
| 928 |
+
SeriesGroupBy.progress_map = inner_generator('map')
|
| 929 |
+
|
| 930 |
+
DataFrame.progress_apply = inner_generator()
|
| 931 |
+
DataFrameGroupBy.progress_apply = inner_generator()
|
| 932 |
+
DataFrame.progress_applymap = inner_generator('applymap')
|
| 933 |
+
DataFrame.progress_map = inner_generator('map')
|
| 934 |
+
DataFrameGroupBy.progress_map = inner_generator('map')
|
| 935 |
+
|
| 936 |
+
if Panel is not None:
|
| 937 |
+
Panel.progress_apply = inner_generator()
|
| 938 |
+
if PanelGroupBy is not None:
|
| 939 |
+
PanelGroupBy.progress_apply = inner_generator()
|
| 940 |
+
|
| 941 |
+
GroupBy.progress_apply = inner_generator()
|
| 942 |
+
GroupBy.progress_aggregate = inner_generator('aggregate')
|
| 943 |
+
GroupBy.progress_transform = inner_generator('transform')
|
| 944 |
+
|
| 945 |
+
if Rolling is not None and Expanding is not None:
|
| 946 |
+
Rolling.progress_apply = inner_generator()
|
| 947 |
+
Expanding.progress_apply = inner_generator()
|
| 948 |
+
elif _Rolling_and_Expanding is not None:
|
| 949 |
+
_Rolling_and_Expanding.progress_apply = inner_generator()
|
| 950 |
+
|
| 951 |
+
# override defaults via env vars
|
| 952 |
+
@envwrap("TQDM_", is_method=True, types={'total': float, 'ncols': int, 'miniters': float,
|
| 953 |
+
'position': int, 'nrows': int})
|
| 954 |
+
def __init__(self, iterable=None, desc=None, total=None, leave=True, file=None,
|
| 955 |
+
ncols=None, mininterval=0.1, maxinterval=10.0, miniters=None,
|
| 956 |
+
ascii=None, disable=False, unit='it', unit_scale=False,
|
| 957 |
+
dynamic_ncols=False, smoothing=0.3, bar_format=None, initial=0,
|
| 958 |
+
position=None, postfix=None, unit_divisor=1000, write_bytes=False,
|
| 959 |
+
lock_args=None, nrows=None, colour=None, delay=0.0, gui=False,
|
| 960 |
+
**kwargs):
|
| 961 |
+
"""see tqdm.tqdm for arguments"""
|
| 962 |
+
if file is None:
|
| 963 |
+
file = sys.stderr
|
| 964 |
+
|
| 965 |
+
if write_bytes:
|
| 966 |
+
# Despite coercing unicode into bytes, py2 sys.std* streams
|
| 967 |
+
# should have bytes written to them.
|
| 968 |
+
file = SimpleTextIOWrapper(
|
| 969 |
+
file, encoding=getattr(file, 'encoding', None) or 'utf-8')
|
| 970 |
+
|
| 971 |
+
file = DisableOnWriteError(file, tqdm_instance=self)
|
| 972 |
+
|
| 973 |
+
if disable is None and hasattr(file, "isatty") and not file.isatty():
|
| 974 |
+
disable = True
|
| 975 |
+
|
| 976 |
+
if total is None and iterable is not None:
|
| 977 |
+
try:
|
| 978 |
+
total = len(iterable)
|
| 979 |
+
except (TypeError, AttributeError):
|
| 980 |
+
total = None
|
| 981 |
+
if total == float("inf"):
|
| 982 |
+
# Infinite iterations, behave same as unknown
|
| 983 |
+
total = None
|
| 984 |
+
|
| 985 |
+
if disable:
|
| 986 |
+
self.iterable = iterable
|
| 987 |
+
self.disable = disable
|
| 988 |
+
with self._lock:
|
| 989 |
+
self.pos = self._get_free_pos(self)
|
| 990 |
+
self._instances.remove(self)
|
| 991 |
+
self.n = initial
|
| 992 |
+
self.total = total
|
| 993 |
+
self.leave = leave
|
| 994 |
+
return
|
| 995 |
+
|
| 996 |
+
if kwargs:
|
| 997 |
+
self.disable = True
|
| 998 |
+
with self._lock:
|
| 999 |
+
self.pos = self._get_free_pos(self)
|
| 1000 |
+
self._instances.remove(self)
|
| 1001 |
+
raise (
|
| 1002 |
+
TqdmDeprecationWarning(
|
| 1003 |
+
"`nested` is deprecated and automated.\n"
|
| 1004 |
+
"Use `position` instead for manual control.\n",
|
| 1005 |
+
fp_write=getattr(file, 'write', sys.stderr.write))
|
| 1006 |
+
if "nested" in kwargs else
|
| 1007 |
+
TqdmKeyError("Unknown argument(s): " + str(kwargs)))
|
| 1008 |
+
|
| 1009 |
+
# Preprocess the arguments
|
| 1010 |
+
if (
|
| 1011 |
+
(ncols is None or nrows is None) and (file in (sys.stderr, sys.stdout))
|
| 1012 |
+
) or dynamic_ncols: # pragma: no cover
|
| 1013 |
+
if dynamic_ncols:
|
| 1014 |
+
dynamic_ncols = _screen_shape_wrapper()
|
| 1015 |
+
if dynamic_ncols:
|
| 1016 |
+
ncols, nrows = dynamic_ncols(file)
|
| 1017 |
+
else:
|
| 1018 |
+
_dynamic_ncols = _screen_shape_wrapper()
|
| 1019 |
+
if _dynamic_ncols:
|
| 1020 |
+
_ncols, _nrows = _dynamic_ncols(file)
|
| 1021 |
+
if ncols is None:
|
| 1022 |
+
ncols = _ncols
|
| 1023 |
+
if nrows is None:
|
| 1024 |
+
nrows = _nrows
|
| 1025 |
+
|
| 1026 |
+
if miniters is None:
|
| 1027 |
+
miniters = 0
|
| 1028 |
+
dynamic_miniters = True
|
| 1029 |
+
else:
|
| 1030 |
+
dynamic_miniters = False
|
| 1031 |
+
|
| 1032 |
+
if mininterval is None:
|
| 1033 |
+
mininterval = 0
|
| 1034 |
+
|
| 1035 |
+
if maxinterval is None:
|
| 1036 |
+
maxinterval = 0
|
| 1037 |
+
|
| 1038 |
+
if ascii is None:
|
| 1039 |
+
ascii = not _supports_unicode(file)
|
| 1040 |
+
|
| 1041 |
+
if bar_format and ascii is not True and not _is_ascii(ascii):
|
| 1042 |
+
# Convert bar format into unicode since terminal uses unicode
|
| 1043 |
+
bar_format = str(bar_format)
|
| 1044 |
+
|
| 1045 |
+
if smoothing is None:
|
| 1046 |
+
smoothing = 0
|
| 1047 |
+
|
| 1048 |
+
# Store the arguments
|
| 1049 |
+
self.iterable = iterable
|
| 1050 |
+
self.desc = desc or ''
|
| 1051 |
+
self.total = total
|
| 1052 |
+
self.leave = leave
|
| 1053 |
+
self.fp = file
|
| 1054 |
+
self.ncols = ncols
|
| 1055 |
+
self.nrows = nrows
|
| 1056 |
+
self.mininterval = mininterval
|
| 1057 |
+
self.maxinterval = maxinterval
|
| 1058 |
+
self.miniters = miniters
|
| 1059 |
+
self.dynamic_miniters = dynamic_miniters
|
| 1060 |
+
self.ascii = ascii
|
| 1061 |
+
self.disable = disable
|
| 1062 |
+
self.unit = unit
|
| 1063 |
+
self.unit_scale = unit_scale
|
| 1064 |
+
self.unit_divisor = unit_divisor
|
| 1065 |
+
self.initial = initial
|
| 1066 |
+
self.lock_args = lock_args
|
| 1067 |
+
self.delay = delay
|
| 1068 |
+
self.gui = gui
|
| 1069 |
+
self.dynamic_ncols = dynamic_ncols
|
| 1070 |
+
self.smoothing = smoothing
|
| 1071 |
+
self._ema_dn = EMA(smoothing)
|
| 1072 |
+
self._ema_dt = EMA(smoothing)
|
| 1073 |
+
self._ema_miniters = EMA(smoothing)
|
| 1074 |
+
self.bar_format = bar_format
|
| 1075 |
+
self.postfix = None
|
| 1076 |
+
self.colour = colour
|
| 1077 |
+
self._time = time
|
| 1078 |
+
if postfix:
|
| 1079 |
+
try:
|
| 1080 |
+
self.set_postfix(refresh=False, **postfix)
|
| 1081 |
+
except TypeError:
|
| 1082 |
+
self.postfix = postfix
|
| 1083 |
+
|
| 1084 |
+
# Init the iterations counters
|
| 1085 |
+
self.last_print_n = initial
|
| 1086 |
+
self.n = initial
|
| 1087 |
+
|
| 1088 |
+
# if nested, at initial sp() call we replace '\r' by '\n' to
|
| 1089 |
+
# not overwrite the outer progress bar
|
| 1090 |
+
with self._lock:
|
| 1091 |
+
# mark fixed positions as negative
|
| 1092 |
+
self.pos = self._get_free_pos(self) if position is None else -position
|
| 1093 |
+
|
| 1094 |
+
if not gui:
|
| 1095 |
+
# Initialize the screen printer
|
| 1096 |
+
self.sp = self.status_printer(self.fp)
|
| 1097 |
+
if delay <= 0:
|
| 1098 |
+
self.refresh(lock_args=self.lock_args)
|
| 1099 |
+
|
| 1100 |
+
# Init the time counter
|
| 1101 |
+
self.last_print_t = self._time()
|
| 1102 |
+
# NB: Avoid race conditions by setting start_t at the very end of init
|
| 1103 |
+
self.start_t = self.last_print_t
|
| 1104 |
+
|
| 1105 |
+
def __bool__(self):
|
| 1106 |
+
if self.total is not None:
|
| 1107 |
+
return self.total > 0
|
| 1108 |
+
if self.iterable is None:
|
| 1109 |
+
raise TypeError('bool() undefined when iterable == total == None')
|
| 1110 |
+
return bool(self.iterable)
|
| 1111 |
+
|
| 1112 |
+
def __len__(self):
|
| 1113 |
+
return (
|
| 1114 |
+
self.total if self.iterable is None
|
| 1115 |
+
else self.iterable.shape[0] if hasattr(self.iterable, "shape")
|
| 1116 |
+
else len(self.iterable) if hasattr(self.iterable, "__len__")
|
| 1117 |
+
else self.iterable.__length_hint__() if hasattr(self.iterable, "__length_hint__")
|
| 1118 |
+
else getattr(self, "total", None))
|
| 1119 |
+
|
| 1120 |
+
def __reversed__(self):
|
| 1121 |
+
try:
|
| 1122 |
+
orig = self.iterable
|
| 1123 |
+
except AttributeError:
|
| 1124 |
+
raise TypeError("'tqdm' object is not reversible")
|
| 1125 |
+
else:
|
| 1126 |
+
self.iterable = reversed(self.iterable)
|
| 1127 |
+
return self.__iter__()
|
| 1128 |
+
finally:
|
| 1129 |
+
self.iterable = orig
|
| 1130 |
+
|
| 1131 |
+
def __contains__(self, item):
|
| 1132 |
+
contains = getattr(self.iterable, '__contains__', None)
|
| 1133 |
+
return contains(item) if contains is not None else item in self.__iter__()
|
| 1134 |
+
|
| 1135 |
+
def __enter__(self):
|
| 1136 |
+
return self
|
| 1137 |
+
|
| 1138 |
+
def __exit__(self, exc_type, exc_value, traceback):
|
| 1139 |
+
try:
|
| 1140 |
+
self.close()
|
| 1141 |
+
except AttributeError:
|
| 1142 |
+
# maybe eager thread cleanup upon external error
|
| 1143 |
+
if (exc_type, exc_value, traceback) == (None, None, None):
|
| 1144 |
+
raise
|
| 1145 |
+
warn("AttributeError ignored", TqdmWarning, stacklevel=2)
|
| 1146 |
+
|
| 1147 |
+
def __del__(self):
|
| 1148 |
+
self.close()
|
| 1149 |
+
|
| 1150 |
+
def __str__(self):
|
| 1151 |
+
return self.format_meter(**self.format_dict)
|
| 1152 |
+
|
| 1153 |
+
@property
|
| 1154 |
+
def _comparable(self):
|
| 1155 |
+
return abs(getattr(self, "pos", 1 << 31))
|
| 1156 |
+
|
| 1157 |
+
def __hash__(self):
|
| 1158 |
+
return id(self)
|
| 1159 |
+
|
| 1160 |
+
def __iter__(self):
|
| 1161 |
+
"""Backward-compatibility to use: for x in tqdm(iterable)"""
|
| 1162 |
+
|
| 1163 |
+
# Inlining instance variables as locals (speed optimisation)
|
| 1164 |
+
iterable = self.iterable
|
| 1165 |
+
|
| 1166 |
+
# If the bar is disabled, then just walk the iterable
|
| 1167 |
+
# (note: keep this check outside the loop for performance)
|
| 1168 |
+
if self.disable:
|
| 1169 |
+
for obj in iterable:
|
| 1170 |
+
yield obj
|
| 1171 |
+
return
|
| 1172 |
+
|
| 1173 |
+
mininterval = self.mininterval
|
| 1174 |
+
last_print_t = self.last_print_t
|
| 1175 |
+
last_print_n = self.last_print_n
|
| 1176 |
+
min_start_t = self.start_t + self.delay
|
| 1177 |
+
n = self.n
|
| 1178 |
+
time = self._time
|
| 1179 |
+
|
| 1180 |
+
try:
|
| 1181 |
+
for obj in iterable:
|
| 1182 |
+
yield obj
|
| 1183 |
+
# Update and possibly print the progressbar.
|
| 1184 |
+
# Note: does not call self.update(1) for speed optimisation.
|
| 1185 |
+
n += 1
|
| 1186 |
+
|
| 1187 |
+
if n - last_print_n >= self.miniters:
|
| 1188 |
+
cur_t = time()
|
| 1189 |
+
dt = cur_t - last_print_t
|
| 1190 |
+
if dt >= mininterval and cur_t >= min_start_t:
|
| 1191 |
+
self.update(n - last_print_n)
|
| 1192 |
+
last_print_n = self.last_print_n
|
| 1193 |
+
last_print_t = self.last_print_t
|
| 1194 |
+
finally:
|
| 1195 |
+
self.n = n
|
| 1196 |
+
self.close()
|
| 1197 |
+
|
| 1198 |
+
def update(self, n=1):
|
| 1199 |
+
"""
|
| 1200 |
+
Manually update the progress bar, useful for streams
|
| 1201 |
+
such as reading files.
|
| 1202 |
+
E.g.:
|
| 1203 |
+
>>> t = tqdm(total=filesize) # Initialise
|
| 1204 |
+
>>> for current_buffer in stream:
|
| 1205 |
+
... ...
|
| 1206 |
+
... t.update(len(current_buffer))
|
| 1207 |
+
>>> t.close()
|
| 1208 |
+
The last line is highly recommended, but possibly not necessary if
|
| 1209 |
+
`t.update()` will be called in such a way that `filesize` will be
|
| 1210 |
+
exactly reached and printed.
|
| 1211 |
+
|
| 1212 |
+
Parameters
|
| 1213 |
+
----------
|
| 1214 |
+
n : int or float, optional
|
| 1215 |
+
Increment to add to the internal counter of iterations
|
| 1216 |
+
[default: 1]. If using float, consider specifying `{n:.3f}`
|
| 1217 |
+
or similar in `bar_format`, or specifying `unit_scale`.
|
| 1218 |
+
|
| 1219 |
+
Returns
|
| 1220 |
+
-------
|
| 1221 |
+
out : bool or None
|
| 1222 |
+
True if a `display()` was triggered.
|
| 1223 |
+
"""
|
| 1224 |
+
if self.disable:
|
| 1225 |
+
return
|
| 1226 |
+
|
| 1227 |
+
if n < 0:
|
| 1228 |
+
self.last_print_n += n # for auto-refresh logic to work
|
| 1229 |
+
self.n += n
|
| 1230 |
+
|
| 1231 |
+
# check counter first to reduce calls to time()
|
| 1232 |
+
if self.n - self.last_print_n >= self.miniters:
|
| 1233 |
+
cur_t = self._time()
|
| 1234 |
+
dt = cur_t - self.last_print_t
|
| 1235 |
+
if dt >= self.mininterval and cur_t >= self.start_t + self.delay:
|
| 1236 |
+
cur_t = self._time()
|
| 1237 |
+
dn = self.n - self.last_print_n # >= n
|
| 1238 |
+
if self.smoothing and dt and dn:
|
| 1239 |
+
# EMA (not just overall average)
|
| 1240 |
+
self._ema_dn(dn)
|
| 1241 |
+
self._ema_dt(dt)
|
| 1242 |
+
self.refresh(lock_args=self.lock_args)
|
| 1243 |
+
if self.dynamic_miniters:
|
| 1244 |
+
# If no `miniters` was specified, adjust automatically to the
|
| 1245 |
+
# maximum iteration rate seen so far between two prints.
|
| 1246 |
+
# e.g.: After running `tqdm.update(5)`, subsequent
|
| 1247 |
+
# calls to `tqdm.update()` will only cause an update after
|
| 1248 |
+
# at least 5 more iterations.
|
| 1249 |
+
if self.maxinterval and dt >= self.maxinterval:
|
| 1250 |
+
self.miniters = dn * (self.mininterval or self.maxinterval) / dt
|
| 1251 |
+
elif self.smoothing:
|
| 1252 |
+
# EMA miniters update
|
| 1253 |
+
self.miniters = self._ema_miniters(
|
| 1254 |
+
dn * (self.mininterval / dt if self.mininterval and dt
|
| 1255 |
+
else 1))
|
| 1256 |
+
else:
|
| 1257 |
+
# max iters between two prints
|
| 1258 |
+
self.miniters = max(self.miniters, dn)
|
| 1259 |
+
|
| 1260 |
+
# Store old values for next call
|
| 1261 |
+
self.last_print_n = self.n
|
| 1262 |
+
self.last_print_t = cur_t
|
| 1263 |
+
return True
|
| 1264 |
+
|
| 1265 |
+
def close(self):
|
| 1266 |
+
"""Cleanup and (if leave=False) close the progressbar."""
|
| 1267 |
+
if self.disable:
|
| 1268 |
+
return
|
| 1269 |
+
|
| 1270 |
+
# Prevent multiple closures
|
| 1271 |
+
self.disable = True
|
| 1272 |
+
|
| 1273 |
+
# decrement instance pos and remove from internal set
|
| 1274 |
+
pos = abs(self.pos)
|
| 1275 |
+
self._decr_instances(self)
|
| 1276 |
+
|
| 1277 |
+
if self.last_print_t < self.start_t + self.delay:
|
| 1278 |
+
# haven't ever displayed; nothing to clear
|
| 1279 |
+
return
|
| 1280 |
+
|
| 1281 |
+
# GUI mode
|
| 1282 |
+
if getattr(self, 'sp', None) is None:
|
| 1283 |
+
return
|
| 1284 |
+
|
| 1285 |
+
# annoyingly, _supports_unicode isn't good enough
|
| 1286 |
+
def fp_write(s):
|
| 1287 |
+
self.fp.write(str(s))
|
| 1288 |
+
|
| 1289 |
+
try:
|
| 1290 |
+
fp_write('')
|
| 1291 |
+
except ValueError as e:
|
| 1292 |
+
if 'closed' in str(e):
|
| 1293 |
+
return
|
| 1294 |
+
raise # pragma: no cover
|
| 1295 |
+
|
| 1296 |
+
leave = pos == 0 if self.leave is None else self.leave
|
| 1297 |
+
|
| 1298 |
+
with self._lock:
|
| 1299 |
+
if leave:
|
| 1300 |
+
# stats for overall rate (no weighted average)
|
| 1301 |
+
self._ema_dt = lambda: None
|
| 1302 |
+
self.display(pos=0)
|
| 1303 |
+
fp_write('\n')
|
| 1304 |
+
else:
|
| 1305 |
+
# clear previous display
|
| 1306 |
+
if self.display(msg='', pos=pos) and not pos:
|
| 1307 |
+
fp_write('\r')
|
| 1308 |
+
|
| 1309 |
+
def clear(self, nolock=False):
|
| 1310 |
+
"""Clear current bar display."""
|
| 1311 |
+
if self.disable:
|
| 1312 |
+
return
|
| 1313 |
+
|
| 1314 |
+
if not nolock:
|
| 1315 |
+
self._lock.acquire()
|
| 1316 |
+
pos = abs(self.pos)
|
| 1317 |
+
if pos < (self.nrows or 20):
|
| 1318 |
+
self.moveto(pos)
|
| 1319 |
+
self.sp('')
|
| 1320 |
+
self.fp.write('\r') # place cursor back at the beginning of line
|
| 1321 |
+
self.moveto(-pos)
|
| 1322 |
+
if not nolock:
|
| 1323 |
+
self._lock.release()
|
| 1324 |
+
|
| 1325 |
+
def refresh(self, nolock=False, lock_args=None):
|
| 1326 |
+
"""
|
| 1327 |
+
Force refresh the display of this bar.
|
| 1328 |
+
|
| 1329 |
+
Parameters
|
| 1330 |
+
----------
|
| 1331 |
+
nolock : bool, optional
|
| 1332 |
+
If `True`, does not lock.
|
| 1333 |
+
If [default: `False`]: calls `acquire()` on internal lock.
|
| 1334 |
+
lock_args : tuple, optional
|
| 1335 |
+
Passed to internal lock's `acquire()`.
|
| 1336 |
+
If specified, will only `display()` if `acquire()` returns `True`.
|
| 1337 |
+
"""
|
| 1338 |
+
if self.disable:
|
| 1339 |
+
return
|
| 1340 |
+
|
| 1341 |
+
if not nolock:
|
| 1342 |
+
if lock_args:
|
| 1343 |
+
if not self._lock.acquire(*lock_args):
|
| 1344 |
+
return False
|
| 1345 |
+
else:
|
| 1346 |
+
self._lock.acquire()
|
| 1347 |
+
self.display()
|
| 1348 |
+
if not nolock:
|
| 1349 |
+
self._lock.release()
|
| 1350 |
+
return True
|
| 1351 |
+
|
| 1352 |
+
def unpause(self):
|
| 1353 |
+
"""Restart tqdm timer from last print time."""
|
| 1354 |
+
if self.disable:
|
| 1355 |
+
return
|
| 1356 |
+
cur_t = self._time()
|
| 1357 |
+
self.start_t += cur_t - self.last_print_t
|
| 1358 |
+
self.last_print_t = cur_t
|
| 1359 |
+
|
| 1360 |
+
def reset(self, total=None):
|
| 1361 |
+
"""
|
| 1362 |
+
Resets to 0 iterations for repeated use.
|
| 1363 |
+
|
| 1364 |
+
Consider combining with `leave=True`.
|
| 1365 |
+
|
| 1366 |
+
Parameters
|
| 1367 |
+
----------
|
| 1368 |
+
total : int or float, optional. Total to use for the new bar.
|
| 1369 |
+
"""
|
| 1370 |
+
self.n = 0
|
| 1371 |
+
if total is not None:
|
| 1372 |
+
self.total = total
|
| 1373 |
+
if self.disable:
|
| 1374 |
+
return
|
| 1375 |
+
self.last_print_n = 0
|
| 1376 |
+
self.last_print_t = self.start_t = self._time()
|
| 1377 |
+
self._ema_dn = EMA(self.smoothing)
|
| 1378 |
+
self._ema_dt = EMA(self.smoothing)
|
| 1379 |
+
self._ema_miniters = EMA(self.smoothing)
|
| 1380 |
+
self.refresh()
|
| 1381 |
+
|
| 1382 |
+
def set_description(self, desc=None, refresh=True):
|
| 1383 |
+
"""
|
| 1384 |
+
Set/modify description of the progress bar.
|
| 1385 |
+
|
| 1386 |
+
Parameters
|
| 1387 |
+
----------
|
| 1388 |
+
desc : str, optional
|
| 1389 |
+
refresh : bool, optional
|
| 1390 |
+
Forces refresh [default: True].
|
| 1391 |
+
"""
|
| 1392 |
+
self.desc = desc + ': ' if desc else ''
|
| 1393 |
+
if refresh:
|
| 1394 |
+
self.refresh()
|
| 1395 |
+
|
| 1396 |
+
def set_description_str(self, desc=None, refresh=True):
|
| 1397 |
+
"""Set/modify description without ': ' appended."""
|
| 1398 |
+
self.desc = desc or ''
|
| 1399 |
+
if refresh:
|
| 1400 |
+
self.refresh()
|
| 1401 |
+
|
| 1402 |
+
def set_postfix(self, ordered_dict=None, refresh=True, **kwargs):
|
| 1403 |
+
"""
|
| 1404 |
+
Set/modify postfix (additional stats)
|
| 1405 |
+
with automatic formatting based on datatype.
|
| 1406 |
+
|
| 1407 |
+
Parameters
|
| 1408 |
+
----------
|
| 1409 |
+
ordered_dict : dict or OrderedDict, optional
|
| 1410 |
+
refresh : bool, optional
|
| 1411 |
+
Forces refresh [default: True].
|
| 1412 |
+
kwargs : dict, optional
|
| 1413 |
+
"""
|
| 1414 |
+
# Sort in alphabetical order to be more deterministic
|
| 1415 |
+
postfix = OrderedDict([] if ordered_dict is None else ordered_dict)
|
| 1416 |
+
for key in sorted(kwargs.keys()):
|
| 1417 |
+
postfix[key] = kwargs[key]
|
| 1418 |
+
# Preprocess stats according to datatype
|
| 1419 |
+
for key in postfix.keys():
|
| 1420 |
+
# Number: limit the length of the string
|
| 1421 |
+
if isinstance(postfix[key], Number):
|
| 1422 |
+
postfix[key] = self.format_num(postfix[key])
|
| 1423 |
+
# Else for any other type, try to get the string conversion
|
| 1424 |
+
elif not isinstance(postfix[key], str):
|
| 1425 |
+
postfix[key] = str(postfix[key])
|
| 1426 |
+
# Else if it's a string, don't need to preprocess anything
|
| 1427 |
+
# Stitch together to get the final postfix
|
| 1428 |
+
self.postfix = ', '.join(key + '=' + postfix[key].strip()
|
| 1429 |
+
for key in postfix.keys())
|
| 1430 |
+
if refresh:
|
| 1431 |
+
self.refresh()
|
| 1432 |
+
|
| 1433 |
+
def set_postfix_str(self, s='', refresh=True):
|
| 1434 |
+
"""
|
| 1435 |
+
Postfix without dictionary expansion, similar to prefix handling.
|
| 1436 |
+
"""
|
| 1437 |
+
self.postfix = str(s)
|
| 1438 |
+
if refresh:
|
| 1439 |
+
self.refresh()
|
| 1440 |
+
|
| 1441 |
+
def moveto(self, n):
|
| 1442 |
+
# TODO: private method
|
| 1443 |
+
self.fp.write('\n' * n + _term_move_up() * -n)
|
| 1444 |
+
getattr(self.fp, 'flush', lambda: None)()
|
| 1445 |
+
|
| 1446 |
+
@property
|
| 1447 |
+
def format_dict(self):
|
| 1448 |
+
"""Public API for read-only member access."""
|
| 1449 |
+
if self.disable and not hasattr(self, 'unit'):
|
| 1450 |
+
return defaultdict(lambda: None, {
|
| 1451 |
+
'n': self.n, 'total': self.total, 'elapsed': 0, 'unit': 'it'})
|
| 1452 |
+
if self.dynamic_ncols:
|
| 1453 |
+
self.ncols, self.nrows = self.dynamic_ncols(self.fp)
|
| 1454 |
+
return {
|
| 1455 |
+
'n': self.n, 'total': self.total,
|
| 1456 |
+
'elapsed': self._time() - self.start_t if hasattr(self, 'start_t') else 0,
|
| 1457 |
+
'ncols': self.ncols, 'nrows': self.nrows, 'prefix': self.desc,
|
| 1458 |
+
'ascii': self.ascii, 'unit': self.unit, 'unit_scale': self.unit_scale,
|
| 1459 |
+
'rate': self._ema_dn() / self._ema_dt() if self._ema_dt() else None,
|
| 1460 |
+
'bar_format': self.bar_format, 'postfix': self.postfix,
|
| 1461 |
+
'unit_divisor': self.unit_divisor, 'initial': self.initial,
|
| 1462 |
+
'colour': self.colour}
|
| 1463 |
+
|
| 1464 |
+
def display(self, msg=None, pos=None):
|
| 1465 |
+
"""
|
| 1466 |
+
Use `self.sp` to display `msg` in the specified `pos`.
|
| 1467 |
+
|
| 1468 |
+
Consider overloading this function when inheriting to use e.g.:
|
| 1469 |
+
`self.some_frontend(**self.format_dict)` instead of `self.sp`.
|
| 1470 |
+
|
| 1471 |
+
Parameters
|
| 1472 |
+
----------
|
| 1473 |
+
msg : str, optional. What to display (default: `repr(self)`).
|
| 1474 |
+
pos : int, optional. Position to `moveto`
|
| 1475 |
+
(default: `abs(self.pos)`).
|
| 1476 |
+
"""
|
| 1477 |
+
if pos is None:
|
| 1478 |
+
pos = abs(self.pos)
|
| 1479 |
+
|
| 1480 |
+
nrows = self.nrows or 20
|
| 1481 |
+
if pos >= nrows - 1:
|
| 1482 |
+
if pos >= nrows:
|
| 1483 |
+
return False
|
| 1484 |
+
if msg or msg is None: # override at `nrows - 1`
|
| 1485 |
+
msg = " ... (more hidden) ..."
|
| 1486 |
+
|
| 1487 |
+
if not hasattr(self, "sp"):
|
| 1488 |
+
raise TqdmDeprecationWarning(
|
| 1489 |
+
"Please use `tqdm.gui.tqdm(...)`"
|
| 1490 |
+
" instead of `tqdm(..., gui=True)`\n",
|
| 1491 |
+
fp_write=getattr(self.fp, 'write', sys.stderr.write))
|
| 1492 |
+
|
| 1493 |
+
if pos:
|
| 1494 |
+
self.moveto(pos)
|
| 1495 |
+
self.sp(self.__str__() if msg is None else msg)
|
| 1496 |
+
if pos:
|
| 1497 |
+
self.moveto(-pos)
|
| 1498 |
+
return True
|
| 1499 |
+
|
| 1500 |
+
@classmethod
|
| 1501 |
+
@contextmanager
|
| 1502 |
+
def wrapattr(cls, stream, method, total=None, bytes=True, **tqdm_kwargs):
|
| 1503 |
+
"""
|
| 1504 |
+
stream : file-like object.
|
| 1505 |
+
method : str, "read" or "write". The result of `read()` and
|
| 1506 |
+
the first argument of `write()` should have a `len()`.
|
| 1507 |
+
|
| 1508 |
+
>>> with tqdm.wrapattr(file_obj, "read", total=file_obj.size) as fobj:
|
| 1509 |
+
... while True:
|
| 1510 |
+
... chunk = fobj.read(chunk_size)
|
| 1511 |
+
... if not chunk:
|
| 1512 |
+
... break
|
| 1513 |
+
"""
|
| 1514 |
+
with cls(total=total, **tqdm_kwargs) as t:
|
| 1515 |
+
if bytes:
|
| 1516 |
+
t.unit = "B"
|
| 1517 |
+
t.unit_scale = True
|
| 1518 |
+
t.unit_divisor = 1024
|
| 1519 |
+
yield CallbackIOWrapper(t.update, stream, method)
|
| 1520 |
+
|
| 1521 |
+
|
| 1522 |
+
def trange(*args, **kwargs):
|
| 1523 |
+
"""Shortcut for tqdm(range(*args), **kwargs)."""
|
| 1524 |
+
return tqdm(range(*args), **kwargs)
|
vllm/lib/python3.10/site-packages/tqdm/tqdm.1
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.\" Automatically generated by Pandoc 1.19.2
|
| 2 |
+
.\"
|
| 3 |
+
.TH "TQDM" "1" "2015\-2021" "tqdm User Manuals" ""
|
| 4 |
+
.hy
|
| 5 |
+
.SH NAME
|
| 6 |
+
.PP
|
| 7 |
+
tqdm \- fast, extensible progress bar for Python and CLI
|
| 8 |
+
.SH SYNOPSIS
|
| 9 |
+
.PP
|
| 10 |
+
tqdm [\f[I]options\f[]]
|
| 11 |
+
.SH DESCRIPTION
|
| 12 |
+
.PP
|
| 13 |
+
See <https://github.com/tqdm/tqdm>.
|
| 14 |
+
Can be used as a pipe:
|
| 15 |
+
.IP
|
| 16 |
+
.nf
|
| 17 |
+
\f[C]
|
| 18 |
+
$\ #\ count\ lines\ of\ code
|
| 19 |
+
$\ cat\ *.py\ |\ tqdm\ |\ wc\ \-l
|
| 20 |
+
327it\ [00:00,\ 981773.38it/s]
|
| 21 |
+
327
|
| 22 |
+
|
| 23 |
+
$\ #\ find\ all\ files
|
| 24 |
+
$\ find\ .\ \-name\ "*.py"\ |\ tqdm\ |\ wc\ \-l
|
| 25 |
+
432it\ [00:00,\ 833842.30it/s]
|
| 26 |
+
432
|
| 27 |
+
|
| 28 |
+
#\ ...\ and\ more\ info
|
| 29 |
+
$\ find\ .\ \-name\ \[aq]*.py\[aq]\ \-exec\ wc\ \-l\ \\{}\ \\;\ \\
|
| 30 |
+
\ \ |\ tqdm\ \-\-total\ 432\ \-\-unit\ files\ \-\-desc\ counting\ \\
|
| 31 |
+
\ \ |\ awk\ \[aq]{\ sum\ +=\ $1\ };\ END\ {\ print\ sum\ }\[aq]
|
| 32 |
+
counting:\ 100%|█████████|\ 432/432\ [00:00<00:00,\ 794361.83files/s]
|
| 33 |
+
131998
|
| 34 |
+
\f[]
|
| 35 |
+
.fi
|
| 36 |
+
.SH OPTIONS
|
| 37 |
+
.TP
|
| 38 |
+
.B \-h, \-\-help
|
| 39 |
+
Print this help and exit.
|
| 40 |
+
.RS
|
| 41 |
+
.RE
|
| 42 |
+
.TP
|
| 43 |
+
.B \-v, \-\-version
|
| 44 |
+
Print version and exit.
|
| 45 |
+
.RS
|
| 46 |
+
.RE
|
| 47 |
+
.TP
|
| 48 |
+
.B \-\-desc=\f[I]desc\f[]
|
| 49 |
+
str, optional.
|
| 50 |
+
Prefix for the progressbar.
|
| 51 |
+
.RS
|
| 52 |
+
.RE
|
| 53 |
+
.TP
|
| 54 |
+
.B \-\-total=\f[I]total\f[]
|
| 55 |
+
int or float, optional.
|
| 56 |
+
The number of expected iterations.
|
| 57 |
+
If unspecified, len(iterable) is used if possible.
|
| 58 |
+
If float("inf") or as a last resort, only basic progress statistics are
|
| 59 |
+
displayed (no ETA, no progressbar).
|
| 60 |
+
If \f[C]gui\f[] is True and this parameter needs subsequent updating,
|
| 61 |
+
specify an initial arbitrary large positive number, e.g.
|
| 62 |
+
9e9.
|
| 63 |
+
.RS
|
| 64 |
+
.RE
|
| 65 |
+
.TP
|
| 66 |
+
.B \-\-leave
|
| 67 |
+
bool, optional.
|
| 68 |
+
If [default: True], keeps all traces of the progressbar upon termination
|
| 69 |
+
of iteration.
|
| 70 |
+
If \f[C]None\f[], will leave only if \f[C]position\f[] is \f[C]0\f[].
|
| 71 |
+
.RS
|
| 72 |
+
.RE
|
| 73 |
+
.TP
|
| 74 |
+
.B \-\-ncols=\f[I]ncols\f[]
|
| 75 |
+
int, optional.
|
| 76 |
+
The width of the entire output message.
|
| 77 |
+
If specified, dynamically resizes the progressbar to stay within this
|
| 78 |
+
bound.
|
| 79 |
+
If unspecified, attempts to use environment width.
|
| 80 |
+
The fallback is a meter width of 10 and no limit for the counter and
|
| 81 |
+
statistics.
|
| 82 |
+
If 0, will not print any meter (only stats).
|
| 83 |
+
.RS
|
| 84 |
+
.RE
|
| 85 |
+
.TP
|
| 86 |
+
.B \-\-mininterval=\f[I]mininterval\f[]
|
| 87 |
+
float, optional.
|
| 88 |
+
Minimum progress display update interval [default: 0.1] seconds.
|
| 89 |
+
.RS
|
| 90 |
+
.RE
|
| 91 |
+
.TP
|
| 92 |
+
.B \-\-maxinterval=\f[I]maxinterval\f[]
|
| 93 |
+
float, optional.
|
| 94 |
+
Maximum progress display update interval [default: 10] seconds.
|
| 95 |
+
Automatically adjusts \f[C]miniters\f[] to correspond to
|
| 96 |
+
\f[C]mininterval\f[] after long display update lag.
|
| 97 |
+
Only works if \f[C]dynamic_miniters\f[] or monitor thread is enabled.
|
| 98 |
+
.RS
|
| 99 |
+
.RE
|
| 100 |
+
.TP
|
| 101 |
+
.B \-\-miniters=\f[I]miniters\f[]
|
| 102 |
+
int or float, optional.
|
| 103 |
+
Minimum progress display update interval, in iterations.
|
| 104 |
+
If 0 and \f[C]dynamic_miniters\f[], will automatically adjust to equal
|
| 105 |
+
\f[C]mininterval\f[] (more CPU efficient, good for tight loops).
|
| 106 |
+
If > 0, will skip display of specified number of iterations.
|
| 107 |
+
Tweak this and \f[C]mininterval\f[] to get very efficient loops.
|
| 108 |
+
If your progress is erratic with both fast and slow iterations (network,
|
| 109 |
+
skipping items, etc) you should set miniters=1.
|
| 110 |
+
.RS
|
| 111 |
+
.RE
|
| 112 |
+
.TP
|
| 113 |
+
.B \-\-ascii=\f[I]ascii\f[]
|
| 114 |
+
bool or str, optional.
|
| 115 |
+
If unspecified or False, use unicode (smooth blocks) to fill the meter.
|
| 116 |
+
The fallback is to use ASCII characters " 123456789#".
|
| 117 |
+
.RS
|
| 118 |
+
.RE
|
| 119 |
+
.TP
|
| 120 |
+
.B \-\-disable
|
| 121 |
+
bool, optional.
|
| 122 |
+
Whether to disable the entire progressbar wrapper [default: False].
|
| 123 |
+
If set to None, disable on non\-TTY.
|
| 124 |
+
.RS
|
| 125 |
+
.RE
|
| 126 |
+
.TP
|
| 127 |
+
.B \-\-unit=\f[I]unit\f[]
|
| 128 |
+
str, optional.
|
| 129 |
+
String that will be used to define the unit of each iteration [default:
|
| 130 |
+
it].
|
| 131 |
+
.RS
|
| 132 |
+
.RE
|
| 133 |
+
.TP
|
| 134 |
+
.B \-\-unit\-scale=\f[I]unit_scale\f[]
|
| 135 |
+
bool or int or float, optional.
|
| 136 |
+
If 1 or True, the number of iterations will be reduced/scaled
|
| 137 |
+
automatically and a metric prefix following the International System of
|
| 138 |
+
Units standard will be added (kilo, mega, etc.) [default: False].
|
| 139 |
+
If any other non\-zero number, will scale \f[C]total\f[] and \f[C]n\f[].
|
| 140 |
+
.RS
|
| 141 |
+
.RE
|
| 142 |
+
.TP
|
| 143 |
+
.B \-\-dynamic\-ncols
|
| 144 |
+
bool, optional.
|
| 145 |
+
If set, constantly alters \f[C]ncols\f[] and \f[C]nrows\f[] to the
|
| 146 |
+
environment (allowing for window resizes) [default: False].
|
| 147 |
+
.RS
|
| 148 |
+
.RE
|
| 149 |
+
.TP
|
| 150 |
+
.B \-\-smoothing=\f[I]smoothing\f[]
|
| 151 |
+
float, optional.
|
| 152 |
+
Exponential moving average smoothing factor for speed estimates (ignored
|
| 153 |
+
in GUI mode).
|
| 154 |
+
Ranges from 0 (average speed) to 1 (current/instantaneous speed)
|
| 155 |
+
[default: 0.3].
|
| 156 |
+
.RS
|
| 157 |
+
.RE
|
| 158 |
+
.TP
|
| 159 |
+
.B \-\-bar\-format=\f[I]bar_format\f[]
|
| 160 |
+
str, optional.
|
| 161 |
+
Specify a custom bar string formatting.
|
| 162 |
+
May impact performance.
|
| 163 |
+
[default: \[aq]{l_bar}{bar}{r_bar}\[aq]], where l_bar=\[aq]{desc}:
|
| 164 |
+
{percentage:3.0f}%|\[aq] and r_bar=\[aq]| {n_fmt}/{total_fmt}
|
| 165 |
+
[{elapsed}<{remaining}, \[aq] \[aq]{rate_fmt}{postfix}]\[aq] Possible
|
| 166 |
+
vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt, percentage,
|
| 167 |
+
elapsed, elapsed_s, ncols, nrows, desc, unit, rate, rate_fmt,
|
| 168 |
+
rate_noinv, rate_noinv_fmt, rate_inv, rate_inv_fmt, postfix,
|
| 169 |
+
unit_divisor, remaining, remaining_s, eta.
|
| 170 |
+
Note that a trailing ": " is automatically removed after {desc} if the
|
| 171 |
+
latter is empty.
|
| 172 |
+
.RS
|
| 173 |
+
.RE
|
| 174 |
+
.TP
|
| 175 |
+
.B \-\-initial=\f[I]initial\f[]
|
| 176 |
+
int or float, optional.
|
| 177 |
+
The initial counter value.
|
| 178 |
+
Useful when restarting a progress bar [default: 0].
|
| 179 |
+
If using float, consider specifying \f[C]{n:.3f}\f[] or similar in
|
| 180 |
+
\f[C]bar_format\f[], or specifying \f[C]unit_scale\f[].
|
| 181 |
+
.RS
|
| 182 |
+
.RE
|
| 183 |
+
.TP
|
| 184 |
+
.B \-\-position=\f[I]position\f[]
|
| 185 |
+
int, optional.
|
| 186 |
+
Specify the line offset to print this bar (starting from 0) Automatic if
|
| 187 |
+
unspecified.
|
| 188 |
+
Useful to manage multiple bars at once (eg, from threads).
|
| 189 |
+
.RS
|
| 190 |
+
.RE
|
| 191 |
+
.TP
|
| 192 |
+
.B \-\-postfix=\f[I]postfix\f[]
|
| 193 |
+
dict or *, optional.
|
| 194 |
+
Specify additional stats to display at the end of the bar.
|
| 195 |
+
Calls \f[C]set_postfix(**postfix)\f[] if possible (dict).
|
| 196 |
+
.RS
|
| 197 |
+
.RE
|
| 198 |
+
.TP
|
| 199 |
+
.B \-\-unit\-divisor=\f[I]unit_divisor\f[]
|
| 200 |
+
float, optional.
|
| 201 |
+
[default: 1000], ignored unless \f[C]unit_scale\f[] is True.
|
| 202 |
+
.RS
|
| 203 |
+
.RE
|
| 204 |
+
.TP
|
| 205 |
+
.B \-\-write\-bytes
|
| 206 |
+
bool, optional.
|
| 207 |
+
Whether to write bytes.
|
| 208 |
+
If (default: False) will write unicode.
|
| 209 |
+
.RS
|
| 210 |
+
.RE
|
| 211 |
+
.TP
|
| 212 |
+
.B \-\-lock\-args=\f[I]lock_args\f[]
|
| 213 |
+
tuple, optional.
|
| 214 |
+
Passed to \f[C]refresh\f[] for intermediate output (initialisation,
|
| 215 |
+
iterating, and updating).
|
| 216 |
+
.RS
|
| 217 |
+
.RE
|
| 218 |
+
.TP
|
| 219 |
+
.B \-\-nrows=\f[I]nrows\f[]
|
| 220 |
+
int, optional.
|
| 221 |
+
The screen height.
|
| 222 |
+
If specified, hides nested bars outside this bound.
|
| 223 |
+
If unspecified, attempts to use environment height.
|
| 224 |
+
The fallback is 20.
|
| 225 |
+
.RS
|
| 226 |
+
.RE
|
| 227 |
+
.TP
|
| 228 |
+
.B \-\-colour=\f[I]colour\f[]
|
| 229 |
+
str, optional.
|
| 230 |
+
Bar colour (e.g.
|
| 231 |
+
\[aq]green\[aq], \[aq]#00ff00\[aq]).
|
| 232 |
+
.RS
|
| 233 |
+
.RE
|
| 234 |
+
.TP
|
| 235 |
+
.B \-\-delay=\f[I]delay\f[]
|
| 236 |
+
float, optional.
|
| 237 |
+
Don\[aq]t display until [default: 0] seconds have elapsed.
|
| 238 |
+
.RS
|
| 239 |
+
.RE
|
| 240 |
+
.TP
|
| 241 |
+
.B \-\-delim=\f[I]delim\f[]
|
| 242 |
+
chr, optional.
|
| 243 |
+
Delimiting character [default: \[aq]\\n\[aq]].
|
| 244 |
+
Use \[aq]\\0\[aq] for null.
|
| 245 |
+
N.B.: on Windows systems, Python converts \[aq]\\n\[aq] to
|
| 246 |
+
\[aq]\\r\\n\[aq].
|
| 247 |
+
.RS
|
| 248 |
+
.RE
|
| 249 |
+
.TP
|
| 250 |
+
.B \-\-buf\-size=\f[I]buf_size\f[]
|
| 251 |
+
int, optional.
|
| 252 |
+
String buffer size in bytes [default: 256] used when \f[C]delim\f[] is
|
| 253 |
+
specified.
|
| 254 |
+
.RS
|
| 255 |
+
.RE
|
| 256 |
+
.TP
|
| 257 |
+
.B \-\-bytes
|
| 258 |
+
bool, optional.
|
| 259 |
+
If true, will count bytes, ignore \f[C]delim\f[], and default
|
| 260 |
+
\f[C]unit_scale\f[] to True, \f[C]unit_divisor\f[] to 1024, and
|
| 261 |
+
\f[C]unit\f[] to \[aq]B\[aq].
|
| 262 |
+
.RS
|
| 263 |
+
.RE
|
| 264 |
+
.TP
|
| 265 |
+
.B \-\-tee
|
| 266 |
+
bool, optional.
|
| 267 |
+
If true, passes \f[C]stdin\f[] to both \f[C]stderr\f[] and
|
| 268 |
+
\f[C]stdout\f[].
|
| 269 |
+
.RS
|
| 270 |
+
.RE
|
| 271 |
+
.TP
|
| 272 |
+
.B \-\-update
|
| 273 |
+
bool, optional.
|
| 274 |
+
If true, will treat input as newly elapsed iterations, i.e.
|
| 275 |
+
numbers to pass to \f[C]update()\f[].
|
| 276 |
+
Note that this is slow (~2e5 it/s) since every input must be decoded as
|
| 277 |
+
a number.
|
| 278 |
+
.RS
|
| 279 |
+
.RE
|
| 280 |
+
.TP
|
| 281 |
+
.B \-\-update\-to
|
| 282 |
+
bool, optional.
|
| 283 |
+
If true, will treat input as total elapsed iterations, i.e.
|
| 284 |
+
numbers to assign to \f[C]self.n\f[].
|
| 285 |
+
Note that this is slow (~2e5 it/s) since every input must be decoded as
|
| 286 |
+
a number.
|
| 287 |
+
.RS
|
| 288 |
+
.RE
|
| 289 |
+
.TP
|
| 290 |
+
.B \-\-null
|
| 291 |
+
bool, optional.
|
| 292 |
+
If true, will discard input (no stdout).
|
| 293 |
+
.RS
|
| 294 |
+
.RE
|
| 295 |
+
.TP
|
| 296 |
+
.B \-\-manpath=\f[I]manpath\f[]
|
| 297 |
+
str, optional.
|
| 298 |
+
Directory in which to install tqdm man pages.
|
| 299 |
+
.RS
|
| 300 |
+
.RE
|
| 301 |
+
.TP
|
| 302 |
+
.B \-\-comppath=\f[I]comppath\f[]
|
| 303 |
+
str, optional.
|
| 304 |
+
Directory in which to place tqdm completion.
|
| 305 |
+
.RS
|
| 306 |
+
.RE
|
| 307 |
+
.TP
|
| 308 |
+
.B \-\-log=\f[I]log\f[]
|
| 309 |
+
str, optional.
|
| 310 |
+
CRITICAL|FATAL|ERROR|WARN(ING)|[default: \[aq]INFO\[aq]]|DEBUG|NOTSET.
|
| 311 |
+
.RS
|
| 312 |
+
.RE
|
| 313 |
+
.SH AUTHORS
|
| 314 |
+
tqdm developers <https://github.com/tqdm>.
|
vllm/lib/python3.10/site-packages/wandb/cli/__init__.py
ADDED
|
File without changes
|
vllm/lib/python3.10/site-packages/wandb/cli/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (159 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/cli/__pycache__/beta.cpython-310.pyc
ADDED
|
Binary file (4.56 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/cli/__pycache__/cli.cpython-310.pyc
ADDED
|
Binary file (65.1 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/cli/beta.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Beta versions of wandb CLI commands.
|
| 2 |
+
|
| 3 |
+
These commands are experimental and may change or be removed in future versions.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from __future__ import annotations
|
| 7 |
+
|
| 8 |
+
import pathlib
|
| 9 |
+
import sys
|
| 10 |
+
|
| 11 |
+
import click
|
| 12 |
+
|
| 13 |
+
import wandb
|
| 14 |
+
from wandb.errors import UsageError, WandbCoreNotAvailableError
|
| 15 |
+
from wandb.sdk.wandb_sync import _sync
|
| 16 |
+
from wandb.util import get_core_path
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@click.group()
|
| 20 |
+
def beta():
|
| 21 |
+
"""Beta versions of wandb CLI commands. Requires wandb-core."""
|
| 22 |
+
# this is the future that requires wandb-core!
|
| 23 |
+
import wandb.env
|
| 24 |
+
|
| 25 |
+
wandb._sentry.configure_scope(process_context="wandb_beta")
|
| 26 |
+
|
| 27 |
+
if wandb.env.is_require_legacy_service():
|
| 28 |
+
raise UsageError(
|
| 29 |
+
"wandb beta commands can only be used with wandb-core. "
|
| 30 |
+
f"Please make sure that `{wandb.env._REQUIRE_LEGACY_SERVICE}` is not set."
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
get_core_path()
|
| 35 |
+
except WandbCoreNotAvailableError as e:
|
| 36 |
+
wandb._sentry.exception(f"using `wandb beta`. failed with {e}")
|
| 37 |
+
click.secho(
|
| 38 |
+
(e),
|
| 39 |
+
fg="red",
|
| 40 |
+
err=True,
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@beta.command(
|
| 45 |
+
name="sync",
|
| 46 |
+
context_settings={"default_map": {}},
|
| 47 |
+
help="Upload a training run to W&B",
|
| 48 |
+
)
|
| 49 |
+
@click.pass_context
|
| 50 |
+
@click.argument("wandb_dir", nargs=1, type=click.Path(exists=True))
|
| 51 |
+
@click.option("--id", "run_id", help="The run you want to upload to.")
|
| 52 |
+
@click.option("--project", "-p", help="The project you want to upload to.")
|
| 53 |
+
@click.option("--entity", "-e", help="The entity to scope to.")
|
| 54 |
+
@click.option("--skip-console", is_flag=True, default=False, help="Skip console logs")
|
| 55 |
+
@click.option("--append", is_flag=True, default=False, help="Append run")
|
| 56 |
+
@click.option(
|
| 57 |
+
"--include",
|
| 58 |
+
"-i",
|
| 59 |
+
help="Glob to include. Can be used multiple times.",
|
| 60 |
+
multiple=True,
|
| 61 |
+
)
|
| 62 |
+
@click.option(
|
| 63 |
+
"--exclude",
|
| 64 |
+
"-e",
|
| 65 |
+
help="Glob to exclude. Can be used multiple times.",
|
| 66 |
+
multiple=True,
|
| 67 |
+
)
|
| 68 |
+
@click.option(
|
| 69 |
+
"--mark-synced/--no-mark-synced",
|
| 70 |
+
is_flag=True,
|
| 71 |
+
default=True,
|
| 72 |
+
help="Mark runs as synced",
|
| 73 |
+
)
|
| 74 |
+
@click.option(
|
| 75 |
+
"--skip-synced/--no-skip-synced",
|
| 76 |
+
is_flag=True,
|
| 77 |
+
default=True,
|
| 78 |
+
help="Skip synced runs",
|
| 79 |
+
)
|
| 80 |
+
@click.option(
|
| 81 |
+
"--dry-run", is_flag=True, help="Perform a dry run without uploading anything."
|
| 82 |
+
)
|
| 83 |
+
def sync_beta( # noqa: C901
|
| 84 |
+
ctx,
|
| 85 |
+
wandb_dir=None,
|
| 86 |
+
run_id: str | None = None,
|
| 87 |
+
project: str | None = None,
|
| 88 |
+
entity: str | None = None,
|
| 89 |
+
skip_console: bool = False,
|
| 90 |
+
append: bool = False,
|
| 91 |
+
include: str | None = None,
|
| 92 |
+
exclude: str | None = None,
|
| 93 |
+
skip_synced: bool = True,
|
| 94 |
+
mark_synced: bool = True,
|
| 95 |
+
dry_run: bool = False,
|
| 96 |
+
) -> None:
|
| 97 |
+
import concurrent.futures
|
| 98 |
+
from multiprocessing import cpu_count
|
| 99 |
+
|
| 100 |
+
paths = set()
|
| 101 |
+
|
| 102 |
+
# TODO: test file discovery logic
|
| 103 |
+
# include and exclude globs are evaluated relative to the provided base_path
|
| 104 |
+
if include:
|
| 105 |
+
for pattern in include:
|
| 106 |
+
matching_dirs = list(pathlib.Path(wandb_dir).glob(pattern))
|
| 107 |
+
for d in matching_dirs:
|
| 108 |
+
if not d.is_dir():
|
| 109 |
+
continue
|
| 110 |
+
wandb_files = [p for p in d.glob("*.wandb") if p.is_file()]
|
| 111 |
+
if len(wandb_files) > 1:
|
| 112 |
+
wandb.termwarn(
|
| 113 |
+
f"Multiple wandb files found in directory {d}, skipping"
|
| 114 |
+
)
|
| 115 |
+
elif len(wandb_files) == 1:
|
| 116 |
+
paths.add(d)
|
| 117 |
+
else:
|
| 118 |
+
paths.update({p.parent for p in pathlib.Path(wandb_dir).glob("**/*.wandb")})
|
| 119 |
+
|
| 120 |
+
for pattern in exclude:
|
| 121 |
+
matching_dirs = list(pathlib.Path(wandb_dir).glob(pattern))
|
| 122 |
+
for d in matching_dirs:
|
| 123 |
+
if not d.is_dir():
|
| 124 |
+
continue
|
| 125 |
+
if d in paths:
|
| 126 |
+
paths.remove(d)
|
| 127 |
+
|
| 128 |
+
# remove paths that are already synced, if requested
|
| 129 |
+
if skip_synced:
|
| 130 |
+
synced_paths = set()
|
| 131 |
+
for path in paths:
|
| 132 |
+
wandb_synced_files = [p for p in path.glob("*.wandb.synced") if p.is_file()]
|
| 133 |
+
if len(wandb_synced_files) > 1:
|
| 134 |
+
wandb.termwarn(
|
| 135 |
+
f"Multiple wandb.synced files found in directory {path}, skipping"
|
| 136 |
+
)
|
| 137 |
+
elif len(wandb_synced_files) == 1:
|
| 138 |
+
synced_paths.add(path)
|
| 139 |
+
paths -= synced_paths
|
| 140 |
+
|
| 141 |
+
if run_id and len(paths) > 1:
|
| 142 |
+
# TODO: handle this more gracefully
|
| 143 |
+
click.echo("id can only be set for a single run.", err=True)
|
| 144 |
+
sys.exit(1)
|
| 145 |
+
|
| 146 |
+
if not paths:
|
| 147 |
+
click.echo("No runs to sync.")
|
| 148 |
+
return
|
| 149 |
+
|
| 150 |
+
click.echo("Found runs:")
|
| 151 |
+
for path in paths:
|
| 152 |
+
click.echo(f" {path}")
|
| 153 |
+
|
| 154 |
+
if dry_run:
|
| 155 |
+
return
|
| 156 |
+
|
| 157 |
+
wandb.setup()
|
| 158 |
+
|
| 159 |
+
# TODO: make it thread-safe in the Rust code
|
| 160 |
+
with concurrent.futures.ProcessPoolExecutor(
|
| 161 |
+
max_workers=min(len(paths), cpu_count())
|
| 162 |
+
) as executor:
|
| 163 |
+
futures = []
|
| 164 |
+
for path in paths:
|
| 165 |
+
# we already know there is only one wandb file in the directory
|
| 166 |
+
wandb_file = [p for p in path.glob("*.wandb") if p.is_file()][0]
|
| 167 |
+
future = executor.submit(
|
| 168 |
+
_sync,
|
| 169 |
+
wandb_file,
|
| 170 |
+
run_id=run_id,
|
| 171 |
+
project=project,
|
| 172 |
+
entity=entity,
|
| 173 |
+
skip_console=skip_console,
|
| 174 |
+
append=append,
|
| 175 |
+
mark_synced=mark_synced,
|
| 176 |
+
)
|
| 177 |
+
futures.append(future)
|
| 178 |
+
|
| 179 |
+
# Wait for tasks to complete
|
| 180 |
+
for _ in concurrent.futures.as_completed(futures):
|
| 181 |
+
pass
|
vllm/lib/python3.10/site-packages/wandb/cli/cli.py
ADDED
|
@@ -0,0 +1,2810 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import configparser
|
| 3 |
+
import datetime
|
| 4 |
+
import getpass
|
| 5 |
+
import json
|
| 6 |
+
import logging
|
| 7 |
+
import os
|
| 8 |
+
import shlex
|
| 9 |
+
import shutil
|
| 10 |
+
import subprocess
|
| 11 |
+
import sys
|
| 12 |
+
import tempfile
|
| 13 |
+
import textwrap
|
| 14 |
+
import time
|
| 15 |
+
import traceback
|
| 16 |
+
from functools import wraps
|
| 17 |
+
from typing import Any, Dict, Optional
|
| 18 |
+
|
| 19 |
+
import click
|
| 20 |
+
import yaml
|
| 21 |
+
from click.exceptions import ClickException
|
| 22 |
+
|
| 23 |
+
# pycreds has a find_executable that works in windows
|
| 24 |
+
from dockerpycreds.utils import find_executable
|
| 25 |
+
|
| 26 |
+
import wandb
|
| 27 |
+
import wandb.env
|
| 28 |
+
import wandb.errors
|
| 29 |
+
import wandb.sdk.verify.verify as wandb_verify
|
| 30 |
+
from wandb import Config, Error, env, util, wandb_agent, wandb_sdk
|
| 31 |
+
from wandb.apis import InternalApi, PublicApi
|
| 32 |
+
from wandb.apis.public import RunQueue
|
| 33 |
+
from wandb.errors.links import url_registry
|
| 34 |
+
from wandb.sdk.artifacts._validators import is_artifact_registry_project
|
| 35 |
+
from wandb.sdk.artifacts.artifact_file_cache import get_artifact_file_cache
|
| 36 |
+
from wandb.sdk.internal.internal_api import Api as SDKInternalApi
|
| 37 |
+
from wandb.sdk.launch import utils as launch_utils
|
| 38 |
+
from wandb.sdk.launch._launch_add import _launch_add
|
| 39 |
+
from wandb.sdk.launch.errors import ExecutionError, LaunchError
|
| 40 |
+
from wandb.sdk.launch.sweeps import utils as sweep_utils
|
| 41 |
+
from wandb.sdk.launch.sweeps.scheduler import Scheduler
|
| 42 |
+
from wandb.sdk.lib import filesystem
|
| 43 |
+
from wandb.sync import SyncManager, get_run_from_path, get_runs
|
| 44 |
+
|
| 45 |
+
from .beta import beta
|
| 46 |
+
|
| 47 |
+
# Send cli logs to wandb/debug-cli.<username>.log by default and fallback to a temp dir.
|
| 48 |
+
_wandb_dir = wandb.old.core.wandb_dir(env.get_dir())
|
| 49 |
+
if not os.path.exists(_wandb_dir):
|
| 50 |
+
_wandb_dir = tempfile.gettempdir()
|
| 51 |
+
|
| 52 |
+
try:
|
| 53 |
+
_username = getpass.getuser()
|
| 54 |
+
except KeyError:
|
| 55 |
+
# getuser() could raise KeyError in restricted environments like
|
| 56 |
+
# chroot jails or docker containers. Return user id in these cases.
|
| 57 |
+
_username = str(os.getuid())
|
| 58 |
+
|
| 59 |
+
_wandb_log_path = os.path.join(_wandb_dir, f"debug-cli.{_username}.log")
|
| 60 |
+
|
| 61 |
+
logging.basicConfig(
|
| 62 |
+
filename=_wandb_log_path,
|
| 63 |
+
level=logging.INFO,
|
| 64 |
+
format="%(asctime)s %(levelname)s %(message)s",
|
| 65 |
+
datefmt="%Y-%m-%d %H:%M:%S",
|
| 66 |
+
)
|
| 67 |
+
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
| 68 |
+
logger = logging.getLogger("wandb")
|
| 69 |
+
|
| 70 |
+
# Click Contexts
|
| 71 |
+
CONTEXT = {"default_map": {}}
|
| 72 |
+
RUN_CONTEXT = {
|
| 73 |
+
"default_map": {},
|
| 74 |
+
"allow_extra_args": True,
|
| 75 |
+
"ignore_unknown_options": True,
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def cli_unsupported(argument):
|
| 80 |
+
wandb.termerror(f"Unsupported argument `{argument}`")
|
| 81 |
+
sys.exit(1)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class ClickWandbException(ClickException):
|
| 85 |
+
def format_message(self):
|
| 86 |
+
# log_file = util.get_log_file_path()
|
| 87 |
+
log_file = ""
|
| 88 |
+
orig_type = f"{self.orig_type.__module__}.{self.orig_type.__name__}"
|
| 89 |
+
if issubclass(self.orig_type, Error):
|
| 90 |
+
return click.style(str(self.message), fg="red")
|
| 91 |
+
else:
|
| 92 |
+
return (
|
| 93 |
+
f"An Exception was raised, see {log_file} for full traceback.\n"
|
| 94 |
+
f"{orig_type}: {self.message}"
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def display_error(func):
|
| 99 |
+
"""Function decorator for catching common errors and re-raising as wandb.Error."""
|
| 100 |
+
|
| 101 |
+
@wraps(func)
|
| 102 |
+
def wrapper(*args, **kwargs):
|
| 103 |
+
try:
|
| 104 |
+
return func(*args, **kwargs)
|
| 105 |
+
except wandb.Error as e:
|
| 106 |
+
exc_type, exc_value, exc_traceback = sys.exc_info()
|
| 107 |
+
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
|
| 108 |
+
logger.error("".join(lines))
|
| 109 |
+
wandb.termerror(f"Find detailed error logs at: {_wandb_log_path}")
|
| 110 |
+
click_exc = ClickWandbException(e)
|
| 111 |
+
click_exc.orig_type = exc_type
|
| 112 |
+
raise click_exc.with_traceback(sys.exc_info()[2])
|
| 113 |
+
|
| 114 |
+
return wrapper
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
_api = None # caching api instance allows patching from unit tests
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def _get_cling_api(reset=None):
|
| 121 |
+
"""Get a reference to the internal api with cling settings."""
|
| 122 |
+
# TODO: move CLI to wandb-core backend
|
| 123 |
+
wandb.require("legacy-service")
|
| 124 |
+
|
| 125 |
+
global _api
|
| 126 |
+
if reset:
|
| 127 |
+
_api = None
|
| 128 |
+
wandb.teardown()
|
| 129 |
+
if _api is None:
|
| 130 |
+
# TODO(jhr): make a settings object that is better for non runs.
|
| 131 |
+
# only override the necessary setting
|
| 132 |
+
wandb.setup(settings=wandb.Settings(x_cli_only_mode=True))
|
| 133 |
+
_api = InternalApi()
|
| 134 |
+
return _api
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def prompt_for_project(ctx, entity):
|
| 138 |
+
"""Ask the user for a project, creating one if necessary."""
|
| 139 |
+
result = ctx.invoke(projects, entity=entity, display=False)
|
| 140 |
+
api = _get_cling_api()
|
| 141 |
+
try:
|
| 142 |
+
if len(result) == 0:
|
| 143 |
+
project = click.prompt("Enter a name for your first project")
|
| 144 |
+
# description = editor()
|
| 145 |
+
project = api.upsert_project(project, entity=entity)["name"]
|
| 146 |
+
else:
|
| 147 |
+
project_names = [project["name"] for project in result] + ["Create New"]
|
| 148 |
+
wandb.termlog("Which project should we use?")
|
| 149 |
+
result = util.prompt_choices(project_names)
|
| 150 |
+
if result:
|
| 151 |
+
project = result
|
| 152 |
+
else:
|
| 153 |
+
project = "Create New"
|
| 154 |
+
# TODO: check with the server if the project exists
|
| 155 |
+
if project == "Create New":
|
| 156 |
+
project = click.prompt(
|
| 157 |
+
"Enter a name for your new project", value_proc=api.format_project
|
| 158 |
+
)
|
| 159 |
+
# description = editor()
|
| 160 |
+
project = api.upsert_project(project, entity=entity)["name"]
|
| 161 |
+
|
| 162 |
+
except wandb.errors.CommError as e:
|
| 163 |
+
raise ClickException(str(e))
|
| 164 |
+
|
| 165 |
+
return project
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
class RunGroup(click.Group):
|
| 169 |
+
@display_error
|
| 170 |
+
def get_command(self, ctx, cmd_name):
|
| 171 |
+
# TODO: check if cmd_name is a file in the current dir and not require `run`?
|
| 172 |
+
rv = click.Group.get_command(self, ctx, cmd_name)
|
| 173 |
+
if rv is not None:
|
| 174 |
+
return rv
|
| 175 |
+
return None
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
@click.command(cls=RunGroup, invoke_without_command=True)
|
| 179 |
+
@click.version_option(version=wandb.__version__)
|
| 180 |
+
@click.pass_context
|
| 181 |
+
def cli(ctx):
|
| 182 |
+
if ctx.invoked_subcommand is None:
|
| 183 |
+
click.echo(ctx.get_help())
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
@cli.command(context_settings=CONTEXT, help="List projects", hidden=True)
|
| 187 |
+
@click.option(
|
| 188 |
+
"--entity",
|
| 189 |
+
"-e",
|
| 190 |
+
default=None,
|
| 191 |
+
envvar=env.ENTITY,
|
| 192 |
+
help="The entity to scope the listing to.",
|
| 193 |
+
)
|
| 194 |
+
@display_error
|
| 195 |
+
def projects(entity, display=True):
|
| 196 |
+
api = _get_cling_api()
|
| 197 |
+
projects = api.list_projects(entity=entity)
|
| 198 |
+
if len(projects) == 0:
|
| 199 |
+
message = "No projects found for {}".format(entity)
|
| 200 |
+
else:
|
| 201 |
+
message = 'Latest projects for "{}"'.format(entity)
|
| 202 |
+
if display:
|
| 203 |
+
click.echo(click.style(message, bold=True))
|
| 204 |
+
for project in projects:
|
| 205 |
+
click.echo(
|
| 206 |
+
"".join(
|
| 207 |
+
(
|
| 208 |
+
click.style(project["name"], fg="blue", bold=True),
|
| 209 |
+
" - ",
|
| 210 |
+
str(project["description"] or "").split("\n")[0],
|
| 211 |
+
)
|
| 212 |
+
)
|
| 213 |
+
)
|
| 214 |
+
return projects
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
@cli.command(context_settings=CONTEXT, help="Login to Weights & Biases")
|
| 218 |
+
@click.argument("key", nargs=-1)
|
| 219 |
+
@click.option("--cloud", is_flag=True, help="Login to the cloud instead of local")
|
| 220 |
+
@click.option("--host", default=None, help="Login to a specific instance of W&B")
|
| 221 |
+
@click.option(
|
| 222 |
+
"--relogin", default=None, is_flag=True, help="Force relogin if already logged in."
|
| 223 |
+
)
|
| 224 |
+
@click.option("--anonymously", default=False, is_flag=True, help="Log in anonymously")
|
| 225 |
+
@click.option("--verify", default=False, is_flag=True, help="Verify login credentials")
|
| 226 |
+
@display_error
|
| 227 |
+
def login(key, host, cloud, relogin, anonymously, verify, no_offline=False):
|
| 228 |
+
# TODO: move CLI to wandb-core backend
|
| 229 |
+
wandb.require("legacy-service")
|
| 230 |
+
|
| 231 |
+
# TODO: handle no_offline
|
| 232 |
+
anon_mode = "must" if anonymously else "never"
|
| 233 |
+
|
| 234 |
+
wandb_sdk.wandb_login._handle_host_wandb_setting(host, cloud)
|
| 235 |
+
# A change in click or the test harness means key can be none...
|
| 236 |
+
key = key[0] if key is not None and len(key) > 0 else None
|
| 237 |
+
if key:
|
| 238 |
+
relogin = True
|
| 239 |
+
|
| 240 |
+
login_settings = dict(
|
| 241 |
+
x_cli_only_mode=True,
|
| 242 |
+
x_disable_viewer=relogin and not verify,
|
| 243 |
+
anonymous=anon_mode,
|
| 244 |
+
base_url=host,
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
try:
|
| 248 |
+
wandb.setup(
|
| 249 |
+
settings=wandb.Settings(
|
| 250 |
+
**{k: v for k, v in login_settings.items() if v is not None}
|
| 251 |
+
)
|
| 252 |
+
)
|
| 253 |
+
except TypeError as e:
|
| 254 |
+
wandb.termerror(str(e))
|
| 255 |
+
sys.exit(1)
|
| 256 |
+
|
| 257 |
+
wandb.login(
|
| 258 |
+
relogin=relogin,
|
| 259 |
+
key=key,
|
| 260 |
+
anonymous=anon_mode,
|
| 261 |
+
host=host,
|
| 262 |
+
force=True,
|
| 263 |
+
verify=verify,
|
| 264 |
+
)
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
@cli.command(
|
| 268 |
+
context_settings=CONTEXT, help="Run a wandb service", name="service", hidden=True
|
| 269 |
+
)
|
| 270 |
+
@click.option(
|
| 271 |
+
"--sock-port", default=None, type=int, help="The host port to bind socket service."
|
| 272 |
+
)
|
| 273 |
+
@click.option("--port-filename", default=None, help="Save allocated port to file.")
|
| 274 |
+
@click.option("--address", default=None, help="The address to bind service.")
|
| 275 |
+
@click.option("--pid", default=None, type=int, help="The parent process id to monitor.")
|
| 276 |
+
@click.option("--debug", is_flag=True, help="log debug info")
|
| 277 |
+
@display_error
|
| 278 |
+
def service(
|
| 279 |
+
sock_port=None,
|
| 280 |
+
port_filename=None,
|
| 281 |
+
address=None,
|
| 282 |
+
pid=None,
|
| 283 |
+
debug=False,
|
| 284 |
+
):
|
| 285 |
+
from wandb.sdk.service.server import WandbServer
|
| 286 |
+
|
| 287 |
+
server = WandbServer(
|
| 288 |
+
sock_port=sock_port,
|
| 289 |
+
port_fname=port_filename,
|
| 290 |
+
address=address,
|
| 291 |
+
pid=pid,
|
| 292 |
+
debug=debug,
|
| 293 |
+
)
|
| 294 |
+
server.serve()
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
@cli.command(
|
| 298 |
+
context_settings=CONTEXT, help="Configure a directory with Weights & Biases"
|
| 299 |
+
)
|
| 300 |
+
@click.option("--project", "-p", help="The project to use.")
|
| 301 |
+
@click.option("--entity", "-e", help="The entity to scope the project to.")
|
| 302 |
+
# TODO(jhr): Enable these with settings rework
|
| 303 |
+
# @click.option("--setting", "-s", help="enable an arbitrary setting.", multiple=True)
|
| 304 |
+
# @click.option('--show', is_flag=True, help="Show settings")
|
| 305 |
+
@click.option("--reset", is_flag=True, help="Reset settings")
|
| 306 |
+
@click.option(
|
| 307 |
+
"--mode",
|
| 308 |
+
"-m",
|
| 309 |
+
help=' Can be "online", "offline" or "disabled". Defaults to online.',
|
| 310 |
+
)
|
| 311 |
+
@click.pass_context
|
| 312 |
+
@display_error
|
| 313 |
+
def init(ctx, project, entity, reset, mode):
|
| 314 |
+
from wandb.old.core import __stage_dir__, _set_stage_dir, wandb_dir
|
| 315 |
+
|
| 316 |
+
if __stage_dir__ is None:
|
| 317 |
+
_set_stage_dir("wandb")
|
| 318 |
+
|
| 319 |
+
# non-interactive init
|
| 320 |
+
if reset or project or entity or mode:
|
| 321 |
+
api = InternalApi()
|
| 322 |
+
if reset:
|
| 323 |
+
api.clear_setting("entity", persist=True)
|
| 324 |
+
api.clear_setting("project", persist=True)
|
| 325 |
+
api.clear_setting("mode", persist=True)
|
| 326 |
+
# TODO(jhr): clear more settings?
|
| 327 |
+
if entity:
|
| 328 |
+
api.set_setting("entity", entity, persist=True)
|
| 329 |
+
if project:
|
| 330 |
+
api.set_setting("project", project, persist=True)
|
| 331 |
+
if mode:
|
| 332 |
+
api.set_setting("mode", mode, persist=True)
|
| 333 |
+
return
|
| 334 |
+
|
| 335 |
+
if os.path.isdir(wandb_dir()) and os.path.exists(
|
| 336 |
+
os.path.join(wandb_dir(), "settings")
|
| 337 |
+
):
|
| 338 |
+
click.confirm(
|
| 339 |
+
click.style(
|
| 340 |
+
"This directory has been configured previously, should we re-configure it?",
|
| 341 |
+
bold=True,
|
| 342 |
+
),
|
| 343 |
+
abort=True,
|
| 344 |
+
)
|
| 345 |
+
else:
|
| 346 |
+
click.echo(
|
| 347 |
+
click.style("Let's setup this directory for W&B!", fg="green", bold=True)
|
| 348 |
+
)
|
| 349 |
+
api = _get_cling_api()
|
| 350 |
+
if api.api_key is None:
|
| 351 |
+
ctx.invoke(login)
|
| 352 |
+
api = _get_cling_api(reset=True)
|
| 353 |
+
|
| 354 |
+
viewer = api.viewer()
|
| 355 |
+
|
| 356 |
+
# Viewer can be `None` in case your API information became invalid, or
|
| 357 |
+
# in testing if you switch hosts.
|
| 358 |
+
if not viewer:
|
| 359 |
+
click.echo(
|
| 360 |
+
click.style(
|
| 361 |
+
"Your login information seems to be invalid: can you log in again please?",
|
| 362 |
+
fg="red",
|
| 363 |
+
bold=True,
|
| 364 |
+
)
|
| 365 |
+
)
|
| 366 |
+
ctx.invoke(login)
|
| 367 |
+
api = _get_cling_api(reset=True)
|
| 368 |
+
|
| 369 |
+
# This shouldn't happen.
|
| 370 |
+
viewer = api.viewer()
|
| 371 |
+
if not viewer:
|
| 372 |
+
click.echo(
|
| 373 |
+
click.style(
|
| 374 |
+
"We're sorry, there was a problem logging you in. "
|
| 375 |
+
"Please send us a note at support@wandb.com and tell us how this happened.",
|
| 376 |
+
fg="red",
|
| 377 |
+
bold=True,
|
| 378 |
+
)
|
| 379 |
+
)
|
| 380 |
+
sys.exit(1)
|
| 381 |
+
|
| 382 |
+
# At this point we should be logged in successfully.
|
| 383 |
+
if len(viewer["teams"]["edges"]) > 1:
|
| 384 |
+
team_names = [e["node"]["name"] for e in viewer["teams"]["edges"]] + [
|
| 385 |
+
"Manual entry"
|
| 386 |
+
]
|
| 387 |
+
wandb.termlog(
|
| 388 |
+
"Which team should we use?",
|
| 389 |
+
)
|
| 390 |
+
result = util.prompt_choices(team_names)
|
| 391 |
+
# result can be empty on click
|
| 392 |
+
if result:
|
| 393 |
+
entity = result
|
| 394 |
+
else:
|
| 395 |
+
entity = "Manual Entry"
|
| 396 |
+
if entity == "Manual Entry":
|
| 397 |
+
entity = click.prompt("Enter the name of the team you want to use")
|
| 398 |
+
else:
|
| 399 |
+
entity = viewer.get("entity") or click.prompt(
|
| 400 |
+
"What username or team should we use?"
|
| 401 |
+
)
|
| 402 |
+
|
| 403 |
+
# TODO: this error handling sucks and the output isn't pretty
|
| 404 |
+
try:
|
| 405 |
+
project = prompt_for_project(ctx, entity)
|
| 406 |
+
except ClickWandbException:
|
| 407 |
+
raise ClickException(f"Could not find team: {entity}")
|
| 408 |
+
|
| 409 |
+
api.set_setting("entity", entity, persist=True)
|
| 410 |
+
api.set_setting("project", project, persist=True)
|
| 411 |
+
api.set_setting("base_url", api.settings().get("base_url"), persist=True)
|
| 412 |
+
|
| 413 |
+
filesystem.mkdir_exists_ok(wandb_dir())
|
| 414 |
+
with open(os.path.join(wandb_dir(), ".gitignore"), "w") as file:
|
| 415 |
+
file.write("*\n!settings")
|
| 416 |
+
|
| 417 |
+
click.echo(
|
| 418 |
+
click.style("This directory is configured! Next, track a run:\n", fg="green")
|
| 419 |
+
+ textwrap.dedent(
|
| 420 |
+
"""\
|
| 421 |
+
* In your training script:
|
| 422 |
+
{code1}
|
| 423 |
+
{code2}
|
| 424 |
+
* then `{run}`.
|
| 425 |
+
"""
|
| 426 |
+
).format(
|
| 427 |
+
code1=click.style("import wandb", bold=True),
|
| 428 |
+
code2=click.style('wandb.init(project="{}")'.format(project), bold=True),
|
| 429 |
+
run=click.style("python <train.py>", bold=True),
|
| 430 |
+
)
|
| 431 |
+
)
|
| 432 |
+
|
| 433 |
+
|
| 434 |
+
@cli.command(
|
| 435 |
+
context_settings=CONTEXT, help="Upload an offline training directory to W&B"
|
| 436 |
+
)
|
| 437 |
+
@click.pass_context
|
| 438 |
+
@click.argument("path", nargs=-1, type=click.Path(exists=True))
|
| 439 |
+
@click.option("--view", is_flag=True, default=False, help="View runs", hidden=True)
|
| 440 |
+
@click.option("--verbose", is_flag=True, default=False, help="Verbose", hidden=True)
|
| 441 |
+
@click.option("--id", "run_id", help="The run you want to upload to.")
|
| 442 |
+
@click.option("--project", "-p", help="The project you want to upload to.")
|
| 443 |
+
@click.option("--entity", "-e", help="The entity to scope to.")
|
| 444 |
+
@click.option(
|
| 445 |
+
"--job_type",
|
| 446 |
+
"job_type",
|
| 447 |
+
help="Specifies the type of run for grouping related runs together.",
|
| 448 |
+
)
|
| 449 |
+
@click.option(
|
| 450 |
+
"--sync-tensorboard/--no-sync-tensorboard",
|
| 451 |
+
is_flag=True,
|
| 452 |
+
default=None,
|
| 453 |
+
help="Stream tfevent files to wandb.",
|
| 454 |
+
)
|
| 455 |
+
@click.option("--include-globs", help="Comma separated list of globs to include.")
|
| 456 |
+
@click.option("--exclude-globs", help="Comma separated list of globs to exclude.")
|
| 457 |
+
@click.option(
|
| 458 |
+
"--include-online/--no-include-online",
|
| 459 |
+
is_flag=True,
|
| 460 |
+
default=None,
|
| 461 |
+
help="Include online runs",
|
| 462 |
+
)
|
| 463 |
+
@click.option(
|
| 464 |
+
"--include-offline/--no-include-offline",
|
| 465 |
+
is_flag=True,
|
| 466 |
+
default=None,
|
| 467 |
+
help="Include offline runs",
|
| 468 |
+
)
|
| 469 |
+
@click.option(
|
| 470 |
+
"--include-synced/--no-include-synced",
|
| 471 |
+
is_flag=True,
|
| 472 |
+
default=None,
|
| 473 |
+
help="Include synced runs",
|
| 474 |
+
)
|
| 475 |
+
@click.option(
|
| 476 |
+
"--mark-synced/--no-mark-synced",
|
| 477 |
+
is_flag=True,
|
| 478 |
+
default=True,
|
| 479 |
+
help="Mark runs as synced",
|
| 480 |
+
)
|
| 481 |
+
@click.option("--sync-all", is_flag=True, default=False, help="Sync all runs")
|
| 482 |
+
@click.option("--clean", is_flag=True, default=False, help="Delete synced runs")
|
| 483 |
+
@click.option(
|
| 484 |
+
"--clean-old-hours",
|
| 485 |
+
default=24,
|
| 486 |
+
help="Delete runs created before this many hours. To be used alongside --clean flag.",
|
| 487 |
+
type=int,
|
| 488 |
+
)
|
| 489 |
+
@click.option(
|
| 490 |
+
"--clean-force",
|
| 491 |
+
is_flag=True,
|
| 492 |
+
default=False,
|
| 493 |
+
help="Clean without confirmation prompt.",
|
| 494 |
+
)
|
| 495 |
+
@click.option("--ignore", hidden=True)
|
| 496 |
+
@click.option("--show", default=5, help="Number of runs to show")
|
| 497 |
+
@click.option("--append", is_flag=True, default=False, help="Append run")
|
| 498 |
+
@click.option("--skip-console", is_flag=True, default=False, help="Skip console logs")
|
| 499 |
+
@display_error
|
| 500 |
+
def sync(
|
| 501 |
+
ctx,
|
| 502 |
+
path=None,
|
| 503 |
+
view=None,
|
| 504 |
+
verbose=None,
|
| 505 |
+
run_id=None,
|
| 506 |
+
project=None,
|
| 507 |
+
entity=None,
|
| 508 |
+
job_type=None, # trace this back to SyncManager
|
| 509 |
+
sync_tensorboard=None,
|
| 510 |
+
include_globs=None,
|
| 511 |
+
exclude_globs=None,
|
| 512 |
+
include_online=None,
|
| 513 |
+
include_offline=None,
|
| 514 |
+
include_synced=None,
|
| 515 |
+
mark_synced=None,
|
| 516 |
+
sync_all=None,
|
| 517 |
+
ignore=None,
|
| 518 |
+
show=None,
|
| 519 |
+
clean=None,
|
| 520 |
+
clean_old_hours=24,
|
| 521 |
+
clean_force=None,
|
| 522 |
+
append=None,
|
| 523 |
+
skip_console=None,
|
| 524 |
+
):
|
| 525 |
+
api = _get_cling_api()
|
| 526 |
+
if not api.is_authenticated:
|
| 527 |
+
wandb.termlog("Login to W&B to sync offline runs")
|
| 528 |
+
ctx.invoke(login, no_offline=True)
|
| 529 |
+
api = _get_cling_api(reset=True)
|
| 530 |
+
|
| 531 |
+
if ignore:
|
| 532 |
+
exclude_globs = ignore
|
| 533 |
+
if include_globs:
|
| 534 |
+
include_globs = include_globs.split(",")
|
| 535 |
+
if exclude_globs:
|
| 536 |
+
exclude_globs = exclude_globs.split(",")
|
| 537 |
+
|
| 538 |
+
def _summary():
|
| 539 |
+
all_items = get_runs(
|
| 540 |
+
include_online=True,
|
| 541 |
+
include_offline=True,
|
| 542 |
+
include_synced=True,
|
| 543 |
+
include_unsynced=True,
|
| 544 |
+
)
|
| 545 |
+
sync_items = get_runs(
|
| 546 |
+
include_online=include_online if include_online is not None else True,
|
| 547 |
+
include_offline=include_offline if include_offline is not None else True,
|
| 548 |
+
include_synced=include_synced if include_synced is not None else False,
|
| 549 |
+
include_unsynced=True,
|
| 550 |
+
exclude_globs=exclude_globs,
|
| 551 |
+
include_globs=include_globs,
|
| 552 |
+
)
|
| 553 |
+
synced = []
|
| 554 |
+
unsynced = []
|
| 555 |
+
for item in all_items:
|
| 556 |
+
(synced if item.synced else unsynced).append(item)
|
| 557 |
+
if sync_items:
|
| 558 |
+
wandb.termlog(f"Number of runs to be synced: {len(sync_items)}")
|
| 559 |
+
if show and show < len(sync_items):
|
| 560 |
+
wandb.termlog(f"Showing {show} runs to be synced:")
|
| 561 |
+
for item in sync_items[: (show or len(sync_items))]:
|
| 562 |
+
wandb.termlog(f" {item}")
|
| 563 |
+
else:
|
| 564 |
+
wandb.termlog("No runs to be synced.")
|
| 565 |
+
if synced:
|
| 566 |
+
clean_cmd = click.style("wandb sync --clean", fg="yellow")
|
| 567 |
+
wandb.termlog(
|
| 568 |
+
f"NOTE: use {clean_cmd} to delete {len(synced)} synced runs from local directory."
|
| 569 |
+
)
|
| 570 |
+
if unsynced:
|
| 571 |
+
sync_cmd = click.style("wandb sync --sync-all", fg="yellow")
|
| 572 |
+
wandb.termlog(
|
| 573 |
+
f"NOTE: use {sync_cmd} to sync {len(unsynced)} unsynced runs from local directory."
|
| 574 |
+
)
|
| 575 |
+
|
| 576 |
+
def _sync_path(_path, _sync_tensorboard):
|
| 577 |
+
if run_id and len(_path) > 1:
|
| 578 |
+
wandb.termerror("id can only be set for a single run.")
|
| 579 |
+
sys.exit(1)
|
| 580 |
+
sm = SyncManager(
|
| 581 |
+
project=project,
|
| 582 |
+
entity=entity,
|
| 583 |
+
run_id=run_id,
|
| 584 |
+
job_type=job_type,
|
| 585 |
+
mark_synced=mark_synced,
|
| 586 |
+
app_url=api.app_url,
|
| 587 |
+
view=view,
|
| 588 |
+
verbose=verbose,
|
| 589 |
+
sync_tensorboard=_sync_tensorboard,
|
| 590 |
+
log_path=_wandb_log_path,
|
| 591 |
+
append=append,
|
| 592 |
+
skip_console=skip_console,
|
| 593 |
+
)
|
| 594 |
+
for p in _path:
|
| 595 |
+
sm.add(p)
|
| 596 |
+
sm.start()
|
| 597 |
+
while not sm.is_done():
|
| 598 |
+
_ = sm.poll()
|
| 599 |
+
|
| 600 |
+
def _sync_all():
|
| 601 |
+
sync_items = get_runs(
|
| 602 |
+
include_online=include_online if include_online is not None else True,
|
| 603 |
+
include_offline=include_offline if include_offline is not None else True,
|
| 604 |
+
include_synced=include_synced if include_synced is not None else False,
|
| 605 |
+
include_unsynced=True,
|
| 606 |
+
exclude_globs=exclude_globs,
|
| 607 |
+
include_globs=include_globs,
|
| 608 |
+
)
|
| 609 |
+
if not sync_items:
|
| 610 |
+
wandb.termerror("Nothing to sync.")
|
| 611 |
+
else:
|
| 612 |
+
# When syncing run directories, default to not syncing tensorboard
|
| 613 |
+
sync_tb = sync_tensorboard if sync_tensorboard is not None else False
|
| 614 |
+
_sync_path(sync_items, sync_tb)
|
| 615 |
+
|
| 616 |
+
def _clean():
|
| 617 |
+
if path:
|
| 618 |
+
runs = list(map(get_run_from_path, path))
|
| 619 |
+
if not clean_force:
|
| 620 |
+
click.confirm(
|
| 621 |
+
click.style(
|
| 622 |
+
f"Are you sure you want to remove {len(runs)} runs?",
|
| 623 |
+
bold=True,
|
| 624 |
+
),
|
| 625 |
+
abort=True,
|
| 626 |
+
)
|
| 627 |
+
for run in runs:
|
| 628 |
+
shutil.rmtree(run.path)
|
| 629 |
+
click.echo(click.style("Success!", fg="green"))
|
| 630 |
+
return
|
| 631 |
+
runs = get_runs(
|
| 632 |
+
include_online=include_online if include_online is not None else True,
|
| 633 |
+
include_offline=include_offline if include_offline is not None else True,
|
| 634 |
+
include_synced=include_synced if include_synced is not None else True,
|
| 635 |
+
include_unsynced=False,
|
| 636 |
+
exclude_globs=exclude_globs,
|
| 637 |
+
include_globs=include_globs,
|
| 638 |
+
)
|
| 639 |
+
since = datetime.datetime.now() - datetime.timedelta(hours=clean_old_hours)
|
| 640 |
+
old_runs = [run for run in runs if run.datetime < since]
|
| 641 |
+
old_runs.sort(key=lambda _run: _run.datetime)
|
| 642 |
+
if old_runs:
|
| 643 |
+
click.echo(
|
| 644 |
+
f"Found {len(runs)} runs, {len(old_runs)} are older than {clean_old_hours} hours"
|
| 645 |
+
)
|
| 646 |
+
for run in old_runs:
|
| 647 |
+
click.echo(run.path)
|
| 648 |
+
if not clean_force:
|
| 649 |
+
click.confirm(
|
| 650 |
+
click.style(
|
| 651 |
+
f"Are you sure you want to remove {len(old_runs)} runs?",
|
| 652 |
+
bold=True,
|
| 653 |
+
),
|
| 654 |
+
abort=True,
|
| 655 |
+
)
|
| 656 |
+
for run in old_runs:
|
| 657 |
+
shutil.rmtree(run.path)
|
| 658 |
+
click.echo(click.style("Success!", fg="green"))
|
| 659 |
+
else:
|
| 660 |
+
click.echo(
|
| 661 |
+
click.style(
|
| 662 |
+
f"No runs older than {clean_old_hours} hours found", fg="red"
|
| 663 |
+
)
|
| 664 |
+
)
|
| 665 |
+
|
| 666 |
+
if sync_all:
|
| 667 |
+
_sync_all()
|
| 668 |
+
elif clean:
|
| 669 |
+
_clean()
|
| 670 |
+
elif path:
|
| 671 |
+
# When syncing a specific path, default to syncing tensorboard
|
| 672 |
+
sync_tb = sync_tensorboard if sync_tensorboard is not None else True
|
| 673 |
+
_sync_path(path, sync_tb)
|
| 674 |
+
else:
|
| 675 |
+
_summary()
|
| 676 |
+
|
| 677 |
+
|
| 678 |
+
@cli.command(
|
| 679 |
+
context_settings=CONTEXT,
|
| 680 |
+
help="Initialize a hyperparameter sweep. Search for hyperparameters that optimizes a cost function of a machine learning model by testing various combinations.",
|
| 681 |
+
)
|
| 682 |
+
@click.option(
|
| 683 |
+
"--project",
|
| 684 |
+
"-p",
|
| 685 |
+
default=None,
|
| 686 |
+
help="""The name of the project where W&B runs created from the sweep are sent to. If the project is not specified, the run is sent to a project labeled Uncategorized.""",
|
| 687 |
+
)
|
| 688 |
+
@click.option(
|
| 689 |
+
"--entity",
|
| 690 |
+
"-e",
|
| 691 |
+
default=None,
|
| 692 |
+
help="""The username or team name where you want to send W&B runs created by the sweep to. Ensure that the entity you specify already exists. If you don't specify an entity, the run will be sent to your default entity, which is usually your username.""",
|
| 693 |
+
)
|
| 694 |
+
@click.option("--controller", is_flag=True, default=False, help="Run local controller")
|
| 695 |
+
@click.option("--verbose", is_flag=True, default=False, help="Display verbose output")
|
| 696 |
+
@click.option(
|
| 697 |
+
"--name",
|
| 698 |
+
default=None,
|
| 699 |
+
help="The name of the sweep. The sweep ID is used if no name is specified.",
|
| 700 |
+
)
|
| 701 |
+
@click.option("--program", default=None, help="Set sweep program")
|
| 702 |
+
@click.option("--settings", default=None, help="Set sweep settings", hidden=True)
|
| 703 |
+
@click.option("--update", default=None, help="Update pending sweep")
|
| 704 |
+
@click.option(
|
| 705 |
+
"--stop",
|
| 706 |
+
is_flag=True,
|
| 707 |
+
default=False,
|
| 708 |
+
help="Finish a sweep to stop running new runs and let currently running runs finish.",
|
| 709 |
+
)
|
| 710 |
+
@click.option(
|
| 711 |
+
"--cancel",
|
| 712 |
+
is_flag=True,
|
| 713 |
+
default=False,
|
| 714 |
+
help="Cancel a sweep to kill all running runs and stop running new runs.",
|
| 715 |
+
)
|
| 716 |
+
@click.option(
|
| 717 |
+
"--pause",
|
| 718 |
+
is_flag=True,
|
| 719 |
+
default=False,
|
| 720 |
+
help="Pause a sweep to temporarily stop running new runs.",
|
| 721 |
+
)
|
| 722 |
+
@click.option(
|
| 723 |
+
"--resume",
|
| 724 |
+
is_flag=True,
|
| 725 |
+
default=False,
|
| 726 |
+
help="Resume a sweep to continue running new runs.",
|
| 727 |
+
)
|
| 728 |
+
@click.option(
|
| 729 |
+
"--prior_run",
|
| 730 |
+
"-R",
|
| 731 |
+
"prior_runs",
|
| 732 |
+
multiple=True,
|
| 733 |
+
default=None,
|
| 734 |
+
help="ID of an existing run to add to this sweep",
|
| 735 |
+
)
|
| 736 |
+
@click.argument("config_yaml_or_sweep_id")
|
| 737 |
+
@click.pass_context
|
| 738 |
+
@display_error
|
| 739 |
+
def sweep(
|
| 740 |
+
ctx,
|
| 741 |
+
project,
|
| 742 |
+
entity,
|
| 743 |
+
controller,
|
| 744 |
+
verbose,
|
| 745 |
+
name,
|
| 746 |
+
program,
|
| 747 |
+
settings,
|
| 748 |
+
update,
|
| 749 |
+
stop,
|
| 750 |
+
cancel,
|
| 751 |
+
pause,
|
| 752 |
+
resume,
|
| 753 |
+
prior_runs,
|
| 754 |
+
config_yaml_or_sweep_id,
|
| 755 |
+
):
|
| 756 |
+
state_args = "stop", "cancel", "pause", "resume"
|
| 757 |
+
lcls = locals()
|
| 758 |
+
is_state_change_command = sum(lcls[k] for k in state_args)
|
| 759 |
+
if is_state_change_command > 1:
|
| 760 |
+
raise Exception("Only one state flag (stop/cancel/pause/resume) is allowed.")
|
| 761 |
+
elif is_state_change_command == 1:
|
| 762 |
+
sweep_id = config_yaml_or_sweep_id
|
| 763 |
+
api = _get_cling_api()
|
| 764 |
+
if not api.is_authenticated:
|
| 765 |
+
wandb.termlog("Login to W&B to use the sweep feature")
|
| 766 |
+
ctx.invoke(login, no_offline=True)
|
| 767 |
+
api = _get_cling_api(reset=True)
|
| 768 |
+
parts = dict(entity=entity, project=project, name=sweep_id)
|
| 769 |
+
err = sweep_utils.parse_sweep_id(parts)
|
| 770 |
+
if err:
|
| 771 |
+
wandb.termerror(err)
|
| 772 |
+
return
|
| 773 |
+
entity = parts.get("entity") or entity
|
| 774 |
+
project = parts.get("project") or project
|
| 775 |
+
sweep_id = parts.get("name") or sweep_id
|
| 776 |
+
state = [s for s in state_args if lcls[s]][0]
|
| 777 |
+
ings = {
|
| 778 |
+
"stop": "Stopping",
|
| 779 |
+
"cancel": "Cancelling",
|
| 780 |
+
"pause": "Pausing",
|
| 781 |
+
"resume": "Resuming",
|
| 782 |
+
}
|
| 783 |
+
wandb.termlog(f"{ings[state]} sweep {entity}/{project}/{sweep_id}")
|
| 784 |
+
getattr(api, "{}_sweep".format(state))(sweep_id, entity=entity, project=project)
|
| 785 |
+
wandb.termlog("Done.")
|
| 786 |
+
return
|
| 787 |
+
else:
|
| 788 |
+
config_yaml = config_yaml_or_sweep_id
|
| 789 |
+
|
| 790 |
+
def _parse_settings(settings):
|
| 791 |
+
"""Parse settings from json or comma separated assignments."""
|
| 792 |
+
ret = {}
|
| 793 |
+
# TODO(jhr): merge with magic:_parse_magic
|
| 794 |
+
if settings.find("=") > 0:
|
| 795 |
+
for item in settings.split(","):
|
| 796 |
+
kv = item.split("=")
|
| 797 |
+
if len(kv) != 2:
|
| 798 |
+
wandb.termwarn(
|
| 799 |
+
"Unable to parse sweep settings key value pair", repeat=False
|
| 800 |
+
)
|
| 801 |
+
ret.update(dict([kv]))
|
| 802 |
+
return ret
|
| 803 |
+
wandb.termwarn("Unable to parse settings parameter", repeat=False)
|
| 804 |
+
return ret
|
| 805 |
+
|
| 806 |
+
api = _get_cling_api()
|
| 807 |
+
if not api.is_authenticated:
|
| 808 |
+
wandb.termlog("Login to W&B to use the sweep feature")
|
| 809 |
+
ctx.invoke(login, no_offline=True)
|
| 810 |
+
api = _get_cling_api(reset=True)
|
| 811 |
+
|
| 812 |
+
sweep_obj_id = None
|
| 813 |
+
if update:
|
| 814 |
+
parts = dict(entity=entity, project=project, name=update)
|
| 815 |
+
err = sweep_utils.parse_sweep_id(parts)
|
| 816 |
+
if err:
|
| 817 |
+
wandb.termerror(err)
|
| 818 |
+
return
|
| 819 |
+
entity = parts.get("entity") or entity
|
| 820 |
+
project = parts.get("project") or project
|
| 821 |
+
sweep_id = parts.get("name") or update
|
| 822 |
+
|
| 823 |
+
has_project = (project or api.settings("project")) is not None
|
| 824 |
+
has_entity = (entity or api.settings("entity")) is not None
|
| 825 |
+
|
| 826 |
+
termerror_msg = (
|
| 827 |
+
"Sweep lookup requires a valid %s, and none was specified. \n"
|
| 828 |
+
"Either set a default %s in wandb/settings, or, if invoking \n`wandb sweep` "
|
| 829 |
+
"from the command line, specify the full sweep path via: \n\n"
|
| 830 |
+
" wandb sweep {username}/{projectname}/{sweepid}\n\n"
|
| 831 |
+
)
|
| 832 |
+
|
| 833 |
+
if not has_entity:
|
| 834 |
+
wandb.termerror(termerror_msg % (("entity",) * 2))
|
| 835 |
+
return
|
| 836 |
+
|
| 837 |
+
if not has_project:
|
| 838 |
+
wandb.termerror(termerror_msg % (("project",) * 2))
|
| 839 |
+
return
|
| 840 |
+
|
| 841 |
+
found = api.sweep(sweep_id, "{}", entity=entity, project=project)
|
| 842 |
+
if not found:
|
| 843 |
+
wandb.termerror(f"Could not find sweep {entity}/{project}/{sweep_id}")
|
| 844 |
+
return
|
| 845 |
+
sweep_obj_id = found["id"]
|
| 846 |
+
|
| 847 |
+
action = "Updating" if sweep_obj_id else "Creating"
|
| 848 |
+
wandb.termlog(f"{action} sweep from: {config_yaml}")
|
| 849 |
+
config = sweep_utils.load_sweep_config(config_yaml)
|
| 850 |
+
|
| 851 |
+
# Set or override parameters
|
| 852 |
+
if name:
|
| 853 |
+
config["name"] = name
|
| 854 |
+
if program:
|
| 855 |
+
config["program"] = program
|
| 856 |
+
if settings:
|
| 857 |
+
settings = _parse_settings(settings)
|
| 858 |
+
if settings:
|
| 859 |
+
config.setdefault("settings", {})
|
| 860 |
+
config["settings"].update(settings)
|
| 861 |
+
if controller:
|
| 862 |
+
config.setdefault("controller", {})
|
| 863 |
+
config["controller"]["type"] = "local"
|
| 864 |
+
|
| 865 |
+
is_local = config.get("controller", {}).get("type") == "local"
|
| 866 |
+
if is_local:
|
| 867 |
+
from wandb import controller as wandb_controller
|
| 868 |
+
|
| 869 |
+
tuner = wandb_controller()
|
| 870 |
+
err = tuner._validate(config)
|
| 871 |
+
if err:
|
| 872 |
+
wandb.termerror(f"Error in sweep file: {err}")
|
| 873 |
+
return
|
| 874 |
+
|
| 875 |
+
env = os.environ
|
| 876 |
+
entity = (
|
| 877 |
+
entity
|
| 878 |
+
or env.get("WANDB_ENTITY")
|
| 879 |
+
or config.get("entity")
|
| 880 |
+
or api.settings("entity")
|
| 881 |
+
)
|
| 882 |
+
project = (
|
| 883 |
+
project
|
| 884 |
+
or env.get("WANDB_PROJECT")
|
| 885 |
+
or config.get("project")
|
| 886 |
+
or api.settings("project")
|
| 887 |
+
or util.auto_project_name(config.get("program"))
|
| 888 |
+
)
|
| 889 |
+
|
| 890 |
+
sweep_id, warnings = api.upsert_sweep(
|
| 891 |
+
config,
|
| 892 |
+
project=project,
|
| 893 |
+
entity=entity,
|
| 894 |
+
obj_id=sweep_obj_id,
|
| 895 |
+
prior_runs=prior_runs,
|
| 896 |
+
)
|
| 897 |
+
sweep_utils.handle_sweep_config_violations(warnings)
|
| 898 |
+
|
| 899 |
+
# Log nicely formatted sweep information
|
| 900 |
+
styled_id = click.style(sweep_id, fg="yellow")
|
| 901 |
+
wandb.termlog(f"{action} sweep with ID: {styled_id}")
|
| 902 |
+
|
| 903 |
+
sweep_url = wandb_sdk.wandb_sweep._get_sweep_url(api, sweep_id)
|
| 904 |
+
if sweep_url:
|
| 905 |
+
styled_url = click.style(sweep_url, underline=True, fg="blue")
|
| 906 |
+
wandb.termlog(f"View sweep at: {styled_url}")
|
| 907 |
+
|
| 908 |
+
# re-probe entity and project if it was auto-detected by upsert_sweep
|
| 909 |
+
entity = entity or env.get("WANDB_ENTITY")
|
| 910 |
+
project = project or env.get("WANDB_PROJECT")
|
| 911 |
+
|
| 912 |
+
if entity and project:
|
| 913 |
+
sweep_path = f"{entity}/{project}/{sweep_id}"
|
| 914 |
+
elif project:
|
| 915 |
+
sweep_path = f"{project}/{sweep_id}"
|
| 916 |
+
else:
|
| 917 |
+
sweep_path = sweep_id
|
| 918 |
+
|
| 919 |
+
if sweep_path.find(" ") >= 0:
|
| 920 |
+
sweep_path = f"{sweep_path!r}"
|
| 921 |
+
|
| 922 |
+
styled_path = click.style(f"wandb agent {sweep_path}", fg="yellow")
|
| 923 |
+
wandb.termlog(f"Run sweep agent with: {styled_path}")
|
| 924 |
+
if controller:
|
| 925 |
+
wandb.termlog("Starting wandb controller...")
|
| 926 |
+
from wandb import controller as wandb_controller
|
| 927 |
+
|
| 928 |
+
tuner = wandb_controller(sweep_id)
|
| 929 |
+
tuner.run(verbose=verbose)
|
| 930 |
+
|
| 931 |
+
|
| 932 |
+
@cli.command(
|
| 933 |
+
context_settings=CONTEXT,
|
| 934 |
+
no_args_is_help=True,
|
| 935 |
+
help="Run a W&B launch sweep (Experimental).",
|
| 936 |
+
)
|
| 937 |
+
@click.option(
|
| 938 |
+
"--queue",
|
| 939 |
+
"-q",
|
| 940 |
+
default=None,
|
| 941 |
+
help="The name of a queue to push the sweep to",
|
| 942 |
+
)
|
| 943 |
+
@click.option(
|
| 944 |
+
"--project",
|
| 945 |
+
"-p",
|
| 946 |
+
default=None,
|
| 947 |
+
help="Name of the project which the agent will watch. "
|
| 948 |
+
"If passed in, will override the project value passed in using a config file",
|
| 949 |
+
)
|
| 950 |
+
@click.option(
|
| 951 |
+
"--entity",
|
| 952 |
+
"-e",
|
| 953 |
+
default=None,
|
| 954 |
+
help="The entity to use. Defaults to current logged-in user",
|
| 955 |
+
)
|
| 956 |
+
@click.option(
|
| 957 |
+
"--resume_id",
|
| 958 |
+
"-r",
|
| 959 |
+
default=None,
|
| 960 |
+
help="Resume a launch sweep by passing an 8-char sweep id. Queue required",
|
| 961 |
+
)
|
| 962 |
+
@click.option(
|
| 963 |
+
"--prior_run",
|
| 964 |
+
"-R",
|
| 965 |
+
"prior_runs",
|
| 966 |
+
multiple=True,
|
| 967 |
+
default=None,
|
| 968 |
+
help="ID of an existing run to add to this sweep",
|
| 969 |
+
)
|
| 970 |
+
@click.argument("config", required=False, type=click.Path(exists=True))
|
| 971 |
+
@click.pass_context
|
| 972 |
+
@display_error
|
| 973 |
+
def launch_sweep(
|
| 974 |
+
ctx,
|
| 975 |
+
project,
|
| 976 |
+
entity,
|
| 977 |
+
queue,
|
| 978 |
+
config,
|
| 979 |
+
resume_id,
|
| 980 |
+
prior_runs,
|
| 981 |
+
):
|
| 982 |
+
api = _get_cling_api()
|
| 983 |
+
env = os.environ
|
| 984 |
+
if not api.is_authenticated:
|
| 985 |
+
wandb.termlog("Login to W&B to use the sweep feature")
|
| 986 |
+
ctx.invoke(login, no_offline=True)
|
| 987 |
+
api = _get_cling_api(reset=True)
|
| 988 |
+
|
| 989 |
+
entity = entity or env.get("WANDB_ENTITY") or api.settings("entity")
|
| 990 |
+
if entity is None:
|
| 991 |
+
wandb.termerror("Must specify entity when using launch")
|
| 992 |
+
return
|
| 993 |
+
|
| 994 |
+
project = project or env.get("WANDB_PROJECT") or api.settings("project")
|
| 995 |
+
if project is None:
|
| 996 |
+
wandb.termerror("A project must be configured when using launch")
|
| 997 |
+
return
|
| 998 |
+
|
| 999 |
+
# get personal username, not team name or service account, default to entity
|
| 1000 |
+
author = api.viewer().get("username") or entity
|
| 1001 |
+
|
| 1002 |
+
# if not sweep_config XOR resume_id
|
| 1003 |
+
if not (config or resume_id):
|
| 1004 |
+
wandb.termerror("'config' and/or 'resume_id' required")
|
| 1005 |
+
return
|
| 1006 |
+
|
| 1007 |
+
parsed_user_config = sweep_utils.load_launch_sweep_config(config)
|
| 1008 |
+
# Rip special keys out of config, store in scheduler run_config
|
| 1009 |
+
launch_args: Dict[str, Any] = parsed_user_config.pop("launch", {})
|
| 1010 |
+
scheduler_args: Dict[str, Any] = parsed_user_config.pop("scheduler", {})
|
| 1011 |
+
settings: Dict[str, Any] = scheduler_args.pop("settings", {})
|
| 1012 |
+
|
| 1013 |
+
scheduler_job: Optional[str] = scheduler_args.get("job")
|
| 1014 |
+
if scheduler_job:
|
| 1015 |
+
wandb.termwarn(
|
| 1016 |
+
"Using a scheduler job for launch sweeps is *experimental* and may change without warning"
|
| 1017 |
+
)
|
| 1018 |
+
queue: Optional[str] = queue or launch_args.get("queue")
|
| 1019 |
+
|
| 1020 |
+
sweep_config, sweep_obj_id = None, None
|
| 1021 |
+
if not resume_id:
|
| 1022 |
+
sweep_config = parsed_user_config
|
| 1023 |
+
|
| 1024 |
+
# check method
|
| 1025 |
+
method = sweep_config.get("method")
|
| 1026 |
+
if scheduler_job and not method:
|
| 1027 |
+
sweep_config["method"] = "custom"
|
| 1028 |
+
elif scheduler_job and method != "custom":
|
| 1029 |
+
# TODO(gst): Check if using Anaconda2
|
| 1030 |
+
wandb.termwarn(
|
| 1031 |
+
"Use 'method': 'custom' in the sweep config when using scheduler jobs, "
|
| 1032 |
+
"or omit it entirely. For jobs using the wandb optimization engine (WandbScheduler), "
|
| 1033 |
+
"set the method in the sweep config under scheduler.settings.method "
|
| 1034 |
+
)
|
| 1035 |
+
settings["method"] = method
|
| 1036 |
+
|
| 1037 |
+
if settings.get("method"):
|
| 1038 |
+
# assume WandbScheduler, and user is using this right
|
| 1039 |
+
sweep_config["method"] = settings["method"]
|
| 1040 |
+
|
| 1041 |
+
else: # Resuming an existing sweep
|
| 1042 |
+
found = api.sweep(resume_id, "{}", entity=entity, project=project)
|
| 1043 |
+
if not found:
|
| 1044 |
+
wandb.termerror(f"Could not find sweep {entity}/{project}/{resume_id}")
|
| 1045 |
+
return
|
| 1046 |
+
|
| 1047 |
+
if found.get("state") == "RUNNING":
|
| 1048 |
+
wandb.termerror(
|
| 1049 |
+
f"Cannot resume sweep {entity}/{project}/{resume_id}, it is already running"
|
| 1050 |
+
)
|
| 1051 |
+
return
|
| 1052 |
+
|
| 1053 |
+
sweep_obj_id = found["id"]
|
| 1054 |
+
sweep_config = yaml.safe_load(found["config"])
|
| 1055 |
+
wandb.termlog(f"Resuming from existing sweep {entity}/{project}/{resume_id}")
|
| 1056 |
+
if len(parsed_user_config.keys()) > 0:
|
| 1057 |
+
wandb.termwarn(
|
| 1058 |
+
"Sweep parameters loaded from resumed sweep, ignoring provided config"
|
| 1059 |
+
)
|
| 1060 |
+
|
| 1061 |
+
prev_scheduler = json.loads(found.get("scheduler") or "{}")
|
| 1062 |
+
run_spec = json.loads(prev_scheduler.get("run_spec", "{}"))
|
| 1063 |
+
if (
|
| 1064 |
+
scheduler_job
|
| 1065 |
+
and run_spec.get("job")
|
| 1066 |
+
and run_spec.get("job") != scheduler_job
|
| 1067 |
+
):
|
| 1068 |
+
wandb.termerror(
|
| 1069 |
+
f"Resuming a launch sweep with a different scheduler job is not supported. Job loaded from sweep: {run_spec.get('job')}, job in config: {scheduler_job}"
|
| 1070 |
+
)
|
| 1071 |
+
return
|
| 1072 |
+
|
| 1073 |
+
prev_scheduler_args, prev_settings = sweep_utils.get_previous_args(run_spec)
|
| 1074 |
+
# Passed in scheduler_args and settings override previous
|
| 1075 |
+
scheduler_args.update(prev_scheduler_args)
|
| 1076 |
+
settings.update(prev_settings)
|
| 1077 |
+
if not queue:
|
| 1078 |
+
wandb.termerror(
|
| 1079 |
+
"Launch-sweeps require setting a 'queue', use --queue option or a 'queue' key in the 'launch' section in the config"
|
| 1080 |
+
)
|
| 1081 |
+
return
|
| 1082 |
+
|
| 1083 |
+
entrypoint = Scheduler.ENTRYPOINT if not scheduler_job else None
|
| 1084 |
+
args = sweep_utils.construct_scheduler_args(
|
| 1085 |
+
return_job=scheduler_job is not None,
|
| 1086 |
+
sweep_config=sweep_config,
|
| 1087 |
+
queue=queue,
|
| 1088 |
+
project=project,
|
| 1089 |
+
author=author,
|
| 1090 |
+
)
|
| 1091 |
+
if not args:
|
| 1092 |
+
return
|
| 1093 |
+
|
| 1094 |
+
# validate training job existence
|
| 1095 |
+
if not sweep_utils.check_job_exists(PublicApi(), sweep_config.get("job")):
|
| 1096 |
+
return False
|
| 1097 |
+
|
| 1098 |
+
# validate scheduler job existence, if present
|
| 1099 |
+
if not sweep_utils.check_job_exists(PublicApi(), scheduler_job):
|
| 1100 |
+
return False
|
| 1101 |
+
|
| 1102 |
+
# Set run overrides for the Scheduler
|
| 1103 |
+
overrides = {"run_config": {}}
|
| 1104 |
+
if launch_args:
|
| 1105 |
+
overrides["run_config"]["launch"] = launch_args
|
| 1106 |
+
if scheduler_args:
|
| 1107 |
+
overrides["run_config"]["scheduler"] = scheduler_args
|
| 1108 |
+
if settings:
|
| 1109 |
+
overrides["run_config"]["settings"] = settings
|
| 1110 |
+
|
| 1111 |
+
if scheduler_job:
|
| 1112 |
+
overrides["run_config"]["sweep_args"] = args
|
| 1113 |
+
else:
|
| 1114 |
+
overrides["args"] = args
|
| 1115 |
+
|
| 1116 |
+
# configure scheduler job resource
|
| 1117 |
+
resource = scheduler_args.get("resource")
|
| 1118 |
+
if resource:
|
| 1119 |
+
if resource == "local-process" and scheduler_job:
|
| 1120 |
+
wandb.termerror(
|
| 1121 |
+
"Scheduler jobs cannot be run with the 'local-process' resource"
|
| 1122 |
+
)
|
| 1123 |
+
return
|
| 1124 |
+
if resource == "local-process" and scheduler_args.get("docker_image"):
|
| 1125 |
+
wandb.termerror(
|
| 1126 |
+
"Scheduler jobs cannot be run with the 'local-process' resource and a docker image"
|
| 1127 |
+
)
|
| 1128 |
+
return
|
| 1129 |
+
else: # no resource set, default local-process if not scheduler job, else container
|
| 1130 |
+
resource = "local-process" if not scheduler_job else "local-container"
|
| 1131 |
+
|
| 1132 |
+
# Launch job spec for the Scheduler
|
| 1133 |
+
launch_scheduler_spec = launch_utils.construct_launch_spec(
|
| 1134 |
+
uri=Scheduler.PLACEHOLDER_URI,
|
| 1135 |
+
api=api,
|
| 1136 |
+
name="Scheduler.WANDB_SWEEP_ID",
|
| 1137 |
+
project=project,
|
| 1138 |
+
entity=entity,
|
| 1139 |
+
docker_image=scheduler_args.get("docker_image"),
|
| 1140 |
+
resource=resource,
|
| 1141 |
+
entry_point=entrypoint,
|
| 1142 |
+
resource_args=scheduler_args.get("resource_args", {}),
|
| 1143 |
+
repository=launch_args.get("registry", {}).get("url", None),
|
| 1144 |
+
job=scheduler_job,
|
| 1145 |
+
version=None,
|
| 1146 |
+
launch_config={"overrides": overrides},
|
| 1147 |
+
run_id="WANDB_SWEEP_ID", # scheduler inits run with sweep_id=run_id
|
| 1148 |
+
author=None, # author gets passed into scheduler override args
|
| 1149 |
+
)
|
| 1150 |
+
launch_scheduler_with_queue = json.dumps(
|
| 1151 |
+
{
|
| 1152 |
+
"queue": queue,
|
| 1153 |
+
"run_queue_project": launch_utils.LAUNCH_DEFAULT_PROJECT,
|
| 1154 |
+
"run_spec": json.dumps(launch_scheduler_spec),
|
| 1155 |
+
}
|
| 1156 |
+
)
|
| 1157 |
+
|
| 1158 |
+
sweep_id, warnings = api.upsert_sweep(
|
| 1159 |
+
sweep_config,
|
| 1160 |
+
project=project,
|
| 1161 |
+
entity=entity,
|
| 1162 |
+
obj_id=sweep_obj_id, # if resuming
|
| 1163 |
+
launch_scheduler=launch_scheduler_with_queue,
|
| 1164 |
+
state="PENDING",
|
| 1165 |
+
prior_runs=prior_runs,
|
| 1166 |
+
template_variable_values=scheduler_args.get("template_variables", None),
|
| 1167 |
+
)
|
| 1168 |
+
sweep_utils.handle_sweep_config_violations(warnings)
|
| 1169 |
+
# Log nicely formatted sweep information
|
| 1170 |
+
styled_id = click.style(sweep_id, fg="yellow")
|
| 1171 |
+
wandb.termlog(f"{'Resumed' if resume_id else 'Created'} sweep with ID: {styled_id}")
|
| 1172 |
+
sweep_url = wandb_sdk.wandb_sweep._get_sweep_url(api, sweep_id)
|
| 1173 |
+
if sweep_url:
|
| 1174 |
+
styled_url = click.style(sweep_url, underline=True, fg="blue")
|
| 1175 |
+
wandb.termlog(f"View sweep at: {styled_url}")
|
| 1176 |
+
wandb.termlog(f"Scheduler added to launch queue ({queue})")
|
| 1177 |
+
|
| 1178 |
+
|
| 1179 |
+
@cli.command(help=f"Launch or queue a W&B Job. See {url_registry.url('wandb-launch')}")
|
| 1180 |
+
@click.option(
|
| 1181 |
+
"--uri",
|
| 1182 |
+
"-u",
|
| 1183 |
+
metavar="(str)",
|
| 1184 |
+
default=None,
|
| 1185 |
+
help="Local path or git repo uri to launch. If provided this command will "
|
| 1186 |
+
"create a job from the specified uri.",
|
| 1187 |
+
)
|
| 1188 |
+
@click.option(
|
| 1189 |
+
"--job",
|
| 1190 |
+
"-j",
|
| 1191 |
+
metavar="(str)",
|
| 1192 |
+
default=None,
|
| 1193 |
+
help="Name of the job to launch. If passed in, launch does not require a uri.",
|
| 1194 |
+
)
|
| 1195 |
+
@click.option(
|
| 1196 |
+
"--entry-point",
|
| 1197 |
+
"-E",
|
| 1198 |
+
metavar="NAME",
|
| 1199 |
+
default=None,
|
| 1200 |
+
help="""Entry point within project. [default: main]. If the entry point is not found,
|
| 1201 |
+
attempts to run the project file with the specified name as a script,
|
| 1202 |
+
using 'python' to run .py files and the default shell (specified by
|
| 1203 |
+
environment variable $SHELL) to run .sh files. If passed in, will override the entrypoint value passed in using a config file.""",
|
| 1204 |
+
)
|
| 1205 |
+
@click.option(
|
| 1206 |
+
"--git-version",
|
| 1207 |
+
"-g",
|
| 1208 |
+
metavar="GIT-VERSION",
|
| 1209 |
+
hidden=True,
|
| 1210 |
+
help="Version of the project to run, as a Git commit reference for Git projects.",
|
| 1211 |
+
)
|
| 1212 |
+
@click.option(
|
| 1213 |
+
"--build-context",
|
| 1214 |
+
metavar="(str)",
|
| 1215 |
+
help="Path to the build context within the source code. Defaults to the "
|
| 1216 |
+
"root of the source code. Compatible only with -u.",
|
| 1217 |
+
)
|
| 1218 |
+
@click.option(
|
| 1219 |
+
"--job-name",
|
| 1220 |
+
"-J",
|
| 1221 |
+
metavar="(str)",
|
| 1222 |
+
default=None,
|
| 1223 |
+
hidden=True,
|
| 1224 |
+
help="Name for the job created if the -u,--uri flag is passed in.",
|
| 1225 |
+
)
|
| 1226 |
+
@click.option(
|
| 1227 |
+
"--name",
|
| 1228 |
+
envvar="WANDB_NAME",
|
| 1229 |
+
help="""Name of the run under which to launch the run. If not
|
| 1230 |
+
specified, a random run name will be used to launch run. If passed in, will override the name passed in using a config file.""",
|
| 1231 |
+
)
|
| 1232 |
+
@click.option(
|
| 1233 |
+
"--entity",
|
| 1234 |
+
"-e",
|
| 1235 |
+
metavar="(str)",
|
| 1236 |
+
default=None,
|
| 1237 |
+
help="""Name of the target entity which the new run will be sent to. Defaults to using the entity set by local wandb/settings folder.
|
| 1238 |
+
If passed in, will override the entity value passed in using a config file.""",
|
| 1239 |
+
)
|
| 1240 |
+
@click.option(
|
| 1241 |
+
"--project",
|
| 1242 |
+
"-p",
|
| 1243 |
+
metavar="(str)",
|
| 1244 |
+
default=None,
|
| 1245 |
+
help="""Name of the target project which the new run will be sent to. Defaults to using the project name given by the source uri
|
| 1246 |
+
or for github runs, the git repo name. If passed in, will override the project value passed in using a config file.""",
|
| 1247 |
+
)
|
| 1248 |
+
@click.option(
|
| 1249 |
+
"--resource",
|
| 1250 |
+
"-r",
|
| 1251 |
+
metavar="BACKEND",
|
| 1252 |
+
default=None,
|
| 1253 |
+
help="""Execution resource to use for run. Supported values: 'local-process', 'local-container', 'kubernetes', 'sagemaker', 'gcp-vertex'.
|
| 1254 |
+
This is now a required parameter if pushing to a queue with no resource configuration.
|
| 1255 |
+
If passed in, will override the resource value passed in using a config file.""",
|
| 1256 |
+
)
|
| 1257 |
+
@click.option(
|
| 1258 |
+
"--docker-image",
|
| 1259 |
+
"-d",
|
| 1260 |
+
default=None,
|
| 1261 |
+
metavar="DOCKER IMAGE",
|
| 1262 |
+
help="""Specific docker image you'd like to use. In the form name:tag.
|
| 1263 |
+
If passed in, will override the docker image value passed in using a config file.""",
|
| 1264 |
+
)
|
| 1265 |
+
@click.option(
|
| 1266 |
+
"--base-image",
|
| 1267 |
+
"-B",
|
| 1268 |
+
default=None,
|
| 1269 |
+
metavar="BASE IMAGE",
|
| 1270 |
+
help="""Docker image to run job code in. Incompatible with --docker-image.""",
|
| 1271 |
+
)
|
| 1272 |
+
@click.option(
|
| 1273 |
+
"--config",
|
| 1274 |
+
"-c",
|
| 1275 |
+
metavar="FILE",
|
| 1276 |
+
help="""Path to JSON file (must end in '.json') or JSON string which will be passed
|
| 1277 |
+
as a launch config. Dictation how the launched run will be configured.""",
|
| 1278 |
+
)
|
| 1279 |
+
@click.option(
|
| 1280 |
+
"--set-var",
|
| 1281 |
+
"-v",
|
| 1282 |
+
"cli_template_vars",
|
| 1283 |
+
default=None,
|
| 1284 |
+
multiple=True,
|
| 1285 |
+
help="""Set template variable values for queues with allow listing enabled,
|
| 1286 |
+
as key-value pairs e.g. `--set-var key1=value1 --set-var key2=value2`""",
|
| 1287 |
+
)
|
| 1288 |
+
@click.option(
|
| 1289 |
+
"--queue",
|
| 1290 |
+
"-q",
|
| 1291 |
+
is_flag=False,
|
| 1292 |
+
flag_value="default",
|
| 1293 |
+
default=None,
|
| 1294 |
+
help="""Name of run queue to push to. If none, launches single run directly. If supplied without
|
| 1295 |
+
an argument (`--queue`), defaults to queue 'default'. Else, if name supplied, specified run queue must exist under the
|
| 1296 |
+
project and entity supplied.""",
|
| 1297 |
+
)
|
| 1298 |
+
@click.option(
|
| 1299 |
+
"--async",
|
| 1300 |
+
"run_async",
|
| 1301 |
+
is_flag=True,
|
| 1302 |
+
help="""Flag to run the job asynchronously. Defaults to false, i.e. unless --async is set, wandb launch will wait for
|
| 1303 |
+
the job to finish. This option is incompatible with --queue; asynchronous options when running with an agent should be
|
| 1304 |
+
set on wandb launch-agent.""",
|
| 1305 |
+
)
|
| 1306 |
+
@click.option(
|
| 1307 |
+
"--resource-args",
|
| 1308 |
+
"-R",
|
| 1309 |
+
metavar="FILE",
|
| 1310 |
+
help="""Path to JSON file (must end in '.json') or JSON string which will be passed
|
| 1311 |
+
as resource args to the compute resource. The exact content which should be
|
| 1312 |
+
provided is different for each execution backend. See documentation for layout of this file.""",
|
| 1313 |
+
)
|
| 1314 |
+
@click.option(
|
| 1315 |
+
"--build",
|
| 1316 |
+
"-b",
|
| 1317 |
+
is_flag=True,
|
| 1318 |
+
hidden=True,
|
| 1319 |
+
help="Flag to build an associated job and push to queue as an image job.",
|
| 1320 |
+
)
|
| 1321 |
+
@click.option(
|
| 1322 |
+
"--repository",
|
| 1323 |
+
"-rg",
|
| 1324 |
+
is_flag=False,
|
| 1325 |
+
default=None,
|
| 1326 |
+
hidden=True,
|
| 1327 |
+
help="Name of a remote repository. Will be used to push a built image to.",
|
| 1328 |
+
)
|
| 1329 |
+
# TODO: this is only included for back compat. But we should remove this in the future
|
| 1330 |
+
@click.option(
|
| 1331 |
+
"--project-queue",
|
| 1332 |
+
"-pq",
|
| 1333 |
+
default=None,
|
| 1334 |
+
hidden=True,
|
| 1335 |
+
help="Name of the project containing the queue to push to. If none, defaults to entity level queues.",
|
| 1336 |
+
)
|
| 1337 |
+
@click.option(
|
| 1338 |
+
"--dockerfile",
|
| 1339 |
+
"-D",
|
| 1340 |
+
default=None,
|
| 1341 |
+
help="Path to the Dockerfile used to build the job, relative to the job's root",
|
| 1342 |
+
)
|
| 1343 |
+
@click.option(
|
| 1344 |
+
"--priority",
|
| 1345 |
+
"-P",
|
| 1346 |
+
default=None,
|
| 1347 |
+
type=click.Choice(["critical", "high", "medium", "low"]),
|
| 1348 |
+
help="""When --queue is passed, set the priority of the job. Launch jobs with higher priority
|
| 1349 |
+
are served first. The order, from highest to lowest priority, is: critical, high, medium, low""",
|
| 1350 |
+
)
|
| 1351 |
+
@display_error
|
| 1352 |
+
def launch(
|
| 1353 |
+
uri,
|
| 1354 |
+
job,
|
| 1355 |
+
entry_point,
|
| 1356 |
+
git_version,
|
| 1357 |
+
build_context,
|
| 1358 |
+
name,
|
| 1359 |
+
resource,
|
| 1360 |
+
entity,
|
| 1361 |
+
project,
|
| 1362 |
+
docker_image,
|
| 1363 |
+
base_image,
|
| 1364 |
+
config,
|
| 1365 |
+
cli_template_vars,
|
| 1366 |
+
queue,
|
| 1367 |
+
run_async,
|
| 1368 |
+
resource_args,
|
| 1369 |
+
build,
|
| 1370 |
+
repository,
|
| 1371 |
+
project_queue,
|
| 1372 |
+
dockerfile,
|
| 1373 |
+
priority,
|
| 1374 |
+
job_name,
|
| 1375 |
+
):
|
| 1376 |
+
"""Start a W&B run from the given URI.
|
| 1377 |
+
|
| 1378 |
+
The URI can bea wandb URI, a GitHub repo uri, or a local path). In the case of a
|
| 1379 |
+
wandb URI the arguments used in the original run will be used by default. These
|
| 1380 |
+
arguments can be overridden using the args option, or specifying those arguments in
|
| 1381 |
+
the config's 'overrides' key, 'args' field as a list of strings.
|
| 1382 |
+
|
| 1383 |
+
Running `wandb launch [URI]` will launch the run directly. To add the run to a
|
| 1384 |
+
queue, run `wandb launch [URI] --queue [optional queuename]`.
|
| 1385 |
+
"""
|
| 1386 |
+
logger.info(
|
| 1387 |
+
f"=== Launch called with kwargs {locals()} CLI Version: {wandb.__version__}==="
|
| 1388 |
+
)
|
| 1389 |
+
from wandb.sdk.launch._launch import _launch
|
| 1390 |
+
from wandb.sdk.launch.create_job import _create_job
|
| 1391 |
+
from wandb.sdk.launch.utils import _is_git_uri
|
| 1392 |
+
|
| 1393 |
+
api = _get_cling_api()
|
| 1394 |
+
wandb._sentry.configure_scope(process_context="launch_cli")
|
| 1395 |
+
|
| 1396 |
+
if run_async and queue is not None:
|
| 1397 |
+
raise LaunchError(
|
| 1398 |
+
"Cannot use both --async and --queue with wandb launch, see help for details."
|
| 1399 |
+
)
|
| 1400 |
+
|
| 1401 |
+
if queue and docker_image and not project:
|
| 1402 |
+
raise LaunchError(
|
| 1403 |
+
"Cannot use --queue and --docker together without a project. Please specify a project with --project or -p."
|
| 1404 |
+
)
|
| 1405 |
+
|
| 1406 |
+
if priority is not None and queue is None:
|
| 1407 |
+
raise LaunchError("--priority flag requires --queue to be set")
|
| 1408 |
+
|
| 1409 |
+
if resource_args is not None:
|
| 1410 |
+
resource_args = util.load_json_yaml_dict(resource_args)
|
| 1411 |
+
if resource_args is None:
|
| 1412 |
+
raise LaunchError("Invalid format for resource-args")
|
| 1413 |
+
else:
|
| 1414 |
+
resource_args = {}
|
| 1415 |
+
|
| 1416 |
+
if entry_point is not None:
|
| 1417 |
+
entry_point = shlex.split(entry_point)
|
| 1418 |
+
|
| 1419 |
+
if config is not None:
|
| 1420 |
+
config = util.load_json_yaml_dict(config)
|
| 1421 |
+
if config is None:
|
| 1422 |
+
raise LaunchError("Invalid format for config")
|
| 1423 |
+
else:
|
| 1424 |
+
config = {}
|
| 1425 |
+
|
| 1426 |
+
resource = resource or config.get("resource")
|
| 1427 |
+
|
| 1428 |
+
if build and queue is None:
|
| 1429 |
+
raise LaunchError("Build flag requires a queue to be set")
|
| 1430 |
+
|
| 1431 |
+
try:
|
| 1432 |
+
launch_utils.check_logged_in(api)
|
| 1433 |
+
except Exception:
|
| 1434 |
+
wandb.termerror(f"Error running job: {traceback.format_exc()}")
|
| 1435 |
+
|
| 1436 |
+
run_id = config.get("run_id")
|
| 1437 |
+
|
| 1438 |
+
# If URI was provided, we need to create a job from it.
|
| 1439 |
+
if uri:
|
| 1440 |
+
if entry_point is None:
|
| 1441 |
+
raise LaunchError(
|
| 1442 |
+
"Cannot provide a uri without an entry point. Please provide an "
|
| 1443 |
+
"entry point with --entry-point or -E."
|
| 1444 |
+
)
|
| 1445 |
+
if job is not None:
|
| 1446 |
+
raise LaunchError("Cannot provide both a uri and a job name.")
|
| 1447 |
+
job_type = (
|
| 1448 |
+
"git" if _is_git_uri(uri) else "code"
|
| 1449 |
+
) # TODO: Add support for local URIs with git.
|
| 1450 |
+
if entity is None:
|
| 1451 |
+
entity = launch_utils.get_default_entity(api, config)
|
| 1452 |
+
artifact, _, _ = _create_job(
|
| 1453 |
+
api,
|
| 1454 |
+
job_type,
|
| 1455 |
+
uri,
|
| 1456 |
+
entrypoint=" ".join(entry_point),
|
| 1457 |
+
git_hash=git_version,
|
| 1458 |
+
name=job_name,
|
| 1459 |
+
project=project,
|
| 1460 |
+
base_image=base_image,
|
| 1461 |
+
build_context=build_context,
|
| 1462 |
+
dockerfile=dockerfile,
|
| 1463 |
+
entity=entity,
|
| 1464 |
+
)
|
| 1465 |
+
if artifact is None:
|
| 1466 |
+
raise LaunchError(f"Failed to create job from uri: {uri}")
|
| 1467 |
+
job = f"{entity}/{project}/{artifact.name}"
|
| 1468 |
+
|
| 1469 |
+
if dockerfile:
|
| 1470 |
+
if "overrides" in config:
|
| 1471 |
+
config["overrides"]["dockerfile"] = dockerfile
|
| 1472 |
+
else:
|
| 1473 |
+
config["overrides"] = {"dockerfile": dockerfile}
|
| 1474 |
+
|
| 1475 |
+
if priority is not None:
|
| 1476 |
+
priority_map = {
|
| 1477 |
+
"critical": 0,
|
| 1478 |
+
"high": 1,
|
| 1479 |
+
"medium": 2,
|
| 1480 |
+
"low": 3,
|
| 1481 |
+
}
|
| 1482 |
+
priority = priority_map[priority.lower()]
|
| 1483 |
+
|
| 1484 |
+
template_variables = None
|
| 1485 |
+
if cli_template_vars:
|
| 1486 |
+
if queue is None:
|
| 1487 |
+
raise LaunchError("'--set-var' flag requires queue to be set")
|
| 1488 |
+
if entity is None:
|
| 1489 |
+
entity = launch_utils.get_default_entity(api, config)
|
| 1490 |
+
public_api = PublicApi()
|
| 1491 |
+
runqueue = RunQueue(client=public_api.client, name=queue, entity=entity)
|
| 1492 |
+
template_variables = launch_utils.fetch_and_validate_template_variables(
|
| 1493 |
+
runqueue, cli_template_vars
|
| 1494 |
+
)
|
| 1495 |
+
|
| 1496 |
+
if queue is None:
|
| 1497 |
+
# direct launch
|
| 1498 |
+
try:
|
| 1499 |
+
run = asyncio.run(
|
| 1500 |
+
_launch(
|
| 1501 |
+
api,
|
| 1502 |
+
job,
|
| 1503 |
+
project=project,
|
| 1504 |
+
entity=entity,
|
| 1505 |
+
docker_image=docker_image,
|
| 1506 |
+
name=name,
|
| 1507 |
+
entry_point=entry_point,
|
| 1508 |
+
version=git_version,
|
| 1509 |
+
resource=resource,
|
| 1510 |
+
resource_args=resource_args,
|
| 1511 |
+
launch_config=config,
|
| 1512 |
+
synchronous=(not run_async),
|
| 1513 |
+
run_id=run_id,
|
| 1514 |
+
repository=repository,
|
| 1515 |
+
)
|
| 1516 |
+
)
|
| 1517 |
+
if asyncio.run(run.get_status()).state in [
|
| 1518 |
+
"failed",
|
| 1519 |
+
"stopped",
|
| 1520 |
+
"preempted",
|
| 1521 |
+
]:
|
| 1522 |
+
wandb.termerror("Launched run exited with non-zero status")
|
| 1523 |
+
sys.exit(1)
|
| 1524 |
+
except LaunchError as e:
|
| 1525 |
+
logger.error("=== %s ===", e)
|
| 1526 |
+
wandb._sentry.exception(e)
|
| 1527 |
+
sys.exit(e)
|
| 1528 |
+
except ExecutionError as e:
|
| 1529 |
+
logger.error("=== %s ===", e)
|
| 1530 |
+
wandb._sentry.exception(e)
|
| 1531 |
+
sys.exit(e)
|
| 1532 |
+
except asyncio.CancelledError:
|
| 1533 |
+
sys.exit(0)
|
| 1534 |
+
else:
|
| 1535 |
+
try:
|
| 1536 |
+
_launch_add(
|
| 1537 |
+
api,
|
| 1538 |
+
job,
|
| 1539 |
+
config,
|
| 1540 |
+
template_variables,
|
| 1541 |
+
project,
|
| 1542 |
+
entity,
|
| 1543 |
+
queue,
|
| 1544 |
+
resource,
|
| 1545 |
+
entry_point,
|
| 1546 |
+
name,
|
| 1547 |
+
git_version,
|
| 1548 |
+
docker_image,
|
| 1549 |
+
project_queue,
|
| 1550 |
+
resource_args,
|
| 1551 |
+
build=build,
|
| 1552 |
+
run_id=run_id,
|
| 1553 |
+
repository=repository,
|
| 1554 |
+
priority=priority,
|
| 1555 |
+
)
|
| 1556 |
+
|
| 1557 |
+
except Exception as e:
|
| 1558 |
+
wandb._sentry.exception(e)
|
| 1559 |
+
raise e
|
| 1560 |
+
|
| 1561 |
+
|
| 1562 |
+
@cli.command(
|
| 1563 |
+
context_settings=CONTEXT,
|
| 1564 |
+
help="Run a W&B launch agent.",
|
| 1565 |
+
)
|
| 1566 |
+
@click.pass_context
|
| 1567 |
+
@click.option(
|
| 1568 |
+
"--queue",
|
| 1569 |
+
"-q",
|
| 1570 |
+
"queues",
|
| 1571 |
+
default=None,
|
| 1572 |
+
multiple=True,
|
| 1573 |
+
help="The name of a queue for the agent to watch. Multiple -q flags supported.",
|
| 1574 |
+
)
|
| 1575 |
+
@click.option(
|
| 1576 |
+
"--entity",
|
| 1577 |
+
"-e",
|
| 1578 |
+
default=None,
|
| 1579 |
+
help="The entity to use. Defaults to current logged-in user",
|
| 1580 |
+
)
|
| 1581 |
+
@click.option(
|
| 1582 |
+
"--log-file",
|
| 1583 |
+
"-l",
|
| 1584 |
+
default=None,
|
| 1585 |
+
help=(
|
| 1586 |
+
"Destination for internal agent logs. Use - for stdout. "
|
| 1587 |
+
"By default all agents logs will go to debug.log in your wandb/ "
|
| 1588 |
+
"subdirectory or WANDB_DIR if set."
|
| 1589 |
+
),
|
| 1590 |
+
)
|
| 1591 |
+
@click.option(
|
| 1592 |
+
"--max-jobs",
|
| 1593 |
+
"-j",
|
| 1594 |
+
default=None,
|
| 1595 |
+
help="The maximum number of launch jobs this agent can run in parallel. Defaults to 1. Set to -1 for no upper limit",
|
| 1596 |
+
)
|
| 1597 |
+
@click.option(
|
| 1598 |
+
"--config", "-c", default=None, help="path to the agent config yaml to use"
|
| 1599 |
+
)
|
| 1600 |
+
@click.option(
|
| 1601 |
+
"--url",
|
| 1602 |
+
"-u",
|
| 1603 |
+
default=None,
|
| 1604 |
+
hidden=True,
|
| 1605 |
+
help="a wandb client registration URL, this is generated in the UI",
|
| 1606 |
+
)
|
| 1607 |
+
@click.option("--verbose", "-v", count=True, help="Display verbose output")
|
| 1608 |
+
@display_error
|
| 1609 |
+
def launch_agent(
|
| 1610 |
+
ctx,
|
| 1611 |
+
entity=None,
|
| 1612 |
+
queues=None,
|
| 1613 |
+
max_jobs=None,
|
| 1614 |
+
config=None,
|
| 1615 |
+
url=None,
|
| 1616 |
+
log_file=None,
|
| 1617 |
+
verbose=0,
|
| 1618 |
+
):
|
| 1619 |
+
logger.info(
|
| 1620 |
+
f"=== Launch-agent called with kwargs {locals()} CLI Version: {wandb.__version__} ==="
|
| 1621 |
+
)
|
| 1622 |
+
if url is not None:
|
| 1623 |
+
raise LaunchError(
|
| 1624 |
+
"--url is not supported in this version, upgrade with: pip install -u wandb"
|
| 1625 |
+
)
|
| 1626 |
+
|
| 1627 |
+
import wandb.sdk.launch._launch as _launch
|
| 1628 |
+
|
| 1629 |
+
if log_file is not None:
|
| 1630 |
+
_launch.set_launch_logfile(log_file)
|
| 1631 |
+
|
| 1632 |
+
api = _get_cling_api()
|
| 1633 |
+
wandb._sentry.configure_scope(process_context="launch_agent")
|
| 1634 |
+
agent_config, api = _launch.resolve_agent_config(
|
| 1635 |
+
entity, max_jobs, queues, config, verbose
|
| 1636 |
+
)
|
| 1637 |
+
|
| 1638 |
+
if len(agent_config.get("queues")) == 0:
|
| 1639 |
+
raise LaunchError(
|
| 1640 |
+
"To launch an agent please specify a queue or a list of queues in the configuration file or cli."
|
| 1641 |
+
)
|
| 1642 |
+
|
| 1643 |
+
launch_utils.check_logged_in(api)
|
| 1644 |
+
|
| 1645 |
+
wandb.termlog("Starting launch agent ✨")
|
| 1646 |
+
try:
|
| 1647 |
+
_launch.create_and_run_agent(api, agent_config)
|
| 1648 |
+
except Exception as e:
|
| 1649 |
+
wandb._sentry.exception(e)
|
| 1650 |
+
raise e
|
| 1651 |
+
|
| 1652 |
+
|
| 1653 |
+
@cli.command(context_settings=CONTEXT, help="Run the W&B agent")
|
| 1654 |
+
@click.pass_context
|
| 1655 |
+
@click.option(
|
| 1656 |
+
"--project",
|
| 1657 |
+
"-p",
|
| 1658 |
+
default=None,
|
| 1659 |
+
help="""The name of the project where W&B runs created from the sweep are sent to. If the project is not specified, the run is sent to a project labeled 'Uncategorized'.""",
|
| 1660 |
+
)
|
| 1661 |
+
@click.option(
|
| 1662 |
+
"--entity",
|
| 1663 |
+
"-e",
|
| 1664 |
+
default=None,
|
| 1665 |
+
help="""The username or team name where you want to send W&B runs created by the sweep to. Ensure that the entity you specify already exists. If you don't specify an entity, the run will be sent to your default entity, which is usually your username.""",
|
| 1666 |
+
)
|
| 1667 |
+
@click.option(
|
| 1668 |
+
"--count", default=None, type=int, help="The max number of runs for this agent."
|
| 1669 |
+
)
|
| 1670 |
+
@click.argument("sweep_id")
|
| 1671 |
+
@display_error
|
| 1672 |
+
def agent(ctx, project, entity, count, sweep_id):
|
| 1673 |
+
api = _get_cling_api()
|
| 1674 |
+
if not api.is_authenticated:
|
| 1675 |
+
wandb.termlog("Login to W&B to use the sweep agent feature")
|
| 1676 |
+
ctx.invoke(login, no_offline=True)
|
| 1677 |
+
api = _get_cling_api(reset=True)
|
| 1678 |
+
|
| 1679 |
+
wandb.termlog("Starting wandb agent 🕵️")
|
| 1680 |
+
wandb_agent.agent(sweep_id, entity=entity, project=project, count=count)
|
| 1681 |
+
|
| 1682 |
+
# you can send local commands like so:
|
| 1683 |
+
# agent_api.command({'type': 'run', 'program': 'train.py',
|
| 1684 |
+
# 'args': ['--max_epochs=10']})
|
| 1685 |
+
|
| 1686 |
+
|
| 1687 |
+
@cli.command(
|
| 1688 |
+
context_settings=RUN_CONTEXT, help="Run a W&B launch sweep scheduler (Experimental)"
|
| 1689 |
+
)
|
| 1690 |
+
@click.pass_context
|
| 1691 |
+
@click.argument("sweep_id")
|
| 1692 |
+
@display_error
|
| 1693 |
+
def scheduler(
|
| 1694 |
+
ctx,
|
| 1695 |
+
sweep_id,
|
| 1696 |
+
):
|
| 1697 |
+
api = InternalApi()
|
| 1698 |
+
if not api.is_authenticated:
|
| 1699 |
+
wandb.termlog("Login to W&B to use the sweep scheduler feature")
|
| 1700 |
+
ctx.invoke(login, no_offline=True)
|
| 1701 |
+
api = InternalApi(reset=True)
|
| 1702 |
+
|
| 1703 |
+
wandb._sentry.configure_scope(process_context="sweep_scheduler")
|
| 1704 |
+
wandb.termlog("Starting a Launch Scheduler 🚀")
|
| 1705 |
+
from wandb.sdk.launch.sweeps import load_scheduler
|
| 1706 |
+
|
| 1707 |
+
# TODO(gst): remove this monstrosity
|
| 1708 |
+
# Future-proofing hack to pull any kwargs that get passed in through the CLI
|
| 1709 |
+
kwargs = {}
|
| 1710 |
+
for i, _arg in enumerate(ctx.args):
|
| 1711 |
+
if isinstance(_arg, str) and _arg.startswith("--"):
|
| 1712 |
+
# convert input kwargs from hyphens to underscores
|
| 1713 |
+
_key = _arg[2:].replace("-", "_")
|
| 1714 |
+
_args = ctx.args[i + 1]
|
| 1715 |
+
if str.isdigit(_args):
|
| 1716 |
+
_args = int(_args)
|
| 1717 |
+
kwargs[_key] = _args
|
| 1718 |
+
try:
|
| 1719 |
+
sweep_type = kwargs.get("sweep_type", "wandb")
|
| 1720 |
+
_scheduler = load_scheduler(scheduler_type=sweep_type)(
|
| 1721 |
+
api,
|
| 1722 |
+
sweep_id=sweep_id,
|
| 1723 |
+
**kwargs,
|
| 1724 |
+
)
|
| 1725 |
+
_scheduler.start()
|
| 1726 |
+
except Exception as e:
|
| 1727 |
+
wandb._sentry.exception(e)
|
| 1728 |
+
raise e
|
| 1729 |
+
|
| 1730 |
+
|
| 1731 |
+
@cli.group(help="Commands for managing and viewing W&B jobs")
|
| 1732 |
+
def job() -> None:
|
| 1733 |
+
pass
|
| 1734 |
+
|
| 1735 |
+
|
| 1736 |
+
@job.command("list", help="List jobs in a project")
|
| 1737 |
+
@click.option(
|
| 1738 |
+
"--project",
|
| 1739 |
+
"-p",
|
| 1740 |
+
envvar=env.PROJECT,
|
| 1741 |
+
help="The project you want to list jobs from.",
|
| 1742 |
+
)
|
| 1743 |
+
@click.option(
|
| 1744 |
+
"--entity",
|
| 1745 |
+
"-e",
|
| 1746 |
+
default="models",
|
| 1747 |
+
envvar=env.ENTITY,
|
| 1748 |
+
help="The entity the jobs belong to",
|
| 1749 |
+
)
|
| 1750 |
+
def _list(project, entity):
|
| 1751 |
+
wandb.termlog(f"Listing jobs in {entity}/{project}")
|
| 1752 |
+
public_api = PublicApi()
|
| 1753 |
+
try:
|
| 1754 |
+
jobs = public_api.list_jobs(entity=entity, project=project)
|
| 1755 |
+
except wandb.errors.CommError as e:
|
| 1756 |
+
wandb.termerror(f"{e}")
|
| 1757 |
+
return
|
| 1758 |
+
|
| 1759 |
+
if len(jobs) == 0:
|
| 1760 |
+
wandb.termlog("No jobs found")
|
| 1761 |
+
return
|
| 1762 |
+
|
| 1763 |
+
for job in jobs:
|
| 1764 |
+
aliases = []
|
| 1765 |
+
if len(job["edges"]) == 0:
|
| 1766 |
+
# deleted?
|
| 1767 |
+
continue
|
| 1768 |
+
|
| 1769 |
+
name = job["edges"][0]["node"]["artifactSequence"]["name"]
|
| 1770 |
+
for version in job["edges"]:
|
| 1771 |
+
aliases += [x["alias"] for x in version["node"]["aliases"]]
|
| 1772 |
+
|
| 1773 |
+
# only list the most recent 10 job versions
|
| 1774 |
+
aliases_str = ",".join(aliases[::-1])
|
| 1775 |
+
wandb.termlog(f"{name} -- versions ({len(aliases)}): {aliases_str}")
|
| 1776 |
+
|
| 1777 |
+
|
| 1778 |
+
@job.command(
|
| 1779 |
+
help="Describe a launch job. Provide the launch job in the form of: entity/project/job-name:alias-or-version"
|
| 1780 |
+
)
|
| 1781 |
+
@click.argument("job")
|
| 1782 |
+
def describe(job):
|
| 1783 |
+
public_api = PublicApi()
|
| 1784 |
+
try:
|
| 1785 |
+
job = public_api.job(name=job)
|
| 1786 |
+
except wandb.errors.CommError as e:
|
| 1787 |
+
wandb.termerror(f"{e}")
|
| 1788 |
+
return
|
| 1789 |
+
|
| 1790 |
+
for key in job._job_info:
|
| 1791 |
+
if key.startswith("_"):
|
| 1792 |
+
continue
|
| 1793 |
+
wandb.termlog(f"{key}: {job._job_info[key]}")
|
| 1794 |
+
|
| 1795 |
+
|
| 1796 |
+
@job.command(
|
| 1797 |
+
no_args_is_help=True,
|
| 1798 |
+
)
|
| 1799 |
+
@click.option(
|
| 1800 |
+
"--project",
|
| 1801 |
+
"-p",
|
| 1802 |
+
envvar=env.PROJECT,
|
| 1803 |
+
help="The project you want to list jobs from.",
|
| 1804 |
+
)
|
| 1805 |
+
@click.option(
|
| 1806 |
+
"--entity",
|
| 1807 |
+
"-e",
|
| 1808 |
+
envvar=env.ENTITY,
|
| 1809 |
+
help="The entity the jobs belong to",
|
| 1810 |
+
)
|
| 1811 |
+
@click.option(
|
| 1812 |
+
"--name",
|
| 1813 |
+
"-n",
|
| 1814 |
+
help="Name for the job",
|
| 1815 |
+
)
|
| 1816 |
+
@click.option(
|
| 1817 |
+
"--description",
|
| 1818 |
+
"-d",
|
| 1819 |
+
help="Description for the job",
|
| 1820 |
+
)
|
| 1821 |
+
@click.option(
|
| 1822 |
+
"--alias",
|
| 1823 |
+
"-a",
|
| 1824 |
+
"aliases",
|
| 1825 |
+
help="Alias for the job",
|
| 1826 |
+
multiple=True,
|
| 1827 |
+
default=tuple(),
|
| 1828 |
+
)
|
| 1829 |
+
@click.option(
|
| 1830 |
+
"--entry-point",
|
| 1831 |
+
"-E",
|
| 1832 |
+
"entrypoint",
|
| 1833 |
+
help="Entrypoint to the script, including an executable and an entrypoint "
|
| 1834 |
+
"file. Required for code or repo jobs. If --build-context is provided, "
|
| 1835 |
+
"paths in the entrypoint command will be relative to the build context.",
|
| 1836 |
+
)
|
| 1837 |
+
@click.option(
|
| 1838 |
+
"--git-hash",
|
| 1839 |
+
"-g",
|
| 1840 |
+
"git_hash",
|
| 1841 |
+
type=str,
|
| 1842 |
+
help="Commit reference to use as the source for git jobs",
|
| 1843 |
+
)
|
| 1844 |
+
@click.option(
|
| 1845 |
+
"--runtime",
|
| 1846 |
+
"-r",
|
| 1847 |
+
type=str,
|
| 1848 |
+
help="Python runtime to execute the job",
|
| 1849 |
+
)
|
| 1850 |
+
@click.option(
|
| 1851 |
+
"--build-context",
|
| 1852 |
+
"-b",
|
| 1853 |
+
type=str,
|
| 1854 |
+
help="Path to the build context from the root of the job source code. If "
|
| 1855 |
+
"provided, this is used as the base path for the Dockerfile and entrypoint.",
|
| 1856 |
+
)
|
| 1857 |
+
@click.option(
|
| 1858 |
+
"--base-image",
|
| 1859 |
+
"-B",
|
| 1860 |
+
type=str,
|
| 1861 |
+
help="Base image to use for the job. Incompatible with image jobs.",
|
| 1862 |
+
)
|
| 1863 |
+
@click.option(
|
| 1864 |
+
"--dockerfile",
|
| 1865 |
+
"-D",
|
| 1866 |
+
type=str,
|
| 1867 |
+
help="Path to the Dockerfile for the job. If --build-context is provided, "
|
| 1868 |
+
"the Dockerfile path will be relative to the build context.",
|
| 1869 |
+
)
|
| 1870 |
+
@click.argument(
|
| 1871 |
+
"job_type",
|
| 1872 |
+
type=click.Choice(("git", "code", "image")),
|
| 1873 |
+
)
|
| 1874 |
+
@click.argument("path")
|
| 1875 |
+
def create(
|
| 1876 |
+
path,
|
| 1877 |
+
project,
|
| 1878 |
+
entity,
|
| 1879 |
+
name,
|
| 1880 |
+
job_type,
|
| 1881 |
+
description,
|
| 1882 |
+
aliases,
|
| 1883 |
+
entrypoint,
|
| 1884 |
+
git_hash,
|
| 1885 |
+
runtime,
|
| 1886 |
+
build_context,
|
| 1887 |
+
base_image,
|
| 1888 |
+
dockerfile,
|
| 1889 |
+
):
|
| 1890 |
+
"""Create a job from a source, without a wandb run.
|
| 1891 |
+
|
| 1892 |
+
Jobs can be of three types, git, code, or image.
|
| 1893 |
+
|
| 1894 |
+
git: A git source, with an entrypoint either in the path or provided explicitly pointing to the main python executable.
|
| 1895 |
+
code: A code path, containing a requirements.txt file.
|
| 1896 |
+
image: A docker image.
|
| 1897 |
+
"""
|
| 1898 |
+
from wandb.sdk.launch.create_job import _create_job
|
| 1899 |
+
|
| 1900 |
+
api = _get_cling_api()
|
| 1901 |
+
wandb._sentry.configure_scope(process_context="job_create")
|
| 1902 |
+
|
| 1903 |
+
entity = entity or os.getenv("WANDB_ENTITY") or api.default_entity
|
| 1904 |
+
if not entity:
|
| 1905 |
+
wandb.termerror("No entity provided, use --entity or set WANDB_ENTITY")
|
| 1906 |
+
return
|
| 1907 |
+
|
| 1908 |
+
project = project or os.getenv("WANDB_PROJECT")
|
| 1909 |
+
if not project:
|
| 1910 |
+
wandb.termerror("No project provided, use --project or set WANDB_PROJECT")
|
| 1911 |
+
return
|
| 1912 |
+
|
| 1913 |
+
if entrypoint is None and job_type in ["git", "code"]:
|
| 1914 |
+
wandb.termwarn(
|
| 1915 |
+
f"No entrypoint provided for {job_type} job, defaulting to main.py"
|
| 1916 |
+
)
|
| 1917 |
+
entrypoint = "main.py"
|
| 1918 |
+
|
| 1919 |
+
if job_type == "image" and base_image:
|
| 1920 |
+
wandb.termerror("Cannot provide --base-image/-B for an `image` job")
|
| 1921 |
+
return
|
| 1922 |
+
|
| 1923 |
+
artifact, action, aliases = _create_job(
|
| 1924 |
+
api=api,
|
| 1925 |
+
path=path,
|
| 1926 |
+
entity=entity,
|
| 1927 |
+
project=project,
|
| 1928 |
+
name=name,
|
| 1929 |
+
job_type=job_type,
|
| 1930 |
+
description=description,
|
| 1931 |
+
aliases=list(aliases),
|
| 1932 |
+
entrypoint=entrypoint,
|
| 1933 |
+
git_hash=git_hash,
|
| 1934 |
+
runtime=runtime,
|
| 1935 |
+
build_context=build_context,
|
| 1936 |
+
base_image=base_image,
|
| 1937 |
+
dockerfile=dockerfile,
|
| 1938 |
+
)
|
| 1939 |
+
if not artifact:
|
| 1940 |
+
wandb.termerror("Job creation failed")
|
| 1941 |
+
return
|
| 1942 |
+
|
| 1943 |
+
artifact_path = f"{entity}/{project}/{artifact.name}"
|
| 1944 |
+
msg = f"{action} job: {click.style(artifact_path, fg='yellow')}"
|
| 1945 |
+
if len(aliases) == 1:
|
| 1946 |
+
alias_str = click.style(aliases[0], fg="yellow")
|
| 1947 |
+
msg += f", with alias: {alias_str}"
|
| 1948 |
+
elif len(aliases) > 1:
|
| 1949 |
+
alias_str = click.style(", ".join(aliases), fg="yellow")
|
| 1950 |
+
msg += f", with aliases: {alias_str}"
|
| 1951 |
+
|
| 1952 |
+
wandb.termlog(msg)
|
| 1953 |
+
web_url = util.app_url(api.settings().get("base_url"))
|
| 1954 |
+
url = click.style(f"{web_url}/{entity}/{project}/jobs", underline=True)
|
| 1955 |
+
wandb.termlog(f"View all jobs in project '{project}' here: {url}\n")
|
| 1956 |
+
|
| 1957 |
+
|
| 1958 |
+
@cli.command(context_settings=CONTEXT, help="Run the W&B local sweep controller")
|
| 1959 |
+
@click.option("--verbose", is_flag=True, default=False, help="Display verbose output")
|
| 1960 |
+
@click.argument("sweep_id")
|
| 1961 |
+
@display_error
|
| 1962 |
+
def controller(verbose, sweep_id):
|
| 1963 |
+
click.echo("Starting wandb controller...")
|
| 1964 |
+
from wandb import controller as wandb_controller
|
| 1965 |
+
|
| 1966 |
+
tuner = wandb_controller(sweep_id)
|
| 1967 |
+
tuner.run(verbose=verbose)
|
| 1968 |
+
|
| 1969 |
+
|
| 1970 |
+
@cli.command(context_settings=RUN_CONTEXT, name="docker-run")
|
| 1971 |
+
@click.pass_context
|
| 1972 |
+
@click.argument("docker_run_args", nargs=-1)
|
| 1973 |
+
def docker_run(ctx, docker_run_args):
|
| 1974 |
+
"""Wrap `docker run` and adds WANDB_API_KEY and WANDB_DOCKER environment variables.
|
| 1975 |
+
|
| 1976 |
+
This will also set the runtime to nvidia if the nvidia-docker executable is present
|
| 1977 |
+
on the system and --runtime wasn't set.
|
| 1978 |
+
|
| 1979 |
+
See `docker run --help` for more details.
|
| 1980 |
+
"""
|
| 1981 |
+
api = InternalApi()
|
| 1982 |
+
args = list(docker_run_args)
|
| 1983 |
+
if len(args) > 0 and args[0] == "run":
|
| 1984 |
+
args.pop(0)
|
| 1985 |
+
if len([a for a in args if a.startswith("--runtime")]) == 0 and find_executable(
|
| 1986 |
+
"nvidia-docker"
|
| 1987 |
+
):
|
| 1988 |
+
args = ["--runtime", "nvidia"] + args
|
| 1989 |
+
# TODO: image_from_docker_args uses heuristics to find the docker image arg, there are likely cases
|
| 1990 |
+
# where this won't work
|
| 1991 |
+
image = util.image_from_docker_args(args)
|
| 1992 |
+
resolved_image = None
|
| 1993 |
+
if image:
|
| 1994 |
+
resolved_image = wandb.docker.image_id(image)
|
| 1995 |
+
if resolved_image:
|
| 1996 |
+
args = ["-e", "WANDB_DOCKER={}".format(resolved_image)] + args
|
| 1997 |
+
else:
|
| 1998 |
+
wandb.termlog(
|
| 1999 |
+
"Couldn't detect image argument, running command without the WANDB_DOCKER env variable"
|
| 2000 |
+
)
|
| 2001 |
+
if api.api_key:
|
| 2002 |
+
args = ["-e", "WANDB_API_KEY={}".format(api.api_key)] + args
|
| 2003 |
+
else:
|
| 2004 |
+
wandb.termlog(
|
| 2005 |
+
"Not logged in, run `wandb login` from the host machine to enable result logging"
|
| 2006 |
+
)
|
| 2007 |
+
subprocess.call(["docker", "run"] + args)
|
| 2008 |
+
|
| 2009 |
+
|
| 2010 |
+
@cli.command(context_settings=RUN_CONTEXT)
|
| 2011 |
+
@click.pass_context
|
| 2012 |
+
@click.argument("docker_run_args", nargs=-1)
|
| 2013 |
+
@click.argument("docker_image", required=False)
|
| 2014 |
+
@click.option(
|
| 2015 |
+
"--nvidia/--no-nvidia",
|
| 2016 |
+
default=find_executable("nvidia-docker") is not None,
|
| 2017 |
+
help="Use the nvidia runtime, defaults to nvidia if nvidia-docker is present",
|
| 2018 |
+
)
|
| 2019 |
+
@click.option(
|
| 2020 |
+
"--digest", is_flag=True, default=False, help="Output the image digest and exit"
|
| 2021 |
+
)
|
| 2022 |
+
@click.option(
|
| 2023 |
+
"--jupyter/--no-jupyter", default=False, help="Run jupyter lab in the container"
|
| 2024 |
+
)
|
| 2025 |
+
@click.option(
|
| 2026 |
+
"--dir", default="/app", help="Which directory to mount the code in the container"
|
| 2027 |
+
)
|
| 2028 |
+
@click.option("--no-dir", is_flag=True, help="Don't mount the current directory")
|
| 2029 |
+
@click.option(
|
| 2030 |
+
"--shell", default="/bin/bash", help="The shell to start the container with"
|
| 2031 |
+
)
|
| 2032 |
+
@click.option("--port", default="8888", help="The host port to bind jupyter on")
|
| 2033 |
+
@click.option("--cmd", help="The command to run in the container")
|
| 2034 |
+
@click.option(
|
| 2035 |
+
"--no-tty", is_flag=True, default=False, help="Run the command without a tty"
|
| 2036 |
+
)
|
| 2037 |
+
@display_error
|
| 2038 |
+
def docker(
|
| 2039 |
+
ctx,
|
| 2040 |
+
docker_run_args,
|
| 2041 |
+
docker_image,
|
| 2042 |
+
nvidia,
|
| 2043 |
+
digest,
|
| 2044 |
+
jupyter,
|
| 2045 |
+
dir,
|
| 2046 |
+
no_dir,
|
| 2047 |
+
shell,
|
| 2048 |
+
port,
|
| 2049 |
+
cmd,
|
| 2050 |
+
no_tty,
|
| 2051 |
+
):
|
| 2052 |
+
"""Run your code in a docker container.
|
| 2053 |
+
|
| 2054 |
+
W&B docker lets you run your code in a docker image ensuring wandb is configured. It
|
| 2055 |
+
adds the WANDB_DOCKER and WANDB_API_KEY environment variables to your container and
|
| 2056 |
+
mounts the current directory in /app by default. You can pass additional args which
|
| 2057 |
+
will be added to `docker run` before the image name is declared, we'll choose a
|
| 2058 |
+
default image for you if one isn't passed:
|
| 2059 |
+
|
| 2060 |
+
```sh
|
| 2061 |
+
wandb docker -v /mnt/dataset:/app/data
|
| 2062 |
+
wandb docker gcr.io/kubeflow-images-public/tensorflow-1.12.0-notebook-cpu:v0.4.0 --jupyter
|
| 2063 |
+
wandb docker wandb/deepo:keras-gpu --no-tty --cmd "python train.py --epochs=5"
|
| 2064 |
+
```
|
| 2065 |
+
|
| 2066 |
+
By default, we override the entrypoint to check for the existence of wandb and
|
| 2067 |
+
install it if not present. If you pass the --jupyter flag we will ensure jupyter is
|
| 2068 |
+
installed and start jupyter lab on port 8888. If we detect nvidia-docker on your
|
| 2069 |
+
system we will use the nvidia runtime. If you just want wandb to set environment
|
| 2070 |
+
variable to an existing docker run command, see the wandb docker-run command.
|
| 2071 |
+
"""
|
| 2072 |
+
api = InternalApi()
|
| 2073 |
+
if not find_executable("docker"):
|
| 2074 |
+
raise ClickException("Docker not installed, install it from https://docker.com")
|
| 2075 |
+
args = list(docker_run_args)
|
| 2076 |
+
image = docker_image or ""
|
| 2077 |
+
# remove run for users used to nvidia-docker
|
| 2078 |
+
if len(args) > 0 and args[0] == "run":
|
| 2079 |
+
args.pop(0)
|
| 2080 |
+
if image == "" and len(args) > 0:
|
| 2081 |
+
image = args.pop(0)
|
| 2082 |
+
# If the user adds docker args without specifying an image (should be rare)
|
| 2083 |
+
if not util.docker_image_regex(image.split("@")[0]):
|
| 2084 |
+
if image:
|
| 2085 |
+
args = args + [image]
|
| 2086 |
+
image = wandb.docker.default_image(gpu=nvidia)
|
| 2087 |
+
subprocess.call(["docker", "pull", image])
|
| 2088 |
+
_, repo_name, tag = wandb.docker.parse(image)
|
| 2089 |
+
|
| 2090 |
+
resolved_image = wandb.docker.image_id(image)
|
| 2091 |
+
if resolved_image is None:
|
| 2092 |
+
raise ClickException(
|
| 2093 |
+
"Couldn't find image locally or in a registry, try running `docker pull {}`".format(
|
| 2094 |
+
image
|
| 2095 |
+
)
|
| 2096 |
+
)
|
| 2097 |
+
if digest:
|
| 2098 |
+
sys.stdout.write(resolved_image)
|
| 2099 |
+
exit(0)
|
| 2100 |
+
|
| 2101 |
+
existing = wandb.docker.shell(
|
| 2102 |
+
["ps", "-f", "ancestor={}".format(resolved_image), "-q"]
|
| 2103 |
+
)
|
| 2104 |
+
if existing:
|
| 2105 |
+
if click.confirm(
|
| 2106 |
+
"Found running container with the same image, do you want to attach?"
|
| 2107 |
+
):
|
| 2108 |
+
subprocess.call(["docker", "attach", existing.split("\n")[0]])
|
| 2109 |
+
exit(0)
|
| 2110 |
+
cwd = os.getcwd()
|
| 2111 |
+
command = [
|
| 2112 |
+
"docker",
|
| 2113 |
+
"run",
|
| 2114 |
+
"-e",
|
| 2115 |
+
"LANG=C.UTF-8",
|
| 2116 |
+
"-e",
|
| 2117 |
+
"WANDB_DOCKER={}".format(resolved_image),
|
| 2118 |
+
"--ipc=host",
|
| 2119 |
+
"-v",
|
| 2120 |
+
wandb.docker.entrypoint + ":/wandb-entrypoint.sh",
|
| 2121 |
+
"--entrypoint",
|
| 2122 |
+
"/wandb-entrypoint.sh",
|
| 2123 |
+
]
|
| 2124 |
+
if nvidia:
|
| 2125 |
+
command.extend(["--runtime", "nvidia"])
|
| 2126 |
+
if not no_dir:
|
| 2127 |
+
# TODO: We should default to the working directory if defined
|
| 2128 |
+
command.extend(["-v", cwd + ":" + dir, "-w", dir])
|
| 2129 |
+
if api.api_key:
|
| 2130 |
+
command.extend(["-e", "WANDB_API_KEY={}".format(api.api_key)])
|
| 2131 |
+
else:
|
| 2132 |
+
wandb.termlog(
|
| 2133 |
+
"Couldn't find WANDB_API_KEY, run `wandb login` to enable streaming metrics"
|
| 2134 |
+
)
|
| 2135 |
+
if jupyter:
|
| 2136 |
+
command.extend(["-e", "WANDB_ENSURE_JUPYTER=1", "-p", port + ":8888"])
|
| 2137 |
+
no_tty = True
|
| 2138 |
+
cmd = "jupyter lab --no-browser --ip=0.0.0.0 --allow-root --NotebookApp.token= --notebook-dir {}".format(
|
| 2139 |
+
dir
|
| 2140 |
+
)
|
| 2141 |
+
command.extend(args)
|
| 2142 |
+
if no_tty:
|
| 2143 |
+
command.extend([image, shell, "-c", cmd])
|
| 2144 |
+
else:
|
| 2145 |
+
if cmd:
|
| 2146 |
+
command.extend(["-e", "WANDB_COMMAND={}".format(cmd)])
|
| 2147 |
+
command.extend(["-it", image, shell])
|
| 2148 |
+
wandb.termlog("Launching docker container \U0001f6a2")
|
| 2149 |
+
subprocess.call(command)
|
| 2150 |
+
|
| 2151 |
+
|
| 2152 |
+
@cli.command(
|
| 2153 |
+
context_settings=RUN_CONTEXT,
|
| 2154 |
+
help="Start a local W&B container (deprecated, see wandb server --help)",
|
| 2155 |
+
hidden=True,
|
| 2156 |
+
)
|
| 2157 |
+
@click.pass_context
|
| 2158 |
+
@click.option("--port", "-p", default="8080", help="The host port to bind W&B local on")
|
| 2159 |
+
@click.option(
|
| 2160 |
+
"--env", "-e", default=[], multiple=True, help="Env vars to pass to wandb/local"
|
| 2161 |
+
)
|
| 2162 |
+
@click.option(
|
| 2163 |
+
"--daemon/--no-daemon", default=True, help="Run or don't run in daemon mode"
|
| 2164 |
+
)
|
| 2165 |
+
@click.option(
|
| 2166 |
+
"--upgrade", is_flag=True, default=False, help="Upgrade to the most recent version"
|
| 2167 |
+
)
|
| 2168 |
+
@click.option(
|
| 2169 |
+
"--edge", is_flag=True, default=False, help="Run the bleeding edge", hidden=True
|
| 2170 |
+
)
|
| 2171 |
+
@display_error
|
| 2172 |
+
def local(ctx, *args, **kwargs):
|
| 2173 |
+
wandb.termwarn("`wandb local` has been replaced with `wandb server start`.")
|
| 2174 |
+
ctx.invoke(start, *args, **kwargs)
|
| 2175 |
+
|
| 2176 |
+
|
| 2177 |
+
@cli.group(help="Commands for operating a local W&B server")
|
| 2178 |
+
def server():
|
| 2179 |
+
pass
|
| 2180 |
+
|
| 2181 |
+
|
| 2182 |
+
@server.command(context_settings=RUN_CONTEXT, help="Start a local W&B server")
|
| 2183 |
+
@click.pass_context
|
| 2184 |
+
@click.option(
|
| 2185 |
+
"--port", "-p", default="8080", help="The host port to bind W&B server on"
|
| 2186 |
+
)
|
| 2187 |
+
@click.option(
|
| 2188 |
+
"--env", "-e", default=[], multiple=True, help="Env vars to pass to wandb/local"
|
| 2189 |
+
)
|
| 2190 |
+
@click.option(
|
| 2191 |
+
"--daemon/--no-daemon", default=True, help="Run or don't run in daemon mode"
|
| 2192 |
+
)
|
| 2193 |
+
@click.option(
|
| 2194 |
+
"--upgrade",
|
| 2195 |
+
is_flag=True,
|
| 2196 |
+
default=False,
|
| 2197 |
+
help="Upgrade to the most recent version",
|
| 2198 |
+
hidden=True,
|
| 2199 |
+
)
|
| 2200 |
+
@click.option(
|
| 2201 |
+
"--edge", is_flag=True, default=False, help="Run the bleeding edge", hidden=True
|
| 2202 |
+
)
|
| 2203 |
+
@display_error
|
| 2204 |
+
def start(ctx, port, env, daemon, upgrade, edge):
|
| 2205 |
+
api = InternalApi()
|
| 2206 |
+
if not find_executable("docker"):
|
| 2207 |
+
raise ClickException("Docker not installed, install it from https://docker.com")
|
| 2208 |
+
local_image_sha = wandb.docker.image_id("wandb/local").split("wandb/local")[-1]
|
| 2209 |
+
registry_image_sha = wandb.docker.image_id_from_registry("wandb/local").split(
|
| 2210 |
+
"wandb/local"
|
| 2211 |
+
)[-1]
|
| 2212 |
+
if local_image_sha != registry_image_sha:
|
| 2213 |
+
if upgrade:
|
| 2214 |
+
subprocess.call(["docker", "pull", "wandb/local"])
|
| 2215 |
+
else:
|
| 2216 |
+
wandb.termlog(
|
| 2217 |
+
"A new version of the W&B server is available, upgrade by calling `wandb server start --upgrade`"
|
| 2218 |
+
)
|
| 2219 |
+
running = subprocess.check_output(
|
| 2220 |
+
["docker", "ps", "--filter", "name=wandb-local", "--format", "{{.ID}}"]
|
| 2221 |
+
)
|
| 2222 |
+
if running != b"":
|
| 2223 |
+
if upgrade:
|
| 2224 |
+
subprocess.call(["docker", "stop", "wandb-local"])
|
| 2225 |
+
else:
|
| 2226 |
+
wandb.termerror(
|
| 2227 |
+
"A container named wandb-local is already running, run `docker stop wandb-local` if you want to start a new instance"
|
| 2228 |
+
)
|
| 2229 |
+
exit(1)
|
| 2230 |
+
image = "docker.pkg.github.com/wandb/core/local" if edge else "wandb/local"
|
| 2231 |
+
username = getpass.getuser()
|
| 2232 |
+
env_vars = ["-e", "LOCAL_USERNAME={}".format(username)]
|
| 2233 |
+
for e in env:
|
| 2234 |
+
env_vars.append("-e")
|
| 2235 |
+
env_vars.append(e)
|
| 2236 |
+
command = [
|
| 2237 |
+
"docker",
|
| 2238 |
+
"run",
|
| 2239 |
+
"--rm",
|
| 2240 |
+
"-v",
|
| 2241 |
+
"wandb:/vol",
|
| 2242 |
+
"-p",
|
| 2243 |
+
port + ":8080",
|
| 2244 |
+
"--name",
|
| 2245 |
+
"wandb-local",
|
| 2246 |
+
] + env_vars
|
| 2247 |
+
host = f"http://localhost:{port}"
|
| 2248 |
+
api.set_setting("base_url", host, globally=True, persist=True)
|
| 2249 |
+
if daemon:
|
| 2250 |
+
command += ["-d"]
|
| 2251 |
+
command += [image]
|
| 2252 |
+
|
| 2253 |
+
# DEVNULL is only in py3
|
| 2254 |
+
try:
|
| 2255 |
+
from subprocess import DEVNULL
|
| 2256 |
+
except ImportError:
|
| 2257 |
+
DEVNULL = open(os.devnull, "wb") # noqa: N806
|
| 2258 |
+
code = subprocess.call(command, stdout=DEVNULL)
|
| 2259 |
+
if daemon:
|
| 2260 |
+
if code != 0:
|
| 2261 |
+
wandb.termerror(
|
| 2262 |
+
"Failed to launch the W&B server container, see the above error."
|
| 2263 |
+
)
|
| 2264 |
+
exit(1)
|
| 2265 |
+
else:
|
| 2266 |
+
wandb.termlog(
|
| 2267 |
+
"W&B server started at http://localhost:{} \U0001f680".format(port)
|
| 2268 |
+
)
|
| 2269 |
+
wandb.termlog("You can stop the server by running `wandb server stop`")
|
| 2270 |
+
if not api.api_key:
|
| 2271 |
+
# Let the server start before potentially launching a browser
|
| 2272 |
+
time.sleep(2)
|
| 2273 |
+
ctx.invoke(login, host=host)
|
| 2274 |
+
|
| 2275 |
+
|
| 2276 |
+
@server.command(context_settings=RUN_CONTEXT, help="Stop a local W&B server")
|
| 2277 |
+
def stop():
|
| 2278 |
+
if not find_executable("docker"):
|
| 2279 |
+
raise ClickException("Docker not installed, install it from https://docker.com")
|
| 2280 |
+
subprocess.call(["docker", "stop", "wandb-local"])
|
| 2281 |
+
|
| 2282 |
+
|
| 2283 |
+
@cli.group(help="Commands for interacting with artifacts")
|
| 2284 |
+
def artifact():
|
| 2285 |
+
pass
|
| 2286 |
+
|
| 2287 |
+
|
| 2288 |
+
@artifact.command(context_settings=CONTEXT, help="Upload an artifact to wandb")
|
| 2289 |
+
@click.argument("path")
|
| 2290 |
+
@click.option(
|
| 2291 |
+
"--name", "-n", help="The name of the artifact to push: project/artifact_name"
|
| 2292 |
+
)
|
| 2293 |
+
@click.option("--description", "-d", help="A description of this artifact")
|
| 2294 |
+
@click.option("--type", "-t", default="dataset", help="The type of the artifact")
|
| 2295 |
+
@click.option(
|
| 2296 |
+
"--alias",
|
| 2297 |
+
"-a",
|
| 2298 |
+
default=["latest"],
|
| 2299 |
+
multiple=True,
|
| 2300 |
+
help="An alias to apply to this artifact",
|
| 2301 |
+
)
|
| 2302 |
+
@click.option("--id", "run_id", help="The run you want to upload to.")
|
| 2303 |
+
@click.option(
|
| 2304 |
+
"--resume",
|
| 2305 |
+
is_flag=True,
|
| 2306 |
+
default=None,
|
| 2307 |
+
help="Resume the last run from your current directory.",
|
| 2308 |
+
)
|
| 2309 |
+
@click.option(
|
| 2310 |
+
"--skip_cache",
|
| 2311 |
+
is_flag=True,
|
| 2312 |
+
default=False,
|
| 2313 |
+
help="Skip caching while uploading artifact files.",
|
| 2314 |
+
)
|
| 2315 |
+
@click.option(
|
| 2316 |
+
"--policy",
|
| 2317 |
+
default="mutable",
|
| 2318 |
+
type=click.Choice(["mutable", "immutable"]),
|
| 2319 |
+
help="Set the storage policy while uploading artifact files.",
|
| 2320 |
+
)
|
| 2321 |
+
@display_error
|
| 2322 |
+
def put(
|
| 2323 |
+
path,
|
| 2324 |
+
name,
|
| 2325 |
+
description,
|
| 2326 |
+
type,
|
| 2327 |
+
alias,
|
| 2328 |
+
run_id,
|
| 2329 |
+
resume,
|
| 2330 |
+
skip_cache,
|
| 2331 |
+
policy,
|
| 2332 |
+
):
|
| 2333 |
+
if name is None:
|
| 2334 |
+
name = os.path.basename(path)
|
| 2335 |
+
public_api = PublicApi()
|
| 2336 |
+
entity, project, artifact_name = public_api._parse_artifact_path(name)
|
| 2337 |
+
if project is None:
|
| 2338 |
+
project = click.prompt("Enter the name of the project you want to use")
|
| 2339 |
+
# TODO: settings nightmare...
|
| 2340 |
+
api = InternalApi()
|
| 2341 |
+
api.set_setting("entity", entity)
|
| 2342 |
+
api.set_setting("project", project)
|
| 2343 |
+
artifact = wandb.Artifact(name=artifact_name, type=type, description=description)
|
| 2344 |
+
artifact_path = f"{entity}/{project}/{artifact_name}:{alias[0]}"
|
| 2345 |
+
if os.path.isdir(path):
|
| 2346 |
+
wandb.termlog(f'Uploading directory {path} to: "{artifact_path}" ({type})')
|
| 2347 |
+
artifact.add_dir(path, skip_cache=skip_cache, policy=policy)
|
| 2348 |
+
elif os.path.isfile(path):
|
| 2349 |
+
wandb.termlog(f'Uploading file {path} to: "{artifact_path}" ({type})')
|
| 2350 |
+
artifact.add_file(path, skip_cache=skip_cache, policy=policy)
|
| 2351 |
+
elif "://" in path:
|
| 2352 |
+
wandb.termlog(
|
| 2353 |
+
f'Logging reference artifact from {path} to: "{artifact_path}" ({type})'
|
| 2354 |
+
)
|
| 2355 |
+
artifact.add_reference(path)
|
| 2356 |
+
else:
|
| 2357 |
+
raise ClickException("Path argument must be a file or directory")
|
| 2358 |
+
|
| 2359 |
+
with wandb.init(
|
| 2360 |
+
entity=entity,
|
| 2361 |
+
project=project,
|
| 2362 |
+
config={"path": path},
|
| 2363 |
+
job_type="cli_put",
|
| 2364 |
+
id=run_id,
|
| 2365 |
+
resume=resume,
|
| 2366 |
+
) as run:
|
| 2367 |
+
run.log_artifact(artifact, aliases=alias)
|
| 2368 |
+
artifact.wait()
|
| 2369 |
+
|
| 2370 |
+
wandb.termlog(
|
| 2371 |
+
"Artifact uploaded, use this artifact in a run by adding:\n", prefix=False
|
| 2372 |
+
)
|
| 2373 |
+
wandb.termlog(
|
| 2374 |
+
f' artifact = run.use_artifact("{artifact.source_qualified_name}")\n',
|
| 2375 |
+
prefix=False,
|
| 2376 |
+
)
|
| 2377 |
+
|
| 2378 |
+
|
| 2379 |
+
@artifact.command(context_settings=CONTEXT, help="Download an artifact from wandb")
|
| 2380 |
+
@click.argument("path")
|
| 2381 |
+
@click.option("--root", help="The directory you want to download the artifact to")
|
| 2382 |
+
@click.option("--type", help="The type of artifact you are downloading")
|
| 2383 |
+
@display_error
|
| 2384 |
+
def get(path, root, type):
|
| 2385 |
+
public_api = PublicApi()
|
| 2386 |
+
entity, project, artifact_name = public_api._parse_artifact_path(path)
|
| 2387 |
+
if project is None:
|
| 2388 |
+
project = click.prompt("Enter the name of the project you want to use")
|
| 2389 |
+
|
| 2390 |
+
try:
|
| 2391 |
+
artifact_parts = artifact_name.split(":")
|
| 2392 |
+
if len(artifact_parts) > 1:
|
| 2393 |
+
version = artifact_parts[1]
|
| 2394 |
+
artifact_name = artifact_parts[0]
|
| 2395 |
+
else:
|
| 2396 |
+
version = "latest"
|
| 2397 |
+
if is_artifact_registry_project(project):
|
| 2398 |
+
organization = path.split("/")[0] if path.count("/") == 2 else ""
|
| 2399 |
+
# set entity to match the settings since in above code it was potentially set to an org
|
| 2400 |
+
settings_entity = public_api.settings["entity"] or public_api.default_entity
|
| 2401 |
+
# Registry artifacts are under the org entity. Because we offer a shorthand and alias for this path,
|
| 2402 |
+
# we need to fetch the org entity to for the user behind the scenes.
|
| 2403 |
+
entity = SDKInternalApi()._resolve_org_entity_name(
|
| 2404 |
+
entity=settings_entity, organization=organization
|
| 2405 |
+
)
|
| 2406 |
+
full_path = f"{entity}/{project}/{artifact_name}:{version}"
|
| 2407 |
+
wandb.termlog(
|
| 2408 |
+
"Downloading {type} artifact {full_path}".format(
|
| 2409 |
+
type=type or "dataset", full_path=full_path
|
| 2410 |
+
)
|
| 2411 |
+
)
|
| 2412 |
+
artifact = public_api.artifact(full_path, type=type)
|
| 2413 |
+
path = artifact.download(root=root)
|
| 2414 |
+
wandb.termlog("Artifact downloaded to {}".format(path))
|
| 2415 |
+
except ValueError:
|
| 2416 |
+
raise ClickException("Unable to download artifact")
|
| 2417 |
+
|
| 2418 |
+
|
| 2419 |
+
@artifact.command(
|
| 2420 |
+
context_settings=CONTEXT, help="List all artifacts in a wandb project"
|
| 2421 |
+
)
|
| 2422 |
+
@click.argument("path")
|
| 2423 |
+
@click.option("--type", "-t", help="The type of artifacts to list")
|
| 2424 |
+
@display_error
|
| 2425 |
+
def ls(path, type):
|
| 2426 |
+
public_api = PublicApi()
|
| 2427 |
+
if type is not None:
|
| 2428 |
+
types = [public_api.artifact_type(type, path)]
|
| 2429 |
+
else:
|
| 2430 |
+
types = public_api.artifact_types(path)
|
| 2431 |
+
|
| 2432 |
+
for kind in types:
|
| 2433 |
+
for collection in kind.collections():
|
| 2434 |
+
versions = public_api.artifact_versions(
|
| 2435 |
+
kind.type,
|
| 2436 |
+
"/".join([kind.entity, kind.project, collection.name]),
|
| 2437 |
+
per_page=1,
|
| 2438 |
+
)
|
| 2439 |
+
latest = next(versions)
|
| 2440 |
+
wandb.termlog(
|
| 2441 |
+
"{:<15s}{:<15s}{:>15s} {:<20s}".format(
|
| 2442 |
+
kind.type,
|
| 2443 |
+
latest.updated_at,
|
| 2444 |
+
util.to_human_size(latest.size),
|
| 2445 |
+
latest.name,
|
| 2446 |
+
)
|
| 2447 |
+
)
|
| 2448 |
+
|
| 2449 |
+
|
| 2450 |
+
@artifact.group(help="Commands for interacting with the artifact cache")
|
| 2451 |
+
def cache():
|
| 2452 |
+
pass
|
| 2453 |
+
|
| 2454 |
+
|
| 2455 |
+
@cache.command(
|
| 2456 |
+
context_settings=CONTEXT,
|
| 2457 |
+
help="Clean up less frequently used files from the artifacts cache",
|
| 2458 |
+
)
|
| 2459 |
+
@click.argument("target_size")
|
| 2460 |
+
@click.option("--remove-temp/--no-remove-temp", default=False, help="Remove temp files")
|
| 2461 |
+
@display_error
|
| 2462 |
+
def cleanup(target_size, remove_temp):
|
| 2463 |
+
target_size = util.from_human_size(target_size)
|
| 2464 |
+
cache = get_artifact_file_cache()
|
| 2465 |
+
reclaimed_bytes = cache.cleanup(target_size, remove_temp)
|
| 2466 |
+
wandb.termlog(f"Reclaimed {util.to_human_size(reclaimed_bytes)} of space")
|
| 2467 |
+
|
| 2468 |
+
|
| 2469 |
+
@cli.command(context_settings=CONTEXT, help="Pull files from Weights & Biases")
|
| 2470 |
+
@click.argument("run", envvar=env.RUN_ID)
|
| 2471 |
+
@click.option(
|
| 2472 |
+
"--project", "-p", envvar=env.PROJECT, help="The project you want to download."
|
| 2473 |
+
)
|
| 2474 |
+
@click.option(
|
| 2475 |
+
"--entity",
|
| 2476 |
+
"-e",
|
| 2477 |
+
default="models",
|
| 2478 |
+
envvar=env.ENTITY,
|
| 2479 |
+
help="The entity to scope the listing to.",
|
| 2480 |
+
)
|
| 2481 |
+
@display_error
|
| 2482 |
+
def pull(run, project, entity):
|
| 2483 |
+
api = InternalApi()
|
| 2484 |
+
project, run = api.parse_slug(run, project=project)
|
| 2485 |
+
urls = api.download_urls(project, run=run, entity=entity)
|
| 2486 |
+
if len(urls) == 0:
|
| 2487 |
+
raise ClickException("Run has no files")
|
| 2488 |
+
click.echo(f"Downloading: {click.style(project, bold=True)}/{run}")
|
| 2489 |
+
|
| 2490 |
+
for name in urls:
|
| 2491 |
+
if api.file_current(name, urls[name]["md5"]):
|
| 2492 |
+
click.echo("File {} is up to date".format(name))
|
| 2493 |
+
else:
|
| 2494 |
+
length, response = api.download_file(urls[name]["url"])
|
| 2495 |
+
# TODO: I had to add this because some versions in CI broke click.progressbar
|
| 2496 |
+
sys.stdout.write("File {}\r".format(name))
|
| 2497 |
+
dirname = os.path.dirname(name)
|
| 2498 |
+
if dirname != "":
|
| 2499 |
+
filesystem.mkdir_exists_ok(dirname)
|
| 2500 |
+
with click.progressbar(
|
| 2501 |
+
length=length,
|
| 2502 |
+
label="File {}".format(name),
|
| 2503 |
+
fill_char=click.style("&", fg="green"),
|
| 2504 |
+
) as bar:
|
| 2505 |
+
with open(name, "wb") as f:
|
| 2506 |
+
for data in response.iter_content(chunk_size=4096):
|
| 2507 |
+
f.write(data)
|
| 2508 |
+
bar.update(len(data))
|
| 2509 |
+
|
| 2510 |
+
|
| 2511 |
+
@cli.command(
|
| 2512 |
+
context_settings=CONTEXT, help="Restore code, config and docker state for a run"
|
| 2513 |
+
)
|
| 2514 |
+
@click.pass_context
|
| 2515 |
+
@click.argument("run", envvar=env.RUN_ID)
|
| 2516 |
+
@click.option("--no-git", is_flag=True, default=False, help="Don't restore git state")
|
| 2517 |
+
@click.option(
|
| 2518 |
+
"--branch/--no-branch",
|
| 2519 |
+
default=True,
|
| 2520 |
+
help="Whether to create a branch or checkout detached",
|
| 2521 |
+
)
|
| 2522 |
+
@click.option(
|
| 2523 |
+
"--project", "-p", envvar=env.PROJECT, help="The project you wish to upload to."
|
| 2524 |
+
)
|
| 2525 |
+
@click.option(
|
| 2526 |
+
"--entity", "-e", envvar=env.ENTITY, help="The entity to scope the listing to."
|
| 2527 |
+
)
|
| 2528 |
+
@display_error
|
| 2529 |
+
def restore(ctx, run, no_git, branch, project, entity):
|
| 2530 |
+
from wandb.old.core import wandb_dir
|
| 2531 |
+
|
| 2532 |
+
api = _get_cling_api()
|
| 2533 |
+
if ":" in run:
|
| 2534 |
+
if "/" in run:
|
| 2535 |
+
entity, rest = run.split("/", 1)
|
| 2536 |
+
else:
|
| 2537 |
+
rest = run
|
| 2538 |
+
project, run = rest.split(":", 1)
|
| 2539 |
+
elif run.count("/") > 1:
|
| 2540 |
+
entity, run = run.split("/", 1)
|
| 2541 |
+
|
| 2542 |
+
project, run = api.parse_slug(run, project=project)
|
| 2543 |
+
commit, json_config, patch_content, metadata = api.run_config(
|
| 2544 |
+
project, run=run, entity=entity
|
| 2545 |
+
)
|
| 2546 |
+
repo = metadata.get("git", {}).get("repo")
|
| 2547 |
+
image = metadata.get("docker")
|
| 2548 |
+
restore_message = """`wandb restore` needs to be run from the same git repository as the original run.
|
| 2549 |
+
Run `git clone {}` and restore from there or pass the --no-git flag.""".format(repo)
|
| 2550 |
+
if no_git:
|
| 2551 |
+
commit = None
|
| 2552 |
+
elif not api.git.enabled:
|
| 2553 |
+
if repo:
|
| 2554 |
+
raise ClickException(restore_message)
|
| 2555 |
+
elif image:
|
| 2556 |
+
wandb.termlog(
|
| 2557 |
+
"Original run has no git history. Just restoring config and docker"
|
| 2558 |
+
)
|
| 2559 |
+
|
| 2560 |
+
if commit and api.git.enabled:
|
| 2561 |
+
wandb.termlog(f"Fetching origin and finding commit: {commit}")
|
| 2562 |
+
subprocess.check_call(["git", "fetch", "--all"])
|
| 2563 |
+
try:
|
| 2564 |
+
api.git.repo.commit(commit)
|
| 2565 |
+
except ValueError:
|
| 2566 |
+
wandb.termlog(f"Couldn't find original commit: {commit}")
|
| 2567 |
+
commit = None
|
| 2568 |
+
files = api.download_urls(project, run=run, entity=entity)
|
| 2569 |
+
for filename in files:
|
| 2570 |
+
if filename.startswith("upstream_diff_") and filename.endswith(
|
| 2571 |
+
".patch"
|
| 2572 |
+
):
|
| 2573 |
+
commit = filename[len("upstream_diff_") : -len(".patch")]
|
| 2574 |
+
try:
|
| 2575 |
+
api.git.repo.commit(commit)
|
| 2576 |
+
except ValueError:
|
| 2577 |
+
commit = None
|
| 2578 |
+
else:
|
| 2579 |
+
break
|
| 2580 |
+
|
| 2581 |
+
if commit:
|
| 2582 |
+
wandb.termlog(f"Falling back to upstream commit: {commit}")
|
| 2583 |
+
patch_path, _ = api.download_write_file(files[filename])
|
| 2584 |
+
else:
|
| 2585 |
+
raise ClickException(restore_message)
|
| 2586 |
+
else:
|
| 2587 |
+
if patch_content:
|
| 2588 |
+
patch_path = os.path.join(wandb_dir(), "diff.patch")
|
| 2589 |
+
with open(patch_path, "w") as f:
|
| 2590 |
+
f.write(patch_content)
|
| 2591 |
+
else:
|
| 2592 |
+
patch_path = None
|
| 2593 |
+
|
| 2594 |
+
branch_name = "wandb/{}".format(run)
|
| 2595 |
+
if branch and branch_name not in api.git.repo.branches:
|
| 2596 |
+
api.git.repo.git.checkout(commit, b=branch_name)
|
| 2597 |
+
wandb.termlog(
|
| 2598 |
+
"Created branch {}".format(click.style(branch_name, bold=True))
|
| 2599 |
+
)
|
| 2600 |
+
elif branch:
|
| 2601 |
+
wandb.termlog(
|
| 2602 |
+
"Using existing branch, run `git branch -D {}` from master for a clean checkout".format(
|
| 2603 |
+
branch_name
|
| 2604 |
+
)
|
| 2605 |
+
)
|
| 2606 |
+
api.git.repo.git.checkout(branch_name)
|
| 2607 |
+
else:
|
| 2608 |
+
wandb.termlog("Checking out {} in detached mode".format(commit))
|
| 2609 |
+
api.git.repo.git.checkout(commit)
|
| 2610 |
+
|
| 2611 |
+
if patch_path:
|
| 2612 |
+
# we apply the patch from the repository root so git doesn't exclude
|
| 2613 |
+
# things outside the current directory
|
| 2614 |
+
root = api.git.root
|
| 2615 |
+
patch_rel_path = os.path.relpath(patch_path, start=root)
|
| 2616 |
+
# --reject is necessary or else this fails any time a binary file
|
| 2617 |
+
# occurs in the diff
|
| 2618 |
+
exit_code = subprocess.call(
|
| 2619 |
+
["git", "apply", "--reject", patch_rel_path], cwd=root
|
| 2620 |
+
)
|
| 2621 |
+
if exit_code == 0:
|
| 2622 |
+
wandb.termlog("Applied patch")
|
| 2623 |
+
else:
|
| 2624 |
+
wandb.termerror(
|
| 2625 |
+
"Failed to apply patch, try un-staging any un-committed changes"
|
| 2626 |
+
)
|
| 2627 |
+
|
| 2628 |
+
filesystem.mkdir_exists_ok(wandb_dir())
|
| 2629 |
+
config_path = os.path.join(wandb_dir(), "config.yaml")
|
| 2630 |
+
config = Config()
|
| 2631 |
+
for k, v in json_config.items():
|
| 2632 |
+
if k not in ("_wandb", "wandb_version"):
|
| 2633 |
+
config[k] = v
|
| 2634 |
+
s = b"wandb_version: 1"
|
| 2635 |
+
s += b"\n\n" + yaml.dump(
|
| 2636 |
+
config._as_dict(),
|
| 2637 |
+
Dumper=yaml.SafeDumper,
|
| 2638 |
+
default_flow_style=False,
|
| 2639 |
+
allow_unicode=True,
|
| 2640 |
+
encoding="utf-8",
|
| 2641 |
+
)
|
| 2642 |
+
s = s.decode("utf-8")
|
| 2643 |
+
with open(config_path, "w") as f:
|
| 2644 |
+
f.write(s)
|
| 2645 |
+
|
| 2646 |
+
wandb.termlog("Restored config variables to {}".format(config_path))
|
| 2647 |
+
if image:
|
| 2648 |
+
if not metadata["program"].startswith("<") and metadata.get("args") is not None:
|
| 2649 |
+
# TODO: we may not want to default to python here.
|
| 2650 |
+
runner = util.find_runner(metadata["program"]) or ["python"]
|
| 2651 |
+
command = runner + [metadata["program"]] + metadata["args"]
|
| 2652 |
+
cmd = " ".join(command)
|
| 2653 |
+
else:
|
| 2654 |
+
wandb.termlog("Couldn't find original command, just restoring environment")
|
| 2655 |
+
cmd = None
|
| 2656 |
+
wandb.termlog("Docker image found, attempting to start")
|
| 2657 |
+
ctx.invoke(docker, docker_run_args=[image], cmd=cmd)
|
| 2658 |
+
|
| 2659 |
+
return commit, json_config, patch_content, repo, metadata
|
| 2660 |
+
|
| 2661 |
+
|
| 2662 |
+
@cli.command("online", help="Enable W&B sync")
|
| 2663 |
+
@display_error
|
| 2664 |
+
def online():
|
| 2665 |
+
api = InternalApi()
|
| 2666 |
+
try:
|
| 2667 |
+
api.clear_setting("mode", persist=True)
|
| 2668 |
+
except configparser.Error:
|
| 2669 |
+
pass
|
| 2670 |
+
click.echo(
|
| 2671 |
+
"W&B online. Running your script from this directory will now sync to the cloud."
|
| 2672 |
+
)
|
| 2673 |
+
|
| 2674 |
+
|
| 2675 |
+
@cli.command("offline", help="Disable W&B sync")
|
| 2676 |
+
@display_error
|
| 2677 |
+
def offline():
|
| 2678 |
+
api = InternalApi()
|
| 2679 |
+
try:
|
| 2680 |
+
api.set_setting("mode", "offline", persist=True)
|
| 2681 |
+
click.echo(
|
| 2682 |
+
"W&B offline. Running your script from this directory will only write metadata locally. Use wandb disabled to completely turn off W&B."
|
| 2683 |
+
)
|
| 2684 |
+
except configparser.Error:
|
| 2685 |
+
click.echo(
|
| 2686 |
+
"Unable to write config, copy and paste the following in your terminal to turn off W&B:\nexport WANDB_MODE=offline"
|
| 2687 |
+
)
|
| 2688 |
+
|
| 2689 |
+
|
| 2690 |
+
@cli.command("on", hidden=True)
|
| 2691 |
+
@click.pass_context
|
| 2692 |
+
@display_error
|
| 2693 |
+
def on(ctx):
|
| 2694 |
+
ctx.invoke(online)
|
| 2695 |
+
|
| 2696 |
+
|
| 2697 |
+
@cli.command("off", hidden=True)
|
| 2698 |
+
@click.pass_context
|
| 2699 |
+
@display_error
|
| 2700 |
+
def off(ctx):
|
| 2701 |
+
ctx.invoke(offline)
|
| 2702 |
+
|
| 2703 |
+
|
| 2704 |
+
@cli.command("status", help="Show configuration settings")
|
| 2705 |
+
@click.option(
|
| 2706 |
+
"--settings/--no-settings", help="Show the current settings", default=True
|
| 2707 |
+
)
|
| 2708 |
+
def status(settings):
|
| 2709 |
+
api = _get_cling_api()
|
| 2710 |
+
if settings:
|
| 2711 |
+
click.echo(click.style("Current Settings", bold=True))
|
| 2712 |
+
settings = api.settings()
|
| 2713 |
+
click.echo(
|
| 2714 |
+
json.dumps(settings, sort_keys=True, indent=2, separators=(",", ": "))
|
| 2715 |
+
)
|
| 2716 |
+
|
| 2717 |
+
|
| 2718 |
+
@cli.command("disabled", help="Disable W&B.")
|
| 2719 |
+
@click.option(
|
| 2720 |
+
"--service",
|
| 2721 |
+
is_flag=True,
|
| 2722 |
+
show_default=True,
|
| 2723 |
+
default=True,
|
| 2724 |
+
help="Disable W&B service",
|
| 2725 |
+
)
|
| 2726 |
+
def disabled(service):
|
| 2727 |
+
api = InternalApi()
|
| 2728 |
+
try:
|
| 2729 |
+
api.set_setting("mode", "disabled", persist=True)
|
| 2730 |
+
click.echo("W&B disabled.")
|
| 2731 |
+
except configparser.Error:
|
| 2732 |
+
click.echo(
|
| 2733 |
+
"Unable to write config, copy and paste the following in your terminal to turn off W&B:\nexport WANDB_MODE=disabled"
|
| 2734 |
+
)
|
| 2735 |
+
|
| 2736 |
+
|
| 2737 |
+
@cli.command("enabled", help="Enable W&B.")
|
| 2738 |
+
@click.option(
|
| 2739 |
+
"--service",
|
| 2740 |
+
is_flag=True,
|
| 2741 |
+
show_default=True,
|
| 2742 |
+
default=True,
|
| 2743 |
+
help="Enable W&B service",
|
| 2744 |
+
)
|
| 2745 |
+
def enabled(service):
|
| 2746 |
+
api = InternalApi()
|
| 2747 |
+
try:
|
| 2748 |
+
api.set_setting("mode", "online", persist=True)
|
| 2749 |
+
click.echo("W&B enabled.")
|
| 2750 |
+
except configparser.Error:
|
| 2751 |
+
click.echo(
|
| 2752 |
+
"Unable to write config, copy and paste the following in your terminal to turn on W&B:\nexport WANDB_MODE=online"
|
| 2753 |
+
)
|
| 2754 |
+
|
| 2755 |
+
|
| 2756 |
+
@cli.command(context_settings=CONTEXT, help="Verify your local instance")
|
| 2757 |
+
@click.option("--host", default=None, help="Test a specific instance of W&B")
|
| 2758 |
+
def verify(host):
|
| 2759 |
+
# TODO: (kdg) Build this all into a WandbVerify object, and clean this up.
|
| 2760 |
+
os.environ["WANDB_SILENT"] = "true"
|
| 2761 |
+
os.environ["WANDB_PROJECT"] = "verify"
|
| 2762 |
+
api = _get_cling_api()
|
| 2763 |
+
reinit = False
|
| 2764 |
+
if host is None:
|
| 2765 |
+
host = api.settings("base_url")
|
| 2766 |
+
wandb.termlog(f"Default host selected: {host}")
|
| 2767 |
+
# if the given host does not match the default host, re-run init
|
| 2768 |
+
elif host != api.settings("base_url"):
|
| 2769 |
+
reinit = True
|
| 2770 |
+
|
| 2771 |
+
tmp_dir = tempfile.mkdtemp()
|
| 2772 |
+
wandb.termlog(
|
| 2773 |
+
"Find detailed logs for this test at: {}".format(os.path.join(tmp_dir, "wandb"))
|
| 2774 |
+
)
|
| 2775 |
+
os.chdir(tmp_dir)
|
| 2776 |
+
os.environ["WANDB_BASE_URL"] = host
|
| 2777 |
+
wandb.login(host=host)
|
| 2778 |
+
if reinit:
|
| 2779 |
+
api = _get_cling_api(reset=True)
|
| 2780 |
+
if not wandb_verify.check_host(host):
|
| 2781 |
+
sys.exit(1)
|
| 2782 |
+
if not wandb_verify.check_logged_in(api, host):
|
| 2783 |
+
sys.exit(1)
|
| 2784 |
+
url_success, url = wandb_verify.check_graphql_put(api, host)
|
| 2785 |
+
large_post_success = wandb_verify.check_large_post()
|
| 2786 |
+
wandb_verify.check_secure_requests(
|
| 2787 |
+
api.settings("base_url"),
|
| 2788 |
+
"Checking requests to base url",
|
| 2789 |
+
"Connections are not made over https. SSL required for secure communications.",
|
| 2790 |
+
)
|
| 2791 |
+
if url:
|
| 2792 |
+
wandb_verify.check_secure_requests(
|
| 2793 |
+
url,
|
| 2794 |
+
"Checking requests made over signed URLs",
|
| 2795 |
+
"Signed URL requests not made over https. SSL is required for secure communications.",
|
| 2796 |
+
)
|
| 2797 |
+
wandb_verify.check_cors_configuration(url, host)
|
| 2798 |
+
wandb_verify.check_wandb_version(api)
|
| 2799 |
+
check_run_success = wandb_verify.check_run(api)
|
| 2800 |
+
check_artifacts_success = wandb_verify.check_artifacts()
|
| 2801 |
+
if not (
|
| 2802 |
+
check_artifacts_success
|
| 2803 |
+
and check_run_success
|
| 2804 |
+
and large_post_success
|
| 2805 |
+
and url_success
|
| 2806 |
+
):
|
| 2807 |
+
sys.exit(1)
|
| 2808 |
+
|
| 2809 |
+
|
| 2810 |
+
cli.add_command(beta)
|
vllm/lib/python3.10/site-packages/wandb/old/__init__.py
ADDED
|
File without changes
|
vllm/lib/python3.10/site-packages/wandb/old/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (159 Bytes). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/old/__pycache__/core.cpython-310.pyc
ADDED
|
Binary file (1.36 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/old/__pycache__/settings.cpython-310.pyc
ADDED
|
Binary file (5.31 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/old/__pycache__/summary.cpython-310.pyc
ADDED
|
Binary file (13.2 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/old/core.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Core variables, functions, and classes that we want in the wandb
|
| 2 |
+
module but are also used in modules that import the wandb module.
|
| 3 |
+
|
| 4 |
+
The purpose of this module is to break circular imports.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
import tempfile
|
| 9 |
+
import time
|
| 10 |
+
|
| 11 |
+
import wandb
|
| 12 |
+
from wandb import env
|
| 13 |
+
|
| 14 |
+
# We use the hidden version if it already exists, otherwise non-hidden.
|
| 15 |
+
if os.path.exists(os.path.join(env.get_dir(os.getcwd()), ".wandb")):
|
| 16 |
+
__stage_dir__ = ".wandb" + os.sep
|
| 17 |
+
elif os.path.exists(os.path.join(env.get_dir(os.getcwd()), "wandb")):
|
| 18 |
+
__stage_dir__ = "wandb" + os.sep
|
| 19 |
+
else:
|
| 20 |
+
__stage_dir__ = None
|
| 21 |
+
|
| 22 |
+
wandb.START_TIME = time.time()
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def wandb_dir(root_dir=None):
|
| 26 |
+
if root_dir is None or root_dir == "":
|
| 27 |
+
try:
|
| 28 |
+
cwd = os.getcwd()
|
| 29 |
+
except OSError:
|
| 30 |
+
wandb.termwarn("os.getcwd() no longer exists, using system temp directory")
|
| 31 |
+
cwd = tempfile.gettempdir()
|
| 32 |
+
root_dir = env.get_dir(cwd)
|
| 33 |
+
path = os.path.join(root_dir, __stage_dir__ or ("wandb" + os.sep))
|
| 34 |
+
if not os.access(root_dir, os.W_OK):
|
| 35 |
+
wandb.termwarn(
|
| 36 |
+
f"Path {path} wasn't writable, using system temp directory", repeat=False
|
| 37 |
+
)
|
| 38 |
+
path = os.path.join(tempfile.gettempdir(), __stage_dir__ or ("wandb" + os.sep))
|
| 39 |
+
return path
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def _set_stage_dir(stage_dir):
|
| 43 |
+
# Used when initing a new project with "wandb init"
|
| 44 |
+
global __stage_dir__
|
| 45 |
+
__stage_dir__ = stage_dir
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
__all__ = [
|
| 49 |
+
"__stage_dir__",
|
| 50 |
+
"START_TIME",
|
| 51 |
+
"wandb_dir",
|
| 52 |
+
"_set_stage_dir",
|
| 53 |
+
]
|
vllm/lib/python3.10/site-packages/wandb/old/settings.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import configparser
|
| 2 |
+
import getpass
|
| 3 |
+
import os
|
| 4 |
+
import tempfile
|
| 5 |
+
from typing import Any, Optional
|
| 6 |
+
|
| 7 |
+
from wandb import env
|
| 8 |
+
from wandb.old import core
|
| 9 |
+
from wandb.sdk.lib import filesystem
|
| 10 |
+
from wandb.sdk.lib.runid import generate_id
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Settings:
|
| 14 |
+
"""Global W&B settings stored under $WANDB_CONFIG_DIR/settings."""
|
| 15 |
+
|
| 16 |
+
DEFAULT_SECTION = "default"
|
| 17 |
+
|
| 18 |
+
_UNSET = object()
|
| 19 |
+
|
| 20 |
+
def __init__(
|
| 21 |
+
self, load_settings: bool = True, root_dir: Optional[str] = None
|
| 22 |
+
) -> None:
|
| 23 |
+
self._global_settings = Settings._settings()
|
| 24 |
+
self._local_settings = Settings._settings()
|
| 25 |
+
self.root_dir = root_dir
|
| 26 |
+
|
| 27 |
+
if load_settings:
|
| 28 |
+
global_path = Settings._global_path()
|
| 29 |
+
if global_path is not None:
|
| 30 |
+
self._global_settings.read([global_path])
|
| 31 |
+
# Only attempt to read if there is a directory existing
|
| 32 |
+
if os.path.isdir(core.wandb_dir(self.root_dir)):
|
| 33 |
+
self._local_settings.read([Settings._local_path(self.root_dir)])
|
| 34 |
+
|
| 35 |
+
def get(self, section: str, key: str, fallback: Any = _UNSET) -> Any:
|
| 36 |
+
# Try the local settings first. If we can't find the key, then try the global settings.
|
| 37 |
+
# If a fallback is provided, return it if we can't find the key in either the local or global
|
| 38 |
+
# settings.
|
| 39 |
+
try:
|
| 40 |
+
return self._local_settings.get(section, key)
|
| 41 |
+
except configparser.NoOptionError:
|
| 42 |
+
try:
|
| 43 |
+
return self._global_settings.get(section, key)
|
| 44 |
+
except configparser.NoOptionError:
|
| 45 |
+
if fallback is not Settings._UNSET:
|
| 46 |
+
return fallback
|
| 47 |
+
else:
|
| 48 |
+
raise
|
| 49 |
+
|
| 50 |
+
def _persist_settings(self, settings, settings_path) -> None:
|
| 51 |
+
# write a temp file and then move it to the settings path
|
| 52 |
+
target_dir = os.path.dirname(settings_path)
|
| 53 |
+
with tempfile.NamedTemporaryFile(
|
| 54 |
+
"w+", suffix=".tmp", delete=False, dir=target_dir
|
| 55 |
+
) as fp:
|
| 56 |
+
path = os.path.abspath(fp.name)
|
| 57 |
+
with open(path, "w+") as f:
|
| 58 |
+
settings.write(f)
|
| 59 |
+
try:
|
| 60 |
+
os.replace(path, settings_path)
|
| 61 |
+
except AttributeError:
|
| 62 |
+
os.rename(path, settings_path)
|
| 63 |
+
|
| 64 |
+
def set(self, section, key, value, globally=False, persist=False) -> None:
|
| 65 |
+
"""Persist settings to disk if persist = True"""
|
| 66 |
+
|
| 67 |
+
def write_setting(settings, settings_path, persist):
|
| 68 |
+
if not settings.has_section(section):
|
| 69 |
+
Settings._safe_add_section(settings, Settings.DEFAULT_SECTION)
|
| 70 |
+
settings.set(section, key, str(value))
|
| 71 |
+
|
| 72 |
+
if persist:
|
| 73 |
+
self._persist_settings(settings, settings_path)
|
| 74 |
+
|
| 75 |
+
if globally:
|
| 76 |
+
global_path = Settings._global_path()
|
| 77 |
+
if global_path is not None:
|
| 78 |
+
write_setting(self._global_settings, global_path, persist)
|
| 79 |
+
else:
|
| 80 |
+
write_setting(
|
| 81 |
+
self._local_settings, Settings._local_path(self.root_dir), persist
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
def clear(self, section, key, globally=False, persist=False) -> None:
|
| 85 |
+
def clear_setting(settings, settings_path, persist):
|
| 86 |
+
settings.remove_option(section, key)
|
| 87 |
+
if persist:
|
| 88 |
+
self._persist_settings(settings, settings_path)
|
| 89 |
+
|
| 90 |
+
if globally:
|
| 91 |
+
global_path = Settings._global_path()
|
| 92 |
+
if global_path is not None:
|
| 93 |
+
clear_setting(self._global_settings, global_path, persist)
|
| 94 |
+
else:
|
| 95 |
+
clear_setting(
|
| 96 |
+
self._local_settings, Settings._local_path(self.root_dir), persist
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
def items(self, section=None):
|
| 100 |
+
section = section if section is not None else Settings.DEFAULT_SECTION
|
| 101 |
+
|
| 102 |
+
result = {"section": section}
|
| 103 |
+
|
| 104 |
+
try:
|
| 105 |
+
if section in self._global_settings.sections():
|
| 106 |
+
for option in self._global_settings.options(section):
|
| 107 |
+
result[option] = self._global_settings.get(section, option)
|
| 108 |
+
if section in self._local_settings.sections():
|
| 109 |
+
for option in self._local_settings.options(section):
|
| 110 |
+
result[option] = self._local_settings.get(section, option)
|
| 111 |
+
except configparser.InterpolationSyntaxError:
|
| 112 |
+
core.termwarn("Unable to parse settings file")
|
| 113 |
+
|
| 114 |
+
return result
|
| 115 |
+
|
| 116 |
+
@staticmethod
|
| 117 |
+
def _safe_add_section(settings, section):
|
| 118 |
+
if not settings.has_section(section):
|
| 119 |
+
settings.add_section(section)
|
| 120 |
+
|
| 121 |
+
@staticmethod
|
| 122 |
+
def _settings(default_settings={}):
|
| 123 |
+
settings = configparser.ConfigParser()
|
| 124 |
+
Settings._safe_add_section(settings, Settings.DEFAULT_SECTION)
|
| 125 |
+
for key, value in default_settings.items():
|
| 126 |
+
settings.set(Settings.DEFAULT_SECTION, key, str(value))
|
| 127 |
+
return settings
|
| 128 |
+
|
| 129 |
+
@staticmethod
|
| 130 |
+
def _global_path() -> Optional[str]:
|
| 131 |
+
def try_create_dir(path) -> bool:
|
| 132 |
+
try:
|
| 133 |
+
os.makedirs(path, exist_ok=True)
|
| 134 |
+
if os.access(path, os.W_OK):
|
| 135 |
+
return True
|
| 136 |
+
except OSError:
|
| 137 |
+
pass
|
| 138 |
+
return False
|
| 139 |
+
|
| 140 |
+
def get_username() -> str:
|
| 141 |
+
try:
|
| 142 |
+
return getpass.getuser()
|
| 143 |
+
except (ImportError, KeyError):
|
| 144 |
+
return generate_id()
|
| 145 |
+
|
| 146 |
+
try:
|
| 147 |
+
home_config_dir = os.path.join(os.path.expanduser("~"), ".config", "wandb")
|
| 148 |
+
|
| 149 |
+
if not try_create_dir(home_config_dir):
|
| 150 |
+
temp_config_dir = os.path.join(
|
| 151 |
+
tempfile.gettempdir(), ".config", "wandb"
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
if not try_create_dir(temp_config_dir):
|
| 155 |
+
username = get_username()
|
| 156 |
+
config_dir = os.path.join(
|
| 157 |
+
tempfile.gettempdir(), username, ".config", "wandb"
|
| 158 |
+
)
|
| 159 |
+
try_create_dir(config_dir)
|
| 160 |
+
else:
|
| 161 |
+
config_dir = temp_config_dir
|
| 162 |
+
else:
|
| 163 |
+
config_dir = home_config_dir
|
| 164 |
+
|
| 165 |
+
config_dir = os.environ.get(env.CONFIG_DIR, config_dir)
|
| 166 |
+
return os.path.join(config_dir, "settings")
|
| 167 |
+
except Exception:
|
| 168 |
+
return None
|
| 169 |
+
|
| 170 |
+
@staticmethod
|
| 171 |
+
def _local_path(root_dir=None):
|
| 172 |
+
filesystem.mkdir_exists_ok(core.wandb_dir(root_dir))
|
| 173 |
+
return os.path.join(core.wandb_dir(root_dir), "settings")
|
vllm/lib/python3.10/site-packages/wandb/old/summary.py
ADDED
|
@@ -0,0 +1,440 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import os
|
| 3 |
+
import time
|
| 4 |
+
|
| 5 |
+
from wandb_gql import gql
|
| 6 |
+
|
| 7 |
+
import wandb
|
| 8 |
+
from wandb import util
|
| 9 |
+
from wandb.apis.internal import Api
|
| 10 |
+
from wandb.sdk import lib as wandb_lib
|
| 11 |
+
from wandb.sdk.data_types.utils import val_to_json
|
| 12 |
+
|
| 13 |
+
DEEP_SUMMARY_FNAME = "wandb.h5"
|
| 14 |
+
H5_TYPES = ("numpy.ndarray", "tensorflow.Tensor", "torch.Tensor")
|
| 15 |
+
h5py = util.get_module("h5py")
|
| 16 |
+
np = util.get_module("numpy")
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class SummarySubDict:
|
| 20 |
+
"""Nested dict-like object that proxies read and write operations through a root object.
|
| 21 |
+
|
| 22 |
+
This lets us do synchronous serialization and lazy loading of large values.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
def __init__(self, root=None, path=()):
|
| 26 |
+
self._path = tuple(path)
|
| 27 |
+
if root is None:
|
| 28 |
+
self._root = self
|
| 29 |
+
self._json_dict = {}
|
| 30 |
+
else:
|
| 31 |
+
self._root = root
|
| 32 |
+
json_dict = root._json_dict
|
| 33 |
+
for k in path:
|
| 34 |
+
json_dict = json_dict.get(k, {})
|
| 35 |
+
|
| 36 |
+
self._json_dict = json_dict
|
| 37 |
+
self._dict = {}
|
| 38 |
+
|
| 39 |
+
# We use this to track which keys the user has set explicitly
|
| 40 |
+
# so that we don't automatically overwrite them when we update
|
| 41 |
+
# the summary from the history.
|
| 42 |
+
self._locked_keys = set()
|
| 43 |
+
|
| 44 |
+
def __setattr__(self, k, v):
|
| 45 |
+
k = k.strip()
|
| 46 |
+
if k.startswith("_"):
|
| 47 |
+
object.__setattr__(self, k, v)
|
| 48 |
+
else:
|
| 49 |
+
self[k] = v
|
| 50 |
+
|
| 51 |
+
def __getattr__(self, k):
|
| 52 |
+
k = k.strip()
|
| 53 |
+
if k.startswith("_"):
|
| 54 |
+
return object.__getattribute__(self, k)
|
| 55 |
+
else:
|
| 56 |
+
return self[k]
|
| 57 |
+
|
| 58 |
+
def _root_get(self, path, child_dict):
|
| 59 |
+
"""Load a value at a particular path from the root.
|
| 60 |
+
|
| 61 |
+
This should only be implemented by the "_root" child class.
|
| 62 |
+
|
| 63 |
+
We pass the child_dict so the item can be set on it or not as
|
| 64 |
+
appropriate. Returning None for a nonexistent path wouldn't be
|
| 65 |
+
distinguishable from that path being set to the value None.
|
| 66 |
+
"""
|
| 67 |
+
raise NotImplementedError
|
| 68 |
+
|
| 69 |
+
def _root_set(self, path, new_keys_values):
|
| 70 |
+
"""Set a value at a particular path in the root.
|
| 71 |
+
|
| 72 |
+
This should only be implemented by the "_root" child class.
|
| 73 |
+
"""
|
| 74 |
+
raise NotImplementedError
|
| 75 |
+
|
| 76 |
+
def _root_del(self, path):
|
| 77 |
+
"""Delete a value at a particular path in the root.
|
| 78 |
+
|
| 79 |
+
This should only be implemented by the "_root" child class.
|
| 80 |
+
"""
|
| 81 |
+
raise NotImplementedError
|
| 82 |
+
|
| 83 |
+
def _write(self, commit=False):
|
| 84 |
+
# should only be implemented on the root summary
|
| 85 |
+
raise NotImplementedError
|
| 86 |
+
|
| 87 |
+
def keys(self):
|
| 88 |
+
# _json_dict has the full set of keys, including those for h5 objects
|
| 89 |
+
# that may not have been loaded yet
|
| 90 |
+
return self._json_dict.keys()
|
| 91 |
+
|
| 92 |
+
def get(self, k, default=None):
|
| 93 |
+
if isinstance(k, str):
|
| 94 |
+
k = k.strip()
|
| 95 |
+
if k not in self._dict:
|
| 96 |
+
self._root._root_get(self._path + (k,), self._dict)
|
| 97 |
+
return self._dict.get(k, default)
|
| 98 |
+
|
| 99 |
+
def items(self):
|
| 100 |
+
# not all items may be loaded into self._dict, so we
|
| 101 |
+
# have to build the sequence of items from scratch
|
| 102 |
+
for k in self.keys():
|
| 103 |
+
yield k, self[k]
|
| 104 |
+
|
| 105 |
+
def __getitem__(self, k):
|
| 106 |
+
if isinstance(k, str):
|
| 107 |
+
k = k.strip()
|
| 108 |
+
|
| 109 |
+
self.get(k) # load the value into _dict if it should be there
|
| 110 |
+
res = self._dict[k]
|
| 111 |
+
|
| 112 |
+
return res
|
| 113 |
+
|
| 114 |
+
def __contains__(self, k):
|
| 115 |
+
if isinstance(k, str):
|
| 116 |
+
k = k.strip()
|
| 117 |
+
|
| 118 |
+
return k in self._json_dict
|
| 119 |
+
|
| 120 |
+
def __setitem__(self, k, v):
|
| 121 |
+
if isinstance(k, str):
|
| 122 |
+
k = k.strip()
|
| 123 |
+
|
| 124 |
+
path = self._path
|
| 125 |
+
|
| 126 |
+
if isinstance(v, dict):
|
| 127 |
+
self._dict[k] = SummarySubDict(self._root, path + (k,))
|
| 128 |
+
self._root._root_set(path, [(k, {})])
|
| 129 |
+
self._dict[k].update(v)
|
| 130 |
+
else:
|
| 131 |
+
self._dict[k] = v
|
| 132 |
+
self._root._root_set(path, [(k, v)])
|
| 133 |
+
|
| 134 |
+
self._locked_keys.add(k)
|
| 135 |
+
|
| 136 |
+
self._root._write()
|
| 137 |
+
|
| 138 |
+
return v
|
| 139 |
+
|
| 140 |
+
def __delitem__(self, k):
|
| 141 |
+
k = k.strip()
|
| 142 |
+
del self._dict[k]
|
| 143 |
+
self._root._root_del(self._path + (k,))
|
| 144 |
+
|
| 145 |
+
self._root._write()
|
| 146 |
+
|
| 147 |
+
def __repr__(self):
|
| 148 |
+
# use a copy of _dict, except add placeholders for h5 objects, etc.
|
| 149 |
+
# that haven't been loaded yet
|
| 150 |
+
repr_dict = dict(self._dict)
|
| 151 |
+
for k in self._json_dict:
|
| 152 |
+
v = self._json_dict[k]
|
| 153 |
+
if (
|
| 154 |
+
k not in repr_dict
|
| 155 |
+
and isinstance(v, dict)
|
| 156 |
+
and v.get("_type") in H5_TYPES
|
| 157 |
+
):
|
| 158 |
+
# unloaded h5 objects may be very large. use a placeholder for them
|
| 159 |
+
# if we haven't already loaded them
|
| 160 |
+
repr_dict[k] = "..."
|
| 161 |
+
else:
|
| 162 |
+
repr_dict[k] = self[k]
|
| 163 |
+
|
| 164 |
+
return repr(repr_dict)
|
| 165 |
+
|
| 166 |
+
def update(self, key_vals=None, overwrite=True):
|
| 167 |
+
"""Locked keys will be overwritten unless overwrite=False.
|
| 168 |
+
|
| 169 |
+
Otherwise, written keys will be added to the "locked" list.
|
| 170 |
+
"""
|
| 171 |
+
if key_vals:
|
| 172 |
+
write_items = self._update(key_vals, overwrite)
|
| 173 |
+
self._root._root_set(self._path, write_items)
|
| 174 |
+
self._root._write(commit=True)
|
| 175 |
+
|
| 176 |
+
def _update(self, key_vals, overwrite):
|
| 177 |
+
if not key_vals:
|
| 178 |
+
return
|
| 179 |
+
key_vals = {k.strip(): v for k, v in key_vals.items()}
|
| 180 |
+
if overwrite:
|
| 181 |
+
write_items = list(key_vals.items())
|
| 182 |
+
self._locked_keys.update(key_vals.keys())
|
| 183 |
+
else:
|
| 184 |
+
write_keys = set(key_vals.keys()) - self._locked_keys
|
| 185 |
+
write_items = [(k, key_vals[k]) for k in write_keys]
|
| 186 |
+
|
| 187 |
+
for key, value in write_items:
|
| 188 |
+
if isinstance(value, dict):
|
| 189 |
+
self._dict[key] = SummarySubDict(self._root, self._path + (key,))
|
| 190 |
+
self._dict[key]._update(value, overwrite)
|
| 191 |
+
else:
|
| 192 |
+
self._dict[key] = value
|
| 193 |
+
|
| 194 |
+
return write_items
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
class Summary(SummarySubDict):
|
| 198 |
+
"""Store summary metrics (eg. accuracy) during and after a run.
|
| 199 |
+
|
| 200 |
+
You can manipulate this as if it's a Python dictionary but the keys
|
| 201 |
+
get mangled. .strip() is called on them, so spaces at the beginning
|
| 202 |
+
and end are removed.
|
| 203 |
+
"""
|
| 204 |
+
|
| 205 |
+
def __init__(self, run, summary=None):
|
| 206 |
+
super().__init__()
|
| 207 |
+
self._run = run
|
| 208 |
+
self._h5_path = os.path.join(self._run.dir, DEEP_SUMMARY_FNAME)
|
| 209 |
+
# Lazy load the h5 file
|
| 210 |
+
self._h5 = None
|
| 211 |
+
|
| 212 |
+
# Mirrored version of self._dict with versions of values that get written
|
| 213 |
+
# to JSON kept up to date by self._root_set() and self._root_del().
|
| 214 |
+
self._json_dict = {}
|
| 215 |
+
|
| 216 |
+
if summary is not None:
|
| 217 |
+
self._json_dict = summary
|
| 218 |
+
|
| 219 |
+
def _json_get(self, path):
|
| 220 |
+
pass
|
| 221 |
+
|
| 222 |
+
def _root_get(self, path, child_dict):
|
| 223 |
+
json_dict = self._json_dict
|
| 224 |
+
for key in path[:-1]:
|
| 225 |
+
json_dict = json_dict[key]
|
| 226 |
+
|
| 227 |
+
key = path[-1]
|
| 228 |
+
if key in json_dict:
|
| 229 |
+
child_dict[key] = self._decode(path, json_dict[key])
|
| 230 |
+
|
| 231 |
+
def _root_del(self, path):
|
| 232 |
+
json_dict = self._json_dict
|
| 233 |
+
for key in path[:-1]:
|
| 234 |
+
json_dict = json_dict[key]
|
| 235 |
+
|
| 236 |
+
val = json_dict[path[-1]]
|
| 237 |
+
del json_dict[path[-1]]
|
| 238 |
+
if isinstance(val, dict) and val.get("_type") in H5_TYPES:
|
| 239 |
+
if not h5py:
|
| 240 |
+
wandb.termerror("Deleting tensors in summary requires h5py")
|
| 241 |
+
else:
|
| 242 |
+
self.open_h5()
|
| 243 |
+
h5_key = "summary/" + ".".join(path)
|
| 244 |
+
del self._h5[h5_key]
|
| 245 |
+
self._h5.flush()
|
| 246 |
+
|
| 247 |
+
def _root_set(self, path, new_keys_values):
|
| 248 |
+
json_dict = self._json_dict
|
| 249 |
+
for key in path:
|
| 250 |
+
json_dict = json_dict[key]
|
| 251 |
+
|
| 252 |
+
for new_key, new_value in new_keys_values:
|
| 253 |
+
json_dict[new_key] = self._encode(new_value, path + (new_key,))
|
| 254 |
+
|
| 255 |
+
def write_h5(self, path, val):
|
| 256 |
+
# ensure the file is open
|
| 257 |
+
self.open_h5()
|
| 258 |
+
|
| 259 |
+
if not self._h5:
|
| 260 |
+
wandb.termerror("Storing tensors in summary requires h5py")
|
| 261 |
+
else:
|
| 262 |
+
try:
|
| 263 |
+
del self._h5["summary/" + ".".join(path)]
|
| 264 |
+
except KeyError:
|
| 265 |
+
pass
|
| 266 |
+
self._h5["summary/" + ".".join(path)] = val
|
| 267 |
+
self._h5.flush()
|
| 268 |
+
|
| 269 |
+
def read_h5(self, path, val=None):
|
| 270 |
+
# ensure the file is open
|
| 271 |
+
self.open_h5()
|
| 272 |
+
|
| 273 |
+
if not self._h5:
|
| 274 |
+
wandb.termerror("Reading tensors from summary requires h5py")
|
| 275 |
+
else:
|
| 276 |
+
return self._h5.get("summary/" + ".".join(path), val)
|
| 277 |
+
|
| 278 |
+
def open_h5(self):
|
| 279 |
+
if not self._h5 and h5py:
|
| 280 |
+
self._h5 = h5py.File(self._h5_path, "a", libver="latest")
|
| 281 |
+
|
| 282 |
+
def _decode(self, path, json_value):
|
| 283 |
+
"""Decode a `dict` encoded by `Summary._encode()`, loading h5 objects.
|
| 284 |
+
|
| 285 |
+
h5 objects may be very large, so we won't have loaded them automatically.
|
| 286 |
+
"""
|
| 287 |
+
if isinstance(json_value, dict):
|
| 288 |
+
if json_value.get("_type") in H5_TYPES:
|
| 289 |
+
return self.read_h5(path, json_value)
|
| 290 |
+
elif json_value.get("_type") == "data-frame":
|
| 291 |
+
wandb.termerror(
|
| 292 |
+
"This data frame was saved via the wandb data API. Contact support@wandb.com for help."
|
| 293 |
+
)
|
| 294 |
+
return None
|
| 295 |
+
# TODO: transform wandb objects and plots
|
| 296 |
+
else:
|
| 297 |
+
return SummarySubDict(self, path)
|
| 298 |
+
else:
|
| 299 |
+
return json_value
|
| 300 |
+
|
| 301 |
+
def _encode(self, value, path_from_root):
|
| 302 |
+
"""Normalize, compress, and encode sub-objects for backend storage.
|
| 303 |
+
|
| 304 |
+
value: Object to encode.
|
| 305 |
+
path_from_root: `tuple` of key strings from the top-level summary to the
|
| 306 |
+
current `value`.
|
| 307 |
+
|
| 308 |
+
Returns:
|
| 309 |
+
A new tree of dict's with large objects replaced with dictionaries
|
| 310 |
+
with "_type" entries that say which type the original data was.
|
| 311 |
+
"""
|
| 312 |
+
|
| 313 |
+
# Constructs a new `dict` tree in `json_value` that discards and/or
|
| 314 |
+
# encodes objects that aren't JSON serializable.
|
| 315 |
+
|
| 316 |
+
if isinstance(value, dict):
|
| 317 |
+
json_value = {}
|
| 318 |
+
for key, value in value.items():
|
| 319 |
+
json_value[key] = self._encode(value, path_from_root + (key,))
|
| 320 |
+
return json_value
|
| 321 |
+
else:
|
| 322 |
+
path = ".".join(path_from_root)
|
| 323 |
+
friendly_value, converted = util.json_friendly(
|
| 324 |
+
val_to_json(self._run, path, value, namespace="summary")
|
| 325 |
+
)
|
| 326 |
+
json_value, compressed = util.maybe_compress_summary(
|
| 327 |
+
friendly_value, util.get_h5_typename(value)
|
| 328 |
+
)
|
| 329 |
+
if compressed:
|
| 330 |
+
self.write_h5(path_from_root, friendly_value)
|
| 331 |
+
|
| 332 |
+
return json_value
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
def download_h5(run_id, entity=None, project=None, out_dir=None):
|
| 336 |
+
api = Api()
|
| 337 |
+
meta = api.download_url(
|
| 338 |
+
project or api.settings("project"),
|
| 339 |
+
DEEP_SUMMARY_FNAME,
|
| 340 |
+
entity=entity or api.settings("entity"),
|
| 341 |
+
run=run_id,
|
| 342 |
+
)
|
| 343 |
+
if meta and "md5" in meta and meta["md5"] is not None:
|
| 344 |
+
# TODO: make this non-blocking
|
| 345 |
+
wandb.termlog("Downloading summary data...")
|
| 346 |
+
path, res = api.download_write_file(meta, out_dir=out_dir)
|
| 347 |
+
return path
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def upload_h5(file, run_id, entity=None, project=None):
|
| 351 |
+
api = Api()
|
| 352 |
+
wandb.termlog("Uploading summary data...")
|
| 353 |
+
with open(file, "rb") as f:
|
| 354 |
+
api.push(
|
| 355 |
+
{os.path.basename(file): f}, run=run_id, project=project, entity=entity
|
| 356 |
+
)
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
class FileSummary(Summary):
|
| 360 |
+
def __init__(self, run):
|
| 361 |
+
super().__init__(run)
|
| 362 |
+
self._fname = os.path.join(run.dir, wandb_lib.filenames.SUMMARY_FNAME)
|
| 363 |
+
self.load()
|
| 364 |
+
|
| 365 |
+
def load(self):
|
| 366 |
+
try:
|
| 367 |
+
with open(self._fname) as f:
|
| 368 |
+
self._json_dict = json.load(f)
|
| 369 |
+
except (OSError, ValueError):
|
| 370 |
+
self._json_dict = {}
|
| 371 |
+
|
| 372 |
+
def _write(self, commit=False):
|
| 373 |
+
# TODO: we just ignore commit to ensure backward capability
|
| 374 |
+
with open(self._fname, "w") as f:
|
| 375 |
+
f.write(util.json_dumps_safer(self._json_dict))
|
| 376 |
+
f.write("\n")
|
| 377 |
+
f.flush()
|
| 378 |
+
os.fsync(f.fileno())
|
| 379 |
+
if self._h5:
|
| 380 |
+
self._h5.close()
|
| 381 |
+
self._h5 = None
|
| 382 |
+
if wandb.run and wandb.run._jupyter_agent:
|
| 383 |
+
wandb.run._jupyter_agent.start()
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
class HTTPSummary(Summary):
|
| 387 |
+
def __init__(self, run, client, summary=None):
|
| 388 |
+
super().__init__(run, summary=summary)
|
| 389 |
+
self._run = run
|
| 390 |
+
self._client = client
|
| 391 |
+
self._started = time.time()
|
| 392 |
+
|
| 393 |
+
def __delitem__(self, key):
|
| 394 |
+
if key not in self._json_dict:
|
| 395 |
+
raise KeyError(key)
|
| 396 |
+
del self._json_dict[key]
|
| 397 |
+
|
| 398 |
+
def load(self):
|
| 399 |
+
pass
|
| 400 |
+
|
| 401 |
+
def open_h5(self):
|
| 402 |
+
if not self._h5 and h5py:
|
| 403 |
+
download_h5(
|
| 404 |
+
self._run.id,
|
| 405 |
+
entity=self._run.entity,
|
| 406 |
+
project=self._run.project,
|
| 407 |
+
out_dir=self._run.dir,
|
| 408 |
+
)
|
| 409 |
+
super().open_h5()
|
| 410 |
+
|
| 411 |
+
def _write(self, commit=False):
|
| 412 |
+
mutation = gql(
|
| 413 |
+
"""
|
| 414 |
+
mutation UpsertBucket( $id: String, $summaryMetrics: JSONString) {
|
| 415 |
+
upsertBucket(input: { id: $id, summaryMetrics: $summaryMetrics}) {
|
| 416 |
+
bucket { id }
|
| 417 |
+
}
|
| 418 |
+
}
|
| 419 |
+
"""
|
| 420 |
+
)
|
| 421 |
+
if commit:
|
| 422 |
+
if self._h5:
|
| 423 |
+
self._h5.close()
|
| 424 |
+
self._h5 = None
|
| 425 |
+
res = self._client.execute(
|
| 426 |
+
mutation,
|
| 427 |
+
variable_values={
|
| 428 |
+
"id": self._run.storage_id,
|
| 429 |
+
"summaryMetrics": util.json_dumps_safer(self._json_dict),
|
| 430 |
+
},
|
| 431 |
+
)
|
| 432 |
+
assert res["upsertBucket"]["bucket"]["id"]
|
| 433 |
+
entity, project, run = self._run.path
|
| 434 |
+
if (
|
| 435 |
+
os.path.exists(self._h5_path)
|
| 436 |
+
and os.path.getmtime(self._h5_path) >= self._started
|
| 437 |
+
):
|
| 438 |
+
upload_h5(self._h5_path, run, entity=entity, project=project)
|
| 439 |
+
else:
|
| 440 |
+
return False
|
vllm/lib/python3.10/site-packages/wandb/plot/__init__.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Chart Visualization Utilities
|
| 2 |
+
|
| 3 |
+
This module offers a collection of predefined chart types, along with functionality
|
| 4 |
+
for creating custom charts, enabling flexible visualization of your data beyond the
|
| 5 |
+
built-in options.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"line",
|
| 10 |
+
"histogram",
|
| 11 |
+
"scatter",
|
| 12 |
+
"bar",
|
| 13 |
+
"roc_curve",
|
| 14 |
+
"pr_curve",
|
| 15 |
+
"confusion_matrix",
|
| 16 |
+
"line_series",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
from wandb.plot.bar import bar
|
| 20 |
+
from wandb.plot.confusion_matrix import confusion_matrix
|
| 21 |
+
from wandb.plot.custom_chart import CustomChart, plot_table
|
| 22 |
+
from wandb.plot.histogram import histogram
|
| 23 |
+
from wandb.plot.line import line
|
| 24 |
+
from wandb.plot.line_series import line_series
|
| 25 |
+
from wandb.plot.pr_curve import pr_curve
|
| 26 |
+
from wandb.plot.roc_curve import roc_curve
|
| 27 |
+
from wandb.plot.scatter import scatter
|
| 28 |
+
from wandb.plot.viz import Visualize, visualize
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.03 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/bar.cpython-310.pyc
ADDED
|
Binary file (2.37 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/confusion_matrix.cpython-310.pyc
ADDED
|
Binary file (6.67 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/custom_chart.cpython-310.pyc
ADDED
|
Binary file (4.1 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/histogram.cpython-310.pyc
ADDED
|
Binary file (2.02 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/line.cpython-310.pyc
ADDED
|
Binary file (2.73 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/line_series.cpython-310.pyc
ADDED
|
Binary file (6.21 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/pr_curve.cpython-310.pyc
ADDED
|
Binary file (6.13 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/roc_curve.cpython-310.pyc
ADDED
|
Binary file (5.35 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/wandb/plot/__pycache__/scatter.cpython-310.pyc
ADDED
|
Binary file (2.39 kB). View file
|
|
|