Dataset Viewer (First 5GB)
Auto-converted to Parquet Duplicate
id
int64
1
6.07M
name
stringlengths
1
295
code
stringlengths
12
426k
language
stringclasses
1 value
source_file
stringlengths
5
202
start_line
int64
1
158k
end_line
int64
1
158k
repo
dict
1
lint
def lint(session: nox.Session) -> None: """ Run the linter. """ session.install("pre-commit") session.run( "pre-commit", "run", "--all-files", "--show-diff-on-failure", *session.posargs )
python
noxfile.py
17
24
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
2
pylint
def pylint(session: nox.Session) -> None: """ Run PyLint. """ # This needs to be installed into the package environment, and is slower # than a pre-commit check session.install(".", "pylint") session.run("pylint", "my_cool_package", *session.posargs)
python
noxfile.py
28
35
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
3
tests
def tests(session: nox.Session) -> None: """ Run the unit and regular tests. """ session.install(".[test]") session.run("pytest", *session.posargs)
python
noxfile.py
39
44
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
4
docs
def docs(session: nox.Session) -> None: """ Build the docs. Pass "--serve" to serve. Pass "-b linkcheck" to check links. """ parser = argparse.ArgumentParser() parser.add_argument("--serve", action="store_true", help="Serve after building") parser.add_argument( "-b", dest="builder", default="html", help="Build target (default: html)" ) args, posargs = parser.parse_known_args(session.posargs) if args.builder != "html" and args.serve: session.error("Must not specify non-HTML builder with --serve") extra_installs = ["sphinx-autobuild"] if args.serve else [] session.install("-e.[docs]", *extra_installs) session.chdir("docs") if args.builder == "linkcheck": session.run( "sphinx-build", "-b", "linkcheck", ".", "_build/linkcheck", *posargs ) return shared_args = ( "-n", # nitpicky mode "-T", # full tracebacks f"-b={args.builder}", ".", f"_build/{args.builder}", *posargs, ) if args.serve: session.run("sphinx-autobuild", *shared_args) else: session.run("sphinx-build", "--keep-going", *shared_args)
python
noxfile.py
48
86
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
5
build_api_docs
def build_api_docs(session: nox.Session) -> None: """ Build (regenerate) API docs. """ session.install("sphinx") session.chdir("docs") session.run( "sphinx-apidoc", "-o", "api/", "--module-first", "--no-toc", "--force", "../src/my_cool_package", )
python
noxfile.py
90
105
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
6
build
def build(session: nox.Session) -> None: """ Build an SDist and wheel. """ build_path = DIR.joinpath("build") if build_path.exists(): shutil.rmtree(build_path) session.install("build") session.run("python", "-m", "build")
python
noxfile.py
109
119
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
7
random
def random( cls, num_particles: int, num_dimensions: int, mass_mean: float, mass_width: float, x_width: float, p_width: float, rng: None | np.random.Generator = None, ) -> System: """ Generate a system of gravitating particles randomly. Args: num_particles (int): The number of particles to generate. num_dimensions (int): The number of dimensions in each particle's positions and momenta. mass_mean (float): The mean value of particle mass, generated with a gamma distribution. mass_width (float): The "theta" parameter for the mass's gamma distribution. The variance of the masses is theta times the mean. x_width (float): The "sigma" (standard deviation) of positions, generated as a normal (Gaussian) distribution with zero mean. p_width (float): The "sigma" (standard deviation) of momenta, generated as a normal (Gaussian) distribution with zero mean. rng (None or np.random.Generator): A random number generator from NumPy. If None, a ``np.random.default_rng`` is generated instead. """ if rng is None: rng = np.random.default_rng() m = rng.gamma(mass_mean / mass_width, mass_width, num_particles) x = rng.normal(0, x_width, (num_particles, num_dimensions)) p = rng.normal(0, p_width, (num_particles, num_dimensions)) return cls(m, x, p)
python
src/my_cool_package/orbitty.py
51
86
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
8
__init__
def __init__(self, m: npt.ArrayLike, x: npt.ArrayLike, p: npt.ArrayLike): """ Initialize a system of gravitating particles with explicit values. Args: m (npt.ArrayLike): Ordered collection of masses (1-dimensional). x (npt.ArrayLike): Collection of positions in the same order. The first axis has one subarry per particle; the second axis has one position in each dimension. p (npt.ArrayLike): Collection of momenta in the same order. The first axis has one subarry per particle; the second axis has one momentum in each dimension. """ self.x, self.p = np.broadcast_arrays(x, p) assert self.x.shape == self.p.shape if len(self.x.shape) != 2: err = f"arrays of position and momentum must each have 2 components, not {len(self.x.shape)}" # type: ignore[unreachable] raise ValueError(err) if self.num_dimensions < 2: err = "number of dimensions must be at least 1" raise ValueError(err) self.m, _ = np.broadcast_arrays(m, self.x[:, 0]) assert len(self.m) == len(self.x) if len(self.m.shape) != 1: err = f"array of masses must have only 1 component, not {len(self.m.shape)}" raise ValueError(err) if issubclass(self.m.dtype.type, np.integer): # type: ignore[unreachable] self.m = self.m.astype(np.float64) if issubclass(self.x.dtype.type, np.integer): self.x = self.x.astype(np.float64) if issubclass(self.p.dtype.type, np.integer): self.p = self.p.astype(np.float64) if not issubclass(self.m.dtype.type, np.floating): err = f"masses must have floating-point type, not {self.m.dtype}" raise TypeError(err) if not issubclass(self.x.dtype.type, np.floating): err = f"positions must have floating-point type, not {self.m.dtype}" raise TypeError(err) if not issubclass(self.p.dtype.type, np.floating): err = f"momenta must have floating-point type, not {self.m.dtype}" raise TypeError(err) self.history: list[System.Step] = [self.Step(0, self.x, self.p)]
python
src/my_cool_package/orbitty.py
88
133
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
9
__init__
def __init__( self, t: float, x: np.ndarray[tuple[int, int], FloatingPoint], p: np.ndarray[tuple[int, int], FloatingPoint], ): self.t, self.x, self.p = t, x, p
python
src/my_cool_package/orbitty.py
140
146
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
10
__repr__
def __repr__(self) -> str: return f"<Step t={self.t} x={self.x.tolist()} p={self.p.tolist()}>"
python
src/my_cool_package/orbitty.py
148
149
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
11
__repr__
def __repr__(self) -> str: return f"<System of {self.num_particles} particles in {self.num_dimensions} dimensions>"
python
src/my_cool_package/orbitty.py
151
152
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
12
num_particles
def num_particles(self) -> int: """ The number of particles in the System. """ return self.x.shape[0]
python
src/my_cool_package/orbitty.py
155
160
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
13
num_dimensions
def num_dimensions(self) -> int: """ The number of dimensions in each position and momentum. """ return self.x.shape[1] # type: ignore[no-any-return]
python
src/my_cool_package/orbitty.py
163
168
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
14
forces
def forces(self) -> np.ndarray[tuple[int, int], FloatingPoint]: """ The total force, as a vector on each particle, due to gravitational attraction to all other particles in the System. This array has the same shape as ``x`` and ``p``. """ # indexes to pick out (particle 1, particle 2) pairs, for all pairs p1, p2 = np.triu_indices(len(self.x), k=1) # pairwise (pw) displacements between all particle pairs pw_displacement = self.x[p2] - self.x[p1] # pairwise displacement is a sum in quadrature over all dimensions pw_distance = np.maximum( np.sqrt(np.sum(pw_displacement**2, axis=-1)), self.min_distance ) # direction is a unit vector pw_direction = pw_displacement / pw_distance[:, np.newaxis] m1 = self.m[p1, np.newaxis] m2 = self.m[p2, np.newaxis] # 1/r in 2D, 1/r**2 in 3D, 1/r**3 in 4D... power = self.num_dimensions - 1 # law of universal gravitation pw_force = self.G * m1 * m2 * pw_direction / pw_distance[:, np.newaxis] ** power # vector sum over pairs for each particle, np.add.at inverts p1, p2 indexing total_force = np.zeros_like(self.x) np.add.at(total_force, p1, pw_force) np.add.at(total_force, p2, -pw_force) return total_force
python
src/my_cool_package/orbitty.py
171
199
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
15
step
def step(self, dt: float = 0.1) -> None: """ Simulate the System for one time-step. Args: dt (float): Time interval to simulate. The smaller this value is, the more precise the simulation will be. Uses a kick-drift-kick method to control numerical error. Consequently, this function calls ``forces`` twice. This method changes the state of the System, including its ``history``. """ half_dt = dt / 2 # half kick: update p by a half time-step using current positions self.p = self.p + self.forces * half_dt # full drift: update x by a full time-step using new momenta self.x = self.x + self.p * dt / self.m[:, np.newaxis] # half kick: update p by another half time-step using new positions self.p = self.p + self.forces * half_dt # save the history self.history.append(self.Step(self.history[-1].t + dt, self.x, self.p))
python
src/my_cool_package/orbitty.py
201
223
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
16
steps
def steps(self, n: int, dt: float = 0.01) -> None: """ Simulate the System for ``n`` time-steps. Args: n (int): Number of time-steps. dt (float): Time interval to simulate. The smaller this value is, the more precise the simulation will be. This method changes the state of the System, including its ``history``. """ for _ in range(n): self.step(dt=dt)
python
src/my_cool_package/orbitty.py
225
238
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
17
t_history
def t_history(self) -> np.ndarray[tuple[int], FloatingPoint]: """ Get the history of time-steps as an array. The 1 axis is * time-steps """ return np.array([step.t for step in self.history])
python
src/my_cool_package/orbitty.py
241
250
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
18
x_history
def x_history(self) -> np.ndarray[tuple[int, int, int], FloatingPoint]: """ Get the history of x positions as an array. The 3 axes are * time-steps * particles * dimensions """ x = np.empty((len(self.history), self.num_particles, 2)) for i, step in enumerate(self.history): for j in range(self.num_particles): x[i, j, :] = step.x[j, :2] return x
python
src/my_cool_package/orbitty.py
253
268
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
19
p_history
def p_history(self) -> np.ndarray[tuple[int, int, int], FloatingPoint]: """ Get the history of p momenta as an array. The 3 axes are * time-steps * particles * dimensions """ p = np.empty((len(self.history), self.num_particles, 2)) for i, step in enumerate(self.history): for j in range(self.num_particles): p[i, j, :] = step.p[j, :2] return p
python
src/my_cool_package/orbitty.py
271
286
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
20
plot
def plot( self, figsize: tuple[int, int] = (5, 5), method: Literal["to_jshtml", "to_html5_video"] = "to_jshtml", num_frames: int = 100, frame_ms: int = 50, ) -> Any: """ Present the time-evolution of the System as a Matplotlib animation. Be sure to call ``step`` or ``steps`` before this function, so that there is something to plot! Args: figsize (tuple[int, int]): Matplotlib figure size. method ("to_jshtml", "to_html5_video"): Name of the animation-to-HTML method. ``to_jshtml`` always works, and ``to_html5_video`` works if video codecs are available (JupyterLab but not JupyterLite). num_frames (int): Number of frames to render in the animation, which can be fewer than the number of simulated time-steps. frame_ms (int): Number of milliseconds between each frame. """ import matplotlib.pyplot as plt # pylint: disable=E0401, C0415 from IPython.display import HTML # pylint: disable=E0401, C0415 from matplotlib import animation # pylint: disable=E0401, C0415 fig, ax = plt.subplots(figsize=figsize) x = self.x_history x0 = np.mean(x[:, :, 0]) y0 = np.mean(x[:, :, 1]) scale = np.percentile(np.max(abs(x), axis=0), 75) * 1.5 ax.set(xlim=(x0 - scale, x0 + scale), ylim=(y0 - scale, y0 + scale)) if len(x) > num_frames: x = x[:: len(x) // num_frames] lines = [] for j in range(self.num_particles): lines.append(ax.plot(x[:1, j, 0], x[:1, j, 1])[0]) dots = ax.scatter(x[0, :, 0], x[0, :, 1], color="black") def update(i: int) -> list[Any]: for j, line in enumerate(lines): line.set_xdata(x[:i, j, 0]) line.set_ydata(x[:i, j, 1]) dots.set_offsets(x[i, :, :]) return [*lines, dots] ani = animation.FuncAnimation( fig=fig, func=update, frames=len(x), interval=frame_ms, blit=True ) out = HTML(getattr(ani, method)()) plt.close() return out
python
src/my_cool_package/orbitty.py
288
345
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
21
update
def update(i: int) -> list[Any]: for j, line in enumerate(lines): line.set_xdata(x[:i, j, 0]) line.set_ydata(x[:i, j, 1]) dots.set_offsets(x[i, :, :]) return [*lines, dots]
python
src/my_cool_package/orbitty.py
332
337
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
22
to_radians
def to_radians(θ: float) -> float: """ Convert θ from degrees into radians. This is a scale transformation in which θ = 360° is returned as 2π. """ return θ * math.pi / 180
python
src/my_cool_package/oldtrig.py
6
13
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
23
sine
def sine(θ: float) -> float: """ In a triangle with one right angle and another angle θ, the sine is the length of the side opposite to θ divided by the length of the hypotenuse. The word "sine" comes from 1. the Latin "sinus" ("bosom"), 2. a translation of Arabic "جَيْب" ("bosom"), 3. a misidentification of Arabic "جيب" (j-y-b), 4. which is derived from Sanskrit "ज्या" ("sine" or "bowstring"). """ return math.sin(to_radians(θ))
python
src/my_cool_package/oldtrig.py
16
29
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
24
cosine
def cosine(θ: float) -> float: """ In a triangle with one right angle and another angle θ, the sine is the length of the side adjacent to θ divided by the length of the hypotenuse. The word "cosine" comes from 1. the Latin "complementi sinus" (1620, Edmund Gunter's Canon Triangulorum) and is unrelated to "कोटि-ज्या", despite sounding similar and having the same meaning (on a unit circle). From the Surya Siddhanta (5th century CE). """ return math.cos(to_radians(θ))
python
src/my_cool_package/oldtrig.py
32
45
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
25
versine
def versine(θ: float) -> float: """ Versed sine (as in "versus" or "against"), equal to 1 - cosine(θ). Called "उत्क्रम-ज्या" in the Surya Siddhanta (5th century CE). It was popular before computers because it is always non-negative, making it easier to apply tables of logarithms. """ return 1 - math.cos(to_radians(θ))
python
src/my_cool_package/oldtrig.py
48
58
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
26
coversine
def coversine(θ: float) -> float: """ Complement of the versed sine, equal to 1 + sine(θ). """ return 1 - math.sin(to_radians(θ))
python
src/my_cool_package/oldtrig.py
64
69
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
27
vercosine
def vercosine(θ: float) -> float: """ Versed complement-sine, equal to 1 + cosine(θ). """ return 1 + math.cos(to_radians(θ))
python
src/my_cool_package/oldtrig.py
72
77
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
28
covercosine
def covercosine(θ: float) -> float: """ Complement to the versed complement-sine, equal to 1 + sine(θ). """ return 1 + math.sin(to_radians(θ))
python
src/my_cool_package/oldtrig.py
80
85
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
29
haversine
def haversine(θ: float) -> float: """ Half of the versed sine, equal to (1 - cosine(θ)) / 2. """ return (1 - math.cos(to_radians(θ))) / 2
python
src/my_cool_package/oldtrig.py
88
93
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
30
hacoversine
def hacoversine(θ: float) -> float: """ Half of the complement of the versed sine, equal to (1 - sine(θ)) / 2. """ return (1 - math.sin(to_radians(θ))) / 2
python
src/my_cool_package/oldtrig.py
96
101
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
31
havercosine
def havercosine(θ: float) -> float: """ Half of the versed complement-sine, equal to (1 + cosine(θ)) / 2. """ return (1 + math.cos(to_radians(θ))) / 2
python
src/my_cool_package/oldtrig.py
104
109
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
32
hacovercosine
def hacovercosine(θ: float) -> float: """ Half of the complement to the versed complement-sine, equal to (1 + sine(θ)) / 2. Sheesh! """ return (1 + math.sin(to_radians(θ))) / 2
python
src/my_cool_package/oldtrig.py
112
119
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
33
exsecant
def exsecant(θ: float) -> float: """ External secant, equal to 1/cosine(θ) - 1. Introduced in 1855 by American civil engineer Charles Haslett, used to design circular sections of the Ohio and Mississippi railroad. "Experience has shown, that versed sines and external secants as frequently enter into calculations on curves as sines and tangents; and by their use, as illustrated in the examples given in this work, it is believed that many of the rules in general use are much simplified, and many calculations concerning curves and running lines made less intricate, and results obtained with more accuracy and far less trouble, than by any methods laid down in works of this kind." -- The Mechanic's, Machinist's, and Engineer's Practical Book of Reference """ return 1 / math.cos(to_radians(θ)) - 1
python
src/my_cool_package/oldtrig.py
125
142
{ "name": "jpivarski-talks/my-cool-package", "url": "https://github.com/jpivarski-talks/my-cool-package.git", "license": "BSD-3-Clause", "stars": 2, "forks": 1 }
34
__init__
def __init__( self, name: str, verbose: bool = False, log_level: int = logging.INFO, ) -> None: self.name = name self.verbose = verbose self.logger = logging.getLogger(name) handler = logging.FileHandler(f"{name}.log") handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) self.logger.addHandler(handler) self.logger.setLevel(log_level) # self.print("Initialized CLI") # self.print(self.config)
python
tools/cloud-tool.py
49
65
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
35
print
def print( self, *args: Any, sep: str = " ", end: str = "\n", file=None, ) -> None: self.logger.info(sep.join(map(str, args))) if self.verbose: print(*args, sep=sep, end=end, file=file)
python
tools/cloud-tool.py
67
76
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
36
__init__
def __init__( self, config: GKEConfig, verbose: bool = False, log_level: int = logging.INFO, ) -> None: self.config = config self.logger = Logger(__name__.lower(), verbose, log_level) self.logger.print(f"Initialized {__name__} CLI") self.logger.print(self.config) self.update_components()
python
tools/cloud-tool.py
87
99
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
37
update_components
def update_components() -> None: subprocess.run(["gcloud", "--quiet", "components", "update"])
python
tools/cloud-tool.py
102
103
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
38
install_components
def install_components() -> None: for component in ["gke-gcloud-auth-plugin", "kubectl"]: subprocess.run(["gcloud", "--quiet", "components", "install", component])
python
tools/cloud-tool.py
106
108
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
39
create_cluster
def create_cluster(self) -> None: subprocess.run( [ "gcloud", "container", "clusters", "create", self.config.cluster_name, "--num-nodes", str(self.config.num_nodes), "--machine-type", self.config.machine_type, "--disk-size", self.config.disk_size, "--disk-type", self.config.disk_type, "--accelerator", f"type={self.config.accelerator_type},count={self.config.accelerator_count}", ] )
python
tools/cloud-tool.py
110
129
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
40
get_cluster_credentials
def get_cluster_credentials(self) -> None: subprocess.run( [ "gcloud", "container", "clusters", "get-credentials", self.config.cluster_name, ] )
python
tools/cloud-tool.py
131
140
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
41
delete_cluster
def delete_cluster(self) -> None: subprocess.run( ["gcloud", "container", "clusters", "delete", self.config.cluster_name] )
python
tools/cloud-tool.py
142
145
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
42
__init__
def __init__( self, config: GCEConfig, verbose: bool = False, log_level: int = logging.INFO, ) -> None: self.config = config self.logger = Logger(__name__.lower(), verbose, log_level) self.logger.print(f"Initialized {__name__} CLI") self.logger.print(self.config) self.update_components()
python
tools/cloud-tool.py
149
161
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
43
update_components
def update_components() -> None: subprocess.run(["gcloud", "--quiet", "components", "update"])
python
tools/cloud-tool.py
164
165
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
44
create_vm
def create_vm(self) -> int: """Create the VM. - The first command creates a VM similar to the one the user can get from the GCP marketplace. - There is apparently no way to "interact" with the GCP marketplace directly. - The VMI explicitly asks to install GPU drivers on the first boot, so the second command does it. :return: """ cmd = [ "gcloud", "compute", "instances", "create", self.config.instance_name, "--machine-type", self.config.machine_type, "--maintenance-policy", self.config.maintenance_policy, "--image", f"projects/{self.config.project}/global/images/" f"{self.config.vm_image_name}", "--boot-disk-size", self.config.disk_size, "--boot-disk-type", self.config.disk_type, # "--accelerator", # f"type={self.config.accelerator_type}," # f"count={self.config.accelerator_count}", ] self.logger.print(" ".join(cmd)) p = subprocess.run(cmd) return p.returncode # # Agree to NVIDIA's prompt and install the GPU driver. # # This monster below is here bc the yes command # # and a gazillion alternatives do not work on circleci. # # reverse-engineered from /usr/bin/gcp-ngc-login.sh # cmd = [ # "gcloud", # "compute", # "ssh", # self.config.instance_name, # "--command", # "source /etc/nvidia-vmi-version.txt; " # 'REGISTRY="nvcr.io"; NVIDIA_DIR="/var/tmp/nvidia"; ' # "sudo gsutil cp " # "gs://nvidia-ngc-drivers-us-public/TESLA/shim/NVIDIA-Linux-x86_64-" # "${NVIDIA_DRIVER_VERSION}-${NVIDIA_GCP_VERSION}-shim.run " # "${NVIDIA_DIR}; " # "sudo chmod u+x ${NVIDIA_DIR}/NVIDIA-Linux-x86_64-" # "${NVIDIA_DRIVER_VERSION}-${NVIDIA_GCP_VERSION}-shim.run; " # "sudo ${NVIDIA_DIR}/NVIDIA-Linux-x86_64-${NVIDIA_DRIVER_VERSION}-" # "${NVIDIA_GCP_VERSION}-shim.run --no-cc-version-check " # "--kernel-module-only --silent --dkms; " # "sudo dkms add nvidia/${NVIDIA_DRIVER_VERSION} || true; " # "cd /usr/share/doc/NVIDIA_GLX-1.0/samples/; " # "sudo tar xvjf nvidia-persistenced-init.tar.bz2; " # "sudo nvidia-persistenced-init/install.sh && " # "sudo rm -rf nvidia-persistenced-init; ", # ] # self.logger.print(cmd) # for _ in range(6): # p = subprocess.run(cmd) # if p.returncode == 0: # self.logger.print("GPU driver installed") # break # else: # # allow some time for the VM to boot # self.logger.print("Waiting for VM to boot...") # time.sleep(10) # # return p.returncode
python
tools/cloud-tool.py
167
243
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
45
run
def run(self) -> int: """Run the VM. :return: """ cmd = [ "gcloud", "compute", "ssh", self.config.instance_name, "--command", "sudo apt update; " "sudo apt install -y python3-pip; " "pip3 install --upgrade pip; " "pip3 install --upgrade wheel; " "pip3 install --upgrade wandb distributed; " # "wandb login; ", ] self.logger.print(" ".join(cmd)) p = subprocess.run(cmd) return p.returncode
python
tools/cloud-tool.py
245
266
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
46
delete_vm
def delete_vm(self) -> int: """Delete the VM. :return: """ p = subprocess.run( [ "gcloud", "compute", "instances", "delete", self.config.instance_name, "--quiet", ] ) return p.returncode
python
tools/cloud-tool.py
268
283
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
47
__init__
def __init__(self) -> None: self._items: List[SequenceItem] = [] self._uuid_messages = dict()
python
tools/tracelog-tool.py
29
31
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
48
_parse
def _parse(self, line: str) -> None: line = line.strip() index = line.find("TRACELOG(") if index < 0: return line = line[index:] items = line.split() if len(items) != 10: return # ['TRACELOG(1)', '<-', '185542.522061', 'fd1e0e9f4d3f3520', 'dequeue', 'result_q', 'MsgRouterThr', 'poll_exit_response', '69aed18a893a49d182c7a13b498f805f', '-'] magic, direct, ts, msg_id, op, resource, thr, msg, uuid, stream = items self._uuid_messages.setdefault(uuid, msg) if magic != "TRACELOG(1)": return thr = thr.replace("-", "_") if op == "queue": src = thr dst = resource elif op == "dequeue": dst = thr src = resource else: # TODO: handle this return request = True if direct == "<-": request = False ts = float(ts) if msg == "None": msg = "return_" + self._uuid_messages.get(uuid) item = SequenceItem(ts=ts, src=src, request=request, dst=dst, info=msg) self.add(item)
python
tools/tracelog-tool.py
33
64
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
49
add
def add(self, item: SequenceItem): self._items.append(item)
python
tools/tracelog-tool.py
66
67
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
50
output_plantuml
def output_plantuml(self) -> None: lines = [] for item in self._items: line = f"{item.src} --> {item.dst}: {item.info}" lines.append((item.ts, line)) print("@startuml") header = """ !theme crt-amber skinparam responseMessageBelowArrow true box "User Process" participant User as MainThread control router as MsgRouterThr control check_stop as ChkStopThr control net_stat as NetStatThr end box queue record_q as record_q queue result_q as result_q box "Internal Process" control handler as HandlerThread control stats as StatsThr queue send_q as send_q queue write_q as write_q control writer as WriterThread control sender as SenderThread end box """ print(header) # TODO: move to common place (sorted sequence items) for _, line in sorted(lines): print(line) print("@enduml")
python
tools/tracelog-tool.py
69
101
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
51
output_mermaid
def output_mermaid(self) -> None: lines = [] for item in self._items: line = f"{item.src} ->> {item.dst}: {item.info}" lines.append((item.ts, line)) header = """ sequenceDiagram participant MainThread as User participant MsgRouterThr as router participant ChkStopThr as check_stop participant NetStatThr as net_stat participant record_q as record_q participant result_q as result_q participant HandlerThread as handler participant StatsThr as stats participant send_q as send_q participant write_q as write_q participant WriterThread as writer participant SenderThread as sender """ print(header) # TODO: move to common place (sorted sequence items) for _, line in sorted(lines): print(line)
python
tools/tracelog-tool.py
103
129
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
52
load
def load(self, fname: str) -> None: with open(fname) as f: for line in f.readlines(): self._parse(line)
python
tools/tracelog-tool.py
131
134
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
53
loaddir
def loaddir(self, dname: str) -> None: flist = [] for p in pathlib.Path(dname).iterdir(): if not p.is_file(): continue flist.append(p) for f in flist: self.load(f)
python
tools/tracelog-tool.py
136
144
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
54
main
def main(): argparser = argparse.ArgumentParser() argparser.add_argument("--logdir", default="wandb/latest-run/logs/") argparser.add_argument("--format", default="mermaid") args = argparser.parse_args() parser = TracelogParser() parser.loaddir(args.logdir) if args.format == "plantuml": parser.output_plantuml() elif args.format == "mermaid": parser.output_mermaid() else: print(f"Unknown format: {args.format}") sys.exit(1)
python
tools/tracelog-tool.py
147
161
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
55
get_available_protobuf_versions
def get_available_protobuf_versions() -> List[str]: """Get a list of available protobuf versions.""" try: output = subprocess.check_output( ["pip", "index", "versions", "protobuf"], ).decode("utf-8") versions = list({o for o in output.split() if o[0].isnumeric()}) versions = [v if not v.endswith(",") else v[:-1] for v in versions] return sorted(versions) except subprocess.CalledProcessError: return []
python
tools/check-protobuf-version-compatibility.py
11
21
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
56
parse_protobuf_requirements
def parse_protobuf_requirements() -> List[Tuple[str, str]]: """Parse protobuf requirements from a requirements.txt file.""" path_requirements = pathlib.Path(__file__).parent.parent / "requirements.txt" with open(path_requirements) as f: requirements = f.readlines() system_python_version = f"{sys.version_info.major}.{sys.version_info.minor}" system_platform = sys.platform system_machine = platform.machine() protobuf_reqs = [] for line in requirements: if line.startswith("protobuf"): version_reqs = line.strip().split(";")[0] # first, check the system requirements system_reqs = line.strip().split(";")[1] # regex to find quoted python version in system_reqs python_version = re.search( r"python_version\s+([<>=!]+)\s+[',\"]([2,3]*[.][0-9]+)[',\"]", system_reqs, ) if python_version is not None: version_check = ( f"parse_version({system_python_version!r}) " f"{python_version.group(1)} " f"parse_version({python_version.group(2)!r})" ) if not eval(version_check): continue # regex to find quoted platform in system_reqs platform_reqs = re.search( r"sys_platform\s+([<>=!]+)\s+[',\"]([a-z]+)[',\"]", system_reqs, ) if platform_reqs is not None: if not eval( f"{system_platform!r} {platform_reqs.group(1)} {platform_reqs.group(2)!r}" ): continue # regex to find platform machine in system_reqs platform_machine = re.search( r"platform[.]machine\s+([<>=!]+)\s+[',\"]([a-z]+)[',\"]", system_reqs, ) if platform_machine is not None: if not eval( f"{system_machine!r} {platform_machine.group(1)} {platform_machine.group(2)!r}" ): continue # finally, parse the protobuf version requirements reqs = version_reqs.split("protobuf")[1].split(",") print(reqs) for req in reqs: for i, char in enumerate(req): if char.isnumeric(): protobuf_reqs.append( ( req[:i].strip(), req[i:].strip(), ) ) break print(protobuf_reqs) return protobuf_reqs
python
tools/check-protobuf-version-compatibility.py
24
91
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
57
get_matching_versions
def get_matching_versions( available_protobuf_vs: List[str], protobuf_reqs: List[Tuple[str, str]] ) -> List[str]: matching_vs = [] for v in available_protobuf_vs: if all( eval(f"parse_version({v!r}) {rq[0]} parse_version({rq[1]!r})") for rq in protobuf_reqs ): matching_vs.append(v) return sorted(list(set(matching_vs)))
python
tools/check-protobuf-version-compatibility.py
94
105
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
58
attempt_install_protobuf_version
def attempt_install_protobuf_version(version: str) -> bool: try: subprocess.check_call(["pip", "install", f"protobuf=={version}"]) subprocess.check_call(["python", "-c", "import wandb"]) return True except subprocess.CalledProcessError: return False
python
tools/check-protobuf-version-compatibility.py
108
114
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
59
run_in_env
def run_in_env(command, python_version=None): """Run a command in a pyenv environment.""" if python_version: command = f'eval "$(pyenv init -)"; (pyenv shell {python_version}; {command})' return subprocess.check_output(command, shell=True).decode("utf-8")
python
tools/setup_dev_environment.py
32
36
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
60
installed_versions
def installed_versions(python_version): """List of installed package/versions.""" list_cmd = "pip list --format=freeze --disable-pip-version-check" return run_in_env(list_cmd, python_version=python_version).split()
python
tools/setup_dev_environment.py
39
42
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
61
pin_version
def pin_version(package_name, package_version, python_version=None): versioned_package = f"{package_name}=={package_version}" if versioned_package in installed_versions(python_version): return print(f"Installing {versioned_package}", end=" ") if python_version: print(f"for Python {python_version}", end=" ") print("...") install_command = f"python -m pip install --upgrade {versioned_package} -qq" return run_in_env(install_command, python_version=python_version)
python
tools/setup_dev_environment.py
45
54
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
62
main
def main(): parser = argparse.ArgumentParser() parser.add_argument( "-p", "--python-versions", nargs="+", help="Python versions to use with pyenv.", ) args = parser.parse_args() python_versions = args.python_versions if python_versions is None: python_versions = PYTHON_VERSIONS else: invalid_versions = [v for v in python_versions if v not in PYTHON_VERSIONS] if invalid_versions: print( f"Requested invalid python versions: {invalid_versions}.\n" f"Please select from {PYTHON_VERSIONS}." ) sys.exit(1) print(f"{Console.BLUE}Configuring test environment...{Console.END}") # installed pyenv versions p = subprocess.run( ["pyenv", "versions"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) existing_python_versions = set( re.findall(r"[*]*\s([\d.]+)", p.stdout.decode("utf-8")) ) p = subprocess.run( ["pyenv", "install", "--list"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) all_available_python_versions = re.findall( r"\s\s([\d.]+)\n", p.stdout.decode("utf-8") ) installed_python_versions = [] for python_version in python_versions: available_python_versions = [ v for v in all_available_python_versions if v.startswith(python_version) ] latest = max(available_python_versions, key=parse_version) install_command = ["pyenv", "install", "-s", latest] stdin = subprocess.PIPE # Python 3.6 on MacOS > 11.2 needs a patch that works up to 3.6.13 is_3_6_and_macos_gt_11_2 = ( python_version == "3.6" and platform.system() == "Darwin" and parse_version(platform.mac_ver()[0]) > parse_version("11.2") ) if is_3_6_and_macos_gt_11_2: latest = "3.6.13" patch = subprocess.Popen( [ "curl", "-sSL", "https://github.com/python/cpython/commit/8ea6353.patch", ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) install_command = [ "pyenv", "install", "--patch", latest, ] stdin = patch.stdout if latest in existing_python_versions: print(f"Already installed: {latest}") else: print(f"Installing: {latest}...") p = subprocess.run( install_command, stdin=stdin, stdout=sys.stdout, stderr=subprocess.STDOUT, ) if p.returncode != 0: print(f"Failed to install {latest}") pin_version("tox", TOX_VERSION, python_version=latest) installed_python_versions.append(latest) print(f"Setting local pyenv versions to: {' '.join(installed_python_versions)}") subprocess.run( ["pyenv", "local", *installed_python_versions], stdout=sys.stdout, stderr=subprocess.STDOUT, check=True, ) pin_version("tox", TOX_VERSION) print(f"{Console.GREEN}Development environment setup!{Console.END}") print() print("Run all tests in all python environments:") print(f"{Console.CODE} tox{Console.END}") print("Run a specific test in a specific environment:") print( f"{Console.CODE} tox -e py37 -- tests/pytest_tests/unit_tests/test_public_api.py -k proj{Console.END}" ) print("Lint code:") print(f"{Console.CODE} tox -e format,flake8,mypy{Console.END}")
python
tools/setup_dev_environment.py
57
168
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
63
parse_args
def parse_args(): """Parse command line arguments.""" parser = argparse.ArgumentParser() parser.add_argument( "--push", action="store_true", help="Push image after creation. This requires that you enter a tag that includes a registry via --tag", ) parser.add_argument("--tag", default="wandb-launch-agent", help="Tag for the image") return parser.parse_args()
python
tools/build_launch_agent.py
36
45
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
64
main
def main(): """Build the launch agent image.""" args = parse_args() build_context = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) dockerfile_path = os.path.join(build_context, "Dockerfile") dockerignore_path = os.path.join(build_context, ".dockerignore") with open(dockerfile_path, "w") as f: f.write(DOCKERFILE) with open(dockerignore_path, "w") as f: f.write(DOCKERIGNORE) build( tags=[args.tag], file=dockerfile_path, context_path=build_context, ) if args.push: image, tag = args.tag.split(":") push(image, tag) # Remove the dockerfui os.remove(dockerfile_path) os.remove(dockerignore_path)
python
tools/build_launch_agent.py
48
69
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
65
version_problem
def version_problem(current_version): print(f"Unhandled version string: {current_version}") sys.exit(1)
python
tools/bumpversion-tool.py
16
18
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
66
bump_release_to_dev
def bump_release_to_dev(current_version): # Assume this is a released version parts = current_version.split(".") if len(parts) != 3: version_problem(current_version) major, minor, patch = parts patch_num = 0 try: patch_num = int(patch) except ValueError: version_problem(current_version) new_version = f"{major}.{minor}.{patch_num + 1}.dev1" bump_args = [] if args.debug: bump_args += ["--allow-dirty", "--dry-run", "--verbose"] bump_args += ["--new-version", new_version, "dev"] bumpversion_main(bump_args)
python
tools/bumpversion-tool.py
21
39
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
67
bump_release_from_dev
def bump_release_from_dev(current_version): # Assume this is a dev version parts = current_version.split(".") if len(parts) != 4: version_problem(current_version) major, minor, patch, _ = parts new_version = f"{major}.{minor}.{patch}" bump_args = [] if args.debug: bump_args += ["--allow-dirty", "--dry-run", "--verbose"] bump_args += ["--new-version", new_version, "patch"] bumpversion_main(bump_args)
python
tools/bumpversion-tool.py
42
54
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
68
main
def main(): config = configparser.ConfigParser() config.read("setup.cfg") current_version = config["bumpversion"]["current_version"] if args.to_dev: bump_release_to_dev(current_version) elif args.from_dev: bump_release_from_dev(current_version) else: parser.print_help()
python
tools/bumpversion-tool.py
57
67
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
69
poll
def poll(args, pipeline_id=None, workflow_ids=None): print(f"Waiting for pipeline to complete (Branch: {args.branch})...") while True: num = 0 done = 0 if pipeline_id: url = f"https://circleci.com/api/v2/pipeline/{pipeline_id}/workflow" r = requests.get(url, auth=(args.api_token, "")) assert r.status_code == 200, f"Error making api request: {r}" d = r.json() workflow_ids = [item["id"] for item in d["items"]] num = len(workflow_ids) for work_id in workflow_ids: work_status_url = f"https://circleci.com/api/v2/workflow/{work_id}" r = requests.get(work_status_url, auth=(args.api_token, "")) # print("STATUS", work_status_url) assert r.status_code == 200, f"Error making api work request: {r}" w = r.json() status = w["status"] print("Status:", status) if status not in ("running", "failing"): done += 1 if num and done == num: print("Finished") return time.sleep(20)
python
tools/circleci-tool.py
74
99
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
70
trigger
def trigger(args): url = "https://circleci.com/api/v2/project/gh/wandb/wandb/pipeline" payload = { "branch": args.branch, } manual: bool = any( [args.platform, args.toxenv, args.test_file, args.test_name, args.test_repeat] ) if manual: parameters = {"manual": True} platforms = args.platform.split(",") if args.platform else ["linux"] toxenv = args.toxenv or "py37" toxcmd = toxenv if args.test_file or args.test_repeat: toxcmd += " --" if args.test_file: toxcmd += " " + args.test_file if args.test_name: toxcmd += " -k " + args.test_name if args.test_repeat: toxcmd += f" --flake-finder --flake-runs={args.test_repeat}" # get last token split by hyphen as python version pyver = toxenv.split("-")[-1] pyname = py_name_dict.get(pyver) assert pyname, f"unknown pyver: {pyver}" # handle more complex pyenv (func tests) if pyver != toxenv: toxsplit = toxenv.split("-") assert len(toxsplit) == 3 tsttyp, tstshard, tstver = toxsplit prefix = "s_" if tstshard.startswith(prefix): tstshard = tstshard[len(prefix) :] pyname = f"{pyname}-{tsttyp}-{tstshard}" pyimage = py_image_dict.get(pyver) assert pyimage, f"unknown pyver: {pyver}" for p in platforms: job = platforms_dict.get(p) assert job, f"unknown platform: {p}" pshort = platforms_short_dict.get(p) jobname = f"{pshort}-{pyname}" parameters["manual_" + job] = True parameters["manual_" + job + "_name"] = jobname if job == "test": parameters["manual_" + job + "_image"] = pyimage parameters["manual_" + job + "_toxenv"] = toxcmd if args.parallelism: parameters["manual_parallelism"] = args.parallelism if args.xdist: parameters["manual_xdist"] = args.xdist payload["parameters"] = parameters print("Sending to CircleCI:", payload) if args.dryrun: return r = requests.post(url, json=payload, auth=(args.api_token, "")) assert r.status_code == 201, "Error making api request" d = r.json() uuid = d["id"] print("CircleCI workflow started:", uuid) if args.wait or args.loop: poll(args, pipeline_id=uuid)
python
tools/circleci-tool.py
102
162
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
71
trigger_nightly
def trigger_nightly(args): url = "https://circleci.com/api/v2/project/gh/wandb/wandb/pipeline" default_shards = set(NIGHTLY_SHARDS) shards = { f"nightly_execute_{shard.replace('-', '_')}": False for shard in default_shards } requested_shards = set(args.shards.split(",")) if args.shards else default_shards # check that all requested shards are valid and that there is at least one if not requested_shards.issubset(default_shards): raise ValueError( f"Requested invalid shards: {requested_shards}. " f"Valid shards are: {default_shards}" ) # flip the requested shards to True for shard in requested_shards: shards[f"nightly_execute_{shard.replace('-', '_')}"] = True payload = { "branch": args.branch, "parameters": { **{ "manual": True, "manual_nightly": True, "nightly_git_branch": args.branch, "nightly_slack_notify": args.slack_notify, }, **shards, }, } print("Sending to CircleCI:", payload) if args.dryrun: return r = requests.post(url, json=payload, auth=(args.api_token, "")) assert r.status_code == 201, "Error making api request" d = r.json() uuid = d["id"] number = d["number"] print("CircleCI workflow started.") print(f"UUID: {uuid}") print(f"Number: {number}") if args.wait: poll(args, pipeline_id=uuid)
python
tools/circleci-tool.py
165
210
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
72
get_ci_builds
def get_ci_builds(args, completed=True): bname = args.branch # TODO: extend pagination if not done url = "https://circleci.com/api/v1.1/project/gh/wandb/wandb?shallow=true&limit=100" if completed: url = url + "&filter=completed" # print("SEND", url) r = requests.get(url, auth=(args.api_token, "")) assert r.status_code == 200, f"Error making api request: {r}" lst = r.json() cfirst = None ret = [] done = False for d in lst: b = d.get("branch") if b != bname: continue v = d.get("vcs_revision") n = d.get("build_num") j = d.get("workflows", {}).get("job_name") w = d.get("workflows", {}).get("workflow_id") # print("DDD", d) cfirst = cfirst or v if cfirst != v: done = True break ret.append((v, n, j, w)) if not done: return return ret
python
tools/circleci-tool.py
213
242
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
73
grab
def grab(args, vhash, bnum): # curl -H "Circle-Token: $CIRCLECI_TOKEN" https://circleci.com/api/v1.1/project/github/wandb/wandb/61238/artifacts # curl -L -o out.dat -H "Circle-Token: $CIRCLECI_TOKEN" https://61238-86031674-gh.circle-artifacts.com/0/cover-results/.coverage cachedir = ".circle_cache" cfbase = f"cover-{vhash}-{bnum}.xml" cfname = os.path.join(cachedir, cfbase) if not os.path.exists(cachedir): os.mkdir(cachedir) if os.path.exists(cfname): return url = ( "https://circleci.com/api/v1.1/project/github/wandb/wandb/{}/artifacts".format( bnum ) ) r = requests.get(url, auth=(args.api_token, "")) assert r.status_code == 200, f"Error making api request: {r}" lst = r.json() if not lst: return for item in lst: p = item.get("path") u = item.get("url") # print("got", p) if p != "cover-results/coverage.xml": continue # print("GRAB", p, u) # TODO: use tempfile print("Downloading circle artifacts...") s, o = subprocess.getstatusoutput( f'curl -L -o out.dat -H "Circle-Token: {args.api_token}" {u!r}' ) assert s == 0 os.rename("out.dat", cfname)
python
tools/circleci-tool.py
245
278
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
74
status
def status(args): # TODO: check for current git hash only got = get_ci_builds(args, completed=False) if not got: print("ERROR: couldn't find job, maybe we should poll?") sys.exit(1) work_ids = [workid for _, _, _, workid in got] poll(args, workflow_ids=[work_ids[0]])
python
tools/circleci-tool.py
281
288
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
75
download
def download(args): print(f"Checking for circle artifacts (Branch: {args.branch})...") got = get_ci_builds(args) assert got for v, n, _, _ in got: grab(args, v, n)
python
tools/circleci-tool.py
291
296
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
76
process_args
def process_args(): parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( dest="action", title="action", description="Action to perform" ) parser.add_argument("--api_token", help=argparse.SUPPRESS) parser.add_argument("--branch", help="git branch (autodetected)") parser.add_argument("--dryrun", action="store_true", help="Don't do anything") parse_trigger = subparsers.add_parser("trigger") parse_trigger.add_argument( "--platform", help="comma-separated platform (linux,mac,win)" ) parse_trigger.add_argument("--toxenv", help="single toxenv (py36,py37,py38,py39)") parse_trigger.add_argument("--test-file", help="test file (ex: tests/test.py)") parse_trigger.add_argument("--test-name", help="test name (ex: test_dummy)") parse_trigger.add_argument("--test-repeat", type=int, help="repeat N times (ex: 3)") parse_trigger.add_argument("--parallelism", type=int, help="CircleCI parallelism") parse_trigger.add_argument("--xdist", type=int, help="pytest xdist parallelism") parse_trigger.add_argument("--loop", type=int, help="Outer loop (implies wait)") parse_trigger.add_argument( "--wait", action="store_true", help="Wait for finish or error" ) parse_trigger_nightly = subparsers.add_parser("trigger-nightly") parse_trigger_nightly.add_argument( "--slack-notify", action="store_true", help="post notifications to slack" ) parse_trigger_nightly.add_argument( "--shards", default=",".join(NIGHTLY_SHARDS), help="comma-separated shards (standalone-{cpu,gpu,gpu-win},kfp,imports,regression)", ) parse_trigger_nightly.add_argument( "--wait", action="store_true", help="Wait for finish or error" ) parse_status = subparsers.add_parser("status") parse_status.add_argument( "--wait", action="store_true", help="Wait for finish or error" ) parse_download = subparsers.add_parser("download") parse_download.add_argument( "--wait", action="store_true", help="Wait for finish or error" ) args = parser.parse_args() return parser, args
python
tools/circleci-tool.py
299
348
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
77
process_environment
def process_environment(args): api_token = os.environ.get(CIRCLECI_API_TOKEN) assert api_token, f"Set environment variable: {CIRCLECI_API_TOKEN}" args.api_token = api_token
python
tools/circleci-tool.py
351
354
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
78
process_workspace
def process_workspace(args): branch = args.branch if not branch: code, branch = subprocess.getstatusoutput("git branch --show-current") assert code == 0, "failed git command" args.branch = branch
python
tools/circleci-tool.py
357
362
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
79
main
def main(): parser, args = process_args() process_environment(args) process_workspace(args) if args.action == "trigger": for i in range(args.loop or 1): if args.loop: print(f"Loop: {i + 1} of {args.loop}") trigger(args) elif args.action == "trigger-nightly": trigger_nightly(args) elif args.action == "status": # find my workflow report status, wait on it (if specified) status(args) elif args.action == "download": download(args) else: parser.print_help()
python
tools/circleci-tool.py
365
383
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
80
chunk
def chunk(n: int, iterable) -> Iterator[List[Dict]]: done = False while not done: data = [] try: for _ in range(n): data.append(next(iterable)) except StopIteration: if data: yield data # done = True break yield data
python
tools/wandb_export_history.py
28
40
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
81
wandb_export_history
def wandb_export_history( *, run, api=None, db_file=None, db_table=None, db_replace: bool = None, history_exclude_prefix=None, read_page_size=None, write_page_size=None, ) -> int: api = api or wandb.Api() db_file = db_file or DB_FILE db_table = db_table or "history" history_exclude_prefix = history_exclude_prefix or "system/" read_page_size = read_page_size or 1000 write_page_size = write_page_size or 1000 run = api.run(run) keys = run.history_keys.get("keys", []) if history_exclude_prefix: keys = list(filter(lambda x: not x.startswith(history_exclude_prefix), keys)) db_file = db_file db = sqlite3.connect(db_file) history = run.scan_history(page_size=read_page_size) if_exists = "replace" if db_replace else "fail" written = 0 for index, rows in enumerate(chunk(write_page_size, history)): df = pd.DataFrame.from_records(rows, **{} if index else dict(columns=keys)) written += df.to_sql( "history", con=db, index=False, if_exists="append" if index else if_exists ) return written
python
tools/wandb_export_history.py
43
77
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
82
main
def main() -> None: parser = argparse.ArgumentParser( description="Export W&B run history", allow_abbrev=False ) parser.add_argument("--run", required=True) parser.add_argument("--db_file", default=DB_FILE) parser.add_argument("--db_table") parser.add_argument("--history_exclude_prefix") parser.add_argument("--read_page_size", type=int) parser.add_argument("--write_page_size", type=int) parser.add_argument("--db_replace", action="store_true") args = parser.parse_args() written = wandb_export_history( run=args.run, db_file=args.db_file, db_table=args.db_table, db_replace=args.db_replace, history_exclude_prefix=args.history_exclude_prefix, read_page_size=args.read_page_size, write_page_size=args.write_page_size, ) print(f"Wrote {written} records to {args.db_file}")
python
tools/wandb_export_history.py
80
103
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
83
find_list_of_key_locations_and_dicts
def find_list_of_key_locations_and_dicts(data, search_key: str, root=None): """Search for a dict with key search_key and value containing search_v. Returns: # location - list of indexes representing where to find the key # containing_dict - the dictionary where the search_key was found List of tuples of the form: (location, containing_dict) """ found = [] if root is None: root = [] if isinstance(data, list): for num, val in enumerate(data): find_root = root[:] find_root.append(num) found.extend( find_list_of_key_locations_and_dicts(val, search_key, root=find_root) ) elif isinstance(data, dict): check = data.get(search_key) if check: found.append((root, data)) for key, val in data.items(): find_root = root[:] find_root.append(key) found.extend( find_list_of_key_locations_and_dicts(val, search_key, root=find_root) ) elif isinstance(data, (str, int, float)) or data is None: pass else: raise RuntimeError(f"unknown type: type={type(data)} data={data}") return found
python
tools/coverage-tool.py
19
51
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
84
find_parallelism_defaults
def find_parallelism_defaults(loc_dict_tuple): _, containing_dict = loc_dict_tuple parallelism = containing_dict.get("parallelism") if not isinstance(parallelism, dict): return False default = parallelism.get("default") return isinstance(default, int) and default > 1
python
tools/coverage-tool.py
54
60
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
85
matrix_expand
def matrix_expand(loc_dict_tuple_list): ret = [] loc_dict_tuple_list = list(loc_dict_tuple_list) for location, containing_dict in loc_dict_tuple_list: matrix = containing_dict.get("matrix") if matrix: # assume any block referencing a matrix is using all parameters # could check <<>> and expand syntax parameters = matrix.get("parameters") groups = [] for k, v in parameters.items(): groups.append([(k, i) for i in v]) product = itertools.product(*groups) product = list(product) for subs in product: data = copy.deepcopy(containing_dict) toxenv = data["toxenv"] for k, v in subs: replace = f"<<matrix.{k}>>" assert replace in toxenv, f"Cant find {replace} in {toxenv}" toxenv = toxenv.replace(replace, str(v)) data["toxenv"] = toxenv ret.append((location, data)) else: ret.append((location, containing_dict)) return ret
python
tools/coverage-tool.py
63
89
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
86
create_parallelism_defaults_dict
def create_parallelism_defaults_dict(par_defaults_list): ret = {} for location, containing_dict in par_defaults_list: assert len(location) == 3 jobs, job_name, parameters = location assert jobs == "jobs" assert parameters == "parameters" default = containing_dict["parallelism"]["default"] ret[job_name] = default return ret
python
tools/coverage-tool.py
92
101
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
87
parallelism_expand
def parallelism_expand(cov_list, par_dict): ret = [] for location, containing_dict in cov_list: parallelism = containing_dict.get("parallelism") if parallelism: count = parallelism else: # see if we can find counts in defaults # look up by last element in location lookup = location[-1] count = par_dict.get(lookup, 1) for i in range(count): loc = location[:] if count > 1: loc.append(i) ret.append((loc, containing_dict)) return ret
python
tools/coverage-tool.py
104
121
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
88
coverage_tasks
def coverage_tasks(args: argparse.Namespace): ci_fname = args.circleci_yaml with open(ci_fname) as file: data = yaml.safe_load(file) parallelism = find_list_of_key_locations_and_dicts(data, "parallelism") parallelism_defaults = filter(find_parallelism_defaults, parallelism) toxenv = find_list_of_key_locations_and_dicts(data, "toxenv") toxenv_cov = filter(lambda x: "covercircle" in x[1]["toxenv"], toxenv) toxenv_cov_matrix = matrix_expand(toxenv_cov) par_default_dict = create_parallelism_defaults_dict(parallelism_defaults) toxenv_cov_matrix_parallelism = parallelism_expand( toxenv_cov_matrix, par_default_dict ) tasks = [ (".".join(map(str, x[0])), x[1]["toxenv"]) for x in toxenv_cov_matrix_parallelism ] return tasks
python
tools/coverage-tool.py
124
143
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
89
coverage_config_check
def coverage_config_check(jobs_count, args): ci_fname = args.codecov_yaml with open(ci_fname) as file: data = yaml.safe_load(file) num_builds_tuple_list = find_list_of_key_locations_and_dicts( data, "after_n_builds" ) for _, data in num_builds_tuple_list: num_builds = data["after_n_builds"] if num_builds != jobs_count: print(f"Mismatch builds count: {num_builds} (expecting {jobs_count})") sys.exit(1)
python
tools/coverage-tool.py
146
158
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
90
coverage_coveragerc_check
def coverage_coveragerc_check(toxenv_list, args): py = "py" cononical = "wandb/" cov_fname = args.coveragerc cf = configparser.ConfigParser() cf.read(cov_fname) paths = cf.get("paths", "canonicalsrc") paths = paths.split() toxenv_list = list(set(toxenv_list)) toxenv_list.sort() # lets generate what paths should look like expected_paths = [cononical] for toxenv in toxenv_list: toxenv = toxenv.split(",")[0] _func, shard, py_ver = toxenv.split("-") assert py_ver.startswith(py) py_ver = py_ver[len(py) :] python = "".join(("python", py_ver[0], ".", py_ver[1:])) path = f".tox/{toxenv}/lib/{python}/site-packages/wandb/" expected_paths.append(path) if paths != expected_paths: print("Mismatch .coveragerc!") print("Seen:") for path in paths: print(f" {path}") print("Expected:") for path in expected_paths: print(f" {path}") sys.exit(1)
python
tools/coverage-tool.py
161
196
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
91
process_args
def process_args(): parser = argparse.ArgumentParser() parser.add_argument("--circleci-yaml", default=".circleci/config.yml") parser.add_argument("--codecov-yaml", default=".codecov.yml") parser.add_argument("--coveragerc", default=".coveragerc") subparsers = parser.add_subparsers( dest="action", title="action", description="Action to perform" ) subparsers.add_parser("jobs") subparsers.add_parser("check") args = parser.parse_args() return parser, args
python
tools/coverage-tool.py
199
212
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
92
main
def main(): parser, args = process_args() if args.action == "jobs": tasks = coverage_tasks(args) max_key_len = max(len(t) for t, _ in tasks) for k, v in tasks: print(f"{k:{max_key_len}} {v}") elif args.action == "check": tasks = coverage_tasks(args) # let's only count the main workflow main_tasks = list(filter(lambda x: x[0].split(".")[1] == "main", tasks)) func_tasks = filter(lambda x: x[1].startswith("func-"), main_tasks) func_toxenvs = list(map(lambda x: x[1], func_tasks)) coverage_config_check(len(main_tasks), args) coverage_coveragerc_check(func_toxenvs, args) else: parser.print_help()
python
tools/coverage-tool.py
215
232
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
93
write_csv
def write_csv(record: str, fields: List[Any]): record_arg = f"output_{record}s" fname = os.path.join(args.output_dir, getattr(args, record_arg)) print("Writing:", fname) with open(fname, "w") as fp: writer = csv.DictWriter(fp, fieldnames=[record, "key"], lineterminator="\n") writer.writeheader() for f in fields: # let's skip private fields if f.name.startswith("_"): continue writer.writerow({record: f.name, "key": f.number})
python
tools/telemetry-tool.py
37
48
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
94
main
def main(): telemetry_records = list(tpb.TelemetryRecord.DESCRIPTOR.fields) write_csv(record="telemetry_record_type", fields=telemetry_records) import_records = list(tpb.Imports.DESCRIPTOR.fields) write_csv(record="import", fields=import_records) feature_records = list(tpb.Feature.DESCRIPTOR.fields) write_csv(record="feature", fields=feature_records) env_records = list(tpb.Env.DESCRIPTOR.fields) write_csv(record="environment", fields=env_records) label_records = list(tpb.Labels.DESCRIPTOR.fields) write_csv(record="label", fields=label_records) deprecated_records = list(tpb.Deprecated.DESCRIPTOR.fields) write_csv(record="deprecated_feature", fields=deprecated_records) issue_records = list(tpb.Issues.DESCRIPTOR.fields) write_csv(record="issue", fields=issue_records)
python
tools/telemetry-tool.py
51
71
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
95
get_paths
def get_paths() -> List[Path]: paths: List[Path] = [] if not args.files: exclude_dirs = {"vendor", "__pycache__"} root_dir = pathlib.Path(__file__).resolve().parent.parent / "wandb" for base, subdirs, files in os.walk(root_dir): # Don't walk into excluded subdirectories subdirs[:] = list(set(subdirs) - exclude_dirs) for fname in files: if fname.endswith(GENERATE_SUFFIX): paths.append(pathlib.PurePath(base, fname)) for f in args.files: paths.append(pathlib.Path(f)) return paths
python
tools/generate-tool.py
43
56
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
96
generate_file
def generate_file(generate_path: Path, output_path: Path) -> None: status, output = subprocess.getstatusoutput(f"python {generate_path}") assert status == 0, f"Error: {output}" with open(output_path, "w") as f: f.write("# DO NOT EDIT -- GENERATED BY: `generate-tool.py --generate`") f.write(output)
python
tools/generate-tool.py
59
64
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
97
generate_files
def generate_files(paths: List[Path]) -> None: for p in paths: output_path = p.parent / str(p).replace(GENERATE_SUFFIX, GENERATED_SUFFIX) print(f"INFO: Generating {output_path}...") generate_file(p, output_path)
python
tools/generate-tool.py
67
71
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
98
format_file
def format_file(filename: Path) -> None: status, output = subprocess.getstatusoutput(f"black {filename}") assert status == 0, f"Error: {output}"
python
tools/generate-tool.py
74
76
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
99
format_files
def format_files(paths: List[Path]) -> None: for p in paths: output_path = p.parent / str(p).replace(GENERATE_SUFFIX, GENERATED_SUFFIX) print(f"INFO: Formatting {output_path}...") format_file(output_path)
python
tools/generate-tool.py
79
83
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
100
temp_fname
def temp_fname() -> Iterator[Path]: try: f = tempfile.NamedTemporaryFile(delete=False) tmp_name = f.name f.close() yield pathlib.Path(tmp_name) finally: os.unlink(tmp_name)
python
tools/generate-tool.py
87
94
{ "name": "Git-abouvier/wandb", "url": "https://github.com/Git-abouvier/wandb.git", "license": "MIT", "stars": 0, "forks": 0 }
End of preview. Expand in Data Studio

RealPBT: A Dataset of 50,000+ PBTs Captured from Real-World Code

A large-scale dataset of property-based tests (PBTs) extracted from real-world, permissively licensed Github repos. Each PBT comes with overlapping unit tests, and information about the functions it tests.

This data was scraped by Benchify. We scraped Hypothesis PBTs for about 24 hours, and Typescript PBTs for about 8 hours, using our own proprietary Github scraper. In each case we turned our scraper off when, anecdotally, we felt it had hit an asymptote in terms of finding new PBTs. However, the choice of when to turn the scraper off was unscientific in nature and so the relative sizes of these datasets should not be viewed as a scientific measurement of the popularity of each framework (absolutely or relatively), despite the fact that it probably does roughly reflect that information.

Note: This dataset consists of multiple jsonl files. The HuggingFace dataset viewer only shows the first one, containing Python functions under test. To see the rest, look here.

Dataset Description

This dataset contains code examples from thousands of GitHub repositories, focusing on property-based testing using Hypothesis (Python) and Fast-Check (TypeScript).

Dataset Statistics

  • Property-Based Tests (PBTs): 60,628 tests
    • Python PBTs: 54,345 (with detailed metrics, overlapping unit tests, and dependency information)
    • TypeScript PBTs: 6,283 (without the extra stuff mentioned above)
  • Unit Tests: 6,343,790 (Python only)
  • Functions: 6,845,964 (Python only)
  • Repositories: 27,746+ GitHub repos

Dataset Structure

The dataset consists of four JSONL files (one JSON object per line):

1. Python Property-Based Tests (pbts.jsonl)

Each record contains:

  • id: Unique test identifier
  • name: Test function name
  • code: Complete test source code
  • language: Programming language (always "python")
  • source_file: File path within the repository
  • start_line, end_line: Line numbers in source file
  • dependencies: List of test dependencies (Python only)
  • repo: Repository metadata
    • name: Repository name
    • url: GitHub URL
    • license: License type
    • stars: GitHub stars
    • forks: Fork count
  • metrics: Code quality metrics (Python only) from Radon
    • loc: Lines of code
    • sloc: Source lines of code
    • lloc: Logical lines of code
    • comments: Comment lines
    • avg_complexity: Average cyclomatic complexity
    • max_complexity: Maximum cyclomatic complexity
    • maintainability_index: Maintainability score (0-100)
    • halstead_difficulty: Halstead difficulty metric
    • halstead_effort: Halstead effort metric
  • summary: AI-generated natural language description of test behavior (generated with 4o-mini)

2. TypeScript Property-Based Tests (pbts_typescript.jsonl)

Each record contains:

  • id: Unique test identifier
  • name: Test function name
  • code: Complete test source code
  • language: Programming language (always "typescript")
  • source_file: File path within repository
  • start_line, end_line: Line numbers (null - not available)
  • dependencies: List of test dependencies (empty - no dependency analysis performed)
  • repo: Repository metadata
    • name: Repository name
    • url: GitHub URL
    • license: License type
    • stars: GitHub stars
    • forks: Fork count
  • metrics: Code quality metrics (null - not available)
  • summary: AI-generated natural language description of test behavior
  • mode: Testing framework used (always "fast-check")

3. Unit Tests (unit_tests.jsonl)

Each record contains:

  • id: Unique test identifier
  • name: Test function name
  • code: Complete test source code
  • language: Programming language (always "python")
  • source_file: File path within repository
  • start_line, end_line: Line numbers
  • repo: Repository metadata (same structure as PBTs)

4. Functions (functions.jsonl)

Each record contains:

  • id: Unique function identifier
  • name: Function name
  • code: Complete function source code
  • language: Programming language (always "python")
  • source_file: File path within repository
  • start_line, end_line: Line numbers
  • repo: Repository metadata (same structure as PBTs)

Language Detection

Python code validation:

  1. Uses Python's AST (Abstract Syntax Tree) parser
  2. Attempts to parse code using ast.parse()
  3. On success, labels as "python"

TypeScript code validation:

  1. Checks for fast-check framework patterns (fc.property, fc.assert)
  2. Validates basic syntax structure
  3. Verifies balanced brackets and parentheses
  4. On success, labels as "typescript"

The dataset includes Python (89.6%) and TypeScript (10.4%) PBTs.

Code Metrics

The Python PBT records include code quality metrics:

  • Cyclomatic Complexity: Measures code path complexity
  • Maintainability Index: 0-100 score (higher is better)
  • Halstead Metrics: Metrics measuring code difficulty and effort

License Information

Each record includes the repository's license. Common licenses in this dataset:

  • MIT
  • Apache-2.0
  • BSD-3-Clause
  • GPL variants

We only extracted code from repos with licenses we considered permissive. If you believe we made a mistake (either sucking in a license which does not allow this kind of use, or, incorrectly determining the license of a repository) please don't hesitate to let us know and we will update the dataset accordingly.

Always check individual record licenses before use.

Citation

If you use this dataset in your research, please cite:

@dataset{realPBT,
  title={{RealPBT}: 50,000+ PBTs Captured from Real-World Code},
  author={Max von Hippel, Evan Boehs, Jake Ginesin},
  year={2026},
  publisher={HuggingFace},
  note={Work supported by Benchify, Inc.},
  howpublished={\url{https://huggingface.co/datasets/Benchify/realpbt}}
}

Acknowledgments

We gratefully acknowledge the following contributors who made this dataset possible:

  • Max von Hippel - Led the project and performed data cleaning, dependency analysis, and data publication
  • Evan Boehs and Jake Ginesin - Developed and implemented the web scraper for collecting property-based tests from open-source repositories
  • Juan Castaño - Set up and managed the database infrastructure and AWS instances used for large-scale scraping operations
  • The Dartmouth DALI Lab - Extended the scraper to support TypeScript property-based tests using the fast-check framework
    • Sekpey Herbert Setor Kwame - Helped with Typescript PBT scraping as a DALI Lab intern

Contact

For questions, concerns, etc., please contact max@benchify.com or maxvh@hey.com.

Downloads last month
15