instruction
stringclasses
100 values
code
stringlengths
78
193k
response
stringlengths
259
170k
file
stringlengths
59
203
Document functions with clear intent
import sys import traceback # The following code is borrowed from PyTorch. Basically when a subprocess or thread # throws an exception, you will need to wrap the exception with ExceptionWrapper class # and put it in the queue you are normally retrieving from. # NOTE [ Python Traceback Reference Cycle Problem ] # # W...
--- +++ @@ -1,3 +1,6 @@+"""Exception wrapper classes to properly display exceptions under multithreading or +multiprocessing. +""" import sys import traceback @@ -15,12 +18,14 @@ class KeyErrorMessage(str): + r"""str subclass that returns itself in repr""" def __repr__(self): # pylint: disable=invali...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/utils/exception.py
Document all public functions with docstrings
## # Copyright 2019-2021 Contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or a...
--- +++ @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +"""Functional interface for transform""" # pylint: disable= too-many-lines import copy @@ -97,6 +98,10 @@ def pairwise_squared_distance(x): + """ + x : (n_samples, n_po...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/transforms/functional.py
Document this script properly
from .. import backend as F from .._ffi.function import _init_api class Filter(object): def __init__(self, ids): self._filter = _CAPI_DGLFilterCreateFromSet( F.zerocopy_to_dgl_ndarray(ids) ) def find_included_indices(self, test): return F.zerocopy_from_dgl_ndarray( ...
--- +++ @@ -1,16 +1,51 @@+"""Utilities for finding overlap or missing items in arrays.""" from .. import backend as F from .._ffi.function import _init_api class Filter(object): + """Class used to either find the subset of IDs that are in this + filter, or the subset of IDs that are not in this filter + ...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/utils/filter.py
Add docstrings including usage examples
import torch from ..utils import gather_pinned_tensor_rows from .base import register_storage_wrapper from .tensor import BaseTensorStorage def _fetch_cpu(indices, tensor, feature_shape, device, pin_memory, **kwargs): result = torch.empty( indices.shape[0], *feature_shape, dtype=tensor.d...
--- +++ @@ -1,3 +1,4 @@+"""Feature storages for PyTorch tensors.""" import torch @@ -25,6 +26,7 @@ @register_storage_wrapper(torch.Tensor) class PyTorchTensorStorage(BaseTensorStorage): + """Feature storages for slicing a PyTorch tensor.""" def fetch(self, indices, device, pin_memory=False, **kwargs):...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/storages/pytorch_tensor.py
Add professional docstrings to my codebase
import numpy as np # pylint: disable= no-member, arguments-differ, invalid-name import tensorflow as tf from tensorflow.keras import layers from .... import function as fn from ....base import DGLError from ....utils import expand_as_pair # pylint: disable=W0235 class GraphConv(layers.Layer): def __init__( ...
--- +++ @@ -1,3 +1,4 @@+"""Tensorflow modules for graph convolutions(GCN).""" import numpy as np # pylint: disable= no-member, arguments-differ, invalid-name @@ -12,6 +13,128 @@ class GraphConv(layers.Layer): + r"""Graph convolution from `Semi-Supervised Classification with Graph Convolutional Networks + ...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/nn/tensorflow/conv/graphconv.py
Write docstrings for algorithm functions
from __future__ import absolute_import from collections import defaultdict, namedtuple from collections.abc import MutableMapping from . import backend as F from .base import ALL, DGLError from .frame import LazyFeature NodeSpace = namedtuple("NodeSpace", ["data"]) EdgeSpace = namedtuple("EdgeSpace", ["data"]) cla...
--- +++ @@ -1,3 +1,4 @@+"""Views of DGLGraph.""" from __future__ import absolute_import from collections import defaultdict, namedtuple @@ -12,6 +13,7 @@ class HeteroNodeView(object): + """A NodeView class to act as G.nodes for a DGLGraph.""" __slots__ = ["_graph", "_typeid_getter"] @@ -42,6 +44,7 @...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/view.py
Document helper functions with docstrings
from .. import backend as F from .base import FeatureStorage class BaseTensorStorage(FeatureStorage): def __init__(self, tensor): self.storage = tensor def fetch( self, indices, device, pin_memory=False, **kwargs ): # pylint: disable=unused-argument return F.copy_to(F.gather_row...
--- +++ @@ -1,8 +1,12 @@+"""Feature storages for tensors across different frameworks.""" from .. import backend as F from .base import FeatureStorage class BaseTensorStorage(FeatureStorage): + """FeatureStorage that synchronously slices features from a tensor and transfers + it to the given device. + ""...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/storages/tensor.py
Generate NumPy-style docstrings
# pylint: disable= no-member, arguments-differ, invalid-name import tensorflow as tf from tensorflow.keras import layers from .... import function as fn from ....utils import expand_as_pair class GINConv(layers.Layer): def __init__( self, apply_func, aggregator_type, init_eps=0, learn_eps=False ): ...
--- +++ @@ -1,3 +1,4 @@+"""Tensorflow Module for Graph Isomorphism Network layer""" # pylint: disable= no-member, arguments-differ, invalid-name import tensorflow as tf from tensorflow.keras import layers @@ -7,6 +8,55 @@ class GINConv(layers.Layer): + r"""Graph Isomorphism Network layer from `How Powerful ar...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/nn/tensorflow/conv/ginconv.py
Document helper functions with docstrings
import torch.nn as nn from .biased_mha import BiasedMHA class GraphormerLayer(nn.Module): def __init__( self, feat_size, hidden_size, num_heads, attn_bias_type="add", norm_first=False, dropout=0.1, attn_dropout=0.1, activation=nn.ReLU(), ...
--- +++ @@ -1,3 +1,4 @@+"""Graphormer Layer""" import torch.nn as nn @@ -5,6 +6,53 @@ class GraphormerLayer(nn.Module): + r"""Graphormer Layer with Dense Multi-Head Attention, as introduced + in `Do Transformers Really Perform Bad for Graph Representation? + <https://arxiv.org/pdf/2106.05234>`__ + + ...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/nn/pytorch/gt/graphormer.py
Generate helpful docstrings for debugging
from collections import namedtuple import networkx as nx import scipy as sp from .. import backend as F from ..base import DGLError from . import checks def elist2tensor(elist, idtype): if len(elist) == 0: u, v = [], [] else: u, v = zip(*elist) u = list(u) v = list(v) re...
--- +++ @@ -1,3 +1,4 @@+"""Data utilities.""" from collections import namedtuple @@ -10,6 +11,20 @@ def elist2tensor(elist, idtype): + """Function to convert an edge list to edge tensors. + + Parameters + ---------- + elist : iterable of int pairs + List of (src, dst) node ID pairs. + idt...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/utils/data.py
Create documentation strings for testing functions
import argparse import json import logging import multiprocessing import os import re import signal import stat import subprocess import sys import time from functools import partial from threading import Thread from typing import Optional DEFAULT_PORT = 30050 def cleanup_proc(get_all_remote_pids, conn): print("...
--- +++ @@ -1,3 +1,4 @@+"""Launching tool for DGL distributed training""" import argparse import json import logging @@ -17,6 +18,7 @@ def cleanup_proc(get_all_remote_pids, conn): + """This process tries to clean up the remote training tasks.""" print("cleanupu process runs") # This process should n...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distgraphlaunch.py
Add docstrings to my Python code
# pylint: disable= no-member, arguments-differ, invalid-name import tensorflow as tf from tensorflow.keras import layers from .... import function as fn from ....base import DGLError from ....utils import check_eq_shape, expand_as_pair class SAGEConv(layers.Layer): def __init__( self, in_feats, ...
--- +++ @@ -1,3 +1,4 @@+"""Tensorflow Module for GraphSAGE layer""" # pylint: disable= no-member, arguments-differ, invalid-name import tensorflow as tf from tensorflow.keras import layers @@ -8,6 +9,85 @@ class SAGEConv(layers.Layer): + r"""GraphSAGE layer from `Inductive Representation Learning on + Larg...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/nn/tensorflow/conv/sageconv.py
Help me add docstrings to my project
# pylint: disable= no-member, arguments-differ, invalid-name import tensorflow as tf from tensorflow.keras import layers from .... import function as fn from .. import utils class RelGraphConv(layers.Layer): def __init__( self, in_feat, out_feat, num_rels, regularizer="ba...
--- +++ @@ -1,3 +1,4 @@+"""Tensorflow Module for Relational graph convolution layer""" # pylint: disable= no-member, arguments-differ, invalid-name import tensorflow as tf from tensorflow.keras import layers @@ -7,6 +8,106 @@ class RelGraphConv(layers.Layer): + r"""Relational graph convolution layer from `Mod...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/nn/tensorflow/conv/relgraphconv.py
Please document this code using docstrings
# pylint: disable= no-member, arguments-differ, invalid-name, W0613 import numpy as np import tensorflow as tf from tensorflow.keras import layers from .... import function as fn from ....base import DGLError class SGConv(layers.Layer): def __init__( self, in_feats, out_feats, k=...
--- +++ @@ -1,3 +1,4 @@+"""tf Module for Simplifying Graph Convolution layer""" # pylint: disable= no-member, arguments-differ, invalid-name, W0613 import numpy as np import tensorflow as tf @@ -8,6 +9,80 @@ class SGConv(layers.Layer): + r"""SGC layer from `Simplifying Graph + Convolutional Networks <https...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/nn/tensorflow/conv/sgconv.py
Generate NumPy-style docstrings
import itertools import operator import constants import numpy as np import torch from dist_lookup import DistLookupService from gloo_wrapper import allgather_sizes, alltoallv_cpu from utils import memory_snapshot def get_shuffle_global_nids(rank, world_size, global_nids_ranks, node_data): # build a list of siz...
--- +++ @@ -11,6 +11,28 @@ def get_shuffle_global_nids(rank, world_size, global_nids_ranks, node_data): + """ + For nodes which are not owned by the current rank, whose global_nid <-> shuffle_global-nid mapping + is not present at the current rank, this function retrieves their shuffle_global_ids from the ...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distpartitioning/globalids.py
Generate docstrings with parameter types
import tensorflow as tf from tensorflow.keras import layers __all__ = ["HeteroGraphConv"] class HeteroGraphConv(layers.Layer): def __init__(self, mods, aggregate="sum"): super(HeteroGraphConv, self).__init__() self.mods = mods # Do not break if graph has 0-in-degree nodes. # Beca...
--- +++ @@ -1,3 +1,4 @@+"""Heterograph NN modules""" import tensorflow as tf from tensorflow.keras import layers @@ -5,6 +6,126 @@ class HeteroGraphConv(layers.Layer): + r"""A generic module for computing convolution on heterogeneous graphs. + + The heterograph convolution applies sub-modules on their ass...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/nn/tensorflow/hetero.py
Document this script properly
import numpy as np import torch import torch.distributed as dist def allgather_sizes(send_data, world_size, num_parts, return_sizes=False): # Assert on the world_size, num_parts assert (num_parts % world_size) == 0 # compute the length of the local data send_length = len(send_data) out_tensor = ...
--- +++ @@ -4,6 +4,28 @@ def allgather_sizes(send_data, world_size, num_parts, return_sizes=False): + """ + Perform all gather on list lengths, used to compute prefix sums + to determine the offsets on each ranks. This is used to allocate + global ids for edges/nodes on each ranks. + + Parameters + ...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distpartitioning/gloo_wrapper.py
Document this code for team use
import copy import gc import logging import os import constants import dgl import dgl.backend as F import dgl.graphbolt as gb import numpy as np import torch as th import torch.distributed as dist from dgl import EID, ETYPE, NID, NTYPE from dgl.distributed.constants import DGL2GB_EID, GB_DST_ID from dgl.distributed.p...
--- +++ @@ -24,6 +24,76 @@ def _get_unique_invidx(srcids, dstids, nids, low_mem=True): + """This function is used to compute a list of unique elements, + and their indices in the input list, which is the concatenation + of srcids, dstids and uniq_nids. In addition, this function will also + compute inve...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distpartitioning/convert_partition.py
Generate docstrings with examples
import gc import logging import os import array_readwriter import constants import numpy as np import pyarrow import pyarrow.parquet as pq import torch import torch.distributed as dist from gloo_wrapper import alltoallv_cpu from utils import ( DATA_TYPE_ID, generate_read_list, get_gid_offsets, get_idr...
--- +++ @@ -24,6 +24,30 @@ def _broadcast_shape( data, rank, world_size, num_parts, is_feat_data, feat_name ): + """Auxiliary function to broadcast the shape of a feature data. + This information is used to figure out the type-ids for the + local features. + + Parameters: + ----------- + data : ...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distpartitioning/dataset_utils.py
Document all public functions with docstrings
import json import os import constants import dgl import numpy as np import pyarrow import pyarrow.parquet as pq import pytest import torch from dgl.data.utils import load_tensors from dgl.distributed.partition import ( _etype_str_to_tuple, _etype_tuple_to_str, _get_inner_edge_mask, _get_inner_node_m...
--- +++ @@ -22,6 +22,19 @@ def read_file(fname, ftype): + """Read a file from disk + Parameters: + ----------- + fname : string + specifying the absolute path to the file to read + ftype : string + supported formats are `numpy`, `parquet', `csv` + + Returns: + -------- + numpy ...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/verification_utils.py
Add docstrings following best practices
import argparse import logging import os import platform import constants import dgl import numpy as np import pyarrow import pyarrow.parquet as pq import torch as th from dgl.data.utils import load_graphs, load_tensors from dgl.distributed.partition import ( _etype_str_to_tuple, _etype_tuple_to_str, _g...
--- +++ @@ -35,6 +35,19 @@ def _read_graph(schema): + """Read a DGL Graph object from storage using metadata schema, which is + a json object describing the DGL graph on disk. + + Parameters: + ----------- + schema : json object + json object describing the input graph to read from the disk + ...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/verify_partitions.py
Provide clean and structured docstrings
import argparse import json import logging import multiprocessing import os import queue import re import signal import subprocess import sys import time from functools import partial from threading import Thread from typing import Optional def cleanup_proc(get_all_remote_pids, conn): print("cleanup process runs"...
--- +++ @@ -1,3 +1,4 @@+"""Launching tool for DGL distributed training""" import argparse import json import logging @@ -15,6 +16,7 @@ def cleanup_proc(get_all_remote_pids, conn): + """This process tries to clean up the remote training tasks.""" print("cleanup process runs") # This process should no...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/launch.py
Add clean documentation to messy code
# pylint: disable= no-member, arguments-differ, invalid-name, W0235 import tensorflow as tf from tensorflow.keras import layers from ...readout import ( max_nodes, mean_nodes, softmax_nodes, sum_nodes, topk_nodes, ) __all__ = [ "SumPooling", "AvgPooling", "MaxPooling", "SortPooling...
--- +++ @@ -1,3 +1,4 @@+"""Tensorflow modules for graph global pooling.""" # pylint: disable= no-member, arguments-differ, invalid-name, W0235 import tensorflow as tf from tensorflow.keras import layers @@ -21,11 +22,33 @@ class SumPooling(layers.Layer): + r"""Apply sum pooling over the nodes in the graph. + ...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/nn/tensorflow/glob.py
Generate descriptive docstrings automatically
import json import logging import os from itertools import cycle import constants import dgl import numpy as np import psutil import pyarrow import torch from dgl.distributed.partition import _dump_part_config from pyarrow import csv DATA_TYPE_ID = { data_type: id for id, data_type in enumerate( [ ...
--- +++ @@ -35,6 +35,29 @@ def read_ntype_partition_files(schema_map, input_dir): + """ + Utility method to read the partition id mapping for each node. + For each node type, there will be an file, in the input directory argument + containing the partition id mapping for a given nodeid. + + Parameter...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distpartitioning/utils.py
Provide clean and structured docstrings
import json from typing import Optional import pydantic as dt from dgl import DGLError class PartitionMeta(dt.BaseModel): # version of metadata JSON. version: Optional[str] = "1.0.0" # number of partitions. num_parts: int # name of partition algorithm. algo_name: str def dump_partition_met...
--- +++ @@ -6,6 +6,24 @@ class PartitionMeta(dt.BaseModel): + """Metadata that describes the partition assignment results. + + Regardless of the choice of partitioning algorithm, a metadata JSON file + will be created in the output directory which includes the meta information + of the partition algorit...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/partition_algo/base.py
Add return value explanations in docstrings
import copy import logging import os import numpy as np import pyarrow import torch from gloo_wrapper import allgather_sizes, alltoallv_cpu from pyarrow import csv from utils import map_partid_rank class DistLookupService: def __init__(self, input_dir, ntype_names, rank, world_size, num_parts): assert o...
--- +++ @@ -11,6 +11,42 @@ class DistLookupService: + """ + This is an implementation of a Distributed Lookup Service to provide the following + services to its users. Map 1) global node-ids to partition-ids, and 2) global node-ids + to shuffle global node-ids (contiguous, within each node for a give no...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distpartitioning/dist_lookup.py
Write documentation strings for class attributes
import abc from abc import abstractmethod import torch as th from ...cuda import nccl from ...nn.pytorch import NodeEmbedding from ...partition import NDArrayPartition from ...utils import ( create_shared_mem_array, gather_pinned_tensor_rows, get_shared_mem_array, pin_memory_inplace, scatter_pinne...
--- +++ @@ -1,3 +1,4 @@+"""Node embedding optimizers""" import abc from abc import abstractmethod @@ -16,6 +17,17 @@ class SparseGradOptimizer(abc.ABC): + r"""The abstract sparse optimizer. + + Note: dgl sparse optimizer only work with dgl.NodeEmbedding + + Parameters + ---------- + params : list...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/optim/pytorch/sparse_optim.py
Add minimal docstrings for each function
import os import re import time import numpy as np from . import backend as F, utils from ._ffi.function import _init_api from .base import EID, ETYPE, NID, NTYPE from .heterograph import DGLGraph from .ndarray import NDArray from .subgraph import edge_subgraph __all__ = [ "metis_partition", "metis_partition...
--- +++ @@ -1,3 +1,4 @@+"""Module for graph partition utilities.""" import os import re import time @@ -19,6 +20,22 @@ def reorder_nodes(g, new_node_ids): + """Generate a new graph with new node IDs. + + We assign each node in the input graph with a new node ID. This results in + a new graph. + + Par...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/partition.py
Please document this code using docstrings
############################################################################## # # In this tutorial, you learn how to train and generate one graph at # a time. You also explore parallelism within the graph embedding operation, which is an # essential building block. The tutorial ends with a simple optimization that # ...
--- +++ @@ -1,3 +1,20 @@+""" +.. _model-dgmg: + +Generative Models of Graphs +=========================================== + +**Author**: `Mufei Li <https://github.com/mufeili>`_, +`Lingfan Yu <https://github.com/ylfdq1118>`_, Zheng Zhang + +.. warning:: + + The tutorial aims at gaining insights into the p...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tutorials/models/3_generative_model/5_dgmg.py
Add docstrings with type hints explained
# ruff: noqa: E402 # Above allows ruff to ignore E402: module level import not at top of file import gc import json import os import re import tempfile from collections import OrderedDict from functools import lru_cache from importlib.resources import files import click import gradio as gr import numpy as np import s...
--- +++ @@ -97,6 +97,7 @@ @gpu_decorator def chat_model_inference(messages, model, tokenizer): + """Generate response using Qwen""" text = tokenizer.apply_chat_template( messages, tokenize=False, @@ -848,6 +849,7 @@ # Modify process_audio_input to generate user input @gpu_d...
https://raw.githubusercontent.com/SWivid/F5-TTS/HEAD/src/f5_tts/infer/infer_gradio.py
Create docstrings for each class method
import json from importlib.resources import files import torch import torch.nn.functional as F import torchaudio from datasets import Dataset as Dataset_ from datasets import load_from_disk from torch import nn from torch.utils.data import Dataset, Sampler from tqdm import tqdm from f5_tts.model.modules import MelSpe...
--- +++ @@ -164,6 +164,13 @@ # Dynamic Batch Sampler class DynamicBatchSampler(Sampler[list[int]]): + """Extension of Sampler that will do the following: + 1. Change the batch size (essentially number of sequences) + in a batch to ensure that the total number of frames are less + than a certain ...
https://raw.githubusercontent.com/SWivid/F5-TTS/HEAD/src/f5_tts/model/dataset.py
Help me comply with documentation standards
from .sparse_matrix import diag, SparseMatrix, val_like def neg(A: SparseMatrix) -> SparseMatrix: return val_like(A, -A.val) def inv(A: SparseMatrix) -> SparseMatrix: num_rows, num_cols = A.shape assert A.is_diag(), "Non-diagonal sparse matrix does not support inversion." assert num_rows == num_cols...
--- +++ @@ -1,11 +1,53 @@+"""DGL unary operators for sparse matrix module.""" from .sparse_matrix import diag, SparseMatrix, val_like def neg(A: SparseMatrix) -> SparseMatrix: + """Returns a new sparse matrix with the negation of the original nonzero + values, equivalent to ``-A``. + + Returns + -----...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/sparse/unary_op.py
Help me document legacy Python code
import operator import torch from .sparse_matrix import SparseMatrix, val_like def sp_broadcast_v(A: SparseMatrix, v: torch.Tensor, op: str) -> SparseMatrix: op = getattr(operator, op) if v.dim() == 1: v = v.view(1, -1) shape_error_message = ( f"Dimension mismatch for broadcasting. Got...
--- +++ @@ -1,3 +1,4 @@+"""DGL broadcast operator module.""" import operator @@ -7,6 +8,67 @@ def sp_broadcast_v(A: SparseMatrix, v: torch.Tensor, op: str) -> SparseMatrix: + """Broadcast operator for sparse matrix and vector. + + :attr:`v` is broadcasted to the shape of :attr:`A` and then the operator i...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/sparse/broadcast.py
Add docstrings to meet PEP guidelines
import torch from .sparse_matrix import SparseMatrix __all__ = ["sddmm", "bsddmm"] # pylint: disable=invalid-name def sddmm(A: SparseMatrix, X1: torch.Tensor, X2: torch.Tensor) -> SparseMatrix: return SparseMatrix(torch.ops.dgl_sparse.sddmm(A.c_sparse_matrix, X1, X2)) # pylint: disable=invalid-name def bsddmm...
--- +++ @@ -1,3 +1,4 @@+"""Sampled Dense-Dense Matrix Multiplication (SDDMM) operator module.""" import torch from .sparse_matrix import SparseMatrix @@ -7,9 +8,97 @@ # pylint: disable=invalid-name def sddmm(A: SparseMatrix, X1: torch.Tensor, X2: torch.Tensor) -> SparseMatrix: + r"""Sampled-Dense-Dense Matrix...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/sparse/sddmm.py
Document my Python code with docstrings
from .. import backend as F from .._ffi.function import _init_api from ..base import DGLError def pin_memory_inplace(tensor): if F.backend_name in ["mxnet", "tensorflow"]: raise DGLError( "The {} backend does not support pinning " "tensors in-place.".format(F.backend_name) ...
--- +++ @@ -1,3 +1,4 @@+"""Utility functions related to pinned memory tensors.""" from .. import backend as F from .._ffi.function import _init_api @@ -5,6 +6,20 @@ def pin_memory_inplace(tensor): + """Register the tensor into pinned memory in-place (i.e. without copying). + Users are required to save the...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/utils/pin_memory.py
Add docstrings that explain purpose and usage
## # Copyright 2019-2021 Contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or a...
--- +++ @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +"""Modules for transform""" # pylint: disable= no-member, arguments-differ, invalid-name, missing-function-docstring from scipy.linalg import expm @@ -57,6 +58,22 @@ def upda...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/transforms/module.py
Turn comments into proper docstrings
# pylint: disable=invalid-name from __future__ import absolute_import, division from collections.abc import Mapping from .. import backend as F from .._ffi.function import _init_api from ..base import DGLError def prepare_tensor(g, data, name): if F.is_tensor(data): if F.dtype(data) != g.idtype: ...
--- +++ @@ -1,3 +1,4 @@+"""Checking and logging utilities.""" # pylint: disable=invalid-name from __future__ import absolute_import, division @@ -9,6 +10,27 @@ def prepare_tensor(g, data, name): + """Convert the data to ID tensor and check its ID type and context. + + If the data is already in tensor type...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/utils/checks.py
Add docstrings following best practices
# Modified from https://github.com/echocatzh/conv-stft/blob/master/conv_stft/conv_stft.py # Copyright (c) 2024, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the...
--- +++ @@ -61,6 +61,25 @@ win_sqrt=False, pad_center=True, ): + """ + Implement of STFT using 1D convolution and 1D transpose convolutions. + Implement of framing the signal in 2 ways, `break` and `continue`. + `break` method is a kaldi-like framing. + `continue...
https://raw.githubusercontent.com/SWivid/F5-TTS/HEAD/src/f5_tts/runtime/triton_trtllm/scripts/conv_stft.py
Document classes and their methods
# pylint: disable=no-value-for-parameter from __future__ import annotations import asyncio from typing import TypedDict import click from typing_extensions import Unpack from gitingest.config import MAX_FILE_SIZE, OUTPUT_FILE_NAME from gitingest.entrypoint import ingest_async # Import logging configuration first t...
--- +++ @@ -1,3 +1,4 @@+"""Command-line interface (CLI) for Gitingest.""" # pylint: disable=no-value-for-parameter from __future__ import annotations @@ -76,6 +77,40 @@ help="Output file path (default: digest.txt in current directory). Use '-' for stdout.", ) def main(**cli_kwargs: Unpack[_CLIArgs]) -> None: ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/__main__.py
Add docstrings to improve readability
import argparse import logging import os import platform from pathlib import Path import array_readwriter import constants import numpy as np import pyarrow import pyarrow.csv as csv from utils import ( generate_read_list, generate_roundrobin_read_list, get_idranges, get_node_types, read_json, ) ...
--- +++ @@ -21,6 +21,18 @@ def get_proc_info(): + """Helper function to get the rank from the + environment when `mpirun` is used to run this python program. + + Please note that for mpi(openmpi) installation the rank is retrieved from the + environment using OMPI_COMM_WORLD_RANK. For mpich it is + r...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distpartitioning/parmetis_preprocess.py
Help me document legacy Python code
import gc import logging import math import os import sys from datetime import timedelta from timeit import default_timer as timer import constants import dgl import numpy as np import torch import torch.distributed as dist import torch.multiprocessing as mp from convert_partition import create_graph_object, create_m...
--- +++ @@ -46,6 +46,72 @@ def gen_node_data( rank, world_size, num_parts, id_lookup, ntid_ntype_map, schema_map ): + """ + For this data processing pipeline, reading node files is not needed. All the needed information about + the nodes can be found in the metadata json file. This function generates the...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distpartitioning/data_shuffle.py
Provide clean and structured docstrings
from __future__ import annotations import asyncio import errno import shutil import stat import sys from contextlib import asynccontextmanager from pathlib import Path from typing import TYPE_CHECKING, AsyncGenerator, Callable from urllib.parse import urlparse from gitingest.clone import clone_repo from gitingest.co...
--- +++ @@ -1,3 +1,4 @@+"""Main entry point for ingesting a source and processing its contents.""" from __future__ import annotations @@ -44,6 +45,47 @@ token: str | None = None, output: str | None = None, ) -> tuple[str, str, str]: + """Ingest a source and process its contents. + + This function a...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/entrypoint.py
Generate consistent documentation across files
from __future__ import annotations import os from dataclasses import dataclass, field from enum import Enum, auto from typing import TYPE_CHECKING from gitingest.utils.compat_func import readlink from gitingest.utils.file_utils import _decodes, _get_preferred_encodings, _read_chunk from gitingest.utils.notebook impo...
--- +++ @@ -1,3 +1,4 @@+"""Schema for the filesystem representation.""" from __future__ import annotations @@ -17,6 +18,7 @@ class FileSystemNodeType(Enum): + """Enum representing the type of a file system node (directory or file).""" DIRECTORY = auto() FILE = auto() @@ -25,6 +27,7 @@ @datacla...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/schemas/filesystem.py
Add structured docstrings to improve clarity
from __future__ import annotations import os from gitingest.utils.git_utils import validate_github_token def resolve_token(token: str | None) -> str | None: token = token or os.getenv("GITHUB_TOKEN") if token: validate_github_token(token) return token
--- +++ @@ -1,3 +1,4 @@+"""Utilities for handling authentication.""" from __future__ import annotations @@ -7,7 +8,20 @@ def resolve_token(token: str | None) -> str | None: + """Resolve the token to use for the query. + + Parameters + ---------- + token : str | None + GitHub personal access ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/auth.py
Add docstrings that explain purpose and usage
class AsyncTimeoutError(Exception): class InvalidNotebookError(Exception): def __init__(self, message: str) -> None: super().__init__(message) class InvalidGitHubTokenError(ValueError): def __init__(self) -> None: msg = ( "Invalid GitHub token format. To generate a token, go ...
--- +++ @@ -1,19 +1,27 @@+"""Custom exceptions for the Gitingest package.""" class AsyncTimeoutError(Exception): + """Exception raised when an async operation exceeds its timeout limit. + + This exception is used by the ``async_timeout`` decorator to signal that the wrapped + asynchronous function has exc...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/exceptions.py
Add docstrings to my Python code
from __future__ import absolute_import, division import glob import os from collections import defaultdict from collections.abc import Iterable, Mapping, Sequence from functools import wraps import numpy as np from .. import backend as F, ndarray as nd from .._ffi.function import _init_api from ..base import dgl_war...
--- +++ @@ -1,3 +1,4 @@+"""Internal utilities.""" from __future__ import absolute_import, division import glob @@ -14,10 +15,12 @@ def is_listlike(data): + """Return if the data is a sequence but not a string.""" return isinstance(data, Sequence) and not isinstance(data, str) class InconsistentDtyp...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/python/dgl/utils/internal.py
Add docstrings for better understanding
from __future__ import annotations from pathlib import Path # noqa: TC003 (typing-only-standard-library-import) needed for type checking (pydantic) from uuid import UUID # noqa: TC003 (typing-only-standard-library-import) needed for type checking (pydantic) from pydantic import BaseModel, Field from gitingest.con...
--- +++ @@ -1,3 +1,4 @@+"""Module containing the dataclasses for the ingestion process.""" from __future__ import annotations @@ -11,6 +12,46 @@ class IngestionQuery(BaseModel): # pylint: disable=too-many-instance-attributes + """Pydantic model to store the parsed details of the repository or file path. + ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/schemas/ingestion.py
Write docstrings for utility functions
from __future__ import annotations from pathlib import Path DEFAULT_IGNORE_PATTERNS: set[str] = { # Python "*.pyc", "*.pyo", "*.pyd", "__pycache__", ".pytest_cache", ".coverage", ".tox", ".nox", ".mypy_cache", ".ruff_cache", ".hypothesis", "poetry.lock", "Pipfi...
--- +++ @@ -1,3 +1,4 @@+"""Default ignore patterns for Gitingest.""" from __future__ import annotations @@ -168,6 +169,26 @@ def load_ignore_patterns(root: Path, filename: str) -> set[str]: + """Load ignore patterns from ``filename`` found under ``root``. + + The loader walks the directory tree, looks fo...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/ignore_patterns.py
Help me add docstrings to my project
from __future__ import annotations from typing import TYPE_CHECKING from pathspec import PathSpec if TYPE_CHECKING: from pathlib import Path def _should_include(path: Path, base_path: Path, include_patterns: set[str]) -> bool: rel_path = _relative_or_none(path, base_path) if rel_path is None: # outsi...
--- +++ @@ -1,3 +1,4 @@+"""Utility functions for the ingestion process.""" from __future__ import annotations @@ -10,6 +11,25 @@ def _should_include(path: Path, base_path: Path, include_patterns: set[str]) -> bool: + """Return ``True`` if ``path`` matches any of ``include_patterns``. + + Parameters + ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/ingestion_utils.py
Document classes and their methods
from __future__ import annotations import locale import platform from typing import TYPE_CHECKING if TYPE_CHECKING: from pathlib import Path try: locale.setlocale(locale.LC_ALL, "") except locale.Error: locale.setlocale(locale.LC_ALL, "C") _CHUNK_SIZE = 1024 # bytes def _get_preferred_encodings() ->...
--- +++ @@ -1,3 +1,4 @@+"""Utility functions for working with files and directories.""" from __future__ import annotations @@ -17,6 +18,15 @@ def _get_preferred_encodings() -> list[str]: + """Get list of encodings to try, prioritized for the current platform. + + Returns + ------- + list[str] + ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/file_utils.py
Document helper functions with docstrings
import os from pathlib import Path def readlink(path: Path) -> Path: return Path(os.readlink(path)) def removesuffix(s: str, suffix: str) -> str: return s[: -len(suffix)] if s.endswith(suffix) else s
--- +++ @@ -1,11 +1,44 @@+"""Compatibility functions for Python 3.8.""" import os from pathlib import Path def readlink(path: Path) -> Path: + """Read the target of a symlink. + + Compatible with Python 3.8. + + Parameters + ---------- + path : Path + Path to the symlink. + + Returns +...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/compat_func.py
Include argument descriptions in docstrings
from __future__ import annotations import json import logging import os import sys from typing import Any from loguru import logger def json_sink(message: Any) -> None: # noqa: ANN401 record = message.record log_entry = { "timestamp": record["time"].isoformat(), "level": record["level"].n...
--- +++ @@ -1,3 +1,8 @@+"""Logging configuration for gitingest using loguru. + +This module provides structured JSON logging suitable for Kubernetes deployments +while also supporting human-readable logging for development. +""" from __future__ import annotations @@ -11,6 +16,14 @@ def json_sink(message: Any) ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/logging_config.py
Fill in missing docstrings in my code
from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING import git from gitingest.config import DEFAULT_TIMEOUT from gitingest.utils.git_utils import ( check_repo_exists, checkout_partial_clone, create_git_repo, ensure_git_installed, git_auth_context, is_...
--- +++ @@ -1,3 +1,4 @@+"""Module containing functions for cloning a Git repository to a local path.""" from __future__ import annotations @@ -29,6 +30,27 @@ @async_timeout(DEFAULT_TIMEOUT) async def clone_repo(config: CloneConfig, *, token: str | None = None) -> None: + """Clone a repository to a local path...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/clone.py
Include argument descriptions in docstrings
from __future__ import annotations import json from itertools import chain from typing import TYPE_CHECKING, Any from gitingest.utils.exceptions import InvalidNotebookError from gitingest.utils.logging_config import get_logger if TYPE_CHECKING: from pathlib import Path # Initialize logger for this module logge...
--- +++ @@ -1,3 +1,4 @@+"""Utilities for processing Jupyter notebooks.""" from __future__ import annotations @@ -16,6 +17,26 @@ def process_notebook(file: Path, *, include_output: bool = True) -> str: + """Process a Jupyter notebook file and return an executable Python script as a string. + + Parameters ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/notebook.py
Generate missing documentation strings
from __future__ import annotations import asyncio import base64 import re import sys from contextlib import contextmanager from pathlib import Path from typing import TYPE_CHECKING, Final, Generator, Iterable from urllib.parse import urlparse, urlunparse import git from gitingest.utils.compat_func import removesuff...
--- +++ @@ -1,3 +1,4 @@+"""Utility functions for interacting with Git repositories.""" from __future__ import annotations @@ -29,11 +30,45 @@ def is_github_host(url: str) -> bool: + """Check if a URL is from a GitHub host (github.com or GitHub Enterprise). + + Parameters + ---------- + url : str + ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/git_utils.py
Add concise docstrings to each method
import asyncio import functools from typing import Awaitable, Callable, TypeVar from gitingest.utils.compat_typing import ParamSpec from gitingest.utils.exceptions import AsyncTimeoutError T = TypeVar("T") P = ParamSpec("P") def async_timeout(seconds: int) -> Callable[[Callable[P, Awaitable[T]]], Callable[P, Await...
--- +++ @@ -1,3 +1,4 @@+"""Utility functions for the Gitingest package.""" import asyncio import functools @@ -11,6 +12,25 @@ def async_timeout(seconds: int) -> Callable[[Callable[P, Awaitable[T]]], Callable[P, Awaitable[T]]]: + """Async Timeout decorator. + + This decorator wraps an asynchronous function...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/timeout_wrapper.py
Write docstrings for backend logic
import itertools import os os.environ["DGLBACKEND"] = "pytorch" import dgl import dgl.data import numpy as np import scipy.sparse as sp import torch import torch.nn as nn import torch.nn.functional as F ###################################################################### # Overview of Link Prediction with GNN # -...
--- +++ @@ -1,3 +1,21 @@+""" +Link Prediction using Graph Neural Networks +=========================================== + +In the :doc:`introduction <1_introduction>`, you have already learned +the basic workflow of using GNNs for node classification, +i.e. predicting the category of a node in a graph. This tutorial wil...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tutorials/blitz/4_link_predict.py
Add docstrings that explain logic
from __future__ import annotations import re from typing import Iterable from gitingest.utils.ignore_patterns import DEFAULT_IGNORE_PATTERNS _PATTERN_SPLIT_RE = re.compile(r"[,\s]+") def process_patterns( exclude_patterns: str | set[str] | None = None, include_patterns: str | set[str] | None = None, ) -> ...
--- +++ @@ -1,3 +1,4 @@+"""Pattern utilities for the Gitingest package.""" from __future__ import annotations @@ -13,6 +14,21 @@ exclude_patterns: str | set[str] | None = None, include_patterns: str | set[str] | None = None, ) -> tuple[set[str], set[str] | None]: + """Process include and exclude patte...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/pattern_utils.py
Write docstrings for backend logic
############################################################### # Introducing attention to GCN # ---------------------------- # # The key difference between GAT and GCN is how the information from the one-hop neighborhood is aggregated. # # For GCN, a graph convolution operation produces the normalized sum of the node ...
--- +++ @@ -1,3 +1,37 @@+""" +.. _model-gat: + +Understand Graph Attention Network +======================================= + +**Authors:** `Hao Zhang <https://github.com/sufeidechabei/>`_, `Mufei Li +<https://github.com/mufeili>`_, `Minjie Wang +<https://jermainewang.github.io/>`_ `Zheng Zhang +<https://shanghai.nyu....
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tutorials/models/1_gnn/9_gat.py
Write proper docstrings for these functions
from pathlib import Path async def ensure_directory_exists_or_create(path: Path) -> None: try: path.mkdir(parents=True, exist_ok=True) except OSError as exc: msg = f"Failed to create directory {path}: {exc}" raise OSError(msg) from exc
--- +++ @@ -1,10 +1,24 @@+"""Utility functions for working with the operating system.""" from pathlib import Path async def ensure_directory_exists_or_create(path: Path) -> None: + """Ensure the directory exists, creating it if necessary. + + Parameters + ---------- + path : Path + The path t...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/os_utils.py
Generate docstrings with examples
import uvicorn from fastapi import FastAPI from fastapi.responses import HTMLResponse from prometheus_client import REGISTRY, generate_latest from gitingest.utils.logging_config import get_logger # Create a logger for this module logger = get_logger(__name__) # Create a separate FastAPI app for metrics metrics_app ...
--- +++ @@ -1,3 +1,4 @@+"""Prometheus metrics server running on a separate port.""" import uvicorn from fastapi import FastAPI @@ -20,6 +21,16 @@ @metrics_app.get("/metrics") async def metrics() -> HTMLResponse: + """Serve Prometheus metrics without authentication. + + This endpoint is only accessible from...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/metrics_server.py
Create structured documentation for my script
import os os.environ["DGLBACKEND"] = "pytorch" import dgl import dgl.function as fn import torch import torch.nn as nn import torch.nn.functional as F ###################################################################### # Message passing and GNNs # ------------------------ # # DGL follows the *message passing para...
--- +++ @@ -1,3 +1,22 @@+""" +Write your own GNN module +========================= + +Sometimes, your model goes beyond simply stacking existing GNN modules. +For example, you would like to invent a new way of aggregating neighbor +information by considering node importance or edge weights. + +By the end of this tutori...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tutorials/blitz/3_message_passing.py
Generate missing documentation strings
from __future__ import annotations import os import threading from pathlib import Path import sentry_sdk from dotenv import load_dotenv from fastapi import FastAPI, Request from fastapi.responses import FileResponse, HTMLResponse, JSONResponse from fastapi.staticfiles import StaticFiles from slowapi.errors import Ra...
--- +++ @@ -1,3 +1,4 @@+"""Main module for the FastAPI application.""" from __future__ import annotations @@ -93,26 +94,81 @@ @app.get("/health") async def health_check() -> dict[str, str]: + """Health check endpoint to verify that the server is running. + + **Returns** + + - **dict[str, str]**: A JSON...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/main.py
Document all public functions with docstrings
from __future__ import annotations from enum import Enum from typing import TYPE_CHECKING, Union from pydantic import BaseModel, Field, field_validator from gitingest.utils.compat_func import removesuffix from server.server_config import MAX_FILE_SIZE_KB # needed for type checking (pydantic) if TYPE_CHECKING: ...
--- +++ @@ -1,3 +1,4 @@+"""Pydantic models for the query form.""" from __future__ import annotations @@ -15,12 +16,29 @@ class PatternType(str, Enum): + """Enumeration for pattern types used in file filtering.""" INCLUDE = "include" EXCLUDE = "exclude" class IngestRequest(BaseModel): + "...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/models.py
Help me write clear docstrings
from fastapi import APIRouter, Request from fastapi.responses import HTMLResponse from server.server_config import get_version_info, templates router = APIRouter() @router.get("/{full_path:path}", include_in_schema=False) async def catch_all(request: Request, full_path: str) -> HTMLResponse: context = { ...
--- +++ @@ -1,3 +1,4 @@+"""The dynamic router module defines handlers for dynamic path requests.""" from fastapi import APIRouter, Request from fastapi.responses import HTMLResponse @@ -9,6 +10,25 @@ @router.get("/{full_path:path}", include_in_schema=False) async def catch_all(request: Request, full_path: str) -...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/routers/dynamic.py
Create docstrings for each class method
from __future__ import annotations import hashlib import os from typing import TYPE_CHECKING from urllib.parse import urlparse from uuid import UUID # noqa: TC003 (typing-only-standard-library-import) needed for type checking (pydantic) import boto3 from botocore.exceptions import ClientError from prometheus_client...
--- +++ @@ -1,3 +1,4 @@+"""S3 utility functions for uploading and managing digest files.""" from __future__ import annotations @@ -27,13 +28,16 @@ class S3UploadError(Exception): + """Custom exception for S3 upload failures.""" def is_s3_enabled() -> bool: + """Check if S3 is enabled via environment...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/s3_utils.py
Help me write clear docstrings
from typing import Union from uuid import UUID from fastapi import APIRouter, HTTPException, Request, status from fastapi.responses import FileResponse, JSONResponse, RedirectResponse from prometheus_client import Counter from gitingest.config import TMP_BASE_PATH from server.models import IngestRequest from server....
--- +++ @@ -1,3 +1,4 @@+"""Ingest endpoint for the API.""" from typing import Union from uuid import UUID @@ -24,6 +25,21 @@ request: Request, # noqa: ARG001 (unused-function-argument) # pylint: disable=unused-argument ingest_request: IngestRequest, ) -> JSONResponse: + """Ingest a Git repository and ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/routers/ingest.py
Auto-generate documentation strings for this file
import os ############################################################################## # # In this tutorial, you learn to use Tree-LSTM networks for sentiment analysis. # The Tree-LSTM is a generalization of long short-term memory (LSTM) networks to tree-structured network topologies. # # The Tree-LSTM structure wa...
--- +++ @@ -1,3 +1,21 @@+""" +.. _model-tree-lstm: + +Tree-LSTM in DGL +========================== + +**Author**: Zihao Ye, Qipeng Guo, `Minjie Wang +<https://jermainewang.github.io/>`_, `Jake Zhao +<https://cs.nyu.edu/~jakezhao/>`_, Zheng Zhang + +.. warning:: + + The tutorial aims at gaining insights i...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tutorials/models/2_small_graph/3_tree-lstm.py
Help me write clear docstrings
from __future__ import annotations from typing import Any from fastapi import status from fastapi.responses import JSONResponse from server.models import IngestErrorResponse, IngestSuccessResponse, PatternType from server.query_processor import process_query COMMON_INGEST_RESPONSES: dict[int | str, dict[str, Any]]...
--- +++ @@ -1,3 +1,4 @@+"""Utility functions for the ingest endpoints.""" from __future__ import annotations @@ -23,6 +24,10 @@ pattern: str, token: str | None, ) -> JSONResponse: + """Run ``process_query`` and wrap the result in a ``FastAPI`` ``JSONResponse``. + + Consolidates error handling share...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/routers_utils.py
Write docstrings for backend logic
import argparse import logging import os import platform import sys from pathlib import Path import constants from utils import read_json def check_dependencies(): exec_path = os.get_exec_path() mpi_install = False for x in exec_path: if os.path.isfile(os.path.join(x, "mpirun")): mpi...
--- +++ @@ -10,6 +10,9 @@ def check_dependencies(): + """Check if all the dependencies needed for the execution of this file + are installed. + """ exec_path = os.get_exec_path() mpi_install = False @@ -28,6 +31,13 @@ def run_parmetis_wrapper(params): + """Function to execute all the ste...
https://raw.githubusercontent.com/dmlc/dgl/HEAD/tools/distpartitioning/parmetis_wrapper.py
Add standardized docstrings across the file
from fastapi import APIRouter, Request from fastapi.responses import HTMLResponse from server.server_config import EXAMPLE_REPOS, get_version_info, templates router = APIRouter() @router.get("/", response_class=HTMLResponse, include_in_schema=False) async def home(request: Request) -> HTMLResponse: context = {...
--- +++ @@ -1,3 +1,4 @@+"""Module defining the FastAPI router for the home page of the application.""" from fastapi import APIRouter, Request from fastapi.responses import HTMLResponse @@ -9,6 +10,23 @@ @router.get("/", response_class=HTMLResponse, include_in_schema=False) async def home(request: Request) -> HTM...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/routers/index.py
Add well-formatted docstrings
import argparse import gc import logging import queue import socket import struct import threading import traceback import wave from importlib.resources import files import numpy as np import torch import torchaudio from huggingface_hub import hf_hub_download from hydra.utils import get_class from omegaconf import Ome...
--- +++ @@ -30,6 +30,7 @@ class AudioFileWriterThread(threading.Thread): + """Threaded file writer to avoid blocking the TTS streaming process.""" def __init__(self, output_file, sampling_rate): super().__init__() @@ -40,6 +41,7 @@ self.audio_data = [] def run(self): + """Pr...
https://raw.githubusercontent.com/SWivid/F5-TTS/HEAD/src/f5_tts/socket_server.py
Add docstrings explaining edge cases
from __future__ import annotations import ssl from typing import TYPE_CHECKING import requests.exceptions import tiktoken from gitingest.schemas import FileSystemNode, FileSystemNodeType from gitingest.utils.compat_func import readlink from gitingest.utils.logging_config import get_logger if TYPE_CHECKING: fro...
--- +++ @@ -1,3 +1,4 @@+"""Functions to ingest and analyze a codebase directory or single file.""" from __future__ import annotations @@ -24,6 +25,23 @@ def format_node(node: FileSystemNode, query: IngestionQuery) -> tuple[str, str, str]: + """Generate a summary, directory structure, and file contents for a...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/output_formatter.py
Provide clean and structured docstrings
import concurrent.futures import multiprocessing import os import shutil import signal import subprocess import sys from contextlib import contextmanager sys.path.append(os.getcwd()) import argparse import csv import json from importlib.resources import files from pathlib import Path import soundfile as sf import ...
--- +++ @@ -1,3 +1,15 @@+""" +Usage: + python prepare_csv_wavs.py /path/to/metadata.csv /output/dataset/path [--pretrain] [--workers N] + +CSV format (header required, "|" delimiter): + audio_file|text + /path/to/wavs/audio_0001.wav|Yo! Hello? Hello? + /path/to/wavs/audio_0002.wav|Hi, how are you doing toda...
https://raw.githubusercontent.com/SWivid/F5-TTS/HEAD/src/f5_tts/train/datasets/prepare_csv_wavs.py
Generate missing documentation strings
# ruff: noqa: F722 F821 from __future__ import annotations import math import warnings from typing import Optional import torch import torch.nn.functional as F import torchaudio from librosa.filters import mel as librosa_mel_fn from torch import nn from x_transformers.x_transformers import apply_rotary_pos_emb from...
--- +++ @@ -1,3 +1,11 @@+""" +ein notation: +b - batch +n - sequence +nt - text sequence +nw - raw wave length +d - dimension +""" # ruff: noqa: F722 F821 from __future__ import annotations @@ -749,6 +757,14 @@ class MMDiTBlock(nn.Module): + r""" + modified from diffusers/src/diffusers/models/attention.py...
https://raw.githubusercontent.com/SWivid/F5-TTS/HEAD/src/f5_tts/model/modules.py
Create documentation strings for testing functions
from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING from gitingest.config import MAX_DIRECTORY_DEPTH, MAX_FILES, MAX_TOTAL_SIZE_BYTES from gitingest.output_formatter import format_node from gitingest.schemas import FileSystemNode, FileSystemNodeType, FileSystemStats from giti...
--- +++ @@ -1,3 +1,4 @@+"""Functions to ingest and analyze a codebase directory or single file.""" from __future__ import annotations @@ -18,6 +19,28 @@ def ingest_query(query: IngestionQuery) -> tuple[str, str, str]: + """Run the ingestion process for a parsed query. + + This is the main entry point for...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/ingestion.py
Add docstrings for internal functions
# Copyright 2014-2025 the openage authors. See copying.md for legal info. import argparse import importlib import os import shutil import subprocess import sys from .util import log_setup def parse_args(): cli = argparse.ArgumentParser() check_types = cli.add_mutually_exclusive_group() check_types.add...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2014-2025 the openage authors. See copying.md for legal info. +""" +Entry point for the code compliance checker. +""" import argparse import importlib @@ -12,6 +15,7 @@ def parse_args(): + """ Returns the raw argument namespace. """ cli = argparse.ArgumentParser()...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/codecompliance/__main__.py
Write documentation strings for class attributes
from __future__ import annotations import uuid from pathlib import Path from typing import Literal from gitingest.config import TMP_BASE_PATH from gitingest.schemas import IngestionQuery from gitingest.utils.git_utils import fetch_remote_branches_or_tags, resolve_commit from gitingest.utils.logging_config import get...
--- +++ @@ -1,3 +1,4 @@+"""Module containing functions to parse and validate input sources and patterns.""" from __future__ import annotations @@ -22,6 +23,26 @@ async def parse_remote_repo(source: str, token: str | None = None) -> IngestionQuery: + """Parse a repository URL and return an ``IngestionQuery``...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/query_parser.py
Provide docstrings following PEP 257
from __future__ import annotations import shutil from pathlib import Path from typing import TYPE_CHECKING, cast from gitingest.clone import clone_repo from gitingest.ingestion import ingest_query from gitingest.query_parser import parse_remote_repo from gitingest.utils.git_utils import resolve_commit, validate_gith...
--- +++ @@ -1,3 +1,4 @@+"""Process a query by parsing input, cloning a repository, and generating a summary.""" from __future__ import annotations @@ -32,6 +33,7 @@ def _cleanup_repository(clone_config: CloneConfig) -> None: + """Clean up the cloned repository after processing.""" try: local_p...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/query_processor.py
Write docstrings describing functionality
#!/usr/bin/env python3 # # Copyright 2015-2025 the openage authors. See copying.md for legal info. import argparse import os import sys from contextlib import redirect_stdout from multiprocessing import cpu_count from pathlib import Path from Cython.Build import cythonize class LineFilter: # pylint: disable=to...
--- +++ @@ -2,6 +2,9 @@ # # Copyright 2015-2025 the openage authors. See copying.md for legal info. +""" +Runs Cython on all modules that were listed via add_cython_module. +""" import argparse import os @@ -14,6 +17,7 @@ class LineFilter: + """ Proxy for a stream (default stdout) to filter out whole unwa...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/cythonize.py
Write reusable docstrings
from __future__ import annotations import string from typing import TYPE_CHECKING, cast from urllib.parse import ParseResult, unquote, urlparse from gitingest.utils.compat_typing import StrEnum from gitingest.utils.git_utils import _resolve_ref_to_sha, check_repo_exists from gitingest.utils.logging_config import get...
--- +++ @@ -1,3 +1,4 @@+"""Utility functions for parsing and validating query parameters.""" from __future__ import annotations @@ -28,6 +29,7 @@ class PathKind(StrEnum): + """Path kind enum.""" TREE = "tree" BLOB = "blob" @@ -36,6 +38,23 @@ async def _fallback_to_root(query: IngestionQuery,...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/utils/query_parser_utils.py
Include argument descriptions in docstrings
# Copyright 2015-2022 the openage authors. See copying.md for legal info. import argparse import importlib.util import os import py_compile import shutil import sys def clone_file_to_dir(sourcefile, input_dir, output_dir): if os.path.samefile(input_dir, output_dir) or sourcefile.startswith(output_dir): ...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2015-2022 the openage authors. See copying.md for legal info. +""" +Compiles python modules with cpython to pyc/pyo files. +""" import argparse import importlib.util @@ -10,6 +13,13 @@ def clone_file_to_dir(sourcefile, input_dir, output_dir): + """ + Make a copy of so...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/compilepy.py
Write docstrings for backend logic
# Copyright 2015-2024 the openage authors. See copying.md for legal info. # # pylint: disable=too-many-statements import argparse # TODO remove this once all multiprocessing has been eliminated: import multiprocessing import os import sys from .log import set_loglevel, verbosity_to_level, ENV_VERBOSITY def print_ve...
--- +++ @@ -1,6 +1,12 @@ # Copyright 2015-2024 the openage authors. See copying.md for legal info. # # pylint: disable=too-many-statements +""" +Behold: The central entry point for all of openage. + +This module mostly does argparsing. +Subparsers are initialized by their respective modules. +""" import argparse ...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/openage/__main__.py
Add docstrings for better understanding
# Copyright 2021-2021 the openage authors. See copying.md for legal info. import re from buildsystem.codecompliance.util import issue_str_line from .util import findfiles, readfile GLOBAL_PROFILE_DIREC = re.compile(( # global profiling directive for a file r"^(# cython: .*(profile=True|linetrace=True).*\n...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2021-2021 the openage authors. See copying.md for legal info. +""" +Verifies that Cython directives for profiling are deactivated. +""" import re @@ -20,6 +23,10 @@ def filter_file_list(check_files, dirnames): + """ + Yields all those files in check_files that are in...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/codecompliance/cython.py
Add docstrings for utility scripts
# Copyright 2016-2023 the openage authors. See copying.md for legal info. from mako.template import Template def generate_coord_basetypes(projectdir): # pylint: disable=cell-var-from-loop # this list contains all required member lists. member_lists = [ ["x", "y"], ["x", "y", "z"], ...
--- +++ @@ -1,10 +1,18 @@ # Copyright 2016-2023 the openage authors. See copying.md for legal info. +""" +Generates libopenage/coord/coord_{xy, xyz, ne_se, ne_se_up}.{h, cpp} +""" from mako.template import Template def generate_coord_basetypes(projectdir): + """ + Generates the test/demo method symbol l...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/openage/codegen/coord.py
Add standardized docstrings across the file
# Copyright 2015-2022 the openage authors. See copying.md for legal info. import os from io import UnsupportedOperation from typing import NoReturn from ..util.filelike.readonly import ReadOnlyFileLikeObject from ..util.bytequeue import ByteQueue from ..util.math import INF from .lzxd import LZXDecompressor class...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2015-2022 the openage authors. See copying.md for legal info. +""" +Wraps the LZXDecompressor in a file-like, read-only stream object. +""" import os from io import UnsupportedOperation @@ -13,6 +16,28 @@ class LZXDStream(ReadOnlyFileLikeObject): + """ + Read-only str...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/openage/cabextract/lzxdstream.py
Write proper docstrings for these functions
# Copyright 2019-2022 the openage authors. See copying.md for legal info. from __future__ import annotations import typing from ..converter_object import ConverterObject, ConverterObjectGroup from .genie_tech import CivTeamBonus, CivTechTree if typing.TYPE_CHECKING: from openage.convert.entity_object.conversion...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2019-2022 the openage authors. See copying.md for legal info. +""" +Contains structures and API-like objects for civilization from AoC. +""" from __future__ import annotations import typing @@ -17,6 +20,9 @@ class GenieCivilizationObject(ConverterObject): + """ + Civi...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/openage/convert/entity_object/conversion/aoc/genie_civ.py
Write docstrings that follow conventions
# Copyright 2015-2017 the openage authors. See copying.md for legal info. import re from .util import findfiles, readfile, issue_str_line # spaces missing in `if () {` and `for`, `while`, ... MISSING_SPACES_RE = re.compile( # on of the folowing, the first group is used for the column where # the pointer is ...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2015-2017 the openage authors. See copying.md for legal info. +""" +Checks some code style rules for cpp files. +""" import re @@ -46,6 +49,10 @@ def filter_file_list(check_files, dirnames): + """ + Yields all those files in check_files that are in one of the directo...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/codecompliance/cppstyle.py
Add docstrings for utility scripts
from __future__ import annotations from pydantic import BaseModel, Field class CloneConfig(BaseModel): # pylint: disable=too-many-instance-attributes url: str local_path: str commit: str | None = None branch: str | None = None tag: str | None = None subpath: str = Field(default="/") bl...
--- +++ @@ -1,3 +1,4 @@+"""Schema for the cloning process.""" from __future__ import annotations @@ -5,6 +6,31 @@ class CloneConfig(BaseModel): # pylint: disable=too-many-instance-attributes + """Configuration for cloning a Git repository. + + This model holds the necessary parameters for cloning a repo...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/gitingest/schemas/cloning.py
Create docstrings for API functions
# Copyright 2014-2024 the openage authors. See copying.md for legal info. import re import logging from .util import Strlazy def deobfuscate_email(string): replacements = { ' dawt ': '.', ' à ': '@' } for key, value in replacements.items(): string = string.replace(key, value) ...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2014-2024 the openage authors. See copying.md for legal info. +""" +Checks whether all authors are properly listed in copying.md. +""" import re @@ -9,6 +12,12 @@ def deobfuscate_email(string): + """ + Should reveal the original email address passed into obfuscate_em...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/codecompliance/authors.py
Add docstrings for internal functions
# Copyright 2015-2021 the openage authors. See copying.md for legal info. def generate_all(projectdir): from .cpp_testlist import generate_testlist generate_testlist(projectdir) from .coord import generate_coord_basetypes generate_coord_basetypes(projectdir)
--- +++ @@ -1,10 +1,16 @@ # Copyright 2015-2021 the openage authors. See copying.md for legal info. +""" +Contains the listing of all code generator invocations. +""" def generate_all(projectdir): + """ + Generates all source files in targetdir. + """ from .cpp_testlist import generate_testlist ...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/openage/codegen/listing.py
Add docstrings to improve code quality
# Copyright 2014-2022 the openage authors. See copying.md for legal info. from datetime import datetime from enum import Enum from io import UnsupportedOperation from itertools import chain import os from sys import modules import sys from typing import Generator from ..log import err from ..util.filelike.fifo impor...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2014-2022 the openage authors. See copying.md for legal info. +""" +Utility and driver module for C++ code generation. +""" from datetime import datetime from enum import Enum @@ -18,6 +21,9 @@ class CodegenMode(Enum): + """ + Modus operandi + """ # pylint d...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/openage/codegen/codegen.py
Turn comments into proper docstrings
# Copyright 2014-2021 the openage authors. See copying.md for legal info. import re from .util import findfiles, readfile class HeaderIssue(Exception): GUARD_RE = re.compile(( # allow any number of comments or empty lines "^(\\n|(#|//).*\\n)*" # the header guard "#pragma once\n" )) NO_GUARD_REQ...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2014-2021 the openage authors. See copying.md for legal info. +""" +Verifies the guard macros of all C++ header files. +""" import re @@ -7,6 +10,7 @@ class HeaderIssue(Exception): + """ Some issue was detected with the Header guard. """ GUARD_RE = re.compile(( @@ ...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/codecompliance/headerguards.py
Document all endpoints with docstrings
# Copyright 2024-2024 the openage authors. See copying.md for legal info. import subprocess from .cppstyle import filter_file_list from .util import findfiles def find_issues(check_files, dirnames): # Specify the checks to include # 4 checks we focus on checks_to_include = [ 'clang-analyzer-*', ...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2024-2024 the openage authors. See copying.md for legal info. +""" +Checks clang-tidy errors on cpp files +""" import subprocess from .cppstyle import filter_file_list @@ -7,6 +10,10 @@ def find_issues(check_files, dirnames): + """ + Invoke clang-tidy to check C++ fil...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/codecompliance/clangtidy.py
Generate consistent documentation across files
# Copyright 2014-2018 the openage authors. See copying.md for legal info. try: from pycodestyle import StyleGuide except ImportError: from pep8 import StyleGuide # these errors will be ignored by pep8 IGNORE_ERRORS = ( "E221", # multiple spaces before operator "E241", # multiple spaces after ',' ...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2014-2018 the openage authors. See copying.md for legal info. +""" +Checks PEP8 compliance, with some exceptions. +""" try: from pycodestyle import StyleGuide @@ -17,6 +20,10 @@ def filter_file_list(check_files, dirnames): + """ + Yields all those files in check_f...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/codecompliance/pystyle.py
Create structured documentation for my script
# Copyright 2016-2022 the openage authors. See copying.md for legal info. import re import pathlib import stat from .util import findfiles, SHEBANG SHEBANG_RE = re.compile("^" + SHEBANG) EXTENSIONS_NO_X_BIT = { '.h', '.cpp', '.py', '.pyx', '.pxi', '.cmake', '.h.in', '.cpp.in', '.py.in', '.h.template', '.c...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2016-2022 the openage authors. See copying.md for legal info. +""" +Checks the mode of all files and prevents executable source files. +""" import re import pathlib @@ -22,6 +25,9 @@ def check_mode(filename): + """ + Test if the the file has no executable bit set. + ...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/codecompliance/modes.py
Document all public functions with docstrings
# Copyright 2014-2023 the openage authors. See copying.md for legal info. import logging import os SHEBANG = "#!/.*\n(#?\n)?" FILECACHE = {} BADUTF8FILES = set() def log_setup(setting, default=1): levels = (logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG, logging.NOTSET) facto...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2014-2023 the openage authors. See copying.md for legal info. +""" +Some utilities. +""" import logging import os @@ -12,6 +15,13 @@ def log_setup(setting, default=1): + """ + Perform setup for the logger. + Run before any logging.log thingy is called. + + if se...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/buildsystem/codecompliance/util.py
Generate missing documentation strings
# Copyright 2019-2023 the openage authors. See copying.md for legal info. from __future__ import annotations import typing from ..converter_object import ConverterObject, ConverterObjectGroup if typing.TYPE_CHECKING: from openage.convert.entity_object.conversion.aoc.genie_effect import GenieEffectObject, \ ...
--- +++ @@ -1,5 +1,8 @@ # Copyright 2019-2023 the openage authors. See copying.md for legal info. +""" +Contains structures and API-like objects for techs from AoC. +""" from __future__ import annotations import typing @@ -16,6 +19,13 @@ class GenieTechObject(ConverterObject): + """ + Technology in AoE2...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/openage/convert/entity_object/conversion/aoc/genie_tech.py
Add standardized docstrings across the file
# Copyright 2019-2023 the openage authors. See copying.md for legal info. # # pylint: disable=too-many-lines,too-many-public-methods,too-many-instance-attributes,consider-iterating-dictionary from __future__ import annotations import typing from enum import Enum from ..converter_object import ConverterObject, Conver...
--- +++ @@ -2,6 +2,9 @@ # # pylint: disable=too-many-lines,too-many-public-methods,too-many-instance-attributes,consider-iterating-dictionary +""" +Contains structures and API-like objects for game entities from AoC. +""" from __future__ import annotations import typing @@ -17,6 +20,9 @@ class GenieUnitObjec...
https://raw.githubusercontent.com/SFTtech/openage/HEAD/openage/convert/entity_object/conversion/aoc/genie_unit.py
Write docstrings describing functionality
from fastapi import Request from fastapi.responses import Response from slowapi import Limiter, _rate_limit_exceeded_handler from slowapi.errors import RateLimitExceeded from slowapi.util import get_remote_address from gitingest.utils.logging_config import get_logger # Initialize logger for this module logger = get_...
--- +++ @@ -1,3 +1,4 @@+"""Utility functions for the server.""" from fastapi import Request from fastapi.responses import Response @@ -15,6 +16,26 @@ async def rate_limit_exception_handler(request: Request, exc: Exception) -> Response: + """Handle rate-limiting errors with a custom exception handler. + + ...
https://raw.githubusercontent.com/coderamp-labs/gitingest/HEAD/src/server/server_utils.py