_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 75 19.8k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q1900 | TableProcessor._build_row | train | def _build_row(self, row, parent, align, border):
""" Given a row of text, build table cells. """
tr = etree.SubElement(parent, 'tr')
tag = 'td'
if parent.tag == 'thead':
tag = 'th'
cells = self._split_row(row, border)
# We use align here rather than ce... | python | {
"resource": ""
} |
q1901 | TableProcessor._split_row | train | def _split_row(self, row, border):
""" split a row of text into list of cells. """
if border:
if row.startswith('|'):
row = row[1:]
if row.endswith('|'):
row = row[:-1]
return row.split('|') | python | {
"resource": ""
} |
q1902 | TableExtension.extendMarkdown | train | def extendMarkdown(self, md, md_globals):
""" Add an instance of TableProcessor to BlockParser. """
md.parser.blockprocessors.add('table',
TableProcessor(md.parser),
'<hashheader') | python | {
"resource": ""
} |
q1903 | get_all_static | train | def get_all_static():
"""
Get all the static files directories found by ``STATICFILES_FINDERS``
:return: set of paths (top-level folders only)
"""
static_dirs = set()
for finder in settings.STATICFILES_FINDERS:
finder = finders.get_finder(finder)
if hasattr(finder, 'storages')... | python | {
"resource": ""
} |
q1904 | BaseCompiler.input | train | def input(self, **kwargs):
"""
Specify temporary input file extension.
Browserify requires explicit file extension (".js" or ".json" by default).
https://github.com/substack/node-browserify/issues/1469
"""
if self.infile is None and "{infile}" in self.command:
... | python | {
"resource": ""
} |
q1905 | graph_hash | train | def graph_hash(obj):
'''this hashes all types to a hash without colissions. python's hashing algorithms are not cross type compatable but hashing tuples with the type as the first element seems to do the trick'''
obj_type = type(obj)
try:
# this works for hashables
return hash((obj_type, obj... | python | {
"resource": ""
} |
q1906 | VList.where | train | def where(self, relation, filter_fn):
''' use this to filter VLists, simply provide a filter function and what relation to apply it to '''
assert type(relation).__name__ in {'str','unicode'}, 'where needs the first arg to be a string'
assert callable(filter_fn), 'filter_fn needs to be callable'
... | python | {
"resource": ""
} |
q1907 | VList._where | train | def _where(self, filter_fn):
''' use this to filter VLists, simply provide a filter function to filter the current found objects '''
assert callable(filter_fn), 'filter_fn needs to be callable'
return VList(i for i in self if filter_fn(i())) | python | {
"resource": ""
} |
q1908 | VList._where | train | def _where(self, **kwargs):
'''use this to filter VLists with kv pairs'''
out = self
for k,v in kwargs.items():
out = out.where(k, lambda i:i==v)
return out | python | {
"resource": ""
} |
q1909 | SQLiteGraphDB.find | train | def find(self, target, relation):
''' returns back all elements the target has a relation to '''
query = 'select ob1.code from objects as ob1, objects as ob2, relations where relations.dst=ob1.id and relations.name=? and relations.src=ob2.id and ob2.code=?' # src is id not source :/
for i in sel... | python | {
"resource": ""
} |
q1910 | Sock.parse_buf | train | def parse_buf(self, encoding="unicode"):
"""
Since TCP is a stream-orientated protocol, responses aren't guaranteed
to be complete when they arrive. The buffer stores all the data and
this function splits the data into replies based on the new line
delimiter.
"""
... | python | {
"resource": ""
} |
q1911 | Sock.get_chunks | train | def get_chunks(self, fixed_limit=None, encoding="unicode"):
"""
This is the function which handles retrieving new data chunks. It's
main logic is avoiding a recv call blocking forever and halting
the program flow. To do this, it manages errors and keeps an eye
on the buffer to av... | python | {
"resource": ""
} |
q1912 | Net.validate_node | train | def validate_node(self, node_ip, node_port=None, same_nodes=1):
self.debug_print("Validating: " + node_ip)
# Is this a valid IP?
if not is_ip_valid(node_ip) or node_ip == "0.0.0.0":
self.debug_print("Invalid node ip in validate node")
return 0
# Is this ... | python | {
"resource": ""
} |
q1913 | Net.bootstrap | train | def bootstrap(self):
"""
When the software is first started, it needs to retrieve
a list of nodes to connect to the network to. This function
asks the server for N nodes which consists of at least N
passive nodes and N simultaneous nodes. The simultaneous
nodes are ... | python | {
"resource": ""
} |
q1914 | Net.advertise | train | def advertise(self):
"""
This function tells the rendezvous server that our node is ready to
accept connections from other nodes on the P2P network that run the
bootstrap function. It's only used when net_type == p2p
"""
# Advertise is disabled.
if not se... | python | {
"resource": ""
} |
q1915 | Net.determine_node | train | def determine_node(self):
"""
Determines the type of node based on a combination of forwarding
reachability and NAT type.
"""
# Manually set node_type as simultaneous.
if self.node_type == "simultaneous":
if self.nat_type != "unknown":
... | python | {
"resource": ""
} |
q1916 | Net.start | train | def start(self):
"""
This function determines node and NAT type, saves connectivity details,
and starts any needed servers to be a part of the network. This is
usually the first function called after initialising the Net class.
"""
self.debug_print("Starting netwo... | python | {
"resource": ""
} |
q1917 | RendezvousProtocol.send_remote_port | train | def send_remote_port(self):
"""
Sends the remote port mapped for the connection.
This port is surprisingly often the same as the locally
bound port for an endpoint because a lot of NAT types
preserve the port.
"""
msg = "REMOTE TCP %s" % (str(self.transport... | python | {
"resource": ""
} |
q1918 | RendezvousProtocol.cleanup_candidates | train | def cleanup_candidates(self, node_ip):
"""
Removes old TCP hole punching candidates for a
designated node if a certain amount of time has passed
since they last connected.
"""
if node_ip in self.factory.candidates:
old_candidates = []
for c... | python | {
"resource": ""
} |
q1919 | RendezvousProtocol.propogate_candidates | train | def propogate_candidates(self, node_ip):
"""
Used to progate new candidates to passive simultaneous
nodes.
"""
if node_ip in self.factory.candidates:
old_candidates = []
for candidate in self.factory.candidates[node_ip]:
# Not con... | python | {
"resource": ""
} |
q1920 | RendezvousProtocol.synchronize_simultaneous | train | def synchronize_simultaneous(self, node_ip):
"""
Because adjacent mappings for certain NAT types can
be stolen by other connections, the purpose of this
function is to ensure the last connection by a passive
simultaneous node is recent compared to the time for
a can... | python | {
"resource": ""
} |
q1921 | RendezvousProtocol.connectionLost | train | def connectionLost(self, reason):
"""
Mostly handles clean-up of node + candidate structures.
Avoids memory exhaustion for a large number of connections.
"""
try:
self.connected = False
if debug:
print(self.log_entry("CLOSED =", "no... | python | {
"resource": ""
} |
q1922 | IPgetter.get_external_ip | train | def get_external_ip(self):
"""
This function gets your IP from a random server
"""
random.shuffle(self.server_list)
myip = ''
for server in self.server_list[:3]:
myip = self.fetch(server)
if myip != '':
return myip
... | python | {
"resource": ""
} |
q1923 | IPgetter.fetch | train | def fetch(self, server):
"""
This function gets your IP from a specific server
"""
t = None
socket_default_timeout = socket.getdefaulttimeout()
opener = urllib.build_opener()
opener.addheaders = [('User-agent',
"Mozilla/5.0 (X... | python | {
"resource": ""
} |
q1924 | UNL.connect | train | def connect(self, their_unl, events, force_master=1, hairpin=1,
nonce="0" * 64):
"""
A new thread is spawned because many of the connection techniques
rely on sleep to determine connection outcome or to synchronise hole
punching techniques. If the sleep is in its own... | python | {
"resource": ""
} |
q1925 | SysClock.calculate_clock_skew | train | def calculate_clock_skew(self):
"""
Computer average and standard deviation
using all the data points.
"""
n = self.statx_n(self.data_points)
"""
Required to be able to compute the standard
deviation.
"""
if n < 1:
return Decim... | python | {
"resource": ""
} |
q1926 | RendezvousClient.simultaneous_listen | train | def simultaneous_listen(self):
"""
This function is called by passive simultaneous nodes who
wish to establish themself as such. It sets up a connection
to the Rendezvous Server to monitor for new hole punching requests.
"""
# Close socket.
if self.server... | python | {
"resource": ""
} |
q1927 | RendezvousClient.predict_mappings | train | def predict_mappings(self, mappings):
"""
This function is used to predict the remote ports that a NAT
will map a local connection to. It requires the NAT type to
be determined before use. Current support for preserving and
delta type mapping behaviour.
"""
... | python | {
"resource": ""
} |
q1928 | RendezvousClient.parse_remote_port | train | def parse_remote_port(self, reply):
"""
Parses a remote port from a Rendezvous Server's
response.
"""
remote_port = re.findall("^REMOTE (TCP|UDP) ([0-9]+)$", reply)
if not len(remote_port):
remote_port = 0
else:
remote_port = int... | python | {
"resource": ""
} |
q1929 | get_unused_port | train | def get_unused_port(port=None):
"""Checks if port is already in use."""
if port is None or port < 1024 or port > 65535:
port = random.randint(1024, 65535)
assert(1024 <= port <= 65535)
while True:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(('', ... | python | {
"resource": ""
} |
q1930 | get_lan_ip | train | def get_lan_ip(interface="default"):
if sys.version_info < (3, 0, 0):
if type(interface) == str:
interface = unicode(interface)
else:
if type(interface) == bytes:
interface = interface.decode("utf-8")
# Get ID of interface that handles WAN stuff.
default_gateway ... | python | {
"resource": ""
} |
q1931 | get_wan_ip | train | def get_wan_ip(n=0):
"""
That IP module sucks. Occasionally it returns an IP address behind
cloudflare which probably happens when cloudflare tries to proxy your web
request because it thinks you're trying to DoS. It's better if we just run
our own infrastructure.
"""
if n == 2:
try... | python | {
"resource": ""
} |
q1932 | Cron.initialize | train | def initialize(self):
"""Initialize croniter and related times"""
if self.croniter is None:
self.time = time.time()
self.datetime = datetime.now(self.tz)
self.loop_time = self.loop.time()
self.croniter = croniter(self.spec, start_time=self.datetime) | python | {
"resource": ""
} |
q1933 | Cron.get_next | train | def get_next(self):
"""Return next iteration time related to loop time"""
return self.loop_time + (self.croniter.get_next(float) - self.time) | python | {
"resource": ""
} |
q1934 | Cron.call_next | train | def call_next(self):
"""Set next hop in the loop. Call task"""
if self.handle is not None:
self.handle.cancel()
next_time = self.get_next()
self.handle = self.loop.call_at(next_time, self.call_next)
self.call_func() | python | {
"resource": ""
} |
q1935 | Cron.call_func | train | def call_func(self, *args, **kwargs):
"""Called. Take care of exceptions using gather"""
asyncio.gather(
self.cron(*args, **kwargs),
loop=self.loop, return_exceptions=True
).add_done_callback(self.set_result) | python | {
"resource": ""
} |
q1936 | cmd | train | def cmd():
'''Handler for command line invocation'''
# Try to handle any reasonable thing thrown at this.
# Consume '-f' and '-o' as input/output, allow '-' for stdin/stdout
# and treat any subsequent arguments as a space separated string to
# be titlecased (so it still works if people forget quote... | python | {
"resource": ""
} |
q1937 | Checker.add_options | train | def add_options(cls, parser: OptionManager) -> None:
"""
``flake8`` api method to register new plugin options.
See :class:`.Configuration` docs for detailed options reference.
Arguments:
parser: ``flake8`` option parser instance.
"""
parser.add_option(
... | python | {
"resource": ""
} |
q1938 | Checker.run | train | def run(self) -> Generator[Tuple[int, int, str, type], None, None]:
"""
Runs the checker.
``fix_file()`` only mutates the buffer object.
It is the only way to find out if some error happened.
"""
if self.filename != STDIN:
buffer = StringIO()
opti... | python | {
"resource": ""
} |
q1939 | unique | train | def unique(g):
"""
Yield values yielded by ``g``, removing any duplicates.
Example
-------
>>> list(unique(iter([1, 3, 1, 2, 3])))
[1, 3, 2]
"""
yielded = set()
for value in g:
if value not in yielded:
yield value
yielded.add(value) | python | {
"resource": ""
} |
q1940 | static_get_type_attr | train | def static_get_type_attr(t, name):
"""
Get a type attribute statically, circumventing the descriptor protocol.
"""
for type_ in t.mro():
try:
return vars(type_)[name]
except KeyError:
pass
raise AttributeError(name) | python | {
"resource": ""
} |
q1941 | _conflicting_defaults | train | def _conflicting_defaults(typename, conflicts):
"""Format an error message for conflicting default implementations.
Parameters
----------
typename : str
Name of the type for which we're producing an error.
conflicts : dict[str -> list[Interface]]
Map from strings to interfaces provi... | python | {
"resource": ""
} |
q1942 | InterfaceMeta._diff_signatures | train | def _diff_signatures(self, type_):
"""
Diff our method signatures against the methods provided by type_.
Parameters
----------
type_ : type
The type to check.
Returns
-------
missing, mistyped, mismatched : list[str], dict[str -> type], dict[s... | python | {
"resource": ""
} |
q1943 | InterfaceMeta.verify | train | def verify(self, type_):
"""
Check whether a type implements ``self``.
Parameters
----------
type_ : type
The type to check.
Raises
------
TypeError
If ``type_`` doesn't conform to our interface.
Returns
-------
... | python | {
"resource": ""
} |
q1944 | InterfaceMeta._invalid_implementation | train | def _invalid_implementation(self, t, missing, mistyped, mismatched):
"""
Make a TypeError explaining why ``t`` doesn't implement our interface.
"""
assert missing or mistyped or mismatched, "Implementation wasn't invalid."
message = "\nclass {C} failed to implement interface {I}... | python | {
"resource": ""
} |
q1945 | Interface.from_class | train | def from_class(cls, existing_class, subset=None, name=None):
"""Create an interface from an existing class.
Parameters
----------
existing_class : type
The type from which to extract an interface.
subset : list[str], optional
List of methods that should b... | python | {
"resource": ""
} |
q1946 | compatible | train | def compatible(impl_sig, iface_sig):
"""
Check whether ``impl_sig`` is compatible with ``iface_sig``.
Parameters
----------
impl_sig : inspect.Signature
The signature of the implementation function.
iface_sig : inspect.Signature
The signature of the interface function.
In g... | python | {
"resource": ""
} |
q1947 | step_count | train | def step_count(group_idx):
"""Return the amount of index changes within group_idx."""
cmp_pos = 0
steps = 1
if len(group_idx) < 1:
return 0
for i in range(len(group_idx)):
if group_idx[cmp_pos] != group_idx[i]:
cmp_pos = i
steps += 1
return steps | python | {
"resource": ""
} |
q1948 | step_indices | train | def step_indices(group_idx):
"""Return the edges of areas within group_idx, which are filled with the same value."""
ilen = step_count(group_idx) + 1
indices = np.empty(ilen, np.int64)
indices[0] = 0
indices[-1] = group_idx.size
cmp_pos = 0
ri = 1
for i in range(len(group_idx)):
... | python | {
"resource": ""
} |
q1949 | AggregateOp.callable | train | def callable(cls, nans=False, reverse=False, scalar=False):
""" Compile a jitted function doing the hard part of the job """
_valgetter = cls._valgetter_scalar if scalar else cls._valgetter
valgetter = nb.njit(_valgetter)
outersetter = nb.njit(cls._outersetter)
_cls_inner = nb.n... | python | {
"resource": ""
} |
q1950 | AggregateGeneric.callable | train | def callable(self, nans=False):
"""Compile a jitted function and loop it over the sorted data."""
jitfunc = nb.njit(self.func, nogil=True)
def _loop(sortidx, group_idx, a, ret):
size = len(ret)
group_idx_srt = group_idx[sortidx]
a_srt = a[sortidx]
... | python | {
"resource": ""
} |
q1951 | get_func | train | def get_func(func, aliasing, implementations):
""" Return the key of a found implementation or the func itself """
try:
func_str = aliasing[func]
except KeyError:
if callable(func):
return func
else:
if func_str in implementations:
return func_str
... | python | {
"resource": ""
} |
q1952 | minimum_dtype | train | def minimum_dtype(x, dtype=np.bool_):
"""returns the "most basic" dtype which represents `x` properly, which
provides at least the same value range as the specified dtype."""
def check_type(x, dtype):
try:
converted = dtype.type(x)
except (ValueError, OverflowError):
... | python | {
"resource": ""
} |
q1953 | _array | train | def _array(group_idx, a, size, fill_value, dtype=None):
"""groups a into separate arrays, keeping the order intact."""
if fill_value is not None and not (np.isscalar(fill_value) or
len(fill_value) == 0):
raise ValueError("fill_value must be None, a scalar or an emp... | python | {
"resource": ""
} |
q1954 | _generic_callable | train | def _generic_callable(group_idx, a, size, fill_value, dtype=None,
func=lambda g: g, **kwargs):
"""groups a by inds, and then applies foo to each group in turn, placing
the results in an array."""
groups = _array(group_idx, a, size, ())
ret = np.full(size, fill_value, dtype=dtype or... | python | {
"resource": ""
} |
q1955 | _cumsum | train | def _cumsum(group_idx, a, size, fill_value=None, dtype=None):
"""
N to N aggregate operation of cumsum. Perform cumulative sum for each group.
group_idx = np.array([4, 3, 3, 4, 4, 1, 1, 1, 7, 8, 7, 4, 3, 3, 1, 1])
a = np.array([3, 4, 1, 3, 9, 9, 6, 7, 7, 0, 8, 2, 1, 8, 9, 8])
_cumsum(group_idx, a, ... | python | {
"resource": ""
} |
q1956 | _fill_untouched | train | def _fill_untouched(idx, ret, fill_value):
"""any elements of ret not indexed by idx are set to fill_value."""
untouched = np.ones_like(ret, dtype=bool)
untouched[idx] = False
ret[untouched] = fill_value | python | {
"resource": ""
} |
q1957 | _prod | train | def _prod(group_idx, a, size, fill_value, dtype=None):
"""Same as aggregate_numpy.py"""
dtype = minimum_dtype_scalar(fill_value, dtype, a)
ret = np.full(size, fill_value, dtype=dtype)
if fill_value != 1:
ret[group_idx] = 1 # product should start from 1
np.multiply.at(ret, group_idx, a)
... | python | {
"resource": ""
} |
q1958 | c_func | train | def c_func(funcname, reverse=False, nans=False, scalar=False):
""" Fill c_funcs with constructed code from the templates """
varnames = ['group_idx', 'a', 'ret', 'counter']
codebase = c_base_reverse if reverse else c_base
iteration = c_iter_scalar[funcname] if scalar else c_iter[funcname]
if scalar:... | python | {
"resource": ""
} |
q1959 | step_indices | train | def step_indices(group_idx):
""" Get the edges of areas within group_idx, which are filled
with the same value
"""
ilen = step_count(group_idx) + 1
indices = np.empty(ilen, int)
indices[0] = 0
indices[-1] = group_idx.size
inline(c_step_indices, ['group_idx', 'indices'], define_macro... | python | {
"resource": ""
} |
q1960 | RandomProjection.__create_proj_mat | train | def __create_proj_mat(self, size):
"""Create a random projection matrix
[1] D. Achlioptas. Database-friendly random projections: Johnson-Lindenstrauss with binary coins.
[2] P. Li, et al. Very sparse random projections.
http://scikit-learn.org/stable/modules/random_projection.html#spar... | python | {
"resource": ""
} |
q1961 | load_ratings | train | def load_ratings(data_home, size):
"""Load all samples in the dataset.
"""
if size == '100k':
with open(os.path.join(data_home, 'u.data'), encoding='ISO-8859-1') as f:
lines = list(map(lambda l: list(map(int, l.rstrip().split('\t'))), f.readlines()))
elif size == '1m':
with ... | python | {
"resource": ""
} |
q1962 | n_feature_hash | train | def n_feature_hash(feature, dims, seeds):
"""N-hot-encoded feature hashing.
Args:
feature (str): Target feature represented as string.
dims (list of int): Number of dimensions for each hash value.
seeds (list of float): Seed of each hash function (mmh3).
Returns:
numpy 1d a... | python | {
"resource": ""
} |
q1963 | feature_hash | train | def feature_hash(feature, dim, seed=123):
"""Feature hashing.
Args:
feature (str): Target feature represented as string.
dim (int): Number of dimensions for a hash value.
seed (float): Seed of a MurmurHash3 hash function.
Returns:
numpy 1d array: one-hot-encoded feature vec... | python | {
"resource": ""
} |
q1964 | count_true_positive | train | def count_true_positive(truth, recommend):
"""Count number of true positives from given sets of samples.
Args:
truth (numpy 1d array): Set of truth samples.
recommend (numpy 1d array): Ordered set of recommended samples.
Returns:
int: Number of true positives.
"""
tp = 0
... | python | {
"resource": ""
} |
q1965 | RecommenderMixin.initialize | train | def initialize(self, *args):
"""Initialize a recommender by resetting stored users and items.
"""
# number of observed users
self.n_user = 0
# store user data
self.users = {}
# number of observed items
self.n_item = 0
# store item data
s... | python | {
"resource": ""
} |
q1966 | RecommenderMixin.register_user | train | def register_user(self, user):
"""For new users, append their information into the dictionaries.
Args:
user (User): User.
"""
self.users[user.index] = {'known_items': set()}
self.n_user += 1 | python | {
"resource": ""
} |
q1967 | RecommenderMixin.scores2recos | train | def scores2recos(self, scores, candidates, rev=False):
"""Get recommendation list for a user u_index based on scores.
Args:
scores (numpy array; (n_target_items,)):
Scores for the target items. Smaller score indicates a promising item.
candidates (numpy array; (#... | python | {
"resource": ""
} |
q1968 | Evaluator.fit | train | def fit(self, train_events, test_events, n_epoch=1):
"""Train a model using the first 30% positive events to avoid cold-start.
Evaluation of this batch training is done by using the next 20% positive events.
After the batch SGD training, the models are incrementally updated by using the 20% tes... | python | {
"resource": ""
} |
q1969 | Evaluator.__batch_update | train | def __batch_update(self, train_events, test_events, n_epoch):
"""Batch update called by the fitting method.
Args:
train_events (list of Event): Positive training events.
test_events (list of Event): Test events.
n_epoch (int): Number of epochs for the batch training.... | python | {
"resource": ""
} |
q1970 | Evaluator.__batch_evaluate | train | def __batch_evaluate(self, test_events):
"""Evaluate the current model by using the given test events.
Args:
test_events (list of Event): Current model is evaluated by these events.
Returns:
float: Mean Percentile Rank for the test set.
"""
percentiles ... | python | {
"resource": ""
} |
q1971 | Grapher._scale_x_values | train | def _scale_x_values(self, values, max_width):
'''Scale X values to new width'''
if type(values) == dict:
values = self._scale_x_values_timestamps(values=values, max_width=max_width)
adjusted_values = list(values)
if len(adjusted_values) > max_width:
def get_pos... | python | {
"resource": ""
} |
q1972 | Grapher._scale_x_values_timestamps | train | def _scale_x_values_timestamps(self, values, max_width):
'''Scale X values to new width based on timestamps'''
first_timestamp = float(values[0][0])
last_timestamp = float(values[-1][0])
step_size = (last_timestamp - first_timestamp) / max_width
values_by_column = [[] for i in r... | python | {
"resource": ""
} |
q1973 | Grapher._scale_y_values | train | def _scale_y_values(self, values, new_min, new_max, scale_old_from_zero=True):
'''
Take values and transmute them into a new range
'''
# Scale Y values - Create a scaled list of values to use for the visual graph
scaled_values = []
y_min_value = min(values)
if sca... | python | {
"resource": ""
} |
q1974 | Grapher._assign_ascii_character | train | def _assign_ascii_character(self, y_prev, y, y_next): # noqa for complexity
'''Assign the character to be placed into the graph'''
char = '?'
if y_next > y and y_prev > y:
char = '-'
elif y_next < y and y_prev < y:
char = '-'
e... | python | {
"resource": ""
} |
q1975 | Grapher.asciigraph | train | def asciigraph(self, values=None, max_height=None, max_width=None, label=False):
'''
Accepts a list of y values and returns an ascii graph
Optionally values can also be a dictionary with a key of timestamp, and a value of value. InGraphs returns data in this format for example.
'''
... | python | {
"resource": ""
} |
q1976 | replace | train | def replace(expression: Expression, position: Sequence[int], replacement: Replacement) -> Replacement:
r"""Replaces the subexpression of `expression` at the given `position` with the given `replacement`.
The original `expression` itself is not modified, but a modified copy is returned. If the replacement
i... | python | {
"resource": ""
} |
q1977 | BipartiteGraph.find_matching | train | def find_matching(self) -> Dict[TLeft, TRight]:
"""Finds a matching in the bipartite graph.
This is done using the Hopcroft-Karp algorithm with an implementation from the
`hopcroftkarp` package.
Returns:
A dictionary where each edge of the matching is represented by a key-v... | python | {
"resource": ""
} |
q1978 | BipartiteGraph.without_nodes | train | def without_nodes(self, edge: Edge) -> 'BipartiteGraph[TLeft, TRight, TEdgeValue]':
"""Returns a copy of this bipartite graph with the given edge and its adjacent nodes removed."""
return BipartiteGraph(((n1, n2), v) for (n1, n2), v in self._edges.items() if n1 != edge[0] and n2 != edge[1]) | python | {
"resource": ""
} |
q1979 | BipartiteGraph.without_edge | train | def without_edge(self, edge: Edge) -> 'BipartiteGraph[TLeft, TRight, TEdgeValue]':
"""Returns a copy of this bipartite graph with the given edge removed."""
return BipartiteGraph((e2, v) for e2, v in self._edges.items() if edge != e2) | python | {
"resource": ""
} |
q1980 | BipartiteGraph.limited_to | train | def limited_to(self, left: Set[TLeft], right: Set[TRight]) -> 'BipartiteGraph[TLeft, TRight, TEdgeValue]':
"""Returns the induced subgraph where only the nodes from the given sets are included."""
return BipartiteGraph(((n1, n2), v) for (n1, n2), v in self._edges.items() if n1 in left and n2 in right) | python | {
"resource": ""
} |
q1981 | is_constant | train | def is_constant(expression):
"""Check if the given expression is constant, i.e. it does not contain Wildcards."""
if isinstance(expression, Wildcard):
return False
if isinstance(expression, Expression):
return expression.is_constant
if isinstance(expression, Operation):
return al... | python | {
"resource": ""
} |
q1982 | get_head | train | def get_head(expression):
"""Returns the given expression's head."""
if isinstance(expression, Wildcard):
if isinstance(expression, SymbolWildcard):
return expression.symbol_type
return None
return type(expression) | python | {
"resource": ""
} |
q1983 | match_head | train | def match_head(subject, pattern):
"""Checks if the head of subject matches the pattern's head."""
if isinstance(pattern, Pattern):
pattern = pattern.expression
pattern_head = get_head(pattern)
if pattern_head is None:
return True
if issubclass(pattern_head, OneIdentityOperation):
... | python | {
"resource": ""
} |
q1984 | is_anonymous | train | def is_anonymous(expression):
"""Returns True iff the expression does not contain any variables."""
if hasattr(expression, 'variable_name') and expression.variable_name:
return False
if isinstance(expression, Operation):
return all(is_anonymous(o) for o in op_iter(expression))
return Tru... | python | {
"resource": ""
} |
q1985 | contains_variables_from_set | train | def contains_variables_from_set(expression, variables):
"""Returns True iff the expression contains any of the variables from the given set."""
if hasattr(expression, 'variable_name') and expression.variable_name in variables:
return True
if isinstance(expression, Operation):
return any(cont... | python | {
"resource": ""
} |
q1986 | get_variables | train | def get_variables(expression, variables=None):
"""Returns the set of variable names in the given expression."""
if variables is None:
variables = set()
if hasattr(expression, 'variable_name') and expression.variable_name is not None:
variables.add(expression.variable_name)
if isinstance(... | python | {
"resource": ""
} |
q1987 | rename_variables | train | def rename_variables(expression: Expression, renaming: Dict[str, str]) -> Expression:
"""Rename the variables in the expression according to the given dictionary.
Args:
expression:
The expression in which the variables are renamed.
renaming:
The renaming dictionary. Maps... | python | {
"resource": ""
} |
q1988 | fixed_integer_vector_iter | train | def fixed_integer_vector_iter(max_vector: Tuple[int, ...], vector_sum: int) -> Iterator[Tuple[int, ...]]:
"""
Return an iterator over the integer vectors which
- are componentwise less than or equal to *max_vector*, and
- are non-negative, and where
- the sum of their components is exactly *vector_... | python | {
"resource": ""
} |
q1989 | commutative_sequence_variable_partition_iter | train | def commutative_sequence_variable_partition_iter(values: Multiset, variables: List[VariableWithCount]
) -> Iterator[Dict[str, Multiset]]:
"""Yield all possible variable substitutions for given values and variables.
.. note::
The results are not yielded i... | python | {
"resource": ""
} |
q1990 | generator_chain | train | def generator_chain(initial_data: T, *factories: Callable[[T], Iterator[T]]) -> Iterator[T]:
"""Chain multiple generators together by passing results from one to the next.
This helper function allows to create a chain of generator where each generator is constructed by a factory that
gets the data yielded ... | python | {
"resource": ""
} |
q1991 | Substitution.try_add_variable | train | def try_add_variable(self, variable_name: str, replacement: VariableReplacement) -> None:
"""Try to add the variable with its replacement to the substitution.
This considers an existing replacement and will only succeed if the new replacement
can be merged with the old replacement. Merging can ... | python | {
"resource": ""
} |
q1992 | Substitution.union_with_variable | train | def union_with_variable(self, variable: str, replacement: VariableReplacement) -> 'Substitution':
"""Try to create a new substitution with the given variable added.
See :meth:`try_add_variable` for a version of this method that modifies the substitution
in place.
Args:
vari... | python | {
"resource": ""
} |
q1993 | Substitution.extract_substitution | train | def extract_substitution(self, subject: 'expressions.Expression', pattern: 'expressions.Expression') -> bool:
"""Extract the variable substitution for the given pattern and subject.
This assumes that subject and pattern already match when being considered as linear.
Also, they both must be :ter... | python | {
"resource": ""
} |
q1994 | Substitution.union | train | def union(self, *others: 'Substitution') -> 'Substitution':
"""Try to merge the substitutions.
If a variable occurs in multiple substitutions, try to merge the replacements.
See :meth:`union_with_variable` to see how replacements are merged.
Does not modify any of the original substitu... | python | {
"resource": ""
} |
q1995 | Substitution.rename | train | def rename(self, renaming: Dict[str, str]) -> 'Substitution':
"""Return a copy of the substitution with renamed variables.
Example:
Rename the variable *x* to *y*:
>>> subst = Substitution({'x': a})
>>> subst.rename({'x': 'y'})
{'y': Symbol('a')}
... | python | {
"resource": ""
} |
q1996 | _get_symbol_wildcard_label | train | def _get_symbol_wildcard_label(state: '_State', symbol: Symbol) -> Type[Symbol]:
"""Return the transition target for the given symbol type from the the given state or None if it does not exist."""
return next((t for t in state.keys() if is_symbol_wildcard(t) and isinstance(symbol, t)), None) | python | {
"resource": ""
} |
q1997 | _term_str | train | def _term_str(term: TermAtom) -> str: # pragma: no cover
"""Return a string representation of a term atom."""
if is_operation(term):
return term.name + '('
elif is_symbol_wildcard(term):
return '*{!s}'.format(term.__name__)
elif isinstance(term, Wildcard):
return '*{!s}{!s}'.for... | python | {
"resource": ""
} |
q1998 | FlatTerm.merged | train | def merged(cls, *flatterms: 'FlatTerm') -> 'FlatTerm':
"""Concatenate the given flatterms to a single flatterm.
Args:
*flatterms:
The flatterms which are concatenated.
Returns:
The concatenated flatterms.
"""
return cls(cls._combined_wild... | python | {
"resource": ""
} |
q1999 | FlatTerm._flatterm_iter | train | def _flatterm_iter(cls, expression: Expression) -> Iterator[TermAtom]:
"""Generator that yields the atoms of the expressions in prefix notation with operation end markers."""
if isinstance(expression, Operation):
yield type(expression)
for operand in op_iter(expression):
... | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.