Unnamed: 0 int64 0 2.93k | code stringlengths 101 62.2k | docs stringlengths 51 10.7k | doc_len int64 4 1.74k | words int64 4 4.82k | lang stringclasses 1
value | prompt stringlengths 320 71.2k |
|---|---|---|---|---|---|---|
1,600 | def find_asteroidal_triple(G):
r
V = set(G.nodes)
if len(V) < 6:
# An asteroidal triple cannot exist in a graph with 5 or less vertices.
return None
component_structure = create_component_structure(G)
E_complement = set(nx.complement(G).edges)
for e in E_complement:
u ... | Find an asteroidal triple in the given graph.
An asteroidal triple is a triple of non-adjacent vertices such that
there exists a path between any two of them which avoids the closed
neighborhood of the third. It checks all independent triples of vertices
and whether they are an asteroidal triple or not... | 253 | 105 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def find_asteroidal_triple(G):
r
V = set(G.nodes)
if len(V) < 6:
# An asteroidal triple cannot exist in a graph with 5 or less vertices.
return None
compone... |
1,601 | def get_leave_entries(employee, leave_type, from_date, to_date):
return frappe.db.sql(
,
{"from_date": from_date, "to_date": to_date, "employee": employee, "leave_type": leave_type},
as_dict=1,
)
@frappe.whitelist() | Returns leave entries between from_date and to_date.
SELECT
employee, leave_type, from_date, to_date, leaves, transaction_name, transaction_type, holiday_list,
is_carry_forward, is_expired
FROM `tabLeave Ledger Entry`
WHERE employee=%(employee)s AND leave_type=%(leave_type)s
AND docstatus=1
AND (leave... | 52 | 19 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_leave_entries(employee, leave_type, from_date, to_date):
return frappe.db.sql(
,
{"from_date": from_date, "to_date": to_date, "employee": employee, "leave_type": leave_type},
... |
1,602 | def addslashes(value):
return value.replace("\\", "\\\\").replace('"', '\\"').replace("'", "\\'")
@register.filter(is_safe=True)
@stringfilter |
Add slashes before quotes. Useful for escaping strings in CSV, for
example. Less useful for escaping JavaScript; use the ``escapejs``
filter instead.
| 22 | 9 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def addslashes(value):
return value.replace("\\", "\\\\").replace('"', '\\"').replace("'", "\\'")
@register.filter(is_safe=True)
@stringfilter
```
###Assistant :
... |
1,603 | def _get_raw_from_config(cls) -> str:
if cls.varname is None:
raise TypeError("varname should not be None")
return os.environ[cls.varname]
|
Read the value from environment variable.
Returns
-------
str
Config raw value.
Raises
------
TypeError
If `varname` is None.
KeyError
If value is absent.
| 24 | 16 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _get_raw_from_config(cls) -> str:
if cls.varname is None:
raise TypeError("varname should not be None")
return os.environ[cls.varname]
```
... |
1,604 | def is_monotonic_decreasing(self):
return SeriesDefault.register(pandas.Series.is_monotonic_decreasing)(self)
|
Return boolean if values in the object are monotonically decreasing.
Returns
-------
bool
| 13 | 4 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def is_monotonic_decreasing(self):
return SeriesDefault.register(pandas.Series.is_monotonic_decreasing)(self)
```
###Assistant :
Return boolean if valu... |
1,605 | def get(self, model, columns, filters):
key = self._make_key(model, filters)
conn = self.cluster.get_local_client_for_key(key)
pipe = conn.pipeline()
for col in columns:
pipe.hget(key, f"i+{col}")
results = pipe.execute()
return {
col: (... |
Fetches buffered values for a model/filter. Passed columns must be integer columns.
| 12 | 41 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get(self, model, columns, filters):
key = self._make_key(model, filters)
conn = self.cluster.get_local_client_for_key(key)
pipe = conn.pipeline()
... |
1,606 | def test_non_ascii_subscription_for_principal(self) -> None:
iago = self.example_user("iago")
self.assert_adding_subscriptions_for_principal(
iago.id, get_realm("zulip"), ["hümbüǵ"], policy_name="Public"
)
|
You can subscribe other people to streams even if they containing
non-ASCII characters.
| 13 | 13 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_non_ascii_subscription_for_principal(self) -> None:
iago = self.example_user("iago")
self.assert_adding_subscriptions_for_principal(
iago.id, ge... |
1,607 | def add_provs(self, reader):
fileids = reader.fileids()
for fileid in fileids:
prov, langfile = os.path.split(fileid)
file_name, file_extension = os.path.splitext(langfile)
if file_extension == ".tab":
lang = file_name.split("-")[-1]
... | Add languages from Multilingual Wordnet to the provenance dictionary | 9 | 54 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def add_provs(self, reader):
fileids = reader.fileids()
for fileid in fileids:
prov, langfile = os.path.split(fileid)
file_name, file_extensi... |
1,608 | def test_thread_with_bundled_aggregations_for_latest(self) -> None:
self._send_relation(RelationTypes.THREAD, "m.room.test")
channel = self._send_relation(RelationTypes.THREAD, "m.room.test")
thread_2 = channel.json_body["event_id"]
self._send_relation(
RelationType... |
Bundled aggregations should get applied to the latest thread event.
| 10 | 19 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_thread_with_bundled_aggregations_for_latest(self) -> None:
self._send_relation(RelationTypes.THREAD, "m.room.test")
channel = self._send_relation(RelationTy... |
1,609 | def from_environment(cls):
return cls.from_file(path=KUBE_CONFIG_DEFAULT_LOCATION)
|
Factory method to produce an instance of this class using the default kube config location
| 15 | 4 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def from_environment(cls):
return cls.from_file(path=KUBE_CONFIG_DEFAULT_LOCATION)
```
###Assistant :
Factory method to produce an instance of this cl... |
1,610 | def test_thumbnail_repeated_thumbnail(self) -> None:
self._test_thumbnail(
"scale", self.test_image.expected_scaled, self.test_image.expected_found
)
if not self.test_image.expected_found:
return
# Fetching again should work, without re-requesting the i... | Test that fetching the same thumbnail works, and deleting the on disk
thumbnail regenerates it.
| 15 | 112 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_thumbnail_repeated_thumbnail(self) -> None:
self._test_thumbnail(
"scale", self.test_image.expected_scaled, self.test_image.expected_found
)
... |
1,611 | def update(self) -> None:
ping_cmd = [
"ping",
"-c",
"1",
"-W",
str(DEFAULT_PING_TIMEOUT),
str(self._host),
]
status = sp.call(ping_cmd, stdout=sp.DEVNULL, stderr=sp.DEVNULL)
self._state = not bool(status)
| Check if device is on and update the state. Only called if assumed state is false. | 16 | 23 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def update(self) -> None:
ping_cmd = [
"ping",
"-c",
"1",
"-W",
str(DEFAULT_PING_TIMEOUT),
str(self._... |
1,612 | def get_transactions(self):
df = self.__transactions[
[
"Date",
"Type",
"Ticker",
"Side",
"Price",
"Quantity",
"Fees",
"Investment",
"Currency",
... | Get formatted transactions
Returns
-------
pd.DataFrame: formatted transactions
| 8 | 33 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_transactions(self):
df = self.__transactions[
[
"Date",
"Type",
"Ticker",
"Side",
... |
1,613 | def test_pick_colors(self) -> None:
used_colors: Set[str] = set()
color_map: Dict[int, str] = {}
recipient_ids = list(range(30))
user_color_map = pick_colors(used_colors, color_map, recipient_ids)
self.assertEqual(
user_color_map,
{
0: "#76... |
If we are assigning colors to a user with 24+ streams, we have to start
re-using old colors. Our algorithm basically uses recipient_id % 24, so
the following code reflects the worse case scenario that our new
streams have recipient ids spaced out by exact multiples of 24. We
d... | 127 | 157 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_pick_colors(self) -> None:
used_colors: Set[str] = set()
color_map: Dict[int, str] = {}
recipient_ids = list(range(30))
user_color_map = pick_colors(... |
1,614 | def test_import(self):
data = (
('name', 'slug', 'status', 'cf_text', 'cf_longtext', 'cf_integer', 'cf_boolean', 'cf_date', 'cf_url', 'cf_json', 'cf_select', 'cf_multiselect'),
('Site 1', 'site-1', 'active', 'ABC', 'Foo', '123', 'True', '2020-01-01', 'http://example.com/1', '{"f... |
Import a Site in CSV format, including a value for each CustomField.
| 12 | 167 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_import(self):
data = (
('name', 'slug', 'status', 'cf_text', 'cf_longtext', 'cf_integer', 'cf_boolean', 'cf_date', 'cf_url', 'cf_json', 'cf_select', 'cf... |
1,615 | def build_pattern():
#bare = set()
for module, replace in list(MAPPING.items()):
for old_attr, new_attr in list(replace.items()):
LOOKUP[(module, old_attr)] = new_attr
#bare.add(module)
#bare.add(old_attr)
#yield % (module, module)
yield % (m... |
# import_name< 'import' (module=%r
# | dotted_as_names< any* module=%r any* >) >
#
import_from< 'from' module_name=%r 'import'
( attr_name=%r | import_as_name< attr_name=%r 'as' any >) >
... | 35 | 37 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def build_pattern():
#bare = set()
for module, replace in list(MAPPING.items()):
for old_attr, new_attr in list(replace.items()):
LOOKUP[(module, old_attr)] = new... |
1,616 | async def async_refresh_sensor(self) -> None:
_LOGGER.debug("Refreshing library sensor for '%s'", self.name)
try:
await self.hass.async_add_executor_job(self._update_state_and_attrs)
self._attr_available = True
except NotFound:
self._attr_available = ... | Update state and attributes for the library sensor. | 8 | 42 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
async def async_refresh_sensor(self) -> None:
_LOGGER.debug("Refreshing library sensor for '%s'", self.name)
try:
await self.hass.async_add_executor_job(... |
1,617 | def __hash__(self):
return hash(self.name) + hash(self.version) + hash(self.source_url)
|
Compute hash in a way which matches the equality test.
| 10 | 8 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def __hash__(self):
return hash(self.name) + hash(self.version) + hash(self.source_url)
```
###Assistant :
Compute hash in a way which matches the equ... |
1,618 | def _min_nodes(self) -> int:
if self._side == "gblock":
return self._config["fc_gblock_min_nodes"]
retval = self._scale_filters(self._config["fc_min_filters"])
retval = int(retval * self._config["fc_dimensions"] ** 2)
return retval
| int: The number of nodes for the first Dense. For non g-block layers this will be the
given minimum filters multiplied by the dimensions squared. For g-block layers, this is the
given value | 33 | 22 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _min_nodes(self) -> int:
if self._side == "gblock":
return self._config["fc_gblock_min_nodes"]
retval = self._scale_filters(self._config["fc_min_filt... |
1,619 | def _get(cls) -> dict:
custom_parameters = super().get()
result = cls.default.copy()
result.update(
{key.replace("-", "_"): value for key, value in custom_parameters.items()}
)
return result
|
Get the resulted command-line options.
Returns
-------
dict
Decoded and verified config value.
| 13 | 22 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _get(cls) -> dict:
custom_parameters = super().get()
result = cls.default.copy()
result.update(
{key.replace("-", "_"): value for key, value ... |
1,620 | def add(self, node, *predecessors):
if self._ready_nodes is not None:
raise ValueError("Nodes cannot be added after a call to prepare()")
# Create the node -> predecessor edges
nodeinfo = self._get_nodeinfo(node)
nodeinfo.npredecessors += len(predecessors)
... | Add a new node and its predecessors to the graph.
Both the *node* and all elements in *predecessors* must be hashable.
If called multiple times with the same node argument, the set of dependencies
will be the union of all dependencies passed in.
It is possible to add a node with no de... | 97 | 47 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def add(self, node, *predecessors):
if self._ready_nodes is not None:
raise ValueError("Nodes cannot be added after a call to prepare()")
# Create the n... |
1,621 | def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, start_date):
end_date = start_date + duration(days=10)
cursor_value = start_date + duration(days=5)
state = {
AdsInsights.cursor_field: cursor_value.date().isoformat(),
"slices": [(cursor... | Stream will use cursor_value from state, but will skip saved slices | 11 | 87 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_stream_slices_with_state_and_slices(self, api, async_manager_mock, start_date):
end_date = start_date + duration(days=10)
cursor_value = start_date + durati... |
1,622 | def _test_readonly_foreignkey_links(self, admin_site):
chapter = Chapter.objects.create(
title="Chapter 1",
content="content",
book=Book.objects.create(name="Book 1"),
)
language = Language.objects.create(iso="_40", name="Test")
obj = ReadOnly... |
ForeignKey readonly fields render as links if the target model is
registered in admin.
| 14 | 92 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _test_readonly_foreignkey_links(self, admin_site):
chapter = Chapter.objects.create(
title="Chapter 1",
content="content",
book=Book.... |
1,623 | def execute():
for project in frappe.get_all("Project", fields=["name", "percent_complete_method"]):
total = frappe.db.count("Task", dict(project=project.name))
if project.percent_complete_method == "Task Completion" and total > 0:
completed = frappe.db.sql(
,
project.name,
)[0][0]
percent_complet... | select count(name) from tabTask where
project=%s and status in ('Cancelled', 'Completed') | 11 | 51 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def execute():
for project in frappe.get_all("Project", fields=["name", "percent_complete_method"]):
total = frappe.db.count("Task", dict(project=project.name))
if project.percent_compl... |
1,624 | def mathematica(s, additional_translations=None):
parser = MathematicaParser(additional_translations)
if additional_translations is not None:
SymPyDeprecationWarning(
feature="additional_translations parameter for the Mathematica parser",
last_supported_version="1.9",
... |
Translate a string containing a Wolfram Mathematica expression to a SymPy
expression.
If the translator is unable to find a suitable SymPy expression, the
``FullForm`` of the Mathematica expression will be output, using SymPy
``Function`` objects as nodes of the syntax tree.
Examples
====... | 180 | 37 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def mathematica(s, additional_translations=None):
parser = MathematicaParser(additional_translations)
if additional_translations is not None:
SymPyDeprecationWarning(
... |
1,625 | def update_company_current_month_sales(company):
current_month_year = formatdate(today(), "MM-yyyy")
results = frappe.db.sql(
.format(
current_month_year=current_month_year, company=frappe.db.escape(company)
),
as_dict=True,
)
monthly_total = results[0]["total"] if len(results) > 0 else 0
frappe.db.set... |
SELECT
SUM(base_grand_total) AS total,
DATE_FORMAT(`posting_date`, '%m-%Y') AS month_year
FROM
`tabSales Invoice`
WHERE
DATE_FORMAT(`posting_date`, '%m-%Y') = '{current_month_year}'
AND docstatus = 1
AND company = {company}
GROUP BY
month_year
| 27 | 28 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def update_company_current_month_sales(company):
current_month_year = formatdate(today(), "MM-yyyy")
results = frappe.db.sql(
.format(
current_month_year=current_month_year, company=... |
1,626 | def metadata_version(self):
# type: () -> Optional[str]
raise NotImplementedError()
| Value of "Metadata-Version:" in the distribution, if available. | 8 | 9 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def metadata_version(self):
# type: () -> Optional[str]
raise NotImplementedError()
```
###Assistant : Value of "Metadata-Version:" in the distribution,... |
1,627 | def call_ca(self, _):
if self.screen_tickers:
self.queue = ca_controller.ComparisonAnalysisController(
self.screen_tickers, self.queue
).menu(custom_path_menu_above="/stocks/")
else:
print("Some tickers must be screened first through one of th... | Call the comparison analysis menu with selected tickers | 8 | 23 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def call_ca(self, _):
if self.screen_tickers:
self.queue = ca_controller.ComparisonAnalysisController(
self.screen_tickers, self.queue
... |
1,628 | def delete_tasks_predictions(project, queryset, **kwargs):
task_ids = queryset.values_list('id', flat=True)
predictions = Prediction.objects.filter(task__id__in=task_ids)
count = predictions.count()
predictions.delete()
queryset.update(updated_at=datetime.now())
return {'processed_items': c... | Delete all predictions by tasks ids
:param project: project instance
:param queryset: filtered tasks db queryset
| 16 | 191 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def delete_tasks_predictions(project, queryset, **kwargs):
task_ids = queryset.values_list('id', flat=True)
predictions = Prediction.objects.filter(task__id__in=task_ids)
co... |
1,629 | def clean_up(self):
if not self._cleaned:
for ref in self.referenced_paths():
self._reference_counter[ref] -= 1
if self._reference_counter[ref] <= 0:
os.remove(ref)
if self._reference_counter[ref] < 0:
... |
Counter of referenced file paths subtract 1. If the counter reach 0, then delete the file.
| 16 | 42 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def clean_up(self):
if not self._cleaned:
for ref in self.referenced_paths():
self._reference_counter[ref] -= 1
if self._referenc... |
1,630 | def load_backend(backend_name):
# This backend was renamed in Django 1.9.
if backend_name == "django.db.backends.postgresql_psycopg2":
backend_name = "django.db.backends.postgresql"
try:
return import_module("%s.base" % backend_name)
except ImportError as e_user:
# The data... |
Return a database backend's "base" module given a fully qualified database
backend name, or raise an error if it doesn't exist.
| 21 | 136 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def load_backend(backend_name):
# This backend was renamed in Django 1.9.
if backend_name == "django.db.backends.postgresql_psycopg2":
backend_name = "django.db.backends... |
1,631 | def explode_superdims(sizes, dims):
strides_to_sizes = {stride: size for size, stride in zip(sizes, strides_for_sizes(sizes))}
dims = list(reversed(dims))
final_dims = []
for size, stride in dims:
target_size = strides_to_sizes[stride]
new_dims = []
while size > target_size:
assert target_s... | Explode superdims to fit a known shape.
The unflattening process might mistakenly generate too few too large dimensions.
For example, ``unflatten_superdims(np.arange(n))`` always returns ``[(n, 1)]``.
This function takes a list of such contiguous super-dimensions and splits them
into smaller dimensions such th... | 47 | 69 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def explode_superdims(sizes, dims):
strides_to_sizes = {stride: size for size, stride in zip(sizes, strides_for_sizes(sizes))}
dims = list(reversed(dims))
final_dims = []
for size... |
1,632 | def path_weight(G, path, weight):
multigraph = G.is_multigraph()
cost = 0
if not nx.is_path(G, path):
raise nx.NetworkXNoPath("path does not exist")
for node, nbr in nx.utils.pairwise(path):
if multigraph:
cost += min(v[weight] for v in G[node][nbr].values())
el... | Returns total cost associated with specified path and weight
Parameters
----------
G : graph
A NetworkX graph.
path: list
A list of node labels which defines the path to traverse
weight: string
A string indicating which edge attribute to use for path cost
Returns
... | 78 | 39 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def path_weight(G, path, weight):
multigraph = G.is_multigraph()
cost = 0
if not nx.is_path(G, path):
raise nx.NetworkXNoPath("path does not exist")
for node, n... |
1,633 | def _run_sql(self, sql, params, raw=True, output=False, latest=False):
toget = 'source_raw' if raw else 'source'
sqlfrom = "history"
if output:
sqlfrom = "history LEFT JOIN output_history USING (session, line)"
toget = "history.%s, output_history.output" % toget
... | Prepares and runs an SQL query for the history database.
Parameters
----------
sql : str
Any filtering expressions to go after SELECT ... FROM ...
params : tuple
Parameters passed to the SQL query (to replace "?")
raw, output : bool
See :meth:... | 57 | 96 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _run_sql(self, sql, params, raw=True, output=False, latest=False):
toget = 'source_raw' if raw else 'source'
sqlfrom = "history"
if output:
s... |
1,634 | def __add__(self, other):
if isinstance(other, PathSpec):
return PathSpec(self.patterns + other.patterns)
else:
return NotImplemented
|
Combines the :attr:`Pathspec.patterns` patterns from two
:class:`PathSpec` instances.
| 8 | 13 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def __add__(self, other):
if isinstance(other, PathSpec):
return PathSpec(self.patterns + other.patterns)
else:
return NotImplemented
... |
1,635 | def address(self):
if use_gcs_for_bootstrap():
return self._gcs_address
return self._redis_address
| Get the address for bootstrapping, e.g. the address to pass to
`ray start` or `ray.int()` to start worker nodes, that has been
converted to ip:port format.
| 26 | 8 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def address(self):
if use_gcs_for_bootstrap():
return self._gcs_address
return self._redis_address
```
###Assistant : Get the address for bo... |
1,636 | def similarity(self, texts=[], data={}, use_gpu=False, batch_size=1):
if use_gpu:
try:
_places = os.environ["CUDA_VISIBLE_DEVICES"]
int(_places[0])
except:
raise RuntimeError(
"Environment Variable CUDA_VISIBLE_... |
Get the sentiment prediction results results with the texts as input
Args:
texts(list): the input texts to be predicted which the first element is text_1(list)
and the second element is text_2(list), such as [['这道题很难'], ['这道题不简单']]
if tex... | 75 | 149 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def similarity(self, texts=[], data={}, use_gpu=False, batch_size=1):
if use_gpu:
try:
_places = os.environ["CUDA_VISIBLE_DEVICES"]
... |
1,637 | def cli_collect_weights(sys_argv):
parser = argparse.ArgumentParser(
description="This script loads a pretrained model " "and uses it collect weights.",
prog="ludwig collect_weights",
usage="%(prog)s [options]",
)
# ----------------
# Model parameters
# ----------------... | Command Line Interface to collecting the weights for the model.
--m: Input model that is necessary to collect to the tensors, this is a
required *option*
--t: Tensors to collect
--od: Output directory of the model, defaults to results
--v: Verbose: Defines the logging level that the user will ... | 52 | 113 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def cli_collect_weights(sys_argv):
parser = argparse.ArgumentParser(
description="This script loads a pretrained model " "and uses it collect weights.",
prog="ludwig... |
1,638 | def get_user_emails_from_group(group):
group_doc = group
if isinstance(group_doc, str):
group_doc = frappe.get_doc("Daily Work Summary Group", group)
emails = get_users_email(group_doc)
return emails
| Returns list of email of enabled users from the given group
:param group: Daily Work Summary Group `name` | 18 | 20 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_user_emails_from_group(group):
group_doc = group
if isinstance(group_doc, str):
group_doc = frappe.get_doc("Daily Work Summary Group", group)
emails = get_users_email(group_d... |
1,639 | def adapt_unknown_value(self, value):
if isinstance(value, datetime.datetime): # must be before date
return self.adapt_datetimefield_value(value)
elif isinstance(value, datetime.date):
return self.adapt_datefield_value(value)
elif isinstance(value, datetime.time... |
Transform a value to something compatible with the backend driver.
This method only depends on the type of the value. It's designed for
cases where the target type isn't known, such as .raw() SQL queries.
As a consequence it may not work perfectly in all circumstances.
| 46 | 31 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def adapt_unknown_value(self, value):
if isinstance(value, datetime.datetime): # must be before date
return self.adapt_datetimefield_value(value)
elif i... |
1,640 | def test_get_settings_variable_assignment_request_context(self):
request = self.get_request(site=self.other_site)
context = Context({"request": request})
template = Template(
"{% load wagtailsettings_tags %}"
"{% get_settings as wagtail_settings %}"
"... |
Check that assigning the setting to a context variable with
{% get_settings as wagtail_settings %} works.
| 16 | 54 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_get_settings_variable_assignment_request_context(self):
request = self.get_request(site=self.other_site)
context = Context({"request": request})
tem... |
1,641 | def get_dataset(cls, path, engine, storage_options):
if engine == "auto":
# We follow in concordance with pandas
engine_classes = [PyArrowDataset, FastParquetDataset]
error_msgs = ""
for engine_class in engine_classes:
try:
... |
Retrieve Parquet engine specific Dataset implementation.
Parameters
----------
path : str, path object or file-like object
The filepath of the parquet file in local filesystem or hdfs.
engine : str
Parquet library to use (only 'PyArrow' is supported for ... | 57 | 109 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_dataset(cls, path, engine, storage_options):
if engine == "auto":
# We follow in concordance with pandas
engine_classes = [PyArrowDataset, Fa... |
1,642 | def test_causal_lm_model_past_with_attn_mask(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_causal_lm_model_past_with_attn_mask(*config_and_inputs)
| Test the causal LM model with `past_key_values` and `attention_mask` | 9 | 6 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_causal_lm_model_past_with_attn_mask(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_causal_lm_mo... |
1,643 | def _whatsnd(data):
hdr = data[:512]
fakefile = BytesIO(hdr)
for testfn in sndhdr.tests:
res = testfn(hdr, fakefile)
if res is not None:
return _sndhdr_MIMEmap.get(res[0])
return None | Try to identify a sound file type.
sndhdr.what() has a pretty cruddy interface, unfortunately. This is why
we re-do it here. It would be easier to reverse engineer the Unix 'file'
command and use the standard 'magic' file, as shipped with a modern Unix.
| 44 | 25 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _whatsnd(data):
hdr = data[:512]
fakefile = BytesIO(hdr)
for testfn in sndhdr.tests:
res = testfn(hdr, fakefile)
if res is not None:
return _... |
1,644 | async def test_protect_loop_debugger_sleep(caplog):
block_async_io.enable()
with patch(
"homeassistant.util.async_.extract_stack",
return_value=[
Mock(
filename="/home/paulus/homeassistant/.venv/blah/pydevd.py",
lineno="23",
line=... | Test time.sleep injected by the debugger is not reported. | 9 | 37 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
async def test_protect_loop_debugger_sleep(caplog):
block_async_io.enable()
with patch(
"homeassistant.util.async_.extract_stack",
return_value=[
Mo... |
1,645 | def update_worker_pea_args(self):
self.peas_args['peas'] = self._set_peas_args(self.args)
| Update args of all its worker peas based on Pod args. Does not touch head and tail | 17 | 5 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def update_worker_pea_args(self):
self.peas_args['peas'] = self._set_peas_args(self.args)
```
###Assistant : Update args of all its worker peas based on Pod ar... |
1,646 | def prevprime(n):
n = _as_int_ceiling(n)
if n < 3:
raise ValueError("no preceding primes")
if n < 8:
return {3: 2, 4: 3, 5: 3, 6: 5, 7: 5}[n]
if n <= sieve._list[-1]:
l, u = sieve.search(n)
if l == u:
return sieve[l-1]
else:
return sie... | Return the largest prime smaller than n.
Notes
=====
Potential primes are located at 6*j +/- 1. This
property is used during searching.
>>> from sympy import prevprime
>>> [(i, prevprime(i)) for i in range(10, 15)]
[(10, 7), (11, 7), (12, 11), (13, 11), (14, 1... | 67 | 88 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def prevprime(n):
n = _as_int_ceiling(n)
if n < 3:
raise ValueError("no preceding primes")
if n < 8:
return {3: 2, 4: 3, 5: 3, 6: 5, 7: 5}[n]
if n <= sie... |
1,647 | def test_duplicate_statistics_handle_integrity_error(hass_recorder, caplog):
hass = hass_recorder()
wait_recording_done(hass)
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
external_energy_metadat... | Test the recorder does not blow up if statistics is duplicated. | 11 | 117 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_duplicate_statistics_handle_integrity_error(hass_recorder, caplog):
hass = hass_recorder()
wait_recording_done(hass)
period1 = dt_util.as_utc(dt_util.parse_datetim... |
1,648 | def testRequestResourcesRaceConditionWithResourceDemands(self):
config = copy.deepcopy(MULTI_WORKER_CLUSTER)
config["available_node_types"].update(
{
"empty_node": {
"node_config": {},
"resources": {"CPU": 2, "GPU": 1},
... | Test request_resources() with resource_demands.
Tests when request_resources() is called simultaneously with resource
demands in multiple orders.
| 16 | 130 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def testRequestResourcesRaceConditionWithResourceDemands(self):
config = copy.deepcopy(MULTI_WORKER_CLUSTER)
config["available_node_types"].update(
{
... |
1,649 | def test_sitemap_unpublished_titles(self):
sitemap = CMSSitemap()
locations = []
urlset = sitemap.get_urls()
unpublished_titles = set()
for item in urlset:
locations.append(item['location'])
for page in Page.objects.drafts():
if page.get_p... |
Check that titles attached to unpublished pages are not in the urlset.
As titles are 'published' depending on their attached page, we create a
set of unpublished titles by checking titles attached to the draft and
public version of each page
| 41 | 56 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_sitemap_unpublished_titles(self):
sitemap = CMSSitemap()
locations = []
urlset = sitemap.get_urls()
unpublished_titles = set()
for i... |
1,650 | def mixin_gateway_parser(parser):
gp = add_arg_group(parser, title='Gateway')
_add_host(gp)
_add_proxy(gp)
gp.add_argument(
'--uses',
type=str,
default=None,
# TODO: add Jina Hub Gateway
help=,
)
gp.add_argument(
'--uses-with',
actio... | Add the options for remote expose at the Gateway
:param parser: the parser
The config of the gateway, it could be one of the followings:
* the string literal of an Gateway class name
* a Gateway YAML file (.yml, .yaml, .jaml)
* a docker image (must start with `docker://`)
... | 169 | 160 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def mixin_gateway_parser(parser):
gp = add_arg_group(parser, title='Gateway')
_add_host(gp)
_add_proxy(gp)
gp.add_argument(
'--uses',
type=str,
... |
1,651 | def _format_changelog(self, changelog):
if not changelog:
return changelog
new_changelog = []
for line in changelog.strip().split('\n'):
line = line.strip()
if line[0] == '*':
new_changelog.extend(['', line])
elif line[0] =... | Format the changelog correctly and convert it to a list of strings
| 12 | 50 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _format_changelog(self, changelog):
if not changelog:
return changelog
new_changelog = []
for line in changelog.strip().split('\n'):
... |
1,652 | def _get_time(self) -> float:
# N.B. We could remove this method and always call `self._timer.get_time()` internally,
# but it's handy to have in mocking situations
return self._timer.get_time()
| Get the current wall clock time, via the internal Timer. | 10 | 27 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _get_time(self) -> float:
# N.B. We could remove this method and always call `self._timer.get_time()` internally,
# but it's handy to have in mocking situations
... |
1,653 | def _generate(self, pset, min_, max_, condition, type_=None):
if type_ is None:
type_ = pset.ret
expr = []
height = np.random.randint(min_, max_)
stack = [(0, type_)]
while len(stack) != 0:
depth, type_ = stack.pop()
# We've added a t... | Generate a Tree as a list of lists.
The tree is build from the root to the leaves, and it stop growing when
the condition is fulfilled.
Parameters
----------
pset: PrimitiveSetTyped
Primitive set from which primitives are selected.
min_: int
Mini... | 116 | 131 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _generate(self, pset, min_, max_, condition, type_=None):
if type_ is None:
type_ = pset.ret
expr = []
height = np.random.randint(min_, max_)... |
1,654 | def test_ohe_infrequent_multiple_categories_dtypes():
pd = pytest.importorskip("pandas")
X = pd.DataFrame(
{
"str": ["a", "f", "c", "f", "f", "a", "c", "b", "b"],
"int": [5, 3, 0, 10, 10, 12, 0, 3, 5],
},
columns=["str", "int"],
)
ohe = OneHotEncode... | Test infrequent categories with a pandas dataframe with multiple dtypes. | 10 | 252 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_ohe_infrequent_multiple_categories_dtypes():
pd = pytest.importorskip("pandas")
X = pd.DataFrame(
{
"str": ["a", "f", "c", "f", "f", "a", "c", "b",... |
1,655 | def _set_random_id(self):
if getattr(self, "persistence", False):
raise RuntimeError(
)
if "dash_snapshots" in sys.modules:
raise RuntimeError(
)
if not hasattr(self, "id"):
v = str(uuid.UUID(int=rd.ra... |
Attempting to use an auto-generated ID with the `persistence` prop.
This is prohibited because persistence is tied to component IDs and
auto-generated IDs can easily change.
Please assign an explicit ID to this component.
Att... | 82 | 32 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _set_random_id(self):
if getattr(self, "persistence", False):
raise RuntimeError(
)
if "dash_snapshots" in sys.modules:
... |
1,656 | def final():
head = []
head.append(("layernorm.weight", "norm.weight"))
head.append(("layernorm.bias", "norm.bias"))
head.append(("classifier.weight", "head.weight"))
head.append(("classifier.bias", "head.bias"))
return head
|
Function helps in renaming final classification layer
| 7 | 15 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def final():
head = []
head.append(("layernorm.weight", "norm.weight"))
head.append(("layernorm.bias", "norm.bias"))
head.append(("classifier.weight", "head.weight"))
... |
1,657 | def test_delete_index(self, ds, documents):
ds.write_documents(documents, index="custom_index")
assert ds.get_document_count(index="custom_index") == len(documents)
ds.delete_index(index="custom_index")
assert ds.get_document_count(index="custom_index") == 0
| Contrary to other Document Stores, SQLDocumentStore doesn't raise if the index is empty | 13 | 15 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_delete_index(self, ds, documents):
ds.write_documents(documents, index="custom_index")
assert ds.get_document_count(index="custom_index") == len(documents)
... |
1,658 | def add_holidays(events, start, end, employee, company):
applicable_holiday_list = get_holiday_list_for_employee(employee, company)
if not applicable_holiday_list:
return
for holiday in frappe.db.sql(
,
(applicable_holiday_list, start, end),
as_dict=True,
):
events.append(
{
"doctype": "Holiday",
... | select name, holiday_date, description
from `tabHoliday` where parent=%s and holiday_date between %s and %s | 14 | 44 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def add_holidays(events, start, end, employee, company):
applicable_holiday_list = get_holiday_list_for_employee(employee, company)
if not applicable_holiday_list:
return
for holiday i... |
1,659 | def get_video_input_devices_names() -> List[str]:
# based on https://docs.microsoft.com/ru-ru/windows/win32/directshow/selecting-a-capture-device
names = []
sys_dev_enum = strmif.ICreateDevEnum()
if ole32.CoCreateInstance(uuids.CLSID_SystemDeviceEnum, None, ole32.CLSCTX.CLSCTX_INPROC_SERVER, strmi... |
returns a list of available names of VideoInputDevice's
ole32 should be initialized before use
| 14 | 82 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_video_input_devices_names() -> List[str]:
# based on https://docs.microsoft.com/ru-ru/windows/win32/directshow/selecting-a-capture-device
names = []
sys_dev_enum = ... |
1,660 | def check_for_updates():
version_message = get_update_status()
print(version_message)
|
Check for updates to the current version.
| 7 | 6 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def check_for_updates():
version_message = get_update_status()
print(version_message)
```
###Assistant :
Check for updates to the current version.
|
1,661 | def _get_base_knot_positions(X, n_knots=10, knots="uniform", sample_weight=None):
if knots == "quantile":
percentiles = 100 * np.linspace(
start=0, stop=1, num=n_knots, dtype=np.float64
)
if sample_weight is None:
knots = np.percentil... | Calculate base knot positions.
Base knots such that first knot <= feature <= last knot. For the
B-spline construction with scipy.interpolate.BSpline, 2*degree knots
beyond the base interval are added.
Returns
-------
knots : ndarray of shape (n_knots, n_features), dtype... | 45 | 101 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _get_base_knot_positions(X, n_knots=10, knots="uniform", sample_weight=None):
if knots == "quantile":
percentiles = 100 * np.linspace(
start=... |
1,662 | def _num_tokens(self, data):
if tf_utils.is_sparse(data):
flat_values = data.values
elif tf_utils.is_ragged(data):
flat_values = data.flat_values
else:
flat_values = tf.reshape(data, [-1])
tokens, _, counts = tf.unique_with_counts(flat_values,... | Count the number of tokens in a ragged, sparse or dense tensor. | 12 | 27 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _num_tokens(self, data):
if tf_utils.is_sparse(data):
flat_values = data.values
elif tf_utils.is_ragged(data):
flat_values = data.flat_va... |
1,663 | def no_devicess_fixture():
return json.loads(load_fixture("awair/no_devices.json"))
@pytest.fixture(name="awair_offline", scope="session") | Fixture representing when no devices are found in Awair's cloud API. | 11 | 6 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def no_devicess_fixture():
return json.loads(load_fixture("awair/no_devices.json"))
@pytest.fixture(name="awair_offline", scope="session")
```
###Assistant : Fixture r... |
1,664 | def data():
return pd.array(
[True, False] * 4 + [np.nan] + [True, False] * 44 + [np.nan] + [True, False],
dtype="boolean",
)
@pytest.mark.parametrize(
"values, exp_any, exp_all, exp_any_noskip, exp_all_noskip",
[
([True, pd.NA], True, True, True, pd.NA),
([False, ... | Fixture returning boolean array, with valid and missing values. | 9 | 76 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def data():
return pd.array(
[True, False] * 4 + [np.nan] + [True, False] * 44 + [np.nan] + [True, False],
dtype="boolean",
)
@pytest.mark.parametrize(
"va... |
1,665 | def should_toggle_mask(self) -> bool:
with self._lock:
retval = self._toggle_mask
if retval:
logger.debug("Sending toggle mask")
self._toggle_mask = False
return retval
| Check whether the mask should be toggled and return the value. If ``True`` is returned
then resets :attr:`_toggle_mask` back to ``False``
Returns
-------
bool
``True`` if the mask should be toggled otherwise ``False``. | 33 | 19 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def should_toggle_mask(self) -> bool:
with self._lock:
retval = self._toggle_mask
if retval:
logger.debug("Sending toggle mask")
... |
1,666 | def load_mtpl2(n_samples=100000):
# freMTPL2freq dataset from https://www.openml.org/d/41214
df_freq = fetch_openml(data_id=41214, as_frame=True, parser="pandas").data
df_freq["IDpol"] = df_freq["IDpol"].astype(int)
df_freq.set_index("IDpol", inplace=True)
# freMTPL2sev dataset from https://ww... | Fetch the French Motor Third-Party Liability Claims dataset.
Parameters
----------
n_samples: int, default=100000
number of samples to select (for faster run time). Full dataset has
678013 samples.
| 27 | 57 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def load_mtpl2(n_samples=100000):
# freMTPL2freq dataset from https://www.openml.org/d/41214
df_freq = fetch_openml(data_id=41214, as_frame=True, parser="pandas").data
df_fr... |
1,667 | def _get_driver(self) -> str:
try:
driver = pynvml.nvmlSystemGetDriverVersion().decode("utf-8")
except pynvml.NVMLError as err:
self._log("debug", f"Unable to obtain driver. Original error: {str(err)}")
driver = "No Nvidia driver found"
self._log("deb... | Obtain the Nvidia driver version currently in use.
Returns
-------
str
The current GPU driver version
| 16 | 32 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _get_driver(self) -> str:
try:
driver = pynvml.nvmlSystemGetDriverVersion().decode("utf-8")
except pynvml.NVMLError as err:
self._log("de... |
1,668 | def _load_serializers():
global _serializers
serializers = {}
for format in BUILTIN_SERIALIZERS:
register_serializer(format, BUILTIN_SERIALIZERS[format], serializers)
if hasattr(settings, "SERIALIZATION_MODULES"):
for format in settings.SERIALIZATION_MODULES:
register_se... |
Register built-in and settings-defined serializers. This is done lazily so
that user code has a chance to (e.g.) set up custom settings without
needing to be careful of import order.
| 30 | 29 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _load_serializers():
global _serializers
serializers = {}
for format in BUILTIN_SERIALIZERS:
register_serializer(format, BUILTIN_SERIALIZERS[format], serializers... |
1,669 | def pop(self):
if self.keyorder:
value = self.keys()[0]
self.remove(value)
return value
return None
|
Pops the top element from the sorted keys if it exists. Returns None otherwise.
| 14 | 12 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def pop(self):
if self.keyorder:
value = self.keys()[0]
self.remove(value)
return value
return None
```
###Assistan... |
1,670 | def probs_to_pianoroll_viterbi(frame_probs, onset_probs, alpha=0.5):
n, d = onset_probs.shape
loss_matrix = np.zeros([n, d, 2], dtype=float)
path_matrix = np.zeros([n, d, 2], dtype=bool)
frame_losses = (1 - alpha) * -np.log(np.stack([1 - frame_probs,
frame_p... | Viterbi decoding of frame & onset probabilities to pianoroll.
Args:
frame_probs: A numpy array (num-frames-by-num-pitches) of frame
probabilities.
onset_probs: A numpy array (num-frames-by-num-pitches) of onset
probabilities.
alpha: Relative weight of onset and frame loss, a float between 0 a... | 67 | 167 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def probs_to_pianoroll_viterbi(frame_probs, onset_probs, alpha=0.5):
n, d = onset_probs.shape
loss_matrix = np.zeros([n, d, 2], dtype=float)
path_matrix = np.zeros([n, d, 2], dtype... |
1,671 | def to_native_string(string, encoding="ascii"):
if isinstance(string, builtin_str):
out = string
else:
out = string.decode(encoding)
return out
| Given a string object, regardless of type, returns a representation of
that string in the native string type, encoding and decoding where
necessary. This assumes ASCII unless told otherwise.
| 29 | 15 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def to_native_string(string, encoding="ascii"):
if isinstance(string, builtin_str):
out = string
else:
out = string.decode(encoding)
return out
``... |
1,672 | def mathematica(s, additional_translations=None):
parser = MathematicaParser(additional_translations)
if additional_translations is not None:
sympy_deprecation_warning(
,
deprecated_since_version="1.11",
active_deprecations_target="mathematica-parser-additional-... |
Translate a string containing a Wolfram Mathematica expression to a SymPy
expression.
If the translator is unable to find a suitable SymPy expression, the
``FullForm`` of the Mathematica expression will be output, using SymPy
``Function`` objects as nodes of the syntax tree.
Examples
====... | 203 | 20 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def mathematica(s, additional_translations=None):
parser = MathematicaParser(additional_translations)
if additional_translations is not None:
sympy_deprecation_warning(... |
1,673 | def evaluation(self):
# adding info about the eval tasks
if self.eval_tasks == self.train_tasks:
msg = "For evalution, we used the same training datasets; check the [Datasets Used](#datasets-used) section for more information"
eval_list = ''
else:
msg... |
returns a section with dataset info about the eval tasks if they exist,
information about the validation metric if it exists, and create a table with
the validation metric.
| 29 | 262 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def evaluation(self):
# adding info about the eval tasks
if self.eval_tasks == self.train_tasks:
msg = "For evalution, we used the same training datasets... |
1,674 | def _train_with_recompute(n_steps):
img_dim, n_channels, batch_size = 256, 1, 4
x, y = _get_dummy_data(img_dim, n_channels, batch_size)
# This model is the same model as _get_big_cnn_model but split into 3 parts.
models = _get_split_cnn_model(
img_dim, n_channels, num_partitions=3, blocks_p... | Trains a single large model with gradient checkpointing using tf.recompute_grad. | 10 | 110 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _train_with_recompute(n_steps):
img_dim, n_channels, batch_size = 256, 1, 4
x, y = _get_dummy_data(img_dim, n_channels, batch_size)
# This model is the same model as _ge... |
1,675 | def test_syncer_callback_dead_node_log_error(caplog, ray_start_2_cpus, temp_data_dirs):
caplog.set_level(logging.ERROR, logger="ray.tune.syncer")
tmp_source, tmp_target = temp_data_dirs
syncer_callback = TestSyncerCallback(
sync_period=0,
local_logdir_override=tmp_target,
)
t... | Check that we catch + log errors when trying syncing with a dead remote node. | 15 | 45 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_syncer_callback_dead_node_log_error(caplog, ray_start_2_cpus, temp_data_dirs):
caplog.set_level(logging.ERROR, logger="ray.tune.syncer")
tmp_source, tmp_target = temp_... |
1,676 | def directed_modularity_matrix(G, nodelist=None, weight=None):
import numpy as np
if nodelist is None:
nodelist = list(G)
A = nx.to_scipy_sparse_array(G, nodelist=nodelist, weight=weight, format="csr")
k_in = A.sum(axis=0)
k_out = A.sum(axis=1)
m = k_in.sum()
# Expected adjacen... | Returns the directed modularity matrix of G.
The modularity matrix is the matrix B = A - <A>, where A is the adjacency
matrix and <A> is the expected adjacency matrix, assuming that the graph
is described by the configuration model.
More specifically, the element B_ij of B is defined as
.. math::... | 303 | 44 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def directed_modularity_matrix(G, nodelist=None, weight=None):
import numpy as np
if nodelist is None:
nodelist = list(G)
A = nx.to_scipy_sparse_array(G, nodelist=n... |
1,677 | def project_columns(self, columns):
if columns == self.columns:
return self
return ParquetFunctionWrapper(
self.engine,
self.fs,
self.meta,
columns,
self.index,
None, # Already merged into common_kwargs
... | Return a new ParquetFunctionWrapper object
with a sub-column projection.
| 9 | 24 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def project_columns(self, columns):
if columns == self.columns:
return self
return ParquetFunctionWrapper(
self.engine,
self.fs,
... |
1,678 | def sort_args_by_name(self):
expr = self.expr
if not isinstance(expr, ArrayTensorProduct):
return self
args = expr.args
sorted_data = sorted(enumerate(args), key=lambda x: default_sort_key(x[1]))
pos_sorted, args_sorted = zip(*sorted_data)
reordering_... |
Sort arguments in the tensor product so that their order is lexicographical.
Examples
========
>>> from sympy.tensor.array.expressions.from_matrix_to_array import convert_matrix_to_array
>>> from sympy import MatrixSymbol
>>> from sympy.abc import N
>>> A = Mat... | 81 | 61 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def sort_args_by_name(self):
expr = self.expr
if not isinstance(expr, ArrayTensorProduct):
return self
args = expr.args
sorted_data = sor... |
1,679 | def gather_async(self, batch_ms=0, num_async=1) -> "LocalIterator[T]":
if num_async < 1:
raise ValueError("queue depth must be positive")
if batch_ms < 0:
raise ValueError("batch time must be positive")
# Forward reference to the returned iterator.
loca... | Returns a local iterable for asynchronous iteration.
New items will be fetched from the shards asynchronously as soon as
the previous one is computed. Items arrive in non-deterministic order.
Arguments:
batch_ms (int): Batches items for batch_ms milliseconds
on each... | 101 | 36 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def gather_async(self, batch_ms=0, num_async=1) -> "LocalIterator[T]":
if num_async < 1:
raise ValueError("queue depth must be positive")
if batch_ms < ... |
1,680 | def test_dict_checkpoint_dict(self):
checkpoint = self._prepare_dict_checkpoint()
# Convert into dict checkpoint
data_dict = checkpoint.to_dict()
self.assertIsInstance(data_dict, dict)
# Create from dict
checkpoint = Checkpoint.from_dict(data_dict)
self... | Test conversion from dict to dict checkpoint and back. | 9 | 24 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_dict_checkpoint_dict(self):
checkpoint = self._prepare_dict_checkpoint()
# Convert into dict checkpoint
data_dict = checkpoint.to_dict()
se... |
1,681 | def test_5_model(self):
query =
predict_query =
for cid, char in [(CID_A, 'a'), (CID_B, 'b')]:
self.sql_via_http(
query.format(char, char),
company_id=cid,
expected_resp_type=RESPONSE_TYPE.OK
)
response = se... |
CREATE MODEL mindsdb.model_{}
FROM test_integration_{} (
select * from test_data.home_rentals limit 50
) PREDICT rental_price
USING join_learn_process=true, time_aim=5
select * from mindsdb.model_{} where sqft = 100
| 26 | 29 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_5_model(self):
query =
predict_query =
for cid, char in [(CID_A, 'a'), (CID_B, 'b')]:
self.sql_via_http(
query.format(char, c... |
1,682 | def putpalette(self, data, rawmode="RGB"):
from . import ImagePalette
if self.mode not in ("L", "LA", "P", "PA"):
raise ValueError("illegal image mode")
if isinstance(data, ImagePalette.ImagePalette):
palette = ImagePalette.raw(data.rawmode, data.palette)
... |
Attaches a palette to this image. The image must be a "P", "PA", "L"
or "LA" image.
The palette sequence must contain at most 256 colors, made up of one
integer value for each channel in the raw mode.
For example, if the raw mode is "RGB", then it can contain at most 768
... | 142 | 59 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def putpalette(self, data, rawmode="RGB"):
from . import ImagePalette
if self.mode not in ("L", "LA", "P", "PA"):
raise ValueError("illegal image mode")... |
1,683 | def get_timeout() -> t.Optional[t.Dict[str, t.Any]]:
if not os.path.exists(TIMEOUT_PATH):
return None
data = read_json_file(TIMEOUT_PATH)
data['deadline'] = datetime.datetime.strptime(data['deadline'], '%Y-%m-%dT%H:%M:%SZ')
return data
| Return details about the currently set timeout, if any, otherwise return None. | 12 | 19 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_timeout() -> t.Optional[t.Dict[str, t.Any]]:
if not os.path.exists(TIMEOUT_PATH):
return None
data = read_json_file(TIMEOUT_PATH)
data['deadline'] = datetim... |
1,684 | def my_mean_squared_error(y_true, y_pred):
return backend.mean(tf.math.squared_difference(y_pred, y_true), axis=-1)
module_my_mean_squared_error = my_mean_squared_error
@test_utils.run_v2_only | Identical to built-in `mean_squared_error`, added here as a custom
func.
| 10 | 11 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def my_mean_squared_error(y_true, y_pred):
return backend.mean(tf.math.squared_difference(y_pred, y_true), axis=-1)
module_my_mean_squared_error = my_mean_squared_error
@test_ut... |
1,685 | def _amd_predict_with_optimized_batchsizes(self, feed, batch_size):
if isinstance(feed, np.ndarray):
feed = [feed]
items = feed[0].shape[0]
done_items = 0
results = []
while done_items < items:
if batch_size < 4: # Not much difference in BS < 4
... | Minimizes the amount of kernels to be compiled when using the ``amd`` backend with
varying batch sizes while trying to keep the batchsize as high as possible.
Parameters
----------
feed: numpy.ndarray or list
The feed to be provided to the model as input. This should be a `... | 67 | 80 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _amd_predict_with_optimized_batchsizes(self, feed, batch_size):
if isinstance(feed, np.ndarray):
feed = [feed]
items = feed[0].shape[0]
done_... |
1,686 | def set_positions(self, posA, posB):
if posA is not None:
self._posA_posB[0] = posA
if posB is not None:
self._posA_posB[1] = posB
self.stale = True
|
Set the start and end positions of the connecting path.
Parameters
----------
posA, posB : None, tuple
(x, y) coordinates of arrow tail and arrow head respectively. If
`None` use current value.
| 32 | 23 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def set_positions(self, posA, posB):
if posA is not None:
self._posA_posB[0] = posA
if posB is not None:
self._posA_posB[1] = posB
se... |
1,687 | def test_orderby_percentile_with_many_fields_multiple_entities_with_missing_data(self):
for tag, value, numbers in (
("transaction", "/foo/", [10, 11, 12]),
("transaction", "/bar/", [4, 5, 6]),
):
for subvalue in numbers:
self.store_performanc... |
Test that ensures when transactions table has null values for some fields (i.e. fields
with a different entity than the entity of the field in the order by), then the table gets
populated accordingly
| 34 | 101 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def test_orderby_percentile_with_many_fields_multiple_entities_with_missing_data(self):
for tag, value, numbers in (
("transaction", "/foo/", [10, 11, 12]),
... |
1,688 | def get_earning_components_max_benefits(employee, date, earning_component):
salary_structure = get_assigned_salary_structure(employee, date)
amount = frappe.db.sql(
,
salary_structure,
earning_component,
)
return amount if amount else 0
|
select amount
from `tabSalary Detail`
where parent = %s and is_flexible_benefit = 1
and salary_component = %s
order by name
| 20 | 21 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_earning_components_max_benefits(employee, date, earning_component):
salary_structure = get_assigned_salary_structure(employee, date)
amount = frappe.db.sql(
,
salary_structure,... |
1,689 | def get_input_mask_at(self, node_index):
inputs = self.get_input_at(node_index)
if isinstance(inputs, list):
return [getattr(x, "_keras_mask", None) for x in inputs]
else:
return getattr(inputs, "_keras_mask", None)
| Retrieves the input mask tensor(s) of a layer at a given node.
Args:
node_index: Integer, index of the node
from which to retrieve the attribute.
E.g. `node_index=0` will correspond to the
first time the layer was called.
Returns:
... | 51 | 22 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_input_mask_at(self, node_index):
inputs = self.get_input_at(node_index)
if isinstance(inputs, list):
return [getattr(x, "_keras_mask", None) for ... |
1,690 | def get_default_grpc_options():
return [
('grpc.max_send_message_length', -1),
('grpc.max_receive_message_length', -1),
]
|
Returns a list of default options used for creating grpc channels.
Documentation is here https://github.com/grpc/grpc/blob/master/include/grpc/impl/codegen/grpc_types.h
:returns: list of tuples defining grpc parameters
| 22 | 9 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_default_grpc_options():
return [
('grpc.max_send_message_length', -1),
('grpc.max_receive_message_length', -1),
]
```
##... |
1,691 | def _cancel_futures_kwargs(self):
if sys.version_info[:2] < (3, 9):
return {}
return dict(cancel_futures=True)
| Shim older Pythons that do not have Executor.shutdown(...cancel_futures=).
Remove this code when support for Python 3.8 is dropped.
| 18 | 11 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _cancel_futures_kwargs(self):
if sys.version_info[:2] < (3, 9):
return {}
return dict(cancel_futures=True)
```
###Assistant : Shim older... |
1,692 | def _estimate_available_parallelism() -> int:
cur_pg = ray.util.get_current_placement_group()
return _estimate_avail_cpus(cur_pg)
| Estimates the available CPU parallelism for this Dataset in the cluster.
If we are currently in a placement group, take that into account. | 23 | 9 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _estimate_available_parallelism() -> int:
cur_pg = ray.util.get_current_placement_group()
return _estimate_avail_cpus(cur_pg)
```
###Assistant : Estimates the a... |
1,693 | def bin(num, max_bits=None):
ceiling = 2 ** (num).bit_length()
if num >= 0:
s = bltns.bin(num + ceiling).replace('1', '0', 1)
else:
s = bltns.bin(~num ^ (ceiling - 1) + ceiling)
sign = s[:3]
digits = s[3:]
if max_bits is not None:
if len(digits) < max_bits:
... |
Like built-in bin(), except negative values are represented in
twos-compliment, and the leading bit always indicates sign
(0=positive, 1=negative).
>>> bin(10)
'0b0 1010'
>>> bin(~10) # ~10 is -11
'0b1 0101'
| 31 | 57 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def bin(num, max_bits=None):
ceiling = 2 ** (num).bit_length()
if num >= 0:
s = bltns.bin(num + ceiling).replace('1', '0', 1)
else:
s = bltns.bin(~num ^ (ce... |
1,694 | def axis0_safe_slice(X, mask, len_mask):
if len_mask != 0:
return X[safe_mask(X, mask), :]
return np.zeros(shape=(0, X.shape[1]))
| Return a mask which is safer to use on X than safe_mask.
This mask is safer than safe_mask since it returns an
empty array, when a sparse matrix is sliced with a boolean mask
with all False, instead of raising an unhelpful error in older
versions of SciPy.
See: https://github.com/scipy/scipy/issue... | 140 | 15 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def axis0_safe_slice(X, mask, len_mask):
if len_mask != 0:
return X[safe_mask(X, mask), :]
return np.zeros(shape=(0, X.shape[1]))
```
###Assistant : Return... |
1,695 | def _most_frequent(array, extra_value, n_repeat):
# Compute the most frequent value in array only
if array.size > 0:
if array.dtype == object:
# scipy.stats.mode is slow with object dtype array.
# Python Counter is more efficient
counter = Counter(array)
... | Compute the most frequent value in a 1d array extended with
[extra_value] * n_repeat, where extra_value is assumed to be not part
of the array. | 25 | 121 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _most_frequent(array, extra_value, n_repeat):
# Compute the most frequent value in array only
if array.size > 0:
if array.dtype == object:
# scipy.stats.... |
1,696 | def _cov(X, shrinkage=None, covariance_estimator=None):
if covariance_estimator is None:
shrinkage = "empirical" if shrinkage is None else shrinkage
if isinstance(shrinkage, str):
if shrinkage == "auto":
sc = StandardScaler() # standardize features
X... | Estimate covariance matrix (using optional covariance_estimator).
Parameters
----------
X : array-like of shape (n_samples, n_features)
Input data.
shrinkage : {'empirical', 'auto'} or float, default=None
Shrinkage parameter, possible values:
- None or 'empirical': no shrinkag... | 126 | 144 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def _cov(X, shrinkage=None, covariance_estimator=None):
if covariance_estimator is None:
shrinkage = "empirical" if shrinkage is None else shrinkage
if isinstance(sh... |
1,697 | def from_key_val_list(value):
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError("cannot encode objects that are not 2-tuples")
return OrderedDict(value)
| Take an object and test to see if it can be represented as a
dictionary. Unless it can not be represented as such, return an
OrderedDict, e.g.,
::
>>> from_key_val_list([('key', 'val')])
OrderedDict([('key', 'val')])
>>> from_key_val_list('string')
Traceback (most recent ca... | 56 | 24 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def from_key_val_list(value):
if value is None:
return None
if isinstance(value, (str, bytes, bool, int)):
raise ValueError("cannot encode objects that are not ... |
1,698 | def load_lexer_from_file(filename, lexername="CustomLexer", **options):
try:
# This empty dict will contain the namespace for the exec'd file
custom_namespace = {}
with open(filename, 'rb') as f:
exec(f.read(), custom_namespace)
# Retrieve the class `lexername` from ... | Load a lexer from a file.
This method expects a file located relative to the current working
directory, which contains a Lexer class. By default, it expects the
Lexer to be name CustomLexer; you can specify your own class name
as the second argument to this function.
Users should be very careful w... | 80 | 92 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def load_lexer_from_file(filename, lexername="CustomLexer", **options):
try:
# This empty dict will contain the namespace for the exec'd file
custom_namespace = {}
... |
1,699 | def get_price(item_code, price_list, customer_group, company, qty=1):
from erpnext.e_commerce.shopping_cart.cart import get_party
template_item_code = frappe.db.get_value("Item", item_code, "variant_of")
if price_list:
price = frappe.get_all("Item Price", fields=["price_list_rate", "currency"],
filters={"pric... | select C.conversion_factor
from `tabUOM Conversion Detail` C
inner join `tabItem` I on C.parent = I.name and C.uom = I.sales_uom
where I.name = %s | 23 | 214 | Python |
###User : Below is a Python method which does a task. Create a documentation for the below code :
```Python
def get_price(item_code, price_list, customer_group, company, qty=1):
from erpnext.e_commerce.shopping_cart.cart import get_party
template_item_code = frappe.db.get_value("Item", item_cod... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.