partition stringclasses 3
values | func_name stringlengths 1 134 | docstring stringlengths 1 46.9k | path stringlengths 4 223 | original_string stringlengths 75 104k | code stringlengths 75 104k | docstring_tokens listlengths 1 1.97k | repo stringlengths 7 55 | language stringclasses 1
value | url stringlengths 87 315 | code_tokens listlengths 19 28.4k | sha stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|---|---|---|
test | BigQueryBaseCursor.delete_dataset | Delete a dataset of Big query in your project.
:param project_id: The name of the project where we have the dataset .
:type project_id: str
:param dataset_id: The dataset to be delete.
:type dataset_id: str
:return: | airflow/contrib/hooks/bigquery_hook.py | def delete_dataset(self, project_id, dataset_id):
"""
Delete a dataset of Big query in your project.
:param project_id: The name of the project where we have the dataset .
:type project_id: str
:param dataset_id: The dataset to be delete.
:type dataset_id: str
:re... | def delete_dataset(self, project_id, dataset_id):
"""
Delete a dataset of Big query in your project.
:param project_id: The name of the project where we have the dataset .
:type project_id: str
:param dataset_id: The dataset to be delete.
:type dataset_id: str
:re... | [
"Delete",
"a",
"dataset",
"of",
"Big",
"query",
"in",
"your",
"project",
".",
":",
"param",
"project_id",
":",
"The",
"name",
"of",
"the",
"project",
"where",
"we",
"have",
"the",
"dataset",
".",
":",
"type",
"project_id",
":",
"str",
":",
"param",
"da... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1600-L1623 | [
"def",
"delete_dataset",
"(",
"self",
",",
"project_id",
",",
"dataset_id",
")",
":",
"project_id",
"=",
"project_id",
"if",
"project_id",
"is",
"not",
"None",
"else",
"self",
".",
"project_id",
"self",
".",
"log",
".",
"info",
"(",
"'Deleting from project: %s... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BigQueryBaseCursor.get_dataset | Method returns dataset_resource if dataset exist
and raised 404 error if dataset does not exist
:param dataset_id: The BigQuery Dataset ID
:type dataset_id: str
:param project_id: The GCP Project ID
:type project_id: str
:return: dataset_resource
.. seealso:... | airflow/contrib/hooks/bigquery_hook.py | def get_dataset(self, dataset_id, project_id=None):
"""
Method returns dataset_resource if dataset exist
and raised 404 error if dataset does not exist
:param dataset_id: The BigQuery Dataset ID
:type dataset_id: str
:param project_id: The GCP Project ID
:type pr... | def get_dataset(self, dataset_id, project_id=None):
"""
Method returns dataset_resource if dataset exist
and raised 404 error if dataset does not exist
:param dataset_id: The BigQuery Dataset ID
:type dataset_id: str
:param project_id: The GCP Project ID
:type pr... | [
"Method",
"returns",
"dataset_resource",
"if",
"dataset",
"exist",
"and",
"raised",
"404",
"error",
"if",
"dataset",
"does",
"not",
"exist"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1625-L1655 | [
"def",
"get_dataset",
"(",
"self",
",",
"dataset_id",
",",
"project_id",
"=",
"None",
")",
":",
"if",
"not",
"dataset_id",
"or",
"not",
"isinstance",
"(",
"dataset_id",
",",
"str",
")",
":",
"raise",
"ValueError",
"(",
"\"dataset_id argument must be provided and... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BigQueryBaseCursor.get_datasets_list | Method returns full list of BigQuery datasets in the current project
.. seealso::
For more information, see:
https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list
:param project_id: Google Cloud Project for which you
try to get all datasets
... | airflow/contrib/hooks/bigquery_hook.py | def get_datasets_list(self, project_id=None):
"""
Method returns full list of BigQuery datasets in the current project
.. seealso::
For more information, see:
https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list
:param project_id: Google Cloud ... | def get_datasets_list(self, project_id=None):
"""
Method returns full list of BigQuery datasets in the current project
.. seealso::
For more information, see:
https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list
:param project_id: Google Cloud ... | [
"Method",
"returns",
"full",
"list",
"of",
"BigQuery",
"datasets",
"in",
"the",
"current",
"project"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1657-L1703 | [
"def",
"get_datasets_list",
"(",
"self",
",",
"project_id",
"=",
"None",
")",
":",
"dataset_project_id",
"=",
"project_id",
"if",
"project_id",
"else",
"self",
".",
"project_id",
"try",
":",
"datasets_list",
"=",
"self",
".",
"service",
".",
"datasets",
"(",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BigQueryBaseCursor.insert_all | Method to stream data into BigQuery one record at a time without needing
to run a load job
.. seealso::
For more information, see:
https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll
:param project_id: The name of the project where we have the t... | airflow/contrib/hooks/bigquery_hook.py | def insert_all(self, project_id, dataset_id, table_id,
rows, ignore_unknown_values=False,
skip_invalid_rows=False, fail_on_error=False):
"""
Method to stream data into BigQuery one record at a time without needing
to run a load job
.. seealso::
... | def insert_all(self, project_id, dataset_id, table_id,
rows, ignore_unknown_values=False,
skip_invalid_rows=False, fail_on_error=False):
"""
Method to stream data into BigQuery one record at a time without needing
to run a load job
.. seealso::
... | [
"Method",
"to",
"stream",
"data",
"into",
"BigQuery",
"one",
"record",
"at",
"a",
"time",
"without",
"needing",
"to",
"run",
"a",
"load",
"job"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1705-L1779 | [
"def",
"insert_all",
"(",
"self",
",",
"project_id",
",",
"dataset_id",
",",
"table_id",
",",
"rows",
",",
"ignore_unknown_values",
"=",
"False",
",",
"skip_invalid_rows",
"=",
"False",
",",
"fail_on_error",
"=",
"False",
")",
":",
"dataset_project_id",
"=",
"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BigQueryCursor.execute | Executes a BigQuery query, and returns the job ID.
:param operation: The query to execute.
:type operation: str
:param parameters: Parameters to substitute into the query.
:type parameters: dict | airflow/contrib/hooks/bigquery_hook.py | def execute(self, operation, parameters=None):
"""
Executes a BigQuery query, and returns the job ID.
:param operation: The query to execute.
:type operation: str
:param parameters: Parameters to substitute into the query.
:type parameters: dict
"""
sql =... | def execute(self, operation, parameters=None):
"""
Executes a BigQuery query, and returns the job ID.
:param operation: The query to execute.
:type operation: str
:param parameters: Parameters to substitute into the query.
:type parameters: dict
"""
sql =... | [
"Executes",
"a",
"BigQuery",
"query",
"and",
"returns",
"the",
"job",
"ID",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1819-L1830 | [
"def",
"execute",
"(",
"self",
",",
"operation",
",",
"parameters",
"=",
"None",
")",
":",
"sql",
"=",
"_bind_parameters",
"(",
"operation",
",",
"parameters",
")",
"if",
"parameters",
"else",
"operation",
"self",
".",
"job_id",
"=",
"self",
".",
"run_quer... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BigQueryCursor.executemany | Execute a BigQuery query multiple times with different parameters.
:param operation: The query to execute.
:type operation: str
:param seq_of_parameters: List of dictionary parameters to substitute into the
query.
:type seq_of_parameters: list | airflow/contrib/hooks/bigquery_hook.py | def executemany(self, operation, seq_of_parameters):
"""
Execute a BigQuery query multiple times with different parameters.
:param operation: The query to execute.
:type operation: str
:param seq_of_parameters: List of dictionary parameters to substitute into the
que... | def executemany(self, operation, seq_of_parameters):
"""
Execute a BigQuery query multiple times with different parameters.
:param operation: The query to execute.
:type operation: str
:param seq_of_parameters: List of dictionary parameters to substitute into the
que... | [
"Execute",
"a",
"BigQuery",
"query",
"multiple",
"times",
"with",
"different",
"parameters",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1832-L1843 | [
"def",
"executemany",
"(",
"self",
",",
"operation",
",",
"seq_of_parameters",
")",
":",
"for",
"parameters",
"in",
"seq_of_parameters",
":",
"self",
".",
"execute",
"(",
"operation",
",",
"parameters",
")"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BigQueryCursor.next | Helper method for fetchone, which returns the next row from a buffer.
If the buffer is empty, attempts to paginate through the result set for
the next page, and load it into the buffer. | airflow/contrib/hooks/bigquery_hook.py | def next(self):
"""
Helper method for fetchone, which returns the next row from a buffer.
If the buffer is empty, attempts to paginate through the result set for
the next page, and load it into the buffer.
"""
if not self.job_id:
return None
if len(se... | def next(self):
"""
Helper method for fetchone, which returns the next row from a buffer.
If the buffer is empty, attempts to paginate through the result set for
the next page, and load it into the buffer.
"""
if not self.job_id:
return None
if len(se... | [
"Helper",
"method",
"for",
"fetchone",
"which",
"returns",
"the",
"next",
"row",
"from",
"a",
"buffer",
".",
"If",
"the",
"buffer",
"is",
"empty",
"attempts",
"to",
"paginate",
"through",
"the",
"result",
"set",
"for",
"the",
"next",
"page",
"and",
"load",... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1849-L1890 | [
"def",
"next",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"job_id",
":",
"return",
"None",
"if",
"len",
"(",
"self",
".",
"buffer",
")",
"==",
"0",
":",
"if",
"self",
".",
"all_pages_loaded",
":",
"return",
"None",
"query_results",
"=",
"(",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BigQueryCursor.fetchmany | Fetch the next set of rows of a query result, returning a sequence of sequences
(e.g. a list of tuples). An empty sequence is returned when no more rows are
available. The number of rows to fetch per call is specified by the parameter.
If it is not given, the cursor's arraysize determines the nu... | airflow/contrib/hooks/bigquery_hook.py | def fetchmany(self, size=None):
"""
Fetch the next set of rows of a query result, returning a sequence of sequences
(e.g. a list of tuples). An empty sequence is returned when no more rows are
available. The number of rows to fetch per call is specified by the parameter.
If it is... | def fetchmany(self, size=None):
"""
Fetch the next set of rows of a query result, returning a sequence of sequences
(e.g. a list of tuples). An empty sequence is returned when no more rows are
available. The number of rows to fetch per call is specified by the parameter.
If it is... | [
"Fetch",
"the",
"next",
"set",
"of",
"rows",
"of",
"a",
"query",
"result",
"returning",
"a",
"sequence",
"of",
"sequences",
"(",
"e",
".",
"g",
".",
"a",
"list",
"of",
"tuples",
")",
".",
"An",
"empty",
"sequence",
"is",
"returned",
"when",
"no",
"mo... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1892-L1913 | [
"def",
"fetchmany",
"(",
"self",
",",
"size",
"=",
"None",
")",
":",
"if",
"size",
"is",
"None",
":",
"size",
"=",
"self",
".",
"arraysize",
"result",
"=",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"size",
")",
":",
"one",
"=",
"self",
".",
"fe... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BigQueryCursor.fetchall | Fetch all (remaining) rows of a query result, returning them as a sequence of
sequences (e.g. a list of tuples). | airflow/contrib/hooks/bigquery_hook.py | def fetchall(self):
"""
Fetch all (remaining) rows of a query result, returning them as a sequence of
sequences (e.g. a list of tuples).
"""
result = []
while True:
one = self.fetchone()
if one is None:
break
else:
... | def fetchall(self):
"""
Fetch all (remaining) rows of a query result, returning them as a sequence of
sequences (e.g. a list of tuples).
"""
result = []
while True:
one = self.fetchone()
if one is None:
break
else:
... | [
"Fetch",
"all",
"(",
"remaining",
")",
"rows",
"of",
"a",
"query",
"result",
"returning",
"them",
"as",
"a",
"sequence",
"of",
"sequences",
"(",
"e",
".",
"g",
".",
"a",
"list",
"of",
"tuples",
")",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/bigquery_hook.py#L1915-L1927 | [
"def",
"fetchall",
"(",
"self",
")",
":",
"result",
"=",
"[",
"]",
"while",
"True",
":",
"one",
"=",
"self",
".",
"fetchone",
"(",
")",
"if",
"one",
"is",
"None",
":",
"break",
"else",
":",
"result",
".",
"append",
"(",
"one",
")",
"return",
"res... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | configure_manifest_files | Loads the manifest file and register the `url_for_asset_` template tag.
:param app:
:return: | airflow/www/static_config.py | def configure_manifest_files(app):
"""
Loads the manifest file and register the `url_for_asset_` template tag.
:param app:
:return:
"""
def parse_manifest_json():
# noinspection PyBroadException
try:
global manifest
manifest_file = os.path.join(os.path.di... | def configure_manifest_files(app):
"""
Loads the manifest file and register the `url_for_asset_` template tag.
:param app:
:return:
"""
def parse_manifest_json():
# noinspection PyBroadException
try:
global manifest
manifest_file = os.path.join(os.path.di... | [
"Loads",
"the",
"manifest",
"file",
"and",
"register",
"the",
"url_for_asset_",
"template",
"tag",
".",
":",
"param",
"app",
":",
":",
"return",
":"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/static_config.py#L29-L68 | [
"def",
"configure_manifest_files",
"(",
"app",
")",
":",
"def",
"parse_manifest_json",
"(",
")",
":",
"# noinspection PyBroadException",
"try",
":",
"global",
"manifest",
"manifest_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirna... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | PostgresToGoogleCloudStorageOperator._query_postgres | Queries Postgres and returns a cursor to the results. | airflow/contrib/operators/postgres_to_gcs_operator.py | def _query_postgres(self):
"""
Queries Postgres and returns a cursor to the results.
"""
postgres = PostgresHook(postgres_conn_id=self.postgres_conn_id)
conn = postgres.get_conn()
cursor = conn.cursor()
cursor.execute(self.sql, self.parameters)
return curs... | def _query_postgres(self):
"""
Queries Postgres and returns a cursor to the results.
"""
postgres = PostgresHook(postgres_conn_id=self.postgres_conn_id)
conn = postgres.get_conn()
cursor = conn.cursor()
cursor.execute(self.sql, self.parameters)
return curs... | [
"Queries",
"Postgres",
"and",
"returns",
"a",
"cursor",
"to",
"the",
"results",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/postgres_to_gcs_operator.py#L114-L122 | [
"def",
"_query_postgres",
"(",
"self",
")",
":",
"postgres",
"=",
"PostgresHook",
"(",
"postgres_conn_id",
"=",
"self",
".",
"postgres_conn_id",
")",
"conn",
"=",
"postgres",
".",
"get_conn",
"(",
")",
"cursor",
"=",
"conn",
".",
"cursor",
"(",
")",
"curso... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | PostgresToGoogleCloudStorageOperator._write_local_data_files | Takes a cursor, and writes results to a local file.
:return: A dictionary where keys are filenames to be used as object
names in GCS, and values are file handles to local files that
contain the data for the GCS objects. | airflow/contrib/operators/postgres_to_gcs_operator.py | def _write_local_data_files(self, cursor):
"""
Takes a cursor, and writes results to a local file.
:return: A dictionary where keys are filenames to be used as object
names in GCS, and values are file handles to local files that
contain the data for the GCS objects.
... | def _write_local_data_files(self, cursor):
"""
Takes a cursor, and writes results to a local file.
:return: A dictionary where keys are filenames to be used as object
names in GCS, and values are file handles to local files that
contain the data for the GCS objects.
... | [
"Takes",
"a",
"cursor",
"and",
"writes",
"results",
"to",
"a",
"local",
"file",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/postgres_to_gcs_operator.py#L124-L164 | [
"def",
"_write_local_data_files",
"(",
"self",
",",
"cursor",
")",
":",
"schema",
"=",
"list",
"(",
"map",
"(",
"lambda",
"schema_tuple",
":",
"schema_tuple",
"[",
"0",
"]",
",",
"cursor",
".",
"description",
")",
")",
"tmp_file_handles",
"=",
"{",
"}",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | PostgresToGoogleCloudStorageOperator._write_local_schema_file | Takes a cursor, and writes the BigQuery schema for the results to a
local file system.
:return: A dictionary where key is a filename to be used as an object
name in GCS, and values are file handles to local files that
contains the BigQuery schema fields in .json format. | airflow/contrib/operators/postgres_to_gcs_operator.py | def _write_local_schema_file(self, cursor):
"""
Takes a cursor, and writes the BigQuery schema for the results to a
local file system.
:return: A dictionary where key is a filename to be used as an object
name in GCS, and values are file handles to local files that
... | def _write_local_schema_file(self, cursor):
"""
Takes a cursor, and writes the BigQuery schema for the results to a
local file system.
:return: A dictionary where key is a filename to be used as an object
name in GCS, and values are file handles to local files that
... | [
"Takes",
"a",
"cursor",
"and",
"writes",
"the",
"BigQuery",
"schema",
"for",
"the",
"results",
"to",
"a",
"local",
"file",
"system",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/postgres_to_gcs_operator.py#L166-L192 | [
"def",
"_write_local_schema_file",
"(",
"self",
",",
"cursor",
")",
":",
"schema",
"=",
"[",
"]",
"for",
"field",
"in",
"cursor",
".",
"description",
":",
"# See PEP 249 for details about the description tuple.",
"field_name",
"=",
"field",
"[",
"0",
"]",
"field_t... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | PostgresToGoogleCloudStorageOperator.convert_types | Takes a value from Postgres, and converts it to a value that's safe for
JSON/Google Cloud Storage/BigQuery. Dates are converted to UTC seconds.
Decimals are converted to floats. Times are converted to seconds. | airflow/contrib/operators/postgres_to_gcs_operator.py | def convert_types(cls, value):
"""
Takes a value from Postgres, and converts it to a value that's safe for
JSON/Google Cloud Storage/BigQuery. Dates are converted to UTC seconds.
Decimals are converted to floats. Times are converted to seconds.
"""
if type(value) in (date... | def convert_types(cls, value):
"""
Takes a value from Postgres, and converts it to a value that's safe for
JSON/Google Cloud Storage/BigQuery. Dates are converted to UTC seconds.
Decimals are converted to floats. Times are converted to seconds.
"""
if type(value) in (date... | [
"Takes",
"a",
"value",
"from",
"Postgres",
"and",
"converts",
"it",
"to",
"a",
"value",
"that",
"s",
"safe",
"for",
"JSON",
"/",
"Google",
"Cloud",
"Storage",
"/",
"BigQuery",
".",
"Dates",
"are",
"converted",
"to",
"UTC",
"seconds",
".",
"Decimals",
"ar... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/postgres_to_gcs_operator.py#L207-L224 | [
"def",
"convert_types",
"(",
"cls",
",",
"value",
")",
":",
"if",
"type",
"(",
"value",
")",
"in",
"(",
"datetime",
".",
"datetime",
",",
"datetime",
".",
"date",
")",
":",
"return",
"time",
".",
"mktime",
"(",
"value",
".",
"timetuple",
"(",
")",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | _make_intermediate_dirs | Create all the intermediate directories in a remote host
:param sftp_client: A Paramiko SFTP client.
:param remote_directory: Absolute Path of the directory containing the file
:return: | airflow/contrib/operators/sftp_operator.py | def _make_intermediate_dirs(sftp_client, remote_directory):
"""
Create all the intermediate directories in a remote host
:param sftp_client: A Paramiko SFTP client.
:param remote_directory: Absolute Path of the directory containing the file
:return:
"""
if remote_directory == '/':
s... | def _make_intermediate_dirs(sftp_client, remote_directory):
"""
Create all the intermediate directories in a remote host
:param sftp_client: A Paramiko SFTP client.
:param remote_directory: Absolute Path of the directory containing the file
:return:
"""
if remote_directory == '/':
s... | [
"Create",
"all",
"the",
"intermediate",
"directories",
"in",
"a",
"remote",
"host"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/sftp_operator.py#L160-L180 | [
"def",
"_make_intermediate_dirs",
"(",
"sftp_client",
",",
"remote_directory",
")",
":",
"if",
"remote_directory",
"==",
"'/'",
":",
"sftp_client",
".",
"chdir",
"(",
"'/'",
")",
"return",
"if",
"remote_directory",
"==",
"''",
":",
"return",
"try",
":",
"sftp_... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SQSHook.create_queue | Create queue using connection object
:param queue_name: name of the queue.
:type queue_name: str
:param attributes: additional attributes for the queue (default: None)
For details of the attributes parameter see :py:meth:`botocore.client.SQS.create_queue`
:type attributes: d... | airflow/contrib/hooks/aws_sqs_hook.py | def create_queue(self, queue_name, attributes=None):
"""
Create queue using connection object
:param queue_name: name of the queue.
:type queue_name: str
:param attributes: additional attributes for the queue (default: None)
For details of the attributes parameter se... | def create_queue(self, queue_name, attributes=None):
"""
Create queue using connection object
:param queue_name: name of the queue.
:type queue_name: str
:param attributes: additional attributes for the queue (default: None)
For details of the attributes parameter se... | [
"Create",
"queue",
"using",
"connection",
"object"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_sqs_hook.py#L34-L48 | [
"def",
"create_queue",
"(",
"self",
",",
"queue_name",
",",
"attributes",
"=",
"None",
")",
":",
"return",
"self",
".",
"get_conn",
"(",
")",
".",
"create_queue",
"(",
"QueueName",
"=",
"queue_name",
",",
"Attributes",
"=",
"attributes",
"or",
"{",
"}",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SQSHook.send_message | Send message to the queue
:param queue_url: queue url
:type queue_url: str
:param message_body: the contents of the message
:type message_body: str
:param delay_seconds: seconds to delay the message
:type delay_seconds: int
:param message_attributes: additional a... | airflow/contrib/hooks/aws_sqs_hook.py | def send_message(self, queue_url, message_body, delay_seconds=0, message_attributes=None):
"""
Send message to the queue
:param queue_url: queue url
:type queue_url: str
:param message_body: the contents of the message
:type message_body: str
:param delay_seconds... | def send_message(self, queue_url, message_body, delay_seconds=0, message_attributes=None):
"""
Send message to the queue
:param queue_url: queue url
:type queue_url: str
:param message_body: the contents of the message
:type message_body: str
:param delay_seconds... | [
"Send",
"message",
"to",
"the",
"queue"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_sqs_hook.py#L50-L71 | [
"def",
"send_message",
"(",
"self",
",",
"queue_url",
",",
"message_body",
",",
"delay_seconds",
"=",
"0",
",",
"message_attributes",
"=",
"None",
")",
":",
"return",
"self",
".",
"get_conn",
"(",
")",
".",
"send_message",
"(",
"QueueUrl",
"=",
"queue_url",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | _integrate_plugins | Integrate plugins to the context | airflow/hooks/__init__.py | def _integrate_plugins():
"""Integrate plugins to the context"""
from airflow.plugins_manager import hooks_modules
for hooks_module in hooks_modules:
sys.modules[hooks_module.__name__] = hooks_module
globals()[hooks_module._name] = hooks_module | def _integrate_plugins():
"""Integrate plugins to the context"""
from airflow.plugins_manager import hooks_modules
for hooks_module in hooks_modules:
sys.modules[hooks_module.__name__] = hooks_module
globals()[hooks_module._name] = hooks_module | [
"Integrate",
"plugins",
"to",
"the",
"context"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/__init__.py#L27-L32 | [
"def",
"_integrate_plugins",
"(",
")",
":",
"from",
"airflow",
".",
"plugins_manager",
"import",
"hooks_modules",
"for",
"hooks_module",
"in",
"hooks_modules",
":",
"sys",
".",
"modules",
"[",
"hooks_module",
".",
"__name__",
"]",
"=",
"hooks_module",
"globals",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BaseTaskRunner.run_command | Run the task command.
:param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']``
:type run_with: list
:param join_args: whether to concatenate the list of command tokens e.g. ``['airflow', 'run']`` vs
``['airflow run']``
:param join_args: bool
... | airflow/task/task_runner/base_task_runner.py | def run_command(self, run_with=None, join_args=False):
"""
Run the task command.
:param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']``
:type run_with: list
:param join_args: whether to concatenate the list of command tokens e.g. ``['airflow', 'run'... | def run_command(self, run_with=None, join_args=False):
"""
Run the task command.
:param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']``
:type run_with: list
:param join_args: whether to concatenate the list of command tokens e.g. ``['airflow', 'run'... | [
"Run",
"the",
"task",
"command",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/task/task_runner/base_task_runner.py#L101-L135 | [
"def",
"run_command",
"(",
"self",
",",
"run_with",
"=",
"None",
",",
"join_args",
"=",
"False",
")",
":",
"run_with",
"=",
"run_with",
"or",
"[",
"]",
"cmd",
"=",
"[",
"\" \"",
".",
"join",
"(",
"self",
".",
"_command",
")",
"]",
"if",
"join_args",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BaseTaskRunner.on_finish | A callback that should be called when this is done running. | airflow/task/task_runner/base_task_runner.py | def on_finish(self):
"""
A callback that should be called when this is done running.
"""
if self._cfg_path and os.path.isfile(self._cfg_path):
if self.run_as_user:
subprocess.call(['sudo', 'rm', self._cfg_path], close_fds=True)
else:
... | def on_finish(self):
"""
A callback that should be called when this is done running.
"""
if self._cfg_path and os.path.isfile(self._cfg_path):
if self.run_as_user:
subprocess.call(['sudo', 'rm', self._cfg_path], close_fds=True)
else:
... | [
"A",
"callback",
"that",
"should",
"be",
"called",
"when",
"this",
"is",
"done",
"running",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/task/task_runner/base_task_runner.py#L157-L165 | [
"def",
"on_finish",
"(",
"self",
")",
":",
"if",
"self",
".",
"_cfg_path",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"self",
".",
"_cfg_path",
")",
":",
"if",
"self",
".",
"run_as_user",
":",
"subprocess",
".",
"call",
"(",
"[",
"'sudo'",
",",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | _main | Parse options and process commands | airflow/_vendor/nvd3/NVD3Chart.py | def _main():
"""
Parse options and process commands
"""
# Parse arguments
usage = "usage: nvd3.py [options]"
parser = OptionParser(usage=usage,
version=("python-nvd3 - Charts generator with "
"nvd3.js and d3.js"))
parser.add_option... | def _main():
"""
Parse options and process commands
"""
# Parse arguments
usage = "usage: nvd3.py [options]"
parser = OptionParser(usage=usage,
version=("python-nvd3 - Charts generator with "
"nvd3.js and d3.js"))
parser.add_option... | [
"Parse",
"options",
"and",
"process",
"commands"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L488-L501 | [
"def",
"_main",
"(",
")",
":",
"# Parse arguments",
"usage",
"=",
"\"usage: nvd3.py [options]\"",
"parser",
"=",
"OptionParser",
"(",
"usage",
"=",
"usage",
",",
"version",
"=",
"(",
"\"python-nvd3 - Charts generator with \"",
"\"nvd3.js and d3.js\"",
")",
")",
"parse... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | NVD3Chart.add_serie | add serie - Series are list of data that will be plotted
y {1, 2, 3, 4, 5} / x {1, 2, 3, 4, 5}
**Attributes**:
* ``name`` - set Serie name
* ``x`` - x-axis data
* ``y`` - y-axis data
kwargs:
* ``shape`` - for scatterChart, you can set diffe... | airflow/_vendor/nvd3/NVD3Chart.py | def add_serie(self, y, x, name=None, extra=None, **kwargs):
"""
add serie - Series are list of data that will be plotted
y {1, 2, 3, 4, 5} / x {1, 2, 3, 4, 5}
**Attributes**:
* ``name`` - set Serie name
* ``x`` - x-axis data
* ``y`` - y-axis data
... | def add_serie(self, y, x, name=None, extra=None, **kwargs):
"""
add serie - Series are list of data that will be plotted
y {1, 2, 3, 4, 5} / x {1, 2, 3, 4, 5}
**Attributes**:
* ``name`` - set Serie name
* ``x`` - x-axis data
* ``y`` - y-axis data
... | [
"add",
"serie",
"-",
"Series",
"are",
"list",
"of",
"data",
"that",
"will",
"be",
"plotted",
"y",
"{",
"1",
"2",
"3",
"4",
"5",
"}",
"/",
"x",
"{",
"1",
"2",
"3",
"4",
"5",
"}"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L179-L294 | [
"def",
"add_serie",
"(",
"self",
",",
"y",
",",
"x",
",",
"name",
"=",
"None",
",",
"extra",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"name",
":",
"name",
"=",
"\"Serie %d\"",
"%",
"(",
"self",
".",
"serie_no",
")",
"# For sca... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | NVD3Chart.buildcontent | Build HTML content only, no header or body tags. To be useful this
will usually require the attribute `juqery_on_ready` to be set which
will wrap the js in $(function(){<regular_js>};) | airflow/_vendor/nvd3/NVD3Chart.py | def buildcontent(self):
"""Build HTML content only, no header or body tags. To be useful this
will usually require the attribute `juqery_on_ready` to be set which
will wrap the js in $(function(){<regular_js>};)
"""
self.buildcontainer()
# if the subclass has a method bui... | def buildcontent(self):
"""Build HTML content only, no header or body tags. To be useful this
will usually require the attribute `juqery_on_ready` to be set which
will wrap the js in $(function(){<regular_js>};)
"""
self.buildcontainer()
# if the subclass has a method bui... | [
"Build",
"HTML",
"content",
"only",
"no",
"header",
"or",
"body",
"tags",
".",
"To",
"be",
"useful",
"this",
"will",
"usually",
"require",
"the",
"attribute",
"juqery_on_ready",
"to",
"be",
"set",
"which",
"will",
"wrap",
"the",
"js",
"in",
"$",
"(",
"fu... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L350-L361 | [
"def",
"buildcontent",
"(",
"self",
")",
":",
"self",
".",
"buildcontainer",
"(",
")",
"# if the subclass has a method buildjs this method will be",
"# called instead of the method defined here",
"# when this subclass method is entered it does call",
"# the method buildjschart defined he... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | NVD3Chart.buildhtml | Build the HTML page
Create the htmlheader with css / js
Create html page
Add Js code for nvd3 | airflow/_vendor/nvd3/NVD3Chart.py | def buildhtml(self):
"""Build the HTML page
Create the htmlheader with css / js
Create html page
Add Js code for nvd3
"""
self.buildcontent()
self.content = self.htmlcontent
self.htmlcontent = self.template_page_nvd3.render(chart=self) | def buildhtml(self):
"""Build the HTML page
Create the htmlheader with css / js
Create html page
Add Js code for nvd3
"""
self.buildcontent()
self.content = self.htmlcontent
self.htmlcontent = self.template_page_nvd3.render(chart=self) | [
"Build",
"the",
"HTML",
"page",
"Create",
"the",
"htmlheader",
"with",
"css",
"/",
"js",
"Create",
"html",
"page",
"Add",
"Js",
"code",
"for",
"nvd3"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L363-L371 | [
"def",
"buildhtml",
"(",
"self",
")",
":",
"self",
".",
"buildcontent",
"(",
")",
"self",
".",
"content",
"=",
"self",
".",
"htmlcontent",
"self",
".",
"htmlcontent",
"=",
"self",
".",
"template_page_nvd3",
".",
"render",
"(",
"chart",
"=",
"self",
")"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | NVD3Chart.buildhtmlheader | generate HTML header content | airflow/_vendor/nvd3/NVD3Chart.py | def buildhtmlheader(self):
"""generate HTML header content"""
self.htmlheader = ''
# If the JavaScript assets have already been injected, don't bother re-sourcing them.
global _js_initialized
if '_js_initialized' not in globals() or not _js_initialized:
for css in sel... | def buildhtmlheader(self):
"""generate HTML header content"""
self.htmlheader = ''
# If the JavaScript assets have already been injected, don't bother re-sourcing them.
global _js_initialized
if '_js_initialized' not in globals() or not _js_initialized:
for css in sel... | [
"generate",
"HTML",
"header",
"content"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L374-L383 | [
"def",
"buildhtmlheader",
"(",
"self",
")",
":",
"self",
".",
"htmlheader",
"=",
"''",
"# If the JavaScript assets have already been injected, don't bother re-sourcing them.",
"global",
"_js_initialized",
"if",
"'_js_initialized'",
"not",
"in",
"globals",
"(",
")",
"or",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | NVD3Chart.buildcontainer | generate HTML div | airflow/_vendor/nvd3/NVD3Chart.py | def buildcontainer(self):
"""generate HTML div"""
if self.container:
return
# Create SVG div with style
if self.width:
if self.width[-1] != '%':
self.style += 'width:%spx;' % self.width
else:
self.style += 'width:%s;' %... | def buildcontainer(self):
"""generate HTML div"""
if self.container:
return
# Create SVG div with style
if self.width:
if self.width[-1] != '%':
self.style += 'width:%spx;' % self.width
else:
self.style += 'width:%s;' %... | [
"generate",
"HTML",
"div"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L385-L405 | [
"def",
"buildcontainer",
"(",
"self",
")",
":",
"if",
"self",
".",
"container",
":",
"return",
"# Create SVG div with style",
"if",
"self",
".",
"width",
":",
"if",
"self",
".",
"width",
"[",
"-",
"1",
"]",
"!=",
"'%'",
":",
"self",
".",
"style",
"+=",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | NVD3Chart.buildjschart | generate javascript code for the chart | airflow/_vendor/nvd3/NVD3Chart.py | def buildjschart(self):
"""generate javascript code for the chart"""
self.jschart = ''
# add custom tooltip string in jschart
# default condition (if build_custom_tooltip is not called explicitly with date_flag=True)
if self.tooltip_condition_string == '':
self.toolt... | def buildjschart(self):
"""generate javascript code for the chart"""
self.jschart = ''
# add custom tooltip string in jschart
# default condition (if build_custom_tooltip is not called explicitly with date_flag=True)
if self.tooltip_condition_string == '':
self.toolt... | [
"generate",
"javascript",
"code",
"for",
"the",
"chart"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L407-L417 | [
"def",
"buildjschart",
"(",
"self",
")",
":",
"self",
".",
"jschart",
"=",
"''",
"# add custom tooltip string in jschart",
"# default condition (if build_custom_tooltip is not called explicitly with date_flag=True)",
"if",
"self",
".",
"tooltip_condition_string",
"==",
"''",
":... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | NVD3Chart.create_x_axis | Create X-axis | airflow/_vendor/nvd3/NVD3Chart.py | def create_x_axis(self, name, label=None, format=None, date=False, custom_format=False):
"""Create X-axis"""
axis = {}
if custom_format and format:
axis['tickFormat'] = format
elif format:
if format == 'AM_PM':
axis['tickFormat'] = "function(d) { r... | def create_x_axis(self, name, label=None, format=None, date=False, custom_format=False):
"""Create X-axis"""
axis = {}
if custom_format and format:
axis['tickFormat'] = format
elif format:
if format == 'AM_PM':
axis['tickFormat'] = "function(d) { r... | [
"Create",
"X",
"-",
"axis"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L419-L448 | [
"def",
"create_x_axis",
"(",
"self",
",",
"name",
",",
"label",
"=",
"None",
",",
"format",
"=",
"None",
",",
"date",
"=",
"False",
",",
"custom_format",
"=",
"False",
")",
":",
"axis",
"=",
"{",
"}",
"if",
"custom_format",
"and",
"format",
":",
"axi... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | NVD3Chart.create_y_axis | Create Y-axis | airflow/_vendor/nvd3/NVD3Chart.py | def create_y_axis(self, name, label=None, format=None, custom_format=False):
"""
Create Y-axis
"""
axis = {}
if custom_format and format:
axis['tickFormat'] = format
elif format:
axis['tickFormat'] = "d3.format(',%s')" % format
if label:
... | def create_y_axis(self, name, label=None, format=None, custom_format=False):
"""
Create Y-axis
"""
axis = {}
if custom_format and format:
axis['tickFormat'] = format
elif format:
axis['tickFormat'] = "d3.format(',%s')" % format
if label:
... | [
"Create",
"Y",
"-",
"axis"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L450-L465 | [
"def",
"create_y_axis",
"(",
"self",
",",
"name",
",",
"label",
"=",
"None",
",",
"format",
"=",
"None",
",",
"custom_format",
"=",
"False",
")",
":",
"axis",
"=",
"{",
"}",
"if",
"custom_format",
"and",
"format",
":",
"axis",
"[",
"'tickFormat'",
"]",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | TemplateMixin.buildcontent | Build HTML content only, no header or body tags. To be useful this
will usually require the attribute `juqery_on_ready` to be set which
will wrap the js in $(function(){<regular_js>};) | airflow/_vendor/nvd3/NVD3Chart.py | def buildcontent(self):
"""Build HTML content only, no header or body tags. To be useful this
will usually require the attribute `juqery_on_ready` to be set which
will wrap the js in $(function(){<regular_js>};)
"""
self.buildcontainer()
# if the subclass has a method bui... | def buildcontent(self):
"""Build HTML content only, no header or body tags. To be useful this
will usually require the attribute `juqery_on_ready` to be set which
will wrap the js in $(function(){<regular_js>};)
"""
self.buildcontainer()
# if the subclass has a method bui... | [
"Build",
"HTML",
"content",
"only",
"no",
"header",
"or",
"body",
"tags",
".",
"To",
"be",
"useful",
"this",
"will",
"usually",
"require",
"the",
"attribute",
"juqery_on_ready",
"to",
"be",
"set",
"which",
"will",
"wrap",
"the",
"js",
"in",
"$",
"(",
"fu... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/_vendor/nvd3/NVD3Chart.py#L474-L485 | [
"def",
"buildcontent",
"(",
"self",
")",
":",
"self",
".",
"buildcontainer",
"(",
")",
"# if the subclass has a method buildjs this method will be",
"# called instead of the method defined here",
"# when this subclass method is entered it does call",
"# the method buildjschart defined he... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SqliteHook.get_conn | Returns a sqlite connection object | airflow/hooks/sqlite_hook.py | def get_conn(self):
"""
Returns a sqlite connection object
"""
conn = self.get_connection(self.sqlite_conn_id)
conn = sqlite3.connect(conn.host)
return conn | def get_conn(self):
"""
Returns a sqlite connection object
"""
conn = self.get_connection(self.sqlite_conn_id)
conn = sqlite3.connect(conn.host)
return conn | [
"Returns",
"a",
"sqlite",
"connection",
"object"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/sqlite_hook.py#L35-L41 | [
"def",
"get_conn",
"(",
"self",
")",
":",
"conn",
"=",
"self",
".",
"get_connection",
"(",
"self",
".",
"sqlite_conn_id",
")",
"conn",
"=",
"sqlite3",
".",
"connect",
"(",
"conn",
".",
"host",
")",
"return",
"conn"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | action_logging | Decorator to log user actions | airflow/www/decorators.py | def action_logging(f):
"""
Decorator to log user actions
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
with create_session() as session:
if g.user.is_anonymous:
user = 'anonymous'
else:
user = g.user.username
log =... | def action_logging(f):
"""
Decorator to log user actions
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
with create_session() as session:
if g.user.is_anonymous:
user = 'anonymous'
else:
user = g.user.username
log =... | [
"Decorator",
"to",
"log",
"user",
"actions"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/decorators.py#L29-L58 | [
"def",
"action_logging",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"create_session",
"(",
")",
"as",
"session",
":",
"if",
"g",
".",
"user",
"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | gzipped | Decorator to make a view compressed | airflow/www/decorators.py | def gzipped(f):
"""
Decorator to make a view compressed
"""
@functools.wraps(f)
def view_func(*args, **kwargs):
@after_this_request
def zipper(response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if 'gzip' not in accept_encoding.lower():
... | def gzipped(f):
"""
Decorator to make a view compressed
"""
@functools.wraps(f)
def view_func(*args, **kwargs):
@after_this_request
def zipper(response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if 'gzip' not in accept_encoding.lower():
... | [
"Decorator",
"to",
"make",
"a",
"view",
"compressed"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/decorators.py#L61-L94 | [
"def",
"gzipped",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"view_func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"@",
"after_this_request",
"def",
"zipper",
"(",
"response",
")",
":",
"accept_encoding",
"=... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | has_dag_access | Decorator to check whether the user has read / write permission on the dag. | airflow/www/decorators.py | def has_dag_access(**dag_kwargs):
"""
Decorator to check whether the user has read / write permission on the dag.
"""
def decorator(f):
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
has_access = self.appbuilder.sm.has_access
dag_id = request.args.get('da... | def has_dag_access(**dag_kwargs):
"""
Decorator to check whether the user has read / write permission on the dag.
"""
def decorator(f):
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
has_access = self.appbuilder.sm.has_access
dag_id = request.args.get('da... | [
"Decorator",
"to",
"check",
"whether",
"the",
"user",
"has",
"read",
"/",
"write",
"permission",
"on",
"the",
"dag",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/decorators.py#L97-L127 | [
"def",
"has_dag_access",
"(",
"*",
"*",
"dag_kwargs",
")",
":",
"def",
"decorator",
"(",
"f",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"has_access",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | get_last_dagrun | Returns the last dag run for a dag, None if there was none.
Last dag run can be any type of run eg. scheduled or backfilled.
Overridden DagRuns are ignored. | airflow/models/dag.py | def get_last_dagrun(dag_id, session, include_externally_triggered=False):
"""
Returns the last dag run for a dag, None if there was none.
Last dag run can be any type of run eg. scheduled or backfilled.
Overridden DagRuns are ignored.
"""
DR = DagRun
query = session.query(DR).filter(DR.dag_i... | def get_last_dagrun(dag_id, session, include_externally_triggered=False):
"""
Returns the last dag run for a dag, None if there was none.
Last dag run can be any type of run eg. scheduled or backfilled.
Overridden DagRuns are ignored.
"""
DR = DagRun
query = session.query(DR).filter(DR.dag_i... | [
"Returns",
"the",
"last",
"dag",
"run",
"for",
"a",
"dag",
"None",
"if",
"there",
"was",
"none",
".",
"Last",
"dag",
"run",
"can",
"be",
"any",
"type",
"of",
"run",
"eg",
".",
"scheduled",
"or",
"backfilled",
".",
"Overridden",
"DagRuns",
"are",
"ignor... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dag.py#L64-L75 | [
"def",
"get_last_dagrun",
"(",
"dag_id",
",",
"session",
",",
"include_externally_triggered",
"=",
"False",
")",
":",
"DR",
"=",
"DagRun",
"query",
"=",
"session",
".",
"query",
"(",
"DR",
")",
".",
"filter",
"(",
"DR",
".",
"dag_id",
"==",
"dag_id",
")"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | DagModel.create_dagrun | Creates a dag run from this dag including the tasks associated with this dag.
Returns the dag run.
:param run_id: defines the the run id for this dag run
:type run_id: str
:param execution_date: the execution date of this dag run
:type execution_date: datetime.datetime
:... | airflow/models/dag.py | def create_dagrun(self,
run_id,
state,
execution_date,
start_date=None,
external_trigger=False,
conf=None,
session=None):
"""
Creates a dag run from t... | def create_dagrun(self,
run_id,
state,
execution_date,
start_date=None,
external_trigger=False,
conf=None,
session=None):
"""
Creates a dag run from t... | [
"Creates",
"a",
"dag",
"run",
"from",
"this",
"dag",
"including",
"the",
"tasks",
"associated",
"with",
"this",
"dag",
".",
"Returns",
"the",
"dag",
"run",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dag.py#L1494-L1526 | [
"def",
"create_dagrun",
"(",
"self",
",",
"run_id",
",",
"state",
",",
"execution_date",
",",
"start_date",
"=",
"None",
",",
"external_trigger",
"=",
"False",
",",
"conf",
"=",
"None",
",",
"session",
"=",
"None",
")",
":",
"return",
"self",
".",
"get_d... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SQSPublishOperator.execute | Publish the message to SQS queue
:param context: the context object
:type context: dict
:return: dict with information about the message sent
For details of the returned dict see :py:meth:`botocore.client.SQS.send_message`
:rtype: dict | airflow/contrib/operators/aws_sqs_publish_operator.py | def execute(self, context):
"""
Publish the message to SQS queue
:param context: the context object
:type context: dict
:return: dict with information about the message sent
For details of the returned dict see :py:meth:`botocore.client.SQS.send_message`
:rty... | def execute(self, context):
"""
Publish the message to SQS queue
:param context: the context object
:type context: dict
:return: dict with information about the message sent
For details of the returned dict see :py:meth:`botocore.client.SQS.send_message`
:rty... | [
"Publish",
"the",
"message",
"to",
"SQS",
"queue"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/aws_sqs_publish_operator.py#L61-L81 | [
"def",
"execute",
"(",
"self",
",",
"context",
")",
":",
"hook",
"=",
"SQSHook",
"(",
"aws_conn_id",
"=",
"self",
".",
"aws_conn_id",
")",
"result",
"=",
"hook",
".",
"send_message",
"(",
"queue_url",
"=",
"self",
".",
"sqs_queue",
",",
"message_body",
"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | generate_pages | Generates the HTML for a paging component using a similar logic to the paging
auto-generated by Flask managed views. The paging component defines a number of
pages visible in the pager (window) and once the user goes to a page beyond the
largest visible, it would scroll to the right the page numbers and kee... | airflow/www/utils.py | def generate_pages(current_page, num_of_pages,
search=None, showPaused=None, window=7):
"""
Generates the HTML for a paging component using a similar logic to the paging
auto-generated by Flask managed views. The paging component defines a number of
pages visible in the pager (window)... | def generate_pages(current_page, num_of_pages,
search=None, showPaused=None, window=7):
"""
Generates the HTML for a paging component using a similar logic to the paging
auto-generated by Flask managed views. The paging component defines a number of
pages visible in the pager (window)... | [
"Generates",
"the",
"HTML",
"for",
"a",
"paging",
"component",
"using",
"a",
"similar",
"logic",
"to",
"the",
"paging",
"auto",
"-",
"generated",
"by",
"Flask",
"managed",
"views",
".",
"The",
"paging",
"component",
"defines",
"a",
"number",
"of",
"pages",
... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/utils.py#L79-L185 | [
"def",
"generate_pages",
"(",
"current_page",
",",
"num_of_pages",
",",
"search",
"=",
"None",
",",
"showPaused",
"=",
"None",
",",
"window",
"=",
"7",
")",
":",
"void_link",
"=",
"'javascript:void(0)'",
"first_node",
"=",
"Markup",
"(",
"\"\"\"<li class=\"pagin... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | json_response | returns a json response from a json serializable python object | airflow/www/utils.py | def json_response(obj):
"""
returns a json response from a json serializable python object
"""
return Response(
response=json.dumps(
obj, indent=4, cls=AirflowJsonEncoder),
status=200,
mimetype="application/json") | def json_response(obj):
"""
returns a json response from a json serializable python object
"""
return Response(
response=json.dumps(
obj, indent=4, cls=AirflowJsonEncoder),
status=200,
mimetype="application/json") | [
"returns",
"a",
"json",
"response",
"from",
"a",
"json",
"serializable",
"python",
"object"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/utils.py#L193-L201 | [
"def",
"json_response",
"(",
"obj",
")",
":",
"return",
"Response",
"(",
"response",
"=",
"json",
".",
"dumps",
"(",
"obj",
",",
"indent",
"=",
"4",
",",
"cls",
"=",
"AirflowJsonEncoder",
")",
",",
"status",
"=",
"200",
",",
"mimetype",
"=",
"\"applica... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | open_maybe_zipped | Opens the given file. If the path contains a folder with a .zip suffix, then
the folder is treated as a zip archive, opening the file inside the archive.
:return: a file object, as in `open`, or as in `ZipFile.open`. | airflow/www/utils.py | def open_maybe_zipped(f, mode='r'):
"""
Opens the given file. If the path contains a folder with a .zip suffix, then
the folder is treated as a zip archive, opening the file inside the archive.
:return: a file object, as in `open`, or as in `ZipFile.open`.
"""
_, archive, filename = ZIP_REGEX.... | def open_maybe_zipped(f, mode='r'):
"""
Opens the given file. If the path contains a folder with a .zip suffix, then
the folder is treated as a zip archive, opening the file inside the archive.
:return: a file object, as in `open`, or as in `ZipFile.open`.
"""
_, archive, filename = ZIP_REGEX.... | [
"Opens",
"the",
"given",
"file",
".",
"If",
"the",
"path",
"contains",
"a",
"folder",
"with",
"a",
".",
"zip",
"suffix",
"then",
"the",
"folder",
"is",
"treated",
"as",
"a",
"zip",
"archive",
"opening",
"the",
"file",
"inside",
"the",
"archive",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/utils.py#L207-L219 | [
"def",
"open_maybe_zipped",
"(",
"f",
",",
"mode",
"=",
"'r'",
")",
":",
"_",
",",
"archive",
",",
"filename",
"=",
"ZIP_REGEX",
".",
"search",
"(",
"f",
")",
".",
"groups",
"(",
")",
"if",
"archive",
"and",
"zipfile",
".",
"is_zipfile",
"(",
"archiv... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | make_cache_key | Used by cache to get a unique key per URL | airflow/www/utils.py | def make_cache_key(*args, **kwargs):
"""
Used by cache to get a unique key per URL
"""
path = request.path
args = str(hash(frozenset(request.args.items())))
return (path + args).encode('ascii', 'ignore') | def make_cache_key(*args, **kwargs):
"""
Used by cache to get a unique key per URL
"""
path = request.path
args = str(hash(frozenset(request.args.items())))
return (path + args).encode('ascii', 'ignore') | [
"Used",
"by",
"cache",
"to",
"get",
"a",
"unique",
"key",
"per",
"URL"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/www/utils.py#L222-L228 | [
"def",
"make_cache_key",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"path",
"=",
"request",
".",
"path",
"args",
"=",
"str",
"(",
"hash",
"(",
"frozenset",
"(",
"request",
".",
"args",
".",
"items",
"(",
")",
")",
")",
")",
"return",
"(... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | CloudVideoIntelligenceHook.get_conn | Returns Gcp Video Intelligence Service client
:rtype: google.cloud.videointelligence_v1.VideoIntelligenceServiceClient | airflow/contrib/hooks/gcp_video_intelligence_hook.py | def get_conn(self):
"""
Returns Gcp Video Intelligence Service client
:rtype: google.cloud.videointelligence_v1.VideoIntelligenceServiceClient
"""
if not self._conn:
self._conn = VideoIntelligenceServiceClient(credentials=self._get_credentials())
return self.... | def get_conn(self):
"""
Returns Gcp Video Intelligence Service client
:rtype: google.cloud.videointelligence_v1.VideoIntelligenceServiceClient
"""
if not self._conn:
self._conn = VideoIntelligenceServiceClient(credentials=self._get_credentials())
return self.... | [
"Returns",
"Gcp",
"Video",
"Intelligence",
"Service",
"client"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_video_intelligence_hook.py#L41-L49 | [
"def",
"get_conn",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_conn",
":",
"self",
".",
"_conn",
"=",
"VideoIntelligenceServiceClient",
"(",
"credentials",
"=",
"self",
".",
"_get_credentials",
"(",
")",
")",
"return",
"self",
".",
"_conn"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | CloudVideoIntelligenceHook.annotate_video | Performs video annotation.
:param input_uri: Input video location. Currently, only Google Cloud Storage URIs are supported,
which must be specified in the following format: ``gs://bucket-id/object-id``.
:type input_uri: str
:param input_content: The video data bytes.
If ... | airflow/contrib/hooks/gcp_video_intelligence_hook.py | def annotate_video(
self,
input_uri=None,
input_content=None,
features=None,
video_context=None,
output_uri=None,
location=None,
retry=None,
timeout=None,
metadata=None,
):
"""
Performs video annotation.
:param ... | def annotate_video(
self,
input_uri=None,
input_content=None,
features=None,
video_context=None,
output_uri=None,
location=None,
retry=None,
timeout=None,
metadata=None,
):
"""
Performs video annotation.
:param ... | [
"Performs",
"video",
"annotation",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_video_intelligence_hook.py#L51-L105 | [
"def",
"annotate_video",
"(",
"self",
",",
"input_uri",
"=",
"None",
",",
"input_content",
"=",
"None",
",",
"features",
"=",
"None",
",",
"video_context",
"=",
"None",
",",
"output_uri",
"=",
"None",
",",
"location",
"=",
"None",
",",
"retry",
"=",
"Non... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | OpsgenieAlertHook._get_api_key | Get Opsgenie api_key for creating alert | airflow/contrib/hooks/opsgenie_alert_hook.py | def _get_api_key(self):
"""
Get Opsgenie api_key for creating alert
"""
conn = self.get_connection(self.http_conn_id)
api_key = conn.password
if not api_key:
raise AirflowException('Opsgenie API Key is required for this hook, '
... | def _get_api_key(self):
"""
Get Opsgenie api_key for creating alert
"""
conn = self.get_connection(self.http_conn_id)
api_key = conn.password
if not api_key:
raise AirflowException('Opsgenie API Key is required for this hook, '
... | [
"Get",
"Opsgenie",
"api_key",
"for",
"creating",
"alert"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/opsgenie_alert_hook.py#L50-L59 | [
"def",
"_get_api_key",
"(",
"self",
")",
":",
"conn",
"=",
"self",
".",
"get_connection",
"(",
"self",
".",
"http_conn_id",
")",
"api_key",
"=",
"conn",
".",
"password",
"if",
"not",
"api_key",
":",
"raise",
"AirflowException",
"(",
"'Opsgenie API Key is requi... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | OpsgenieAlertHook.get_conn | Overwrite HttpHook get_conn because this hook just needs base_url
and headers, and does not need generic params
:param headers: additional headers to be passed through as a dictionary
:type headers: dict | airflow/contrib/hooks/opsgenie_alert_hook.py | def get_conn(self, headers=None):
"""
Overwrite HttpHook get_conn because this hook just needs base_url
and headers, and does not need generic params
:param headers: additional headers to be passed through as a dictionary
:type headers: dict
"""
conn = self.get_c... | def get_conn(self, headers=None):
"""
Overwrite HttpHook get_conn because this hook just needs base_url
and headers, and does not need generic params
:param headers: additional headers to be passed through as a dictionary
:type headers: dict
"""
conn = self.get_c... | [
"Overwrite",
"HttpHook",
"get_conn",
"because",
"this",
"hook",
"just",
"needs",
"base_url",
"and",
"headers",
"and",
"does",
"not",
"need",
"generic",
"params"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/opsgenie_alert_hook.py#L61-L74 | [
"def",
"get_conn",
"(",
"self",
",",
"headers",
"=",
"None",
")",
":",
"conn",
"=",
"self",
".",
"get_connection",
"(",
"self",
".",
"http_conn_id",
")",
"self",
".",
"base_url",
"=",
"conn",
".",
"host",
"if",
"conn",
".",
"host",
"else",
"'https://ap... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | OpsgenieAlertHook.execute | Execute the Opsgenie Alert call
:param payload: Opsgenie API Create Alert payload values
See https://docs.opsgenie.com/docs/alert-api#section-create-alert
:type payload: dict | airflow/contrib/hooks/opsgenie_alert_hook.py | def execute(self, payload={}):
"""
Execute the Opsgenie Alert call
:param payload: Opsgenie API Create Alert payload values
See https://docs.opsgenie.com/docs/alert-api#section-create-alert
:type payload: dict
"""
api_key = self._get_api_key()
return ... | def execute(self, payload={}):
"""
Execute the Opsgenie Alert call
:param payload: Opsgenie API Create Alert payload values
See https://docs.opsgenie.com/docs/alert-api#section-create-alert
:type payload: dict
"""
api_key = self._get_api_key()
return ... | [
"Execute",
"the",
"Opsgenie",
"Alert",
"call"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/opsgenie_alert_hook.py#L76-L88 | [
"def",
"execute",
"(",
"self",
",",
"payload",
"=",
"{",
"}",
")",
":",
"api_key",
"=",
"self",
".",
"_get_api_key",
"(",
")",
"return",
"self",
".",
"run",
"(",
"endpoint",
"=",
"'v2/alerts'",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"payload",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | BashSensor.poke | Execute the bash command in a temporary directory
which will be cleaned afterwards | airflow/contrib/sensors/bash_sensor.py | def poke(self, context):
"""
Execute the bash command in a temporary directory
which will be cleaned afterwards
"""
bash_command = self.bash_command
self.log.info("Tmp dir root location: \n %s", gettempdir())
with TemporaryDirectory(prefix='airflowtmp') as tmp_dir... | def poke(self, context):
"""
Execute the bash command in a temporary directory
which will be cleaned afterwards
"""
bash_command = self.bash_command
self.log.info("Tmp dir root location: \n %s", gettempdir())
with TemporaryDirectory(prefix='airflowtmp') as tmp_dir... | [
"Execute",
"the",
"bash",
"command",
"in",
"a",
"temporary",
"directory",
"which",
"will",
"be",
"cleaned",
"afterwards"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/sensors/bash_sensor.py#L60-L91 | [
"def",
"poke",
"(",
"self",
",",
"context",
")",
":",
"bash_command",
"=",
"self",
".",
"bash_command",
"self",
".",
"log",
".",
"info",
"(",
"\"Tmp dir root location: \\n %s\"",
",",
"gettempdir",
"(",
")",
")",
"with",
"TemporaryDirectory",
"(",
"prefix",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | OpsgenieAlertOperator._build_opsgenie_payload | Construct the Opsgenie JSON payload. All relevant parameters are combined here
to a valid Opsgenie JSON payload.
:return: Opsgenie payload (dict) to send | airflow/contrib/operators/opsgenie_alert_operator.py | def _build_opsgenie_payload(self):
"""
Construct the Opsgenie JSON payload. All relevant parameters are combined here
to a valid Opsgenie JSON payload.
:return: Opsgenie payload (dict) to send
"""
payload = {}
for key in [
"message", "alias", "descri... | def _build_opsgenie_payload(self):
"""
Construct the Opsgenie JSON payload. All relevant parameters are combined here
to a valid Opsgenie JSON payload.
:return: Opsgenie payload (dict) to send
"""
payload = {}
for key in [
"message", "alias", "descri... | [
"Construct",
"the",
"Opsgenie",
"JSON",
"payload",
".",
"All",
"relevant",
"parameters",
"are",
"combined",
"here",
"to",
"a",
"valid",
"Opsgenie",
"JSON",
"payload",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/opsgenie_alert_operator.py#L107-L124 | [
"def",
"_build_opsgenie_payload",
"(",
"self",
")",
":",
"payload",
"=",
"{",
"}",
"for",
"key",
"in",
"[",
"\"message\"",
",",
"\"alias\"",
",",
"\"description\"",
",",
"\"responders\"",
",",
"\"visibleTo\"",
",",
"\"actions\"",
",",
"\"tags\"",
",",
"\"detai... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | OpsgenieAlertOperator.execute | Call the OpsgenieAlertHook to post message | airflow/contrib/operators/opsgenie_alert_operator.py | def execute(self, context):
"""
Call the OpsgenieAlertHook to post message
"""
self.hook = OpsgenieAlertHook(self.opsgenie_conn_id)
self.hook.execute(self._build_opsgenie_payload()) | def execute(self, context):
"""
Call the OpsgenieAlertHook to post message
"""
self.hook = OpsgenieAlertHook(self.opsgenie_conn_id)
self.hook.execute(self._build_opsgenie_payload()) | [
"Call",
"the",
"OpsgenieAlertHook",
"to",
"post",
"message"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/opsgenie_alert_operator.py#L126-L131 | [
"def",
"execute",
"(",
"self",
",",
"context",
")",
":",
"self",
".",
"hook",
"=",
"OpsgenieAlertHook",
"(",
"self",
".",
"opsgenie_conn_id",
")",
"self",
".",
"hook",
".",
"execute",
"(",
"self",
".",
"_build_opsgenie_payload",
"(",
")",
")"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | AWSAthenaHook.get_conn | check if aws conn exists already or create one and return it
:return: boto3 session | airflow/contrib/hooks/aws_athena_hook.py | def get_conn(self):
"""
check if aws conn exists already or create one and return it
:return: boto3 session
"""
if not self.conn:
self.conn = self.get_client_type('athena')
return self.conn | def get_conn(self):
"""
check if aws conn exists already or create one and return it
:return: boto3 session
"""
if not self.conn:
self.conn = self.get_client_type('athena')
return self.conn | [
"check",
"if",
"aws",
"conn",
"exists",
"already",
"or",
"create",
"one",
"and",
"return",
"it"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L43-L51 | [
"def",
"get_conn",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"conn",
":",
"self",
".",
"conn",
"=",
"self",
".",
"get_client_type",
"(",
"'athena'",
")",
"return",
"self",
".",
"conn"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | AWSAthenaHook.run_query | Run Presto query on athena with provided config and return submitted query_execution_id
:param query: Presto query to run
:type query: str
:param query_context: Context in which query need to be run
:type query_context: dict
:param result_configuration: Dict with path to store r... | airflow/contrib/hooks/aws_athena_hook.py | def run_query(self, query, query_context, result_configuration, client_request_token=None):
"""
Run Presto query on athena with provided config and return submitted query_execution_id
:param query: Presto query to run
:type query: str
:param query_context: Context in which query... | def run_query(self, query, query_context, result_configuration, client_request_token=None):
"""
Run Presto query on athena with provided config and return submitted query_execution_id
:param query: Presto query to run
:type query: str
:param query_context: Context in which query... | [
"Run",
"Presto",
"query",
"on",
"athena",
"with",
"provided",
"config",
"and",
"return",
"submitted",
"query_execution_id"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L53-L72 | [
"def",
"run_query",
"(",
"self",
",",
"query",
",",
"query_context",
",",
"result_configuration",
",",
"client_request_token",
"=",
"None",
")",
":",
"response",
"=",
"self",
".",
"conn",
".",
"start_query_execution",
"(",
"QueryString",
"=",
"query",
",",
"Cl... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | AWSAthenaHook.check_query_status | Fetch the status of submitted athena query. Returns None or one of valid query states.
:param query_execution_id: Id of submitted athena query
:type query_execution_id: str
:return: str | airflow/contrib/hooks/aws_athena_hook.py | def check_query_status(self, query_execution_id):
"""
Fetch the status of submitted athena query. Returns None or one of valid query states.
:param query_execution_id: Id of submitted athena query
:type query_execution_id: str
:return: str
"""
response = self.con... | def check_query_status(self, query_execution_id):
"""
Fetch the status of submitted athena query. Returns None or one of valid query states.
:param query_execution_id: Id of submitted athena query
:type query_execution_id: str
:return: str
"""
response = self.con... | [
"Fetch",
"the",
"status",
"of",
"submitted",
"athena",
"query",
".",
"Returns",
"None",
"or",
"one",
"of",
"valid",
"query",
"states",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L74-L89 | [
"def",
"check_query_status",
"(",
"self",
",",
"query_execution_id",
")",
":",
"response",
"=",
"self",
".",
"conn",
".",
"get_query_execution",
"(",
"QueryExecutionId",
"=",
"query_execution_id",
")",
"state",
"=",
"None",
"try",
":",
"state",
"=",
"response",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | AWSAthenaHook.get_query_results | Fetch submitted athena query results. returns none if query is in intermediate state or
failed/cancelled state else dict of query output
:param query_execution_id: Id of submitted athena query
:type query_execution_id: str
:return: dict | airflow/contrib/hooks/aws_athena_hook.py | def get_query_results(self, query_execution_id):
"""
Fetch submitted athena query results. returns none if query is in intermediate state or
failed/cancelled state else dict of query output
:param query_execution_id: Id of submitted athena query
:type query_execution_id: str
... | def get_query_results(self, query_execution_id):
"""
Fetch submitted athena query results. returns none if query is in intermediate state or
failed/cancelled state else dict of query output
:param query_execution_id: Id of submitted athena query
:type query_execution_id: str
... | [
"Fetch",
"submitted",
"athena",
"query",
"results",
".",
"returns",
"none",
"if",
"query",
"is",
"in",
"intermediate",
"state",
"or",
"failed",
"/",
"cancelled",
"state",
"else",
"dict",
"of",
"query",
"output"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L91-L107 | [
"def",
"get_query_results",
"(",
"self",
",",
"query_execution_id",
")",
":",
"query_state",
"=",
"self",
".",
"check_query_status",
"(",
"query_execution_id",
")",
"if",
"query_state",
"is",
"None",
":",
"self",
".",
"log",
".",
"error",
"(",
"'Invalid Query st... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | AWSAthenaHook.poll_query_status | Poll the status of submitted athena query until query state reaches final state.
Returns one of the final states
:param query_execution_id: Id of submitted athena query
:type query_execution_id: str
:param max_tries: Number of times to poll for query state before function exits
... | airflow/contrib/hooks/aws_athena_hook.py | def poll_query_status(self, query_execution_id, max_tries=None):
"""
Poll the status of submitted athena query until query state reaches final state.
Returns one of the final states
:param query_execution_id: Id of submitted athena query
:type query_execution_id: str
:pa... | def poll_query_status(self, query_execution_id, max_tries=None):
"""
Poll the status of submitted athena query until query state reaches final state.
Returns one of the final states
:param query_execution_id: Id of submitted athena query
:type query_execution_id: str
:pa... | [
"Poll",
"the",
"status",
"of",
"submitted",
"athena",
"query",
"until",
"query",
"state",
"reaches",
"final",
"state",
".",
"Returns",
"one",
"of",
"the",
"final",
"states"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_athena_hook.py#L109-L140 | [
"def",
"poll_query_status",
"(",
"self",
",",
"query_execution_id",
",",
"max_tries",
"=",
"None",
")",
":",
"try_number",
"=",
"1",
"final_query_state",
"=",
"None",
"# Query state when query reaches final state or max_tries reached",
"while",
"True",
":",
"query_state",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SFTPHook.get_conn | Returns an SFTP connection object | airflow/contrib/hooks/sftp_hook.py | def get_conn(self):
"""
Returns an SFTP connection object
"""
if self.conn is None:
cnopts = pysftp.CnOpts()
if self.no_host_key_check:
cnopts.hostkeys = None
cnopts.compression = self.compress
conn_params = {
... | def get_conn(self):
"""
Returns an SFTP connection object
"""
if self.conn is None:
cnopts = pysftp.CnOpts()
if self.no_host_key_check:
cnopts.hostkeys = None
cnopts.compression = self.compress
conn_params = {
... | [
"Returns",
"an",
"SFTP",
"connection",
"object"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L92-L115 | [
"def",
"get_conn",
"(",
"self",
")",
":",
"if",
"self",
".",
"conn",
"is",
"None",
":",
"cnopts",
"=",
"pysftp",
".",
"CnOpts",
"(",
")",
"if",
"self",
".",
"no_host_key_check",
":",
"cnopts",
".",
"hostkeys",
"=",
"None",
"cnopts",
".",
"compression",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SFTPHook.describe_directory | Returns a dictionary of {filename: {attributes}} for all files
on the remote system (where the MLSD command is supported).
:param path: full path to the remote directory
:type path: str | airflow/contrib/hooks/sftp_hook.py | def describe_directory(self, path):
"""
Returns a dictionary of {filename: {attributes}} for all files
on the remote system (where the MLSD command is supported).
:param path: full path to the remote directory
:type path: str
"""
conn = self.get_conn()
fli... | def describe_directory(self, path):
"""
Returns a dictionary of {filename: {attributes}} for all files
on the remote system (where the MLSD command is supported).
:param path: full path to the remote directory
:type path: str
"""
conn = self.get_conn()
fli... | [
"Returns",
"a",
"dictionary",
"of",
"{",
"filename",
":",
"{",
"attributes",
"}}",
"for",
"all",
"files",
"on",
"the",
"remote",
"system",
"(",
"where",
"the",
"MLSD",
"command",
"is",
"supported",
")",
".",
":",
"param",
"path",
":",
"full",
"path",
"... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L126-L143 | [
"def",
"describe_directory",
"(",
"self",
",",
"path",
")",
":",
"conn",
"=",
"self",
".",
"get_conn",
"(",
")",
"flist",
"=",
"conn",
".",
"listdir_attr",
"(",
"path",
")",
"files",
"=",
"{",
"}",
"for",
"f",
"in",
"flist",
":",
"modify",
"=",
"da... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SFTPHook.list_directory | Returns a list of files on the remote system.
:param path: full path to the remote directory to list
:type path: str | airflow/contrib/hooks/sftp_hook.py | def list_directory(self, path):
"""
Returns a list of files on the remote system.
:param path: full path to the remote directory to list
:type path: str
"""
conn = self.get_conn()
files = conn.listdir(path)
return files | def list_directory(self, path):
"""
Returns a list of files on the remote system.
:param path: full path to the remote directory to list
:type path: str
"""
conn = self.get_conn()
files = conn.listdir(path)
return files | [
"Returns",
"a",
"list",
"of",
"files",
"on",
"the",
"remote",
"system",
".",
":",
"param",
"path",
":",
"full",
"path",
"to",
"the",
"remote",
"directory",
"to",
"list",
":",
"type",
"path",
":",
"str"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L145-L153 | [
"def",
"list_directory",
"(",
"self",
",",
"path",
")",
":",
"conn",
"=",
"self",
".",
"get_conn",
"(",
")",
"files",
"=",
"conn",
".",
"listdir",
"(",
"path",
")",
"return",
"files"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SFTPHook.create_directory | Creates a directory on the remote system.
:param path: full path to the remote directory to create
:type path: str
:param mode: int representation of octal mode for directory | airflow/contrib/hooks/sftp_hook.py | def create_directory(self, path, mode=777):
"""
Creates a directory on the remote system.
:param path: full path to the remote directory to create
:type path: str
:param mode: int representation of octal mode for directory
"""
conn = self.get_conn()
conn.m... | def create_directory(self, path, mode=777):
"""
Creates a directory on the remote system.
:param path: full path to the remote directory to create
:type path: str
:param mode: int representation of octal mode for directory
"""
conn = self.get_conn()
conn.m... | [
"Creates",
"a",
"directory",
"on",
"the",
"remote",
"system",
".",
":",
"param",
"path",
":",
"full",
"path",
"to",
"the",
"remote",
"directory",
"to",
"create",
":",
"type",
"path",
":",
"str",
":",
"param",
"mode",
":",
"int",
"representation",
"of",
... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L155-L163 | [
"def",
"create_directory",
"(",
"self",
",",
"path",
",",
"mode",
"=",
"777",
")",
":",
"conn",
"=",
"self",
".",
"get_conn",
"(",
")",
"conn",
".",
"mkdir",
"(",
"path",
",",
"mode",
")"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SFTPHook.retrieve_file | Transfers the remote file to a local location.
If local_full_path is a string path, the file will be put
at that location
:param remote_full_path: full path to the remote file
:type remote_full_path: str
:param local_full_path: full path to the local file
:type local_full... | airflow/contrib/hooks/sftp_hook.py | def retrieve_file(self, remote_full_path, local_full_path):
"""
Transfers the remote file to a local location.
If local_full_path is a string path, the file will be put
at that location
:param remote_full_path: full path to the remote file
:type remote_full_path: str
... | def retrieve_file(self, remote_full_path, local_full_path):
"""
Transfers the remote file to a local location.
If local_full_path is a string path, the file will be put
at that location
:param remote_full_path: full path to the remote file
:type remote_full_path: str
... | [
"Transfers",
"the",
"remote",
"file",
"to",
"a",
"local",
"location",
".",
"If",
"local_full_path",
"is",
"a",
"string",
"path",
"the",
"file",
"will",
"be",
"put",
"at",
"that",
"location",
":",
"param",
"remote_full_path",
":",
"full",
"path",
"to",
"the... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L174-L187 | [
"def",
"retrieve_file",
"(",
"self",
",",
"remote_full_path",
",",
"local_full_path",
")",
":",
"conn",
"=",
"self",
".",
"get_conn",
"(",
")",
"self",
".",
"log",
".",
"info",
"(",
"'Retrieving file from FTP: %s'",
",",
"remote_full_path",
")",
"conn",
".",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SFTPHook.store_file | Transfers a local file to the remote location.
If local_full_path_or_buffer is a string path, the file will be read
from that location
:param remote_full_path: full path to the remote file
:type remote_full_path: str
:param local_full_path: full path to the local file
:ty... | airflow/contrib/hooks/sftp_hook.py | def store_file(self, remote_full_path, local_full_path):
"""
Transfers a local file to the remote location.
If local_full_path_or_buffer is a string path, the file will be read
from that location
:param remote_full_path: full path to the remote file
:type remote_full_path... | def store_file(self, remote_full_path, local_full_path):
"""
Transfers a local file to the remote location.
If local_full_path_or_buffer is a string path, the file will be read
from that location
:param remote_full_path: full path to the remote file
:type remote_full_path... | [
"Transfers",
"a",
"local",
"file",
"to",
"the",
"remote",
"location",
".",
"If",
"local_full_path_or_buffer",
"is",
"a",
"string",
"path",
"the",
"file",
"will",
"be",
"read",
"from",
"that",
"location",
":",
"param",
"remote_full_path",
":",
"full",
"path",
... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sftp_hook.py#L189-L200 | [
"def",
"store_file",
"(",
"self",
",",
"remote_full_path",
",",
"local_full_path",
")",
":",
"conn",
"=",
"self",
".",
"get_conn",
"(",
")",
"conn",
".",
"put",
"(",
"local_full_path",
",",
"remote_full_path",
")"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | ZendeskHook.__handle_rate_limit_exception | Sleep for the time specified in the exception. If not specified, wait
for 60 seconds. | airflow/hooks/zendesk_hook.py | def __handle_rate_limit_exception(self, rate_limit_exception):
"""
Sleep for the time specified in the exception. If not specified, wait
for 60 seconds.
"""
retry_after = int(
rate_limit_exception.response.headers.get('Retry-After', 60))
self.log.info(
... | def __handle_rate_limit_exception(self, rate_limit_exception):
"""
Sleep for the time specified in the exception. If not specified, wait
for 60 seconds.
"""
retry_after = int(
rate_limit_exception.response.headers.get('Retry-After', 60))
self.log.info(
... | [
"Sleep",
"for",
"the",
"time",
"specified",
"in",
"the",
"exception",
".",
"If",
"not",
"specified",
"wait",
"for",
"60",
"seconds",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/zendesk_hook.py#L39-L50 | [
"def",
"__handle_rate_limit_exception",
"(",
"self",
",",
"rate_limit_exception",
")",
":",
"retry_after",
"=",
"int",
"(",
"rate_limit_exception",
".",
"response",
".",
"headers",
".",
"get",
"(",
"'Retry-After'",
",",
"60",
")",
")",
"self",
".",
"log",
".",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | ZendeskHook.call | Call Zendesk API and return results
:param path: The Zendesk API to call
:param query: Query parameters
:param get_all_pages: Accumulate results over all pages before
returning. Due to strict rate limiting, this can often timeout.
Waits for recommended period betwe... | airflow/hooks/zendesk_hook.py | def call(self, path, query=None, get_all_pages=True, side_loading=False):
"""
Call Zendesk API and return results
:param path: The Zendesk API to call
:param query: Query parameters
:param get_all_pages: Accumulate results over all pages before
returning. Due to s... | def call(self, path, query=None, get_all_pages=True, side_loading=False):
"""
Call Zendesk API and return results
:param path: The Zendesk API to call
:param query: Query parameters
:param get_all_pages: Accumulate results over all pages before
returning. Due to s... | [
"Call",
"Zendesk",
"API",
"and",
"return",
"results"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/zendesk_hook.py#L52-L112 | [
"def",
"call",
"(",
"self",
",",
"path",
",",
"query",
"=",
"None",
",",
"get_all_pages",
"=",
"True",
",",
"side_loading",
"=",
"False",
")",
":",
"zendesk",
"=",
"self",
".",
"get_conn",
"(",
")",
"first_request_successful",
"=",
"False",
"while",
"not... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | AwsGlueCatalogHook.get_partitions | Retrieves the partition values for a table.
:param database_name: The name of the catalog database where the partitions reside.
:type database_name: str
:param table_name: The name of the partitions' table.
:type table_name: str
:param expression: An expression filtering the par... | airflow/contrib/hooks/aws_glue_catalog_hook.py | def get_partitions(self,
database_name,
table_name,
expression='',
page_size=None,
max_items=None):
"""
Retrieves the partition values for a table.
:param database_name: The name o... | def get_partitions(self,
database_name,
table_name,
expression='',
page_size=None,
max_items=None):
"""
Retrieves the partition values for a table.
:param database_name: The name o... | [
"Retrieves",
"the",
"partition",
"values",
"for",
"a",
"table",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_glue_catalog_hook.py#L50-L93 | [
"def",
"get_partitions",
"(",
"self",
",",
"database_name",
",",
"table_name",
",",
"expression",
"=",
"''",
",",
"page_size",
"=",
"None",
",",
"max_items",
"=",
"None",
")",
":",
"config",
"=",
"{",
"'PageSize'",
":",
"page_size",
",",
"'MaxItems'",
":",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | AwsGlueCatalogHook.check_for_partition | Checks whether a partition exists
:param database_name: Name of hive database (schema) @table belongs to
:type database_name: str
:param table_name: Name of hive table @partition belongs to
:type table_name: str
:expression: Expression that matches the partitions to check for
... | airflow/contrib/hooks/aws_glue_catalog_hook.py | def check_for_partition(self, database_name, table_name, expression):
"""
Checks whether a partition exists
:param database_name: Name of hive database (schema) @table belongs to
:type database_name: str
:param table_name: Name of hive table @partition belongs to
:type t... | def check_for_partition(self, database_name, table_name, expression):
"""
Checks whether a partition exists
:param database_name: Name of hive database (schema) @table belongs to
:type database_name: str
:param table_name: Name of hive table @partition belongs to
:type t... | [
"Checks",
"whether",
"a",
"partition",
"exists"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_glue_catalog_hook.py#L95-L118 | [
"def",
"check_for_partition",
"(",
"self",
",",
"database_name",
",",
"table_name",
",",
"expression",
")",
":",
"partitions",
"=",
"self",
".",
"get_partitions",
"(",
"database_name",
",",
"table_name",
",",
"expression",
",",
"max_items",
"=",
"1",
")",
"if"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | AwsGlueCatalogHook.get_table | Get the information of the table
:param database_name: Name of hive database (schema) @table belongs to
:type database_name: str
:param table_name: Name of hive table
:type table_name: str
:rtype: dict
>>> hook = AwsGlueCatalogHook()
>>> r = hook.get_table('db',... | airflow/contrib/hooks/aws_glue_catalog_hook.py | def get_table(self, database_name, table_name):
"""
Get the information of the table
:param database_name: Name of hive database (schema) @table belongs to
:type database_name: str
:param table_name: Name of hive table
:type table_name: str
:rtype: dict
... | def get_table(self, database_name, table_name):
"""
Get the information of the table
:param database_name: Name of hive database (schema) @table belongs to
:type database_name: str
:param table_name: Name of hive table
:type table_name: str
:rtype: dict
... | [
"Get",
"the",
"information",
"of",
"the",
"table"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_glue_catalog_hook.py#L120-L137 | [
"def",
"get_table",
"(",
"self",
",",
"database_name",
",",
"table_name",
")",
":",
"result",
"=",
"self",
".",
"get_conn",
"(",
")",
".",
"get_table",
"(",
"DatabaseName",
"=",
"database_name",
",",
"Name",
"=",
"table_name",
")",
"return",
"result",
"[",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | AwsGlueCatalogHook.get_table_location | Get the physical location of the table
:param database_name: Name of hive database (schema) @table belongs to
:type database_name: str
:param table_name: Name of hive table
:type table_name: str
:return: str | airflow/contrib/hooks/aws_glue_catalog_hook.py | def get_table_location(self, database_name, table_name):
"""
Get the physical location of the table
:param database_name: Name of hive database (schema) @table belongs to
:type database_name: str
:param table_name: Name of hive table
:type table_name: str
:return... | def get_table_location(self, database_name, table_name):
"""
Get the physical location of the table
:param database_name: Name of hive database (schema) @table belongs to
:type database_name: str
:param table_name: Name of hive table
:type table_name: str
:return... | [
"Get",
"the",
"physical",
"location",
"of",
"the",
"table"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/aws_glue_catalog_hook.py#L139-L152 | [
"def",
"get_table_location",
"(",
"self",
",",
"database_name",
",",
"table_name",
")",
":",
"table",
"=",
"self",
".",
"get_table",
"(",
"database_name",
",",
"table_name",
")",
"return",
"table",
"[",
"'StorageDescriptor'",
"]",
"[",
"'Location'",
"]"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | RedshiftHook.cluster_status | Return status of a cluster
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str | airflow/contrib/hooks/redshift_hook.py | def cluster_status(self, cluster_identifier):
"""
Return status of a cluster
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
"""
conn = self.get_conn()
try:
response = conn.describe_clusters(
Clu... | def cluster_status(self, cluster_identifier):
"""
Return status of a cluster
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
"""
conn = self.get_conn()
try:
response = conn.describe_clusters(
Clu... | [
"Return",
"status",
"of",
"a",
"cluster"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L31-L44 | [
"def",
"cluster_status",
"(",
"self",
",",
"cluster_identifier",
")",
":",
"conn",
"=",
"self",
".",
"get_conn",
"(",
")",
"try",
":",
"response",
"=",
"conn",
".",
"describe_clusters",
"(",
"ClusterIdentifier",
"=",
"cluster_identifier",
")",
"[",
"'Clusters'... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | RedshiftHook.delete_cluster | Delete a cluster and optionally create a snapshot
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
:param skip_final_cluster_snapshot: determines cluster snapshot creation
:type skip_final_cluster_snapshot: bool
:param final_cluster_snapsho... | airflow/contrib/hooks/redshift_hook.py | def delete_cluster(
self,
cluster_identifier,
skip_final_cluster_snapshot=True,
final_cluster_snapshot_identifier=''):
"""
Delete a cluster and optionally create a snapshot
:param cluster_identifier: unique identifier of a cluster
:type cl... | def delete_cluster(
self,
cluster_identifier,
skip_final_cluster_snapshot=True,
final_cluster_snapshot_identifier=''):
"""
Delete a cluster and optionally create a snapshot
:param cluster_identifier: unique identifier of a cluster
:type cl... | [
"Delete",
"a",
"cluster",
"and",
"optionally",
"create",
"a",
"snapshot"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L46-L66 | [
"def",
"delete_cluster",
"(",
"self",
",",
"cluster_identifier",
",",
"skip_final_cluster_snapshot",
"=",
"True",
",",
"final_cluster_snapshot_identifier",
"=",
"''",
")",
":",
"response",
"=",
"self",
".",
"get_conn",
"(",
")",
".",
"delete_cluster",
"(",
"Cluste... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | RedshiftHook.describe_cluster_snapshots | Gets a list of snapshots for a cluster
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str | airflow/contrib/hooks/redshift_hook.py | def describe_cluster_snapshots(self, cluster_identifier):
"""
Gets a list of snapshots for a cluster
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
"""
response = self.get_conn().describe_cluster_snapshots(
ClusterIden... | def describe_cluster_snapshots(self, cluster_identifier):
"""
Gets a list of snapshots for a cluster
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
"""
response = self.get_conn().describe_cluster_snapshots(
ClusterIden... | [
"Gets",
"a",
"list",
"of",
"snapshots",
"for",
"a",
"cluster"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L68-L83 | [
"def",
"describe_cluster_snapshots",
"(",
"self",
",",
"cluster_identifier",
")",
":",
"response",
"=",
"self",
".",
"get_conn",
"(",
")",
".",
"describe_cluster_snapshots",
"(",
"ClusterIdentifier",
"=",
"cluster_identifier",
")",
"if",
"'Snapshots'",
"not",
"in",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | RedshiftHook.restore_from_cluster_snapshot | Restores a cluster from its snapshot
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
:param snapshot_identifier: unique identifier for a snapshot of a cluster
:type snapshot_identifier: str | airflow/contrib/hooks/redshift_hook.py | def restore_from_cluster_snapshot(self, cluster_identifier, snapshot_identifier):
"""
Restores a cluster from its snapshot
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
:param snapshot_identifier: unique identifier for a snapshot of a cl... | def restore_from_cluster_snapshot(self, cluster_identifier, snapshot_identifier):
"""
Restores a cluster from its snapshot
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str
:param snapshot_identifier: unique identifier for a snapshot of a cl... | [
"Restores",
"a",
"cluster",
"from",
"its",
"snapshot"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L85-L98 | [
"def",
"restore_from_cluster_snapshot",
"(",
"self",
",",
"cluster_identifier",
",",
"snapshot_identifier",
")",
":",
"response",
"=",
"self",
".",
"get_conn",
"(",
")",
".",
"restore_from_cluster_snapshot",
"(",
"ClusterIdentifier",
"=",
"cluster_identifier",
",",
"S... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | RedshiftHook.create_cluster_snapshot | Creates a snapshot of a cluster
:param snapshot_identifier: unique identifier for a snapshot of a cluster
:type snapshot_identifier: str
:param cluster_identifier: unique identifier of a cluster
:type cluster_identifier: str | airflow/contrib/hooks/redshift_hook.py | def create_cluster_snapshot(self, snapshot_identifier, cluster_identifier):
"""
Creates a snapshot of a cluster
:param snapshot_identifier: unique identifier for a snapshot of a cluster
:type snapshot_identifier: str
:param cluster_identifier: unique identifier of a cluster
... | def create_cluster_snapshot(self, snapshot_identifier, cluster_identifier):
"""
Creates a snapshot of a cluster
:param snapshot_identifier: unique identifier for a snapshot of a cluster
:type snapshot_identifier: str
:param cluster_identifier: unique identifier of a cluster
... | [
"Creates",
"a",
"snapshot",
"of",
"a",
"cluster"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/redshift_hook.py#L100-L113 | [
"def",
"create_cluster_snapshot",
"(",
"self",
",",
"snapshot_identifier",
",",
"cluster_identifier",
")",
":",
"response",
"=",
"self",
".",
"get_conn",
"(",
")",
".",
"create_cluster_snapshot",
"(",
"SnapshotIdentifier",
"=",
"snapshot_identifier",
",",
"ClusterIden... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SlackAPIOperator.execute | SlackAPIOperator calls will not fail even if the call is not unsuccessful.
It should not prevent a DAG from completing in success | airflow/operators/slack_operator.py | def execute(self, **kwargs):
"""
SlackAPIOperator calls will not fail even if the call is not unsuccessful.
It should not prevent a DAG from completing in success
"""
if not self.api_params:
self.construct_api_call_params()
slack = SlackHook(token=self.token, ... | def execute(self, **kwargs):
"""
SlackAPIOperator calls will not fail even if the call is not unsuccessful.
It should not prevent a DAG from completing in success
"""
if not self.api_params:
self.construct_api_call_params()
slack = SlackHook(token=self.token, ... | [
"SlackAPIOperator",
"calls",
"will",
"not",
"fail",
"even",
"if",
"the",
"call",
"is",
"not",
"unsuccessful",
".",
"It",
"should",
"not",
"prevent",
"a",
"DAG",
"from",
"completing",
"in",
"success"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/operators/slack_operator.py#L79-L87 | [
"def",
"execute",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"api_params",
":",
"self",
".",
"construct_api_call_params",
"(",
")",
"slack",
"=",
"SlackHook",
"(",
"token",
"=",
"self",
".",
"token",
",",
"slack_conn_id",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | PodGenerator.add_volume | Args:
volume (Volume): | airflow/contrib/kubernetes/pod_generator.py | def add_volume(self, volume):
"""
Args:
volume (Volume):
"""
self._add_volume(name=volume.name, configs=volume.configs) | def add_volume(self, volume):
"""
Args:
volume (Volume):
"""
self._add_volume(name=volume.name, configs=volume.configs) | [
"Args",
":",
"volume",
"(",
"Volume",
")",
":"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/kubernetes/pod_generator.py#L67-L73 | [
"def",
"add_volume",
"(",
"self",
",",
"volume",
")",
":",
"self",
".",
"_add_volume",
"(",
"name",
"=",
"volume",
".",
"name",
",",
"configs",
"=",
"volume",
".",
"configs",
")"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | PodGenerator.add_mount | Args:
volume_mount (VolumeMount): | airflow/contrib/kubernetes/pod_generator.py | def add_mount(self,
volume_mount):
"""
Args:
volume_mount (VolumeMount):
"""
self._add_mount(
name=volume_mount.name,
mount_path=volume_mount.mount_path,
sub_path=volume_mount.sub_path,
read_only=volume_mount.r... | def add_mount(self,
volume_mount):
"""
Args:
volume_mount (VolumeMount):
"""
self._add_mount(
name=volume_mount.name,
mount_path=volume_mount.mount_path,
sub_path=volume_mount.sub_path,
read_only=volume_mount.r... | [
"Args",
":",
"volume_mount",
"(",
"VolumeMount",
")",
":"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/kubernetes/pod_generator.py#L124-L135 | [
"def",
"add_mount",
"(",
"self",
",",
"volume_mount",
")",
":",
"self",
".",
"_add_mount",
"(",
"name",
"=",
"volume_mount",
".",
"name",
",",
"mount_path",
"=",
"volume_mount",
".",
"mount_path",
",",
"sub_path",
"=",
"volume_mount",
".",
"sub_path",
",",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | EmrHook.create_job_flow | Creates a job flow using the config from the EMR connection.
Keys of the json extra hash may have the arguments of the boto3
run_job_flow method.
Overrides for this config may be passed as the job_flow_overrides. | airflow/contrib/hooks/emr_hook.py | def create_job_flow(self, job_flow_overrides):
"""
Creates a job flow using the config from the EMR connection.
Keys of the json extra hash may have the arguments of the boto3
run_job_flow method.
Overrides for this config may be passed as the job_flow_overrides.
"""
... | def create_job_flow(self, job_flow_overrides):
"""
Creates a job flow using the config from the EMR connection.
Keys of the json extra hash may have the arguments of the boto3
run_job_flow method.
Overrides for this config may be passed as the job_flow_overrides.
"""
... | [
"Creates",
"a",
"job",
"flow",
"using",
"the",
"config",
"from",
"the",
"EMR",
"connection",
".",
"Keys",
"of",
"the",
"json",
"extra",
"hash",
"may",
"have",
"the",
"arguments",
"of",
"the",
"boto3",
"run_job_flow",
"method",
".",
"Overrides",
"for",
"thi... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/emr_hook.py#L39-L57 | [
"def",
"create_job_flow",
"(",
"self",
",",
"job_flow_overrides",
")",
":",
"if",
"not",
"self",
".",
"emr_conn_id",
":",
"raise",
"AirflowException",
"(",
"'emr_conn_id must be present to use create_job_flow'",
")",
"emr_conn",
"=",
"self",
".",
"get_connection",
"("... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | HdfsSensor.filter_for_filesize | Will test the filepath result and test if its size is at least self.filesize
:param result: a list of dicts returned by Snakebite ls
:param size: the file size in MB a file should be at least to trigger True
:return: (bool) depending on the matching criteria | airflow/sensors/hdfs_sensor.py | def filter_for_filesize(result, size=None):
"""
Will test the filepath result and test if its size is at least self.filesize
:param result: a list of dicts returned by Snakebite ls
:param size: the file size in MB a file should be at least to trigger True
:return: (bool) dependi... | def filter_for_filesize(result, size=None):
"""
Will test the filepath result and test if its size is at least self.filesize
:param result: a list of dicts returned by Snakebite ls
:param size: the file size in MB a file should be at least to trigger True
:return: (bool) dependi... | [
"Will",
"test",
"the",
"filepath",
"result",
"and",
"test",
"if",
"its",
"size",
"is",
"at",
"least",
"self",
".",
"filesize"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/sensors/hdfs_sensor.py#L59-L76 | [
"def",
"filter_for_filesize",
"(",
"result",
",",
"size",
"=",
"None",
")",
":",
"if",
"size",
":",
"log",
"=",
"LoggingMixin",
"(",
")",
".",
"log",
"log",
".",
"debug",
"(",
"'Filtering for file size >= %s in files: %s'",
",",
"size",
",",
"map",
"(",
"l... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | HdfsSensor.filter_for_ignored_ext | Will filter if instructed to do so the result to remove matching criteria
:param result: list of dicts returned by Snakebite ls
:type result: list[dict]
:param ignored_ext: list of ignored extensions
:type ignored_ext: list
:param ignore_copying: shall we ignore ?
:type ... | airflow/sensors/hdfs_sensor.py | def filter_for_ignored_ext(result, ignored_ext, ignore_copying):
"""
Will filter if instructed to do so the result to remove matching criteria
:param result: list of dicts returned by Snakebite ls
:type result: list[dict]
:param ignored_ext: list of ignored extensions
:t... | def filter_for_ignored_ext(result, ignored_ext, ignore_copying):
"""
Will filter if instructed to do so the result to remove matching criteria
:param result: list of dicts returned by Snakebite ls
:type result: list[dict]
:param ignored_ext: list of ignored extensions
:t... | [
"Will",
"filter",
"if",
"instructed",
"to",
"do",
"so",
"the",
"result",
"to",
"remove",
"matching",
"criteria"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/sensors/hdfs_sensor.py#L79-L102 | [
"def",
"filter_for_ignored_ext",
"(",
"result",
",",
"ignored_ext",
",",
"ignore_copying",
")",
":",
"if",
"ignore_copying",
":",
"log",
"=",
"LoggingMixin",
"(",
")",
".",
"log",
"regex_builder",
"=",
"r\"^.*\\.(%s$)$\"",
"%",
"'$|'",
".",
"join",
"(",
"ignor... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | MongoToS3Operator.execute | Executed by task_instance at runtime | airflow/contrib/operators/mongo_to_s3.py | def execute(self, context):
"""
Executed by task_instance at runtime
"""
s3_conn = S3Hook(self.s3_conn_id)
# Grab collection and execute query according to whether or not it is a pipeline
if self.is_pipeline:
results = MongoHook(self.mongo_conn_id).aggregate(... | def execute(self, context):
"""
Executed by task_instance at runtime
"""
s3_conn = S3Hook(self.s3_conn_id)
# Grab collection and execute query according to whether or not it is a pipeline
if self.is_pipeline:
results = MongoHook(self.mongo_conn_id).aggregate(... | [
"Executed",
"by",
"task_instance",
"at",
"runtime"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/mongo_to_s3.py#L71-L103 | [
"def",
"execute",
"(",
"self",
",",
"context",
")",
":",
"s3_conn",
"=",
"S3Hook",
"(",
"self",
".",
"s3_conn_id",
")",
"# Grab collection and execute query according to whether or not it is a pipeline",
"if",
"self",
".",
"is_pipeline",
":",
"results",
"=",
"MongoHoo... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | MongoToS3Operator._stringify | Takes an iterable (pymongo Cursor or Array) containing dictionaries and
returns a stringified version using python join | airflow/contrib/operators/mongo_to_s3.py | def _stringify(iterable, joinable='\n'):
"""
Takes an iterable (pymongo Cursor or Array) containing dictionaries and
returns a stringified version using python join
"""
return joinable.join(
[json.dumps(doc, default=json_util.default) for doc in iterable]
) | def _stringify(iterable, joinable='\n'):
"""
Takes an iterable (pymongo Cursor or Array) containing dictionaries and
returns a stringified version using python join
"""
return joinable.join(
[json.dumps(doc, default=json_util.default) for doc in iterable]
) | [
"Takes",
"an",
"iterable",
"(",
"pymongo",
"Cursor",
"or",
"Array",
")",
"containing",
"dictionaries",
"and",
"returns",
"a",
"stringified",
"version",
"using",
"python",
"join"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/mongo_to_s3.py#L106-L113 | [
"def",
"_stringify",
"(",
"iterable",
",",
"joinable",
"=",
"'\\n'",
")",
":",
"return",
"joinable",
".",
"join",
"(",
"[",
"json",
".",
"dumps",
"(",
"doc",
",",
"default",
"=",
"json_util",
".",
"default",
")",
"for",
"doc",
"in",
"iterable",
"]",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | get_pool | Get pool by a given name. | airflow/api/common/experimental/pool.py | def get_pool(name, session=None):
"""Get pool by a given name."""
if not (name and name.strip()):
raise AirflowBadRequest("Pool name shouldn't be empty")
pool = session.query(Pool).filter_by(pool=name).first()
if pool is None:
raise PoolNotFound("Pool '%s' doesn't exist" % name)
re... | def get_pool(name, session=None):
"""Get pool by a given name."""
if not (name and name.strip()):
raise AirflowBadRequest("Pool name shouldn't be empty")
pool = session.query(Pool).filter_by(pool=name).first()
if pool is None:
raise PoolNotFound("Pool '%s' doesn't exist" % name)
re... | [
"Get",
"pool",
"by",
"a",
"given",
"name",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/pool.py#L26-L35 | [
"def",
"get_pool",
"(",
"name",
",",
"session",
"=",
"None",
")",
":",
"if",
"not",
"(",
"name",
"and",
"name",
".",
"strip",
"(",
")",
")",
":",
"raise",
"AirflowBadRequest",
"(",
"\"Pool name shouldn't be empty\"",
")",
"pool",
"=",
"session",
".",
"qu... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | create_pool | Create a pool with a given parameters. | airflow/api/common/experimental/pool.py | def create_pool(name, slots, description, session=None):
"""Create a pool with a given parameters."""
if not (name and name.strip()):
raise AirflowBadRequest("Pool name shouldn't be empty")
try:
slots = int(slots)
except ValueError:
raise AirflowBadRequest("Bad value for `slots`... | def create_pool(name, slots, description, session=None):
"""Create a pool with a given parameters."""
if not (name and name.strip()):
raise AirflowBadRequest("Pool name shouldn't be empty")
try:
slots = int(slots)
except ValueError:
raise AirflowBadRequest("Bad value for `slots`... | [
"Create",
"a",
"pool",
"with",
"a",
"given",
"parameters",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/pool.py#L45-L66 | [
"def",
"create_pool",
"(",
"name",
",",
"slots",
",",
"description",
",",
"session",
"=",
"None",
")",
":",
"if",
"not",
"(",
"name",
"and",
"name",
".",
"strip",
"(",
")",
")",
":",
"raise",
"AirflowBadRequest",
"(",
"\"Pool name shouldn't be empty\"",
")... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | delete_pool | Delete pool by a given name. | airflow/api/common/experimental/pool.py | def delete_pool(name, session=None):
"""Delete pool by a given name."""
if not (name and name.strip()):
raise AirflowBadRequest("Pool name shouldn't be empty")
pool = session.query(Pool).filter_by(pool=name).first()
if pool is None:
raise PoolNotFound("Pool '%s' doesn't exist" % name)
... | def delete_pool(name, session=None):
"""Delete pool by a given name."""
if not (name and name.strip()):
raise AirflowBadRequest("Pool name shouldn't be empty")
pool = session.query(Pool).filter_by(pool=name).first()
if pool is None:
raise PoolNotFound("Pool '%s' doesn't exist" % name)
... | [
"Delete",
"pool",
"by",
"a",
"given",
"name",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/api/common/experimental/pool.py#L70-L82 | [
"def",
"delete_pool",
"(",
"name",
",",
"session",
"=",
"None",
")",
":",
"if",
"not",
"(",
"name",
"and",
"name",
".",
"strip",
"(",
")",
")",
":",
"raise",
"AirflowBadRequest",
"(",
"\"Pool name shouldn't be empty\"",
")",
"pool",
"=",
"session",
".",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GKEClusterHook._dict_to_proto | Converts a python dictionary to the proto supplied
:param py_dict: The dictionary to convert
:type py_dict: dict
:param proto: The proto object to merge with dictionary
:type proto: protobuf
:return: A parsed python dictionary in provided proto format
:raises:
... | airflow/contrib/hooks/gcp_container_hook.py | def _dict_to_proto(py_dict, proto):
"""
Converts a python dictionary to the proto supplied
:param py_dict: The dictionary to convert
:type py_dict: dict
:param proto: The proto object to merge with dictionary
:type proto: protobuf
:return: A parsed python diction... | def _dict_to_proto(py_dict, proto):
"""
Converts a python dictionary to the proto supplied
:param py_dict: The dictionary to convert
:type py_dict: dict
:param proto: The proto object to merge with dictionary
:type proto: protobuf
:return: A parsed python diction... | [
"Converts",
"a",
"python",
"dictionary",
"to",
"the",
"proto",
"supplied"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L57-L70 | [
"def",
"_dict_to_proto",
"(",
"py_dict",
",",
"proto",
")",
":",
"dict_json_str",
"=",
"json",
".",
"dumps",
"(",
"py_dict",
")",
"return",
"json_format",
".",
"Parse",
"(",
"dict_json_str",
",",
"proto",
")"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GKEClusterHook.wait_for_operation | Given an operation, continuously fetches the status from Google Cloud until either
completion or an error occurring
:param operation: The Operation to wait for
:type operation: google.cloud.container_V1.gapic.enums.Operation
:param project_id: Google Cloud Platform project ID
:t... | airflow/contrib/hooks/gcp_container_hook.py | def wait_for_operation(self, operation, project_id=None):
"""
Given an operation, continuously fetches the status from Google Cloud until either
completion or an error occurring
:param operation: The Operation to wait for
:type operation: google.cloud.container_V1.gapic.enums.Op... | def wait_for_operation(self, operation, project_id=None):
"""
Given an operation, continuously fetches the status from Google Cloud until either
completion or an error occurring
:param operation: The Operation to wait for
:type operation: google.cloud.container_V1.gapic.enums.Op... | [
"Given",
"an",
"operation",
"continuously",
"fetches",
"the",
"status",
"from",
"Google",
"Cloud",
"until",
"either",
"completion",
"or",
"an",
"error",
"occurring"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L72-L94 | [
"def",
"wait_for_operation",
"(",
"self",
",",
"operation",
",",
"project_id",
"=",
"None",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Waiting for OPERATION_NAME %s\"",
",",
"operation",
".",
"name",
")",
"time",
".",
"sleep",
"(",
"OPERATIONAL_POLL_IN... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GKEClusterHook.get_operation | Fetches the operation from Google Cloud
:param operation_name: Name of operation to fetch
:type operation_name: str
:param project_id: Google Cloud Platform project ID
:type project_id: str
:return: The new, updated operation from Google Cloud | airflow/contrib/hooks/gcp_container_hook.py | def get_operation(self, operation_name, project_id=None):
"""
Fetches the operation from Google Cloud
:param operation_name: Name of operation to fetch
:type operation_name: str
:param project_id: Google Cloud Platform project ID
:type project_id: str
:return: Th... | def get_operation(self, operation_name, project_id=None):
"""
Fetches the operation from Google Cloud
:param operation_name: Name of operation to fetch
:type operation_name: str
:param project_id: Google Cloud Platform project ID
:type project_id: str
:return: Th... | [
"Fetches",
"the",
"operation",
"from",
"Google",
"Cloud"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L96-L108 | [
"def",
"get_operation",
"(",
"self",
",",
"operation_name",
",",
"project_id",
"=",
"None",
")",
":",
"return",
"self",
".",
"get_client",
"(",
")",
".",
"get_operation",
"(",
"project_id",
"=",
"project_id",
"or",
"self",
".",
"project_id",
",",
"zone",
"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GKEClusterHook._append_label | Append labels to provided Cluster Protobuf
Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current
airflow version string follows semantic versioning spec: x.y.z).
:param cluster_proto: The proto to append resource_label airflow
version to
:type cluster_proto: go... | airflow/contrib/hooks/gcp_container_hook.py | def _append_label(cluster_proto, key, val):
"""
Append labels to provided Cluster Protobuf
Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current
airflow version string follows semantic versioning spec: x.y.z).
:param cluster_proto: The proto to append resource_labe... | def _append_label(cluster_proto, key, val):
"""
Append labels to provided Cluster Protobuf
Labels must fit the regex ``[a-z]([-a-z0-9]*[a-z0-9])?`` (current
airflow version string follows semantic versioning spec: x.y.z).
:param cluster_proto: The proto to append resource_labe... | [
"Append",
"labels",
"to",
"provided",
"Cluster",
"Protobuf"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L111-L129 | [
"def",
"_append_label",
"(",
"cluster_proto",
",",
"key",
",",
"val",
")",
":",
"val",
"=",
"val",
".",
"replace",
"(",
"'.'",
",",
"'-'",
")",
".",
"replace",
"(",
"'+'",
",",
"'-'",
")",
"cluster_proto",
".",
"resource_labels",
".",
"update",
"(",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GKEClusterHook.delete_cluster | Deletes the cluster, including the Kubernetes endpoint and all
worker nodes. Firewalls and routes that were configured during
cluster creation are also deleted. Other Google Compute Engine
resources that might be in use by the cluster (e.g. load balancer
resources) will not be deleted if... | airflow/contrib/hooks/gcp_container_hook.py | def delete_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT):
"""
Deletes the cluster, including the Kubernetes endpoint and all
worker nodes. Firewalls and routes that were configured during
cluster creation are also deleted. Other Google Compute Engine
resour... | def delete_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT):
"""
Deletes the cluster, including the Kubernetes endpoint and all
worker nodes. Firewalls and routes that were configured during
cluster creation are also deleted. Other Google Compute Engine
resour... | [
"Deletes",
"the",
"cluster",
"including",
"the",
"Kubernetes",
"endpoint",
"and",
"all",
"worker",
"nodes",
".",
"Firewalls",
"and",
"routes",
"that",
"were",
"configured",
"during",
"cluster",
"creation",
"are",
"also",
"deleted",
".",
"Other",
"Google",
"Compu... | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L131-L168 | [
"def",
"delete_cluster",
"(",
"self",
",",
"name",
",",
"project_id",
"=",
"None",
",",
"retry",
"=",
"DEFAULT",
",",
"timeout",
"=",
"DEFAULT",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Deleting (project_id=%s, zone=%s, cluster_id=%s)\"",
",",
"self"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GKEClusterHook.create_cluster | Creates a cluster, consisting of the specified number and type of Google Compute
Engine instances.
:param cluster: A Cluster protobuf or dict. If dict is provided, it must
be of the same form as the protobuf message
:class:`google.cloud.container_v1.types.Cluster`
:type ... | airflow/contrib/hooks/gcp_container_hook.py | def create_cluster(self, cluster, project_id=None, retry=DEFAULT, timeout=DEFAULT):
"""
Creates a cluster, consisting of the specified number and type of Google Compute
Engine instances.
:param cluster: A Cluster protobuf or dict. If dict is provided, it must
be of the same ... | def create_cluster(self, cluster, project_id=None, retry=DEFAULT, timeout=DEFAULT):
"""
Creates a cluster, consisting of the specified number and type of Google Compute
Engine instances.
:param cluster: A Cluster protobuf or dict. If dict is provided, it must
be of the same ... | [
"Creates",
"a",
"cluster",
"consisting",
"of",
"the",
"specified",
"number",
"and",
"type",
"of",
"Google",
"Compute",
"Engine",
"instances",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L170-L219 | [
"def",
"create_cluster",
"(",
"self",
",",
"cluster",
",",
"project_id",
"=",
"None",
",",
"retry",
"=",
"DEFAULT",
",",
"timeout",
"=",
"DEFAULT",
")",
":",
"if",
"isinstance",
"(",
"cluster",
",",
"dict",
")",
":",
"cluster_proto",
"=",
"Cluster",
"(",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GKEClusterHook.get_cluster | Gets details of specified cluster
:param name: The name of the cluster to retrieve
:type name: str
:param project_id: Google Cloud Platform project ID
:type project_id: str
:param retry: A retry object used to retry requests. If None is specified,
requests will not b... | airflow/contrib/hooks/gcp_container_hook.py | def get_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT):
"""
Gets details of specified cluster
:param name: The name of the cluster to retrieve
:type name: str
:param project_id: Google Cloud Platform project ID
:type project_id: str
:param r... | def get_cluster(self, name, project_id=None, retry=DEFAULT, timeout=DEFAULT):
"""
Gets details of specified cluster
:param name: The name of the cluster to retrieve
:type name: str
:param project_id: Google Cloud Platform project ID
:type project_id: str
:param r... | [
"Gets",
"details",
"of",
"specified",
"cluster"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_container_hook.py#L221-L247 | [
"def",
"get_cluster",
"(",
"self",
",",
"name",
",",
"project_id",
"=",
"None",
",",
"retry",
"=",
"DEFAULT",
",",
"timeout",
"=",
"DEFAULT",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Fetching cluster (project_id=%s, zone=%s, cluster_name=%s)\"",
",",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | DiscordWebhookHook._get_webhook_endpoint | Given a Discord http_conn_id, return the default webhook endpoint or override if a
webhook_endpoint is manually supplied.
:param http_conn_id: The provided connection ID
:param webhook_endpoint: The manually provided webhook endpoint
:return: Webhook endpoint (str) to use | airflow/contrib/hooks/discord_webhook_hook.py | def _get_webhook_endpoint(self, http_conn_id, webhook_endpoint):
"""
Given a Discord http_conn_id, return the default webhook endpoint or override if a
webhook_endpoint is manually supplied.
:param http_conn_id: The provided connection ID
:param webhook_endpoint: The manually pr... | def _get_webhook_endpoint(self, http_conn_id, webhook_endpoint):
"""
Given a Discord http_conn_id, return the default webhook endpoint or override if a
webhook_endpoint is manually supplied.
:param http_conn_id: The provided connection ID
:param webhook_endpoint: The manually pr... | [
"Given",
"a",
"Discord",
"http_conn_id",
"return",
"the",
"default",
"webhook",
"endpoint",
"or",
"override",
"if",
"a",
"webhook_endpoint",
"is",
"manually",
"supplied",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/discord_webhook_hook.py#L76-L100 | [
"def",
"_get_webhook_endpoint",
"(",
"self",
",",
"http_conn_id",
",",
"webhook_endpoint",
")",
":",
"if",
"webhook_endpoint",
":",
"endpoint",
"=",
"webhook_endpoint",
"elif",
"http_conn_id",
":",
"conn",
"=",
"self",
".",
"get_connection",
"(",
"http_conn_id",
"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | DiscordWebhookHook._build_discord_payload | Construct the Discord JSON payload. All relevant parameters are combined here
to a valid Discord JSON payload.
:return: Discord payload (str) to send | airflow/contrib/hooks/discord_webhook_hook.py | def _build_discord_payload(self):
"""
Construct the Discord JSON payload. All relevant parameters are combined here
to a valid Discord JSON payload.
:return: Discord payload (str) to send
"""
payload = {}
if self.username:
payload['username'] = self.... | def _build_discord_payload(self):
"""
Construct the Discord JSON payload. All relevant parameters are combined here
to a valid Discord JSON payload.
:return: Discord payload (str) to send
"""
payload = {}
if self.username:
payload['username'] = self.... | [
"Construct",
"the",
"Discord",
"JSON",
"payload",
".",
"All",
"relevant",
"parameters",
"are",
"combined",
"here",
"to",
"a",
"valid",
"Discord",
"JSON",
"payload",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/discord_webhook_hook.py#L102-L124 | [
"def",
"_build_discord_payload",
"(",
"self",
")",
":",
"payload",
"=",
"{",
"}",
"if",
"self",
".",
"username",
":",
"payload",
"[",
"'username'",
"]",
"=",
"self",
".",
"username",
"if",
"self",
".",
"avatar_url",
":",
"payload",
"[",
"'avatar_url'",
"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | DiscordWebhookHook.execute | Execute the Discord webhook call | airflow/contrib/hooks/discord_webhook_hook.py | def execute(self):
"""
Execute the Discord webhook call
"""
proxies = {}
if self.proxy:
# we only need https proxy for Discord
proxies = {'https': self.proxy}
discord_payload = self._build_discord_payload()
self.run(endpoint=self.webhook_... | def execute(self):
"""
Execute the Discord webhook call
"""
proxies = {}
if self.proxy:
# we only need https proxy for Discord
proxies = {'https': self.proxy}
discord_payload = self._build_discord_payload()
self.run(endpoint=self.webhook_... | [
"Execute",
"the",
"Discord",
"webhook",
"call"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/discord_webhook_hook.py#L126-L140 | [
"def",
"execute",
"(",
"self",
")",
":",
"proxies",
"=",
"{",
"}",
"if",
"self",
".",
"proxy",
":",
"# we only need https proxy for Discord",
"proxies",
"=",
"{",
"'https'",
":",
"self",
".",
"proxy",
"}",
"discord_payload",
"=",
"self",
".",
"_build_discord... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GoogleCloudKMSHook.encrypt | Encrypts a plaintext message using Google Cloud KMS.
:param key_name: The Resource Name for the key (or key version)
to be used for encyption. Of the form
``projects/*/locations/*/keyRings/*/cryptoKeys/**``
:type key_name: str
:param plaintext: ... | airflow/contrib/hooks/gcp_kms_hook.py | def encrypt(self, key_name, plaintext, authenticated_data=None):
"""
Encrypts a plaintext message using Google Cloud KMS.
:param key_name: The Resource Name for the key (or key version)
to be used for encyption. Of the form
``projects/*/location... | def encrypt(self, key_name, plaintext, authenticated_data=None):
"""
Encrypts a plaintext message using Google Cloud KMS.
:param key_name: The Resource Name for the key (or key version)
to be used for encyption. Of the form
``projects/*/location... | [
"Encrypts",
"a",
"plaintext",
"message",
"using",
"Google",
"Cloud",
"KMS",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_kms_hook.py#L58-L83 | [
"def",
"encrypt",
"(",
"self",
",",
"key_name",
",",
"plaintext",
",",
"authenticated_data",
"=",
"None",
")",
":",
"keys",
"=",
"self",
".",
"get_conn",
"(",
")",
".",
"projects",
"(",
")",
".",
"locations",
"(",
")",
".",
"keyRings",
"(",
")",
".",... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SqoopHook.Popen | Remote Popen
:param cmd: command to remotely execute
:param kwargs: extra arguments to Popen (see subprocess.Popen)
:return: handle to subprocess | airflow/contrib/hooks/sqoop_hook.py | def Popen(self, cmd, **kwargs):
"""
Remote Popen
:param cmd: command to remotely execute
:param kwargs: extra arguments to Popen (see subprocess.Popen)
:return: handle to subprocess
"""
masked_cmd = ' '.join(self.cmd_mask_password(cmd))
self.log.info("Exe... | def Popen(self, cmd, **kwargs):
"""
Remote Popen
:param cmd: command to remotely execute
:param kwargs: extra arguments to Popen (see subprocess.Popen)
:return: handle to subprocess
"""
masked_cmd = ' '.join(self.cmd_mask_password(cmd))
self.log.info("Exe... | [
"Remote",
"Popen"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L92-L116 | [
"def",
"Popen",
"(",
"self",
",",
"cmd",
",",
"*",
"*",
"kwargs",
")",
":",
"masked_cmd",
"=",
"' '",
".",
"join",
"(",
"self",
".",
"cmd_mask_password",
"(",
"cmd",
")",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Executing command: {}\"",
".",
"f... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SqoopHook.import_table | Imports table from remote location to target dir. Arguments are
copies of direct sqoop command line arguments
:param table: Table to read
:param target_dir: HDFS destination dir
:param append: Append data to an existing dataset in HDFS
:param file_type: "avro", "sequence", "text... | airflow/contrib/hooks/sqoop_hook.py | def import_table(self, table, target_dir=None, append=False, file_type="text",
columns=None, split_by=None, where=None, direct=False,
driver=None, extra_import_options=None):
"""
Imports table from remote location to target dir. Arguments are
copies of d... | def import_table(self, table, target_dir=None, append=False, file_type="text",
columns=None, split_by=None, where=None, direct=False,
driver=None, extra_import_options=None):
"""
Imports table from remote location to target dir. Arguments are
copies of d... | [
"Imports",
"table",
"from",
"remote",
"location",
"to",
"target",
"dir",
".",
"Arguments",
"are",
"copies",
"of",
"direct",
"sqoop",
"command",
"line",
"arguments"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L202-L233 | [
"def",
"import_table",
"(",
"self",
",",
"table",
",",
"target_dir",
"=",
"None",
",",
"append",
"=",
"False",
",",
"file_type",
"=",
"\"text\"",
",",
"columns",
"=",
"None",
",",
"split_by",
"=",
"None",
",",
"where",
"=",
"None",
",",
"direct",
"=",
... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SqoopHook.import_query | Imports a specific query from the rdbms to hdfs
:param query: Free format query to run
:param target_dir: HDFS destination dir
:param append: Append data to an existing dataset in HDFS
:param file_type: "avro", "sequence", "text" or "parquet"
Imports data to hdfs into the sp... | airflow/contrib/hooks/sqoop_hook.py | def import_query(self, query, target_dir, append=False, file_type="text",
split_by=None, direct=None, driver=None, extra_import_options=None):
"""
Imports a specific query from the rdbms to hdfs
:param query: Free format query to run
:param target_dir: HDFS destinat... | def import_query(self, query, target_dir, append=False, file_type="text",
split_by=None, direct=None, driver=None, extra_import_options=None):
"""
Imports a specific query from the rdbms to hdfs
:param query: Free format query to run
:param target_dir: HDFS destinat... | [
"Imports",
"a",
"specific",
"query",
"from",
"the",
"rdbms",
"to",
"hdfs"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L235-L256 | [
"def",
"import_query",
"(",
"self",
",",
"query",
",",
"target_dir",
",",
"append",
"=",
"False",
",",
"file_type",
"=",
"\"text\"",
",",
"split_by",
"=",
"None",
",",
"direct",
"=",
"None",
",",
"driver",
"=",
"None",
",",
"extra_import_options",
"=",
"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | SqoopHook.export_table | Exports Hive table to remote location. Arguments are copies of direct
sqoop command line Arguments
:param table: Table remote destination
:param export_dir: Hive table to export
:param input_null_string: The string to be interpreted as null for
string columns
:param ... | airflow/contrib/hooks/sqoop_hook.py | def export_table(self, table, export_dir, input_null_string,
input_null_non_string, staging_table,
clear_staging_table, enclosed_by,
escaped_by, input_fields_terminated_by,
input_lines_terminated_by,
input_optionall... | def export_table(self, table, export_dir, input_null_string,
input_null_non_string, staging_table,
clear_staging_table, enclosed_by,
escaped_by, input_fields_terminated_by,
input_lines_terminated_by,
input_optionall... | [
"Exports",
"Hive",
"table",
"to",
"remote",
"location",
".",
"Arguments",
"are",
"copies",
"of",
"direct",
"sqoop",
"command",
"line",
"Arguments"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/sqoop_hook.py#L314-L355 | [
"def",
"export_table",
"(",
"self",
",",
"table",
",",
"export_dir",
",",
"input_null_string",
",",
"input_null_non_string",
",",
"staging_table",
",",
"clear_staging_table",
",",
"enclosed_by",
",",
"escaped_by",
",",
"input_fields_terminated_by",
",",
"input_lines_ter... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GCPTextToSpeechHook.get_conn | Retrieves connection to Cloud Text to Speech.
:return: Google Cloud Text to Speech client object.
:rtype: google.cloud.texttospeech_v1.TextToSpeechClient | airflow/contrib/hooks/gcp_text_to_speech_hook.py | def get_conn(self):
"""
Retrieves connection to Cloud Text to Speech.
:return: Google Cloud Text to Speech client object.
:rtype: google.cloud.texttospeech_v1.TextToSpeechClient
"""
if not self._client:
self._client = TextToSpeechClient(credentials=self._get_... | def get_conn(self):
"""
Retrieves connection to Cloud Text to Speech.
:return: Google Cloud Text to Speech client object.
:rtype: google.cloud.texttospeech_v1.TextToSpeechClient
"""
if not self._client:
self._client = TextToSpeechClient(credentials=self._get_... | [
"Retrieves",
"connection",
"to",
"Cloud",
"Text",
"to",
"Speech",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_text_to_speech_hook.py#L42-L51 | [
"def",
"get_conn",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_client",
":",
"self",
".",
"_client",
"=",
"TextToSpeechClient",
"(",
"credentials",
"=",
"self",
".",
"_get_credentials",
"(",
")",
")",
"return",
"self",
".",
"_client"
] | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | GCPTextToSpeechHook.synthesize_speech | Synthesizes text input
:param input_data: text input to be synthesized. See more:
https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttospeech_v1.types.SynthesisInput
:type input_data: dict or google.cloud.texttospeech_v1.types.Synthesis... | airflow/contrib/hooks/gcp_text_to_speech_hook.py | def synthesize_speech(self, input_data, voice, audio_config, retry=None, timeout=None):
"""
Synthesizes text input
:param input_data: text input to be synthesized. See more:
https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttos... | def synthesize_speech(self, input_data, voice, audio_config, retry=None, timeout=None):
"""
Synthesizes text input
:param input_data: text input to be synthesized. See more:
https://googleapis.github.io/google-cloud-python/latest/texttospeech/gapic/v1/types.html#google.cloud.texttos... | [
"Synthesizes",
"text",
"input"
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_text_to_speech_hook.py#L53-L80 | [
"def",
"synthesize_speech",
"(",
"self",
",",
"input_data",
",",
"voice",
",",
"audio_config",
",",
"retry",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"client",
"=",
"self",
".",
"get_conn",
"(",
")",
"self",
".",
"log",
".",
"info",
"(",
"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
test | S3TaskHandler.close | Close and upload local log file to remote storage S3. | airflow/utils/log/s3_task_handler.py | def close(self):
"""
Close and upload local log file to remote storage S3.
"""
# When application exit, system shuts down all handlers by
# calling close method. Here we check if logger is already
# closed to prevent uploading the log to remote storage multiple
# ... | def close(self):
"""
Close and upload local log file to remote storage S3.
"""
# When application exit, system shuts down all handlers by
# calling close method. Here we check if logger is already
# closed to prevent uploading the log to remote storage multiple
# ... | [
"Close",
"and",
"upload",
"local",
"log",
"file",
"to",
"remote",
"storage",
"S3",
"."
] | apache/airflow | python | https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/s3_task_handler.py#L62-L87 | [
"def",
"close",
"(",
"self",
")",
":",
"# When application exit, system shuts down all handlers by",
"# calling close method. Here we check if logger is already",
"# closed to prevent uploading the log to remote storage multiple",
"# times when `logging.shutdown` is called.",
"if",
"self",
"... | b69c686ad8a0c89b9136bb4b31767257eb7b2597 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.