partition
stringclasses
3 values
func_name
stringlengths
1
134
docstring
stringlengths
1
46.9k
path
stringlengths
4
223
original_string
stringlengths
75
104k
code
stringlengths
75
104k
docstring_tokens
listlengths
1
1.97k
repo
stringlengths
7
55
language
stringclasses
1 value
url
stringlengths
87
315
code_tokens
listlengths
19
28.4k
sha
stringlengths
40
40
test
AzureContainerInstanceHook.get_messages
Get the messages of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :return: A list of the event messages :rtype: list[str]
airflow/contrib/hooks/azure_container_instance_hook.py
def get_messages(self, resource_group, name): """ Get the messages of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :return: A list of the event messa...
def get_messages(self, resource_group, name): """ Get the messages of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :return: A list of the event messa...
[ "Get", "the", "messages", "of", "a", "container", "group" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_container_instance_hook.py#L118-L131
[ "def", "get_messages", "(", "self", ",", "resource_group", ",", "name", ")", ":", "instance_view", "=", "self", ".", "_get_instance_view", "(", "resource_group", ",", "name", ")", "return", "[", "event", ".", "message", "for", "event", "in", "instance_view", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureContainerInstanceHook.get_logs
Get the tail from logs of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :param tail: the size of the tail :type tail: int :return: A list of log messa...
airflow/contrib/hooks/azure_container_instance_hook.py
def get_logs(self, resource_group, name, tail=1000): """ Get the tail from logs of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :param tail: the size...
def get_logs(self, resource_group, name, tail=1000): """ Get the tail from logs of a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str :param tail: the size...
[ "Get", "the", "tail", "from", "logs", "of", "a", "container", "group" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_container_instance_hook.py#L133-L147
[ "def", "get_logs", "(", "self", ",", "resource_group", ",", "name", ",", "tail", "=", "1000", ")", ":", "logs", "=", "self", ".", "connection", ".", "container", ".", "list_logs", "(", "resource_group", ",", "name", ",", "name", ",", "tail", "=", "tail...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureContainerInstanceHook.delete
Delete a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str
airflow/contrib/hooks/azure_container_instance_hook.py
def delete(self, resource_group, name): """ Delete a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str """ self.connection.container_groups.delete(r...
def delete(self, resource_group, name): """ Delete a container group :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str """ self.connection.container_groups.delete(r...
[ "Delete", "a", "container", "group" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_container_instance_hook.py#L149-L158
[ "def", "delete", "(", "self", ",", "resource_group", ",", "name", ")", ":", "self", ".", "connection", ".", "container_groups", ".", "delete", "(", "resource_group", ",", "name", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
AzureContainerInstanceHook.exists
Test if a container group exists :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str
airflow/contrib/hooks/azure_container_instance_hook.py
def exists(self, resource_group, name): """ Test if a container group exists :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str """ for container in self.connection....
def exists(self, resource_group, name): """ Test if a container group exists :param resource_group: the name of the resource group :type resource_group: str :param name: the name of the container group :type name: str """ for container in self.connection....
[ "Test", "if", "a", "container", "group", "exists" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/azure_container_instance_hook.py#L160-L172
[ "def", "exists", "(", "self", ",", "resource_group", ",", "name", ")", ":", "for", "container", "in", "self", ".", "connection", ".", "container_groups", ".", "list_by_resource_group", "(", "resource_group", ")", ":", "if", "container", ".", "name", "==", "n...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
apply_defaults
Function decorator that Looks for an argument named "default_args", and fills the unspecified arguments from it. Since python2.* isn't clear about which arguments are missing when calling a function, and that this can be quite confusing with multi-level inheritance and argument defaults, this decorator...
airflow/utils/decorators.py
def apply_defaults(func): """ Function decorator that Looks for an argument named "default_args", and fills the unspecified arguments from it. Since python2.* isn't clear about which arguments are missing when calling a function, and that this can be quite confusing with multi-level inheritance...
def apply_defaults(func): """ Function decorator that Looks for an argument named "default_args", and fills the unspecified arguments from it. Since python2.* isn't clear about which arguments are missing when calling a function, and that this can be quite confusing with multi-level inheritance...
[ "Function", "decorator", "that", "Looks", "for", "an", "argument", "named", "default_args", "and", "fills", "the", "unspecified", "arguments", "from", "it", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/decorators.py#L38-L100
[ "def", "apply_defaults", "(", "func", ")", ":", "# Cache inspect.signature for the wrapper closure to avoid calling it", "# at every decorated invocation. This is separate sig_cache created", "# per decoration, i.e. each function decorated using apply_defaults will", "# have a different sig_cache....
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HiveToDruidTransfer.construct_ingest_query
Builds an ingest query for an HDFS TSV load. :param static_path: The path on hdfs where the data is :type static_path: str :param columns: List of all the columns that are available :type columns: list
airflow/operators/hive_to_druid.py
def construct_ingest_query(self, static_path, columns): """ Builds an ingest query for an HDFS TSV load. :param static_path: The path on hdfs where the data is :type static_path: str :param columns: List of all the columns that are available :type columns: list "...
def construct_ingest_query(self, static_path, columns): """ Builds an ingest query for an HDFS TSV load. :param static_path: The path on hdfs where the data is :type static_path: str :param columns: List of all the columns that are available :type columns: list "...
[ "Builds", "an", "ingest", "query", "for", "an", "HDFS", "TSV", "load", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/operators/hive_to_druid.py#L157-L244
[ "def", "construct_ingest_query", "(", "self", ",", "static_path", ",", "columns", ")", ":", "# backward compatibility for num_shards,", "# but target_partition_size is the default setting", "# and overwrites the num_shards", "num_shards", "=", "self", ".", "num_shards", "target_p...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
ImapAttachmentToS3Operator.execute
This function executes the transfer from the email server (via imap) into s3. :param context: The context while executing. :type context: dict
airflow/contrib/operators/imap_attachment_to_s3_operator.py
def execute(self, context): """ This function executes the transfer from the email server (via imap) into s3. :param context: The context while executing. :type context: dict """ self.log.info( 'Transferring mail attachment %s from mail server via imap to s3 ...
def execute(self, context): """ This function executes the transfer from the email server (via imap) into s3. :param context: The context while executing. :type context: dict """ self.log.info( 'Transferring mail attachment %s from mail server via imap to s3 ...
[ "This", "function", "executes", "the", "transfer", "from", "the", "email", "server", "(", "via", "imap", ")", "into", "s3", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/imap_attachment_to_s3_operator.py#L67-L88
[ "def", "execute", "(", "self", ",", "context", ")", ":", "self", ".", "log", ".", "info", "(", "'Transferring mail attachment %s from mail server via imap to s3 key %s...'", ",", "self", ".", "imap_attachment_name", ",", "self", ".", "s3_key", ")", "with", "ImapHook...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
RedisPubSubSensor.poke
Check for message on subscribed channels and write to xcom the message with key ``message`` An example of message ``{'type': 'message', 'pattern': None, 'channel': b'test', 'data': b'hello'}`` :param context: the context object :type context: dict :return: ``True`` if message (with typ...
airflow/contrib/sensors/redis_pub_sub_sensor.py
def poke(self, context): """ Check for message on subscribed channels and write to xcom the message with key ``message`` An example of message ``{'type': 'message', 'pattern': None, 'channel': b'test', 'data': b'hello'}`` :param context: the context object :type context: dict ...
def poke(self, context): """ Check for message on subscribed channels and write to xcom the message with key ``message`` An example of message ``{'type': 'message', 'pattern': None, 'channel': b'test', 'data': b'hello'}`` :param context: the context object :type context: dict ...
[ "Check", "for", "message", "on", "subscribed", "channels", "and", "write", "to", "xcom", "the", "message", "with", "key", "message" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/sensors/redis_pub_sub_sensor.py#L50-L73
[ "def", "poke", "(", "self", ",", "context", ")", ":", "self", ".", "log", ".", "info", "(", "'RedisPubSubSensor checking for message on channels: %s'", ",", "self", ".", "channels", ")", "message", "=", "self", ".", "pubsub", ".", "get_message", "(", ")", "s...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagRun.refresh_from_db
Reloads the current dagrun from the database :param session: database session
airflow/models/dagrun.py
def refresh_from_db(self, session=None): """ Reloads the current dagrun from the database :param session: database session """ DR = DagRun exec_date = func.cast(self.execution_date, DateTime) dr = session.query(DR).filter( DR.dag_id == self.dag_id, ...
def refresh_from_db(self, session=None): """ Reloads the current dagrun from the database :param session: database session """ DR = DagRun exec_date = func.cast(self.execution_date, DateTime) dr = session.query(DR).filter( DR.dag_id == self.dag_id, ...
[ "Reloads", "the", "current", "dagrun", "from", "the", "database", ":", "param", "session", ":", "database", "session" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagrun.py#L95-L111
[ "def", "refresh_from_db", "(", "self", ",", "session", "=", "None", ")", ":", "DR", "=", "DagRun", "exec_date", "=", "func", ".", "cast", "(", "self", ".", "execution_date", ",", "DateTime", ")", "dr", "=", "session", ".", "query", "(", "DR", ")", "....
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagRun.find
Returns a set of dag runs for the given search criteria. :param dag_id: the dag_id to find dag runs for :type dag_id: int, list :param run_id: defines the the run id for this dag run :type run_id: str :param execution_date: the execution date :type execution_date: dateti...
airflow/models/dagrun.py
def find(dag_id=None, run_id=None, execution_date=None, state=None, external_trigger=None, no_backfills=False, session=None): """ Returns a set of dag runs for the given search criteria. :param dag_id: the dag_id to find dag runs for :type dag_id: int, list ...
def find(dag_id=None, run_id=None, execution_date=None, state=None, external_trigger=None, no_backfills=False, session=None): """ Returns a set of dag runs for the given search criteria. :param dag_id: the dag_id to find dag runs for :type dag_id: int, list ...
[ "Returns", "a", "set", "of", "dag", "runs", "for", "the", "given", "search", "criteria", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagrun.py#L115-L160
[ "def", "find", "(", "dag_id", "=", "None", ",", "run_id", "=", "None", ",", "execution_date", "=", "None", ",", "state", "=", "None", ",", "external_trigger", "=", "None", ",", "no_backfills", "=", "False", ",", "session", "=", "None", ")", ":", "DR", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagRun.get_task_instances
Returns the task instances for this dag run
airflow/models/dagrun.py
def get_task_instances(self, state=None, session=None): """ Returns the task instances for this dag run """ from airflow.models.taskinstance import TaskInstance # Avoid circular import tis = session.query(TaskInstance).filter( TaskInstance.dag_id == self.dag_id, ...
def get_task_instances(self, state=None, session=None): """ Returns the task instances for this dag run """ from airflow.models.taskinstance import TaskInstance # Avoid circular import tis = session.query(TaskInstance).filter( TaskInstance.dag_id == self.dag_id, ...
[ "Returns", "the", "task", "instances", "for", "this", "dag", "run" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagrun.py#L163-L188
[ "def", "get_task_instances", "(", "self", ",", "state", "=", "None", ",", "session", "=", "None", ")", ":", "from", "airflow", ".", "models", ".", "taskinstance", "import", "TaskInstance", "# Avoid circular import", "tis", "=", "session", ".", "query", "(", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagRun.get_task_instance
Returns the task instance specified by task_id for this dag run :param task_id: the task id
airflow/models/dagrun.py
def get_task_instance(self, task_id, session=None): """ Returns the task instance specified by task_id for this dag run :param task_id: the task id """ from airflow.models.taskinstance import TaskInstance # Avoid circular import TI = TaskInstance ti = session.q...
def get_task_instance(self, task_id, session=None): """ Returns the task instance specified by task_id for this dag run :param task_id: the task id """ from airflow.models.taskinstance import TaskInstance # Avoid circular import TI = TaskInstance ti = session.q...
[ "Returns", "the", "task", "instance", "specified", "by", "task_id", "for", "this", "dag", "run" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagrun.py#L191-L206
[ "def", "get_task_instance", "(", "self", ",", "task_id", ",", "session", "=", "None", ")", ":", "from", "airflow", ".", "models", ".", "taskinstance", "import", "TaskInstance", "# Avoid circular import", "TI", "=", "TaskInstance", "ti", "=", "session", ".", "q...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagRun.get_previous_dagrun
The previous DagRun, if there is one
airflow/models/dagrun.py
def get_previous_dagrun(self, session=None): """The previous DagRun, if there is one""" return session.query(DagRun).filter( DagRun.dag_id == self.dag_id, DagRun.execution_date < self.execution_date ).order_by( DagRun.execution_date.desc() ).first()
def get_previous_dagrun(self, session=None): """The previous DagRun, if there is one""" return session.query(DagRun).filter( DagRun.dag_id == self.dag_id, DagRun.execution_date < self.execution_date ).order_by( DagRun.execution_date.desc() ).first()
[ "The", "previous", "DagRun", "if", "there", "is", "one" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagrun.py#L221-L229
[ "def", "get_previous_dagrun", "(", "self", ",", "session", "=", "None", ")", ":", "return", "session", ".", "query", "(", "DagRun", ")", ".", "filter", "(", "DagRun", ".", "dag_id", "==", "self", ".", "dag_id", ",", "DagRun", ".", "execution_date", "<", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagRun.get_previous_scheduled_dagrun
The previous, SCHEDULED DagRun, if there is one
airflow/models/dagrun.py
def get_previous_scheduled_dagrun(self, session=None): """The previous, SCHEDULED DagRun, if there is one""" dag = self.get_dag() return session.query(DagRun).filter( DagRun.dag_id == self.dag_id, DagRun.execution_date == dag.previous_schedule(self.execution_date) ...
def get_previous_scheduled_dagrun(self, session=None): """The previous, SCHEDULED DagRun, if there is one""" dag = self.get_dag() return session.query(DagRun).filter( DagRun.dag_id == self.dag_id, DagRun.execution_date == dag.previous_schedule(self.execution_date) ...
[ "The", "previous", "SCHEDULED", "DagRun", "if", "there", "is", "one" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagrun.py#L232-L239
[ "def", "get_previous_scheduled_dagrun", "(", "self", ",", "session", "=", "None", ")", ":", "dag", "=", "self", ".", "get_dag", "(", ")", "return", "session", ".", "query", "(", "DagRun", ")", ".", "filter", "(", "DagRun", ".", "dag_id", "==", "self", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagRun.update_state
Determines the overall state of the DagRun based on the state of its TaskInstances. :return: State
airflow/models/dagrun.py
def update_state(self, session=None): """ Determines the overall state of the DagRun based on the state of its TaskInstances. :return: State """ dag = self.get_dag() tis = self.get_task_instances(session=session) self.log.debug("Updating state for %s co...
def update_state(self, session=None): """ Determines the overall state of the DagRun based on the state of its TaskInstances. :return: State """ dag = self.get_dag() tis = self.get_task_instances(session=session) self.log.debug("Updating state for %s co...
[ "Determines", "the", "overall", "state", "of", "the", "DagRun", "based", "on", "the", "state", "of", "its", "TaskInstances", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagrun.py#L242-L329
[ "def", "update_state", "(", "self", ",", "session", "=", "None", ")", ":", "dag", "=", "self", ".", "get_dag", "(", ")", "tis", "=", "self", ".", "get_task_instances", "(", "session", "=", "session", ")", "self", ".", "log", ".", "debug", "(", "\"Upd...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagRun.verify_integrity
Verifies the DagRun by checking for removed tasks or tasks that are not in the database yet. It will set state to removed or add the task if required.
airflow/models/dagrun.py
def verify_integrity(self, session=None): """ Verifies the DagRun by checking for removed tasks or tasks that are not in the database yet. It will set state to removed or add the task if required. """ from airflow.models.taskinstance import TaskInstance # Avoid circular import ...
def verify_integrity(self, session=None): """ Verifies the DagRun by checking for removed tasks or tasks that are not in the database yet. It will set state to removed or add the task if required. """ from airflow.models.taskinstance import TaskInstance # Avoid circular import ...
[ "Verifies", "the", "DagRun", "by", "checking", "for", "removed", "tasks", "or", "tasks", "that", "are", "not", "in", "the", "database", "yet", ".", "It", "will", "set", "state", "to", "removed", "or", "add", "the", "task", "if", "required", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagrun.py#L342-L389
[ "def", "verify_integrity", "(", "self", ",", "session", "=", "None", ")", ":", "from", "airflow", ".", "models", ".", "taskinstance", "import", "TaskInstance", "# Avoid circular import", "dag", "=", "self", ".", "get_dag", "(", ")", "tis", "=", "self", ".", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagRun.get_run
:param dag_id: DAG ID :type dag_id: unicode :param execution_date: execution date :type execution_date: datetime :return: DagRun corresponding to the given dag_id and execution date if one exists. None otherwise. :rtype: airflow.models.DagRun
airflow/models/dagrun.py
def get_run(session, dag_id, execution_date): """ :param dag_id: DAG ID :type dag_id: unicode :param execution_date: execution date :type execution_date: datetime :return: DagRun corresponding to the given dag_id and execution date if one exists. None otherwis...
def get_run(session, dag_id, execution_date): """ :param dag_id: DAG ID :type dag_id: unicode :param execution_date: execution date :type execution_date: datetime :return: DagRun corresponding to the given dag_id and execution date if one exists. None otherwis...
[ ":", "param", "dag_id", ":", "DAG", "ID", ":", "type", "dag_id", ":", "unicode", ":", "param", "execution_date", ":", "execution", "date", ":", "type", "execution_date", ":", "datetime", ":", "return", ":", "DagRun", "corresponding", "to", "the", "given", ...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagrun.py#L392-L407
[ "def", "get_run", "(", "session", ",", "dag_id", ",", "execution_date", ")", ":", "qry", "=", "session", ".", "query", "(", "DagRun", ")", ".", "filter", "(", "DagRun", ".", "dag_id", "==", "dag_id", ",", "DagRun", ".", "external_trigger", "==", "False",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
jenkins_request_with_headers
We need to get the headers in addition to the body answer to get the location from them This function uses jenkins_request method from python-jenkins library with just the return call changed :param jenkins_server: The server to query :param req: The request to execute :return: Dict containing ...
airflow/contrib/operators/jenkins_job_trigger_operator.py
def jenkins_request_with_headers(jenkins_server, req): """ We need to get the headers in addition to the body answer to get the location from them This function uses jenkins_request method from python-jenkins library with just the return call changed :param jenkins_server: The server to query ...
def jenkins_request_with_headers(jenkins_server, req): """ We need to get the headers in addition to the body answer to get the location from them This function uses jenkins_request method from python-jenkins library with just the return call changed :param jenkins_server: The server to query ...
[ "We", "need", "to", "get", "the", "headers", "in", "addition", "to", "the", "body", "answer", "to", "get", "the", "location", "from", "them", "This", "function", "uses", "jenkins_request", "method", "from", "python", "-", "jenkins", "library", "with", "just"...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/jenkins_job_trigger_operator.py#L34-L79
[ "def", "jenkins_request_with_headers", "(", "jenkins_server", ",", "req", ")", ":", "try", ":", "response", "=", "jenkins_server", ".", "jenkins_request", "(", "req", ")", "response_body", "=", "response", ".", "content", "response_headers", "=", "response", ".", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
JenkinsJobTriggerOperator.build_job
This function makes an API call to Jenkins to trigger a build for 'job_name' It returned a dict with 2 keys : body and headers. headers contains also a dict-like object which can be queried to get the location to poll in the queue. :param jenkins_server: The jenkins server where the job...
airflow/contrib/operators/jenkins_job_trigger_operator.py
def build_job(self, jenkins_server): """ This function makes an API call to Jenkins to trigger a build for 'job_name' It returned a dict with 2 keys : body and headers. headers contains also a dict-like object which can be queried to get the location to poll in the queue. ...
def build_job(self, jenkins_server): """ This function makes an API call to Jenkins to trigger a build for 'job_name' It returned a dict with 2 keys : body and headers. headers contains also a dict-like object which can be queried to get the location to poll in the queue. ...
[ "This", "function", "makes", "an", "API", "call", "to", "Jenkins", "to", "trigger", "a", "build", "for", "job_name", "It", "returned", "a", "dict", "with", "2", "keys", ":", "body", "and", "headers", ".", "headers", "contains", "also", "a", "dict", "-", ...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/jenkins_job_trigger_operator.py#L124-L147
[ "def", "build_job", "(", "self", ",", "jenkins_server", ")", ":", "# Warning if the parameter is too long, the URL can be longer than", "# the maximum allowed size", "if", "self", ".", "parameters", "and", "isinstance", "(", "self", ".", "parameters", ",", "six", ".", "...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
JenkinsJobTriggerOperator.poll_job_in_queue
This method poll the jenkins queue until the job is executed. When we trigger a job through an API call, the job is first put in the queue without having a build number assigned. Thus we have to wait the job exit the queue to know its build number. To do so, we have to add /api/json (or ...
airflow/contrib/operators/jenkins_job_trigger_operator.py
def poll_job_in_queue(self, location, jenkins_server): """ This method poll the jenkins queue until the job is executed. When we trigger a job through an API call, the job is first put in the queue without having a build number assigned. Thus we have to wait the job exit the queu...
def poll_job_in_queue(self, location, jenkins_server): """ This method poll the jenkins queue until the job is executed. When we trigger a job through an API call, the job is first put in the queue without having a build number assigned. Thus we have to wait the job exit the queu...
[ "This", "method", "poll", "the", "jenkins", "queue", "until", "the", "job", "is", "executed", ".", "When", "we", "trigger", "a", "job", "through", "an", "API", "call", "the", "job", "is", "first", "put", "in", "the", "queue", "without", "having", "a", ...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/jenkins_job_trigger_operator.py#L149-L183
[ "def", "poll_job_in_queue", "(", "self", ",", "location", ",", "jenkins_server", ")", ":", "try_count", "=", "0", "location", "=", "location", "+", "'/api/json'", "# TODO Use get_queue_info instead", "# once it will be available in python-jenkins (v > 0.4.15)", "self", ".",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
context_to_airflow_vars
Given a context, this function provides a dictionary of values that can be used to externally reconstruct relations between dags, dag_runs, tasks and task_instances. Default to abc.def.ghi format and can be made to ABC_DEF_GHI format if in_env_var_format is set to True. :param context: The context for ...
airflow/utils/operator_helpers.py
def context_to_airflow_vars(context, in_env_var_format=False): """ Given a context, this function provides a dictionary of values that can be used to externally reconstruct relations between dags, dag_runs, tasks and task_instances. Default to abc.def.ghi format and can be made to ABC_DEF_GHI format if ...
def context_to_airflow_vars(context, in_env_var_format=False): """ Given a context, this function provides a dictionary of values that can be used to externally reconstruct relations between dags, dag_runs, tasks and task_instances. Default to abc.def.ghi format and can be made to ABC_DEF_GHI format if ...
[ "Given", "a", "context", "this", "function", "provides", "a", "dictionary", "of", "values", "that", "can", "be", "used", "to", "externally", "reconstruct", "relations", "between", "dags", "dag_runs", "tasks", "and", "task_instances", ".", "Default", "to", "abc",...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/operator_helpers.py#L33-L66
[ "def", "context_to_airflow_vars", "(", "context", ",", "in_env_var_format", "=", "False", ")", ":", "params", "=", "dict", "(", ")", "if", "in_env_var_format", ":", "name_format", "=", "'env_var_format'", "else", ":", "name_format", "=", "'default'", "task_instanc...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
on_pre_execution
Calls callbacks before execution. Note that any exception from callback will be logged but won't be propagated. :param kwargs: :return: None
airflow/utils/cli_action_loggers.py
def on_pre_execution(**kwargs): """ Calls callbacks before execution. Note that any exception from callback will be logged but won't be propagated. :param kwargs: :return: None """ logging.debug("Calling callbacks: %s", __pre_exec_callbacks) for cb in __pre_exec_callbacks: try: ...
def on_pre_execution(**kwargs): """ Calls callbacks before execution. Note that any exception from callback will be logged but won't be propagated. :param kwargs: :return: None """ logging.debug("Calling callbacks: %s", __pre_exec_callbacks) for cb in __pre_exec_callbacks: try: ...
[ "Calls", "callbacks", "before", "execution", ".", "Note", "that", "any", "exception", "from", "callback", "will", "be", "logged", "but", "won", "t", "be", "propagated", ".", ":", "param", "kwargs", ":", ":", "return", ":", "None" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/cli_action_loggers.py#L57-L69
[ "def", "on_pre_execution", "(", "*", "*", "kwargs", ")", ":", "logging", ".", "debug", "(", "\"Calling callbacks: %s\"", ",", "__pre_exec_callbacks", ")", "for", "cb", "in", "__pre_exec_callbacks", ":", "try", ":", "cb", "(", "*", "*", "kwargs", ")", "except...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
on_post_execution
Calls callbacks after execution. As it's being called after execution, it can capture status of execution, duration, etc. Note that any exception from callback will be logged but won't be propagated. :param kwargs: :return: None
airflow/utils/cli_action_loggers.py
def on_post_execution(**kwargs): """ Calls callbacks after execution. As it's being called after execution, it can capture status of execution, duration, etc. Note that any exception from callback will be logged but won't be propagated. :param kwargs: :return: None """ logging.debug(...
def on_post_execution(**kwargs): """ Calls callbacks after execution. As it's being called after execution, it can capture status of execution, duration, etc. Note that any exception from callback will be logged but won't be propagated. :param kwargs: :return: None """ logging.debug(...
[ "Calls", "callbacks", "after", "execution", ".", "As", "it", "s", "being", "called", "after", "execution", "it", "can", "capture", "status", "of", "execution", "duration", "etc", ".", "Note", "that", "any", "exception", "from", "callback", "will", "be", "log...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/cli_action_loggers.py#L72-L86
[ "def", "on_post_execution", "(", "*", "*", "kwargs", ")", ":", "logging", ".", "debug", "(", "\"Calling callbacks: %s\"", ",", "__post_exec_callbacks", ")", "for", "cb", "in", "__post_exec_callbacks", ":", "try", ":", "cb", "(", "*", "*", "kwargs", ")", "exc...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
conditionally_trigger
This function decides whether or not to Trigger the remote DAG
airflow/example_dags/example_trigger_controller_dag.py
def conditionally_trigger(context, dag_run_obj): """This function decides whether or not to Trigger the remote DAG""" c_p = context['params']['condition_param'] print("Controller DAG : conditionally_trigger = {}".format(c_p)) if context['params']['condition_param']: dag_run_obj.payload = {'messa...
def conditionally_trigger(context, dag_run_obj): """This function decides whether or not to Trigger the remote DAG""" c_p = context['params']['condition_param'] print("Controller DAG : conditionally_trigger = {}".format(c_p)) if context['params']['condition_param']: dag_run_obj.payload = {'messa...
[ "This", "function", "decides", "whether", "or", "not", "to", "Trigger", "the", "remote", "DAG" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/example_dags/example_trigger_controller_dag.py#L45-L52
[ "def", "conditionally_trigger", "(", "context", ",", "dag_run_obj", ")", ":", "c_p", "=", "context", "[", "'params'", "]", "[", "'condition_param'", "]", "print", "(", "\"Controller DAG : conditionally_trigger = {}\"", ".", "format", "(", "c_p", ")", ")", "if", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatadogHook.send_metric
Sends a single datapoint metric to DataDog :param metric_name: The name of the metric :type metric_name: str :param datapoint: A single integer or float related to the metric :type datapoint: int or float :param tags: A list of tags associated with the metric :type tags:...
airflow/contrib/hooks/datadog_hook.py
def send_metric(self, metric_name, datapoint, tags=None, type_=None, interval=None): """ Sends a single datapoint metric to DataDog :param metric_name: The name of the metric :type metric_name: str :param datapoint: A single integer or float related to the metric :type d...
def send_metric(self, metric_name, datapoint, tags=None, type_=None, interval=None): """ Sends a single datapoint metric to DataDog :param metric_name: The name of the metric :type metric_name: str :param datapoint: A single integer or float related to the metric :type d...
[ "Sends", "a", "single", "datapoint", "metric", "to", "DataDog" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datadog_hook.py#L62-L86
[ "def", "send_metric", "(", "self", ",", "metric_name", ",", "datapoint", ",", "tags", "=", "None", ",", "type_", "=", "None", ",", "interval", "=", "None", ")", ":", "response", "=", "api", ".", "Metric", ".", "send", "(", "metric", "=", "metric_name",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatadogHook.query_metric
Queries datadog for a specific metric, potentially with some function applied to it and returns the results. :param query: The datadog query to execute (see datadog docs) :type query: str :param from_seconds_ago: How many seconds ago to start querying for. :type from_seconds_ago...
airflow/contrib/hooks/datadog_hook.py
def query_metric(self, query, from_seconds_ago, to_seconds_ago): """ Queries datadog for a specific metric, potentially with some function applied to it and returns the results. :param query: The datadog query to execute (se...
def query_metric(self, query, from_seconds_ago, to_seconds_ago): """ Queries datadog for a specific metric, potentially with some function applied to it and returns the results. :param query: The datadog query to execute (se...
[ "Queries", "datadog", "for", "a", "specific", "metric", "potentially", "with", "some", "function", "applied", "to", "it", "and", "returns", "the", "results", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datadog_hook.py#L88-L111
[ "def", "query_metric", "(", "self", ",", "query", ",", "from_seconds_ago", ",", "to_seconds_ago", ")", ":", "now", "=", "int", "(", "time", ".", "time", "(", ")", ")", "response", "=", "api", ".", "Metric", ".", "query", "(", "start", "=", "now", "-"...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DatadogHook.post_event
Posts an event to datadog (processing finished, potentially alerts, other issues) Think about this as a means to maintain persistence of alerts, rather than alerting itself. :param title: The title of the event :type title: str :param text: The body of the event (more informatio...
airflow/contrib/hooks/datadog_hook.py
def post_event(self, title, text, aggregation_key=None, alert_type=None, date_happened=None, handle=None, priority=None, related_event_id=None, tags=None, device_name=None): """ Posts an event to datadog (processing finished, potentially alerts, other issues) Think about this ...
def post_event(self, title, text, aggregation_key=None, alert_type=None, date_happened=None, handle=None, priority=None, related_event_id=None, tags=None, device_name=None): """ Posts an event to datadog (processing finished, potentially alerts, other issues) Think about this ...
[ "Posts", "an", "event", "to", "datadog", "(", "processing", "finished", "potentially", "alerts", "other", "issues", ")", "Think", "about", "this", "as", "a", "means", "to", "maintain", "persistence", "of", "alerts", "rather", "than", "alerting", "itself", "." ...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/datadog_hook.py#L113-L158
[ "def", "post_event", "(", "self", ",", "title", ",", "text", ",", "aggregation_key", "=", "None", ",", "alert_type", "=", "None", ",", "date_happened", "=", "None", ",", "handle", "=", "None", ",", "priority", "=", "None", ",", "related_event_id", "=", "...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SlackWebhookHook._get_token
Given either a manually set token or a conn_id, return the webhook_token to use :param token: The manually provided token :type token: str :param http_conn_id: The conn_id provided :type http_conn_id: str :return: webhook_token (str) to use
airflow/contrib/hooks/slack_webhook_hook.py
def _get_token(self, token, http_conn_id): """ Given either a manually set token or a conn_id, return the webhook_token to use :param token: The manually provided token :type token: str :param http_conn_id: The conn_id provided :type http_conn_id: str :return: web...
def _get_token(self, token, http_conn_id): """ Given either a manually set token or a conn_id, return the webhook_token to use :param token: The manually provided token :type token: str :param http_conn_id: The conn_id provided :type http_conn_id: str :return: web...
[ "Given", "either", "a", "manually", "set", "token", "or", "a", "conn_id", "return", "the", "webhook_token", "to", "use", ":", "param", "token", ":", "The", "manually", "provided", "token", ":", "type", "token", ":", "str", ":", "param", "http_conn_id", ":"...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/slack_webhook_hook.py#L80-L97
[ "def", "_get_token", "(", "self", ",", "token", ",", "http_conn_id", ")", ":", "if", "token", ":", "return", "token", "elif", "http_conn_id", ":", "conn", "=", "self", ".", "get_connection", "(", "http_conn_id", ")", "extra", "=", "conn", ".", "extra_dejso...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SlackWebhookHook._build_slack_message
Construct the Slack message. All relevant parameters are combined here to a valid Slack json message :return: Slack message (str) to send
airflow/contrib/hooks/slack_webhook_hook.py
def _build_slack_message(self): """ Construct the Slack message. All relevant parameters are combined here to a valid Slack json message :return: Slack message (str) to send """ cmd = {} if self.channel: cmd['channel'] = self.channel if self.u...
def _build_slack_message(self): """ Construct the Slack message. All relevant parameters are combined here to a valid Slack json message :return: Slack message (str) to send """ cmd = {} if self.channel: cmd['channel'] = self.channel if self.u...
[ "Construct", "the", "Slack", "message", ".", "All", "relevant", "parameters", "are", "combined", "here", "to", "a", "valid", "Slack", "json", "message", ":", "return", ":", "Slack", "message", "(", "str", ")", "to", "send" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/slack_webhook_hook.py#L99-L119
[ "def", "_build_slack_message", "(", "self", ")", ":", "cmd", "=", "{", "}", "if", "self", ".", "channel", ":", "cmd", "[", "'channel'", "]", "=", "self", ".", "channel", "if", "self", ".", "username", ":", "cmd", "[", "'username'", "]", "=", "self", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SlackWebhookHook.execute
Remote Popen (actually execute the slack webhook call)
airflow/contrib/hooks/slack_webhook_hook.py
def execute(self): """ Remote Popen (actually execute the slack webhook call) """ proxies = {} if self.proxy: # we only need https proxy for Slack, as the endpoint is https proxies = {'https': self.proxy} slack_message = self._build_slack_message(...
def execute(self): """ Remote Popen (actually execute the slack webhook call) """ proxies = {} if self.proxy: # we only need https proxy for Slack, as the endpoint is https proxies = {'https': self.proxy} slack_message = self._build_slack_message(...
[ "Remote", "Popen", "(", "actually", "execute", "the", "slack", "webhook", "call", ")" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/slack_webhook_hook.py#L121-L134
[ "def", "execute", "(", "self", ")", ":", "proxies", "=", "{", "}", "if", "self", ".", "proxy", ":", "# we only need https proxy for Slack, as the endpoint is https", "proxies", "=", "{", "'https'", ":", "self", ".", "proxy", "}", "slack_message", "=", "self", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagBag.get_dag
Gets the DAG out of the dictionary, and refreshes it if expired
airflow/models/dagbag.py
def get_dag(self, dag_id): """ Gets the DAG out of the dictionary, and refreshes it if expired """ from airflow.models.dag import DagModel # Avoid circular import # If asking for a known subdag, we want to refresh the parent root_dag_id = dag_id if dag_id in sel...
def get_dag(self, dag_id): """ Gets the DAG out of the dictionary, and refreshes it if expired """ from airflow.models.dag import DagModel # Avoid circular import # If asking for a known subdag, we want to refresh the parent root_dag_id = dag_id if dag_id in sel...
[ "Gets", "the", "DAG", "out", "of", "the", "dictionary", "and", "refreshes", "it", "if", "expired" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagbag.py#L112-L143
[ "def", "get_dag", "(", "self", ",", "dag_id", ")", ":", "from", "airflow", ".", "models", ".", "dag", "import", "DagModel", "# Avoid circular import", "# If asking for a known subdag, we want to refresh the parent", "root_dag_id", "=", "dag_id", "if", "dag_id", "in", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagBag.process_file
Given a path to a python module or zip file, this method imports the module and look for dag objects within it.
airflow/models/dagbag.py
def process_file(self, filepath, only_if_updated=True, safe_mode=True): """ Given a path to a python module or zip file, this method imports the module and look for dag objects within it. """ from airflow.models.dag import DAG # Avoid circular import found_dags = [] ...
def process_file(self, filepath, only_if_updated=True, safe_mode=True): """ Given a path to a python module or zip file, this method imports the module and look for dag objects within it. """ from airflow.models.dag import DAG # Avoid circular import found_dags = [] ...
[ "Given", "a", "path", "to", "a", "python", "module", "or", "zip", "file", "this", "method", "imports", "the", "module", "and", "look", "for", "dag", "objects", "within", "it", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagbag.py#L145-L271
[ "def", "process_file", "(", "self", ",", "filepath", ",", "only_if_updated", "=", "True", ",", "safe_mode", "=", "True", ")", ":", "from", "airflow", ".", "models", ".", "dag", "import", "DAG", "# Avoid circular import", "found_dags", "=", "[", "]", "# if th...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagBag.kill_zombies
Fail given zombie tasks, which are tasks that haven't had a heartbeat for too long, in the current DagBag. :param zombies: zombie task instances to kill. :type zombies: airflow.utils.dag_processing.SimpleTaskInstance :param session: DB session. :type session: sqlalchemy.orm.sess...
airflow/models/dagbag.py
def kill_zombies(self, zombies, session=None): """ Fail given zombie tasks, which are tasks that haven't had a heartbeat for too long, in the current DagBag. :param zombies: zombie task instances to kill. :type zombies: airflow.utils.dag_processing.SimpleTaskInstance :pa...
def kill_zombies(self, zombies, session=None): """ Fail given zombie tasks, which are tasks that haven't had a heartbeat for too long, in the current DagBag. :param zombies: zombie task instances to kill. :type zombies: airflow.utils.dag_processing.SimpleTaskInstance :pa...
[ "Fail", "given", "zombie", "tasks", "which", "are", "tasks", "that", "haven", "t", "had", "a", "heartbeat", "for", "too", "long", "in", "the", "current", "DagBag", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagbag.py#L274-L303
[ "def", "kill_zombies", "(", "self", ",", "zombies", ",", "session", "=", "None", ")", ":", "from", "airflow", ".", "models", ".", "taskinstance", "import", "TaskInstance", "# Avoid circular import", "for", "zombie", "in", "zombies", ":", "if", "zombie", ".", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagBag.bag_dag
Adds the DAG into the bag, recurses into sub dags. Throws AirflowDagCycleException if a cycle is detected in this dag or its subdags
airflow/models/dagbag.py
def bag_dag(self, dag, parent_dag, root_dag): """ Adds the DAG into the bag, recurses into sub dags. Throws AirflowDagCycleException if a cycle is detected in this dag or its subdags """ dag.test_cycle() # throws if a task cycle is found dag.resolve_template_files() ...
def bag_dag(self, dag, parent_dag, root_dag): """ Adds the DAG into the bag, recurses into sub dags. Throws AirflowDagCycleException if a cycle is detected in this dag or its subdags """ dag.test_cycle() # throws if a task cycle is found dag.resolve_template_files() ...
[ "Adds", "the", "DAG", "into", "the", "bag", "recurses", "into", "sub", "dags", ".", "Throws", "AirflowDagCycleException", "if", "a", "cycle", "is", "detected", "in", "this", "dag", "or", "its", "subdags" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagbag.py#L305-L339
[ "def", "bag_dag", "(", "self", ",", "dag", ",", "parent_dag", ",", "root_dag", ")", ":", "dag", ".", "test_cycle", "(", ")", "# throws if a task cycle is found", "dag", ".", "resolve_template_files", "(", ")", "dag", ".", "last_loaded", "=", "timezone", ".", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagBag.collect_dags
Given a file path or a folder, this method looks for python modules, imports them and adds them to the dagbag collection. Note that if a ``.airflowignore`` file is found while processing the directory, it will behave much like a ``.gitignore``, ignoring files that match any of the regex...
airflow/models/dagbag.py
def collect_dags( self, dag_folder=None, only_if_updated=True, include_examples=configuration.conf.getboolean('core', 'LOAD_EXAMPLES'), safe_mode=configuration.conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE')): """ Given a file path or a fold...
def collect_dags( self, dag_folder=None, only_if_updated=True, include_examples=configuration.conf.getboolean('core', 'LOAD_EXAMPLES'), safe_mode=configuration.conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE')): """ Given a file path or a fold...
[ "Given", "a", "file", "path", "or", "a", "folder", "this", "method", "looks", "for", "python", "modules", "imports", "them", "and", "adds", "them", "to", "the", "dagbag", "collection", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagbag.py#L341-L396
[ "def", "collect_dags", "(", "self", ",", "dag_folder", "=", "None", ",", "only_if_updated", "=", "True", ",", "include_examples", "=", "configuration", ".", "conf", ".", "getboolean", "(", "'core'", ",", "'LOAD_EXAMPLES'", ")", ",", "safe_mode", "=", "configur...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
DagBag.dagbag_report
Prints a report around DagBag loading stats
airflow/models/dagbag.py
def dagbag_report(self): """Prints a report around DagBag loading stats""" report = textwrap.dedent("""\n ------------------------------------------------------------------- DagBag loading stats for {dag_folder} ------------------------------------------------------------------- ...
def dagbag_report(self): """Prints a report around DagBag loading stats""" report = textwrap.dedent("""\n ------------------------------------------------------------------- DagBag loading stats for {dag_folder} ------------------------------------------------------------------- ...
[ "Prints", "a", "report", "around", "DagBag", "loading", "stats" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/dagbag.py#L398-L416
[ "def", "dagbag_report", "(", "self", ")", ":", "report", "=", "textwrap", ".", "dedent", "(", "\"\"\"\\n\n -------------------------------------------------------------------\n DagBag loading stats for {dag_folder}\n ------------------------------------------------------...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
SparkJDBCOperator.execute
Call the SparkSubmitHook to run the provided spark job
airflow/contrib/operators/spark_jdbc_operator.py
def execute(self, context): """ Call the SparkSubmitHook to run the provided spark job """ self._hook = SparkJDBCHook( spark_app_name=self._spark_app_name, spark_conn_id=self._spark_conn_id, spark_conf=self._spark_conf, spark_py_files=self....
def execute(self, context): """ Call the SparkSubmitHook to run the provided spark job """ self._hook = SparkJDBCHook( spark_app_name=self._spark_app_name, spark_conn_id=self._spark_conn_id, spark_conf=self._spark_conf, spark_py_files=self....
[ "Call", "the", "SparkSubmitHook", "to", "run", "the", "provided", "spark", "job" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/spark_jdbc_operator.py#L177-L211
[ "def", "execute", "(", "self", ",", "context", ")", ":", "self", ".", "_hook", "=", "SparkJDBCHook", "(", "spark_app_name", "=", "self", ".", "_spark_app_name", ",", "spark_conn_id", "=", "self", ".", "_spark_conn_id", ",", "spark_conf", "=", "self", ".", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
ds_add
Add or subtract days from a YYYY-MM-DD :param ds: anchor date in ``YYYY-MM-DD`` format to add to :type ds: str :param days: number of days to add to the ds, you can use negative values :type days: int >>> ds_add('2015-01-01', 5) '2015-01-06' >>> ds_add('2015-01-06', -5) '2015-01-01'
airflow/macros/__init__.py
def ds_add(ds, days): """ Add or subtract days from a YYYY-MM-DD :param ds: anchor date in ``YYYY-MM-DD`` format to add to :type ds: str :param days: number of days to add to the ds, you can use negative values :type days: int >>> ds_add('2015-01-01', 5) '2015-01-06' >>> ds_add('20...
def ds_add(ds, days): """ Add or subtract days from a YYYY-MM-DD :param ds: anchor date in ``YYYY-MM-DD`` format to add to :type ds: str :param days: number of days to add to the ds, you can use negative values :type days: int >>> ds_add('2015-01-01', 5) '2015-01-06' >>> ds_add('20...
[ "Add", "or", "subtract", "days", "from", "a", "YYYY", "-", "MM", "-", "DD" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/macros/__init__.py#L28-L46
[ "def", "ds_add", "(", "ds", ",", "days", ")", ":", "ds", "=", "datetime", ".", "strptime", "(", "ds", ",", "'%Y-%m-%d'", ")", "if", "days", ":", "ds", "=", "ds", "+", "timedelta", "(", "days", ")", "return", "ds", ".", "isoformat", "(", ")", "[",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
ds_format
Takes an input string and outputs another string as specified in the output format :param ds: input string which contains a date :type ds: str :param input_format: input string format. E.g. %Y-%m-%d :type input_format: str :param output_format: output string format E.g. %Y-%m-%d :type outp...
airflow/macros/__init__.py
def ds_format(ds, input_format, output_format): """ Takes an input string and outputs another string as specified in the output format :param ds: input string which contains a date :type ds: str :param input_format: input string format. E.g. %Y-%m-%d :type input_format: str :param outpu...
def ds_format(ds, input_format, output_format): """ Takes an input string and outputs another string as specified in the output format :param ds: input string which contains a date :type ds: str :param input_format: input string format. E.g. %Y-%m-%d :type input_format: str :param outpu...
[ "Takes", "an", "input", "string", "and", "outputs", "another", "string", "as", "specified", "in", "the", "output", "format" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/macros/__init__.py#L49-L66
[ "def", "ds_format", "(", "ds", ",", "input_format", ",", "output_format", ")", ":", "return", "datetime", ".", "strptime", "(", "ds", ",", "input_format", ")", ".", "strftime", "(", "output_format", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
_integrate_plugins
Integrate plugins to the context
airflow/macros/__init__.py
def _integrate_plugins(): """Integrate plugins to the context""" import sys from airflow.plugins_manager import macros_modules for macros_module in macros_modules: sys.modules[macros_module.__name__] = macros_module globals()[macros_module._name] = macros_module
def _integrate_plugins(): """Integrate plugins to the context""" import sys from airflow.plugins_manager import macros_modules for macros_module in macros_modules: sys.modules[macros_module.__name__] = macros_module globals()[macros_module._name] = macros_module
[ "Integrate", "plugins", "to", "the", "context" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/macros/__init__.py#L69-L75
[ "def", "_integrate_plugins", "(", ")", ":", "import", "sys", "from", "airflow", ".", "plugins_manager", "import", "macros_modules", "for", "macros_module", "in", "macros_modules", ":", "sys", ".", "modules", "[", "macros_module", ".", "__name__", "]", "=", "macr...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HdfsSensorRegex.poke
poke matching files in a directory with self.regex :return: Bool depending on the search criteria
airflow/contrib/sensors/hdfs_sensor.py
def poke(self, context): """ poke matching files in a directory with self.regex :return: Bool depending on the search criteria """ sb = self.hook(self.hdfs_conn_id).get_conn() self.log.info( 'Poking for %s to be a directory with files matching %s', self.filep...
def poke(self, context): """ poke matching files in a directory with self.regex :return: Bool depending on the search criteria """ sb = self.hook(self.hdfs_conn_id).get_conn() self.log.info( 'Poking for %s to be a directory with files matching %s', self.filep...
[ "poke", "matching", "files", "in", "a", "directory", "with", "self", ".", "regex" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/sensors/hdfs_sensor.py#L30-L46
[ "def", "poke", "(", "self", ",", "context", ")", ":", "sb", "=", "self", ".", "hook", "(", "self", ".", "hdfs_conn_id", ")", ".", "get_conn", "(", ")", "self", ".", "log", ".", "info", "(", "'Poking for %s to be a directory with files matching %s'", ",", "...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
HdfsSensorFolder.poke
poke for a non empty directory :return: Bool depending on the search criteria
airflow/contrib/sensors/hdfs_sensor.py
def poke(self, context): """ poke for a non empty directory :return: Bool depending on the search criteria """ sb = self.hook(self.hdfs_conn_id).get_conn() result = [f for f in sb.ls([self.filepath], include_toplevel=True)] result = self.filter_for_ignored_ext(re...
def poke(self, context): """ poke for a non empty directory :return: Bool depending on the search criteria """ sb = self.hook(self.hdfs_conn_id).get_conn() result = [f for f in sb.ls([self.filepath], include_toplevel=True)] result = self.filter_for_ignored_ext(re...
[ "poke", "for", "a", "non", "empty", "directory" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/sensors/hdfs_sensor.py#L57-L74
[ "def", "poke", "(", "self", ",", "context", ")", ":", "sb", "=", "self", ".", "hook", "(", "self", ".", "hdfs_conn_id", ")", ".", "get_conn", "(", ")", "result", "=", "[", "f", "for", "f", "in", "sb", ".", "ls", "(", "[", "self", ".", "filepath...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
clear_task_instances
Clears a set of task instances, but makes sure the running ones get killed. :param tis: a list of task instances :param session: current session :param activate_dag_runs: flag to check for active dag run :param dag: DAG object
airflow/models/taskinstance.py
def clear_task_instances(tis, session, activate_dag_runs=True, dag=None, ): """ Clears a set of task instances, but makes sure the running ones get killed. :param tis: a list of task instances :param...
def clear_task_instances(tis, session, activate_dag_runs=True, dag=None, ): """ Clears a set of task instances, but makes sure the running ones get killed. :param tis: a list of task instances :param...
[ "Clears", "a", "set", "of", "task", "instances", "but", "makes", "sure", "the", "running", "ones", "get", "killed", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L59-L107
[ "def", "clear_task_instances", "(", "tis", ",", "session", ",", "activate_dag_runs", "=", "True", ",", "dag", "=", "None", ",", ")", ":", "job_ids", "=", "[", "]", "for", "ti", "in", "tis", ":", "if", "ti", ".", "state", "==", "State", ".", "RUNNING"...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.try_number
Return the try number that this task number will be when it is actually run. If the TI is currently running, this will match the column in the databse, in all othercases this will be incremenetd
airflow/models/taskinstance.py
def try_number(self): """ Return the try number that this task number will be when it is actually run. If the TI is currently running, this will match the column in the databse, in all othercases this will be incremenetd """ # This is designed so that task logs e...
def try_number(self): """ Return the try number that this task number will be when it is actually run. If the TI is currently running, this will match the column in the databse, in all othercases this will be incremenetd """ # This is designed so that task logs e...
[ "Return", "the", "try", "number", "that", "this", "task", "number", "will", "be", "when", "it", "is", "actually", "run", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L197-L208
[ "def", "try_number", "(", "self", ")", ":", "# This is designed so that task logs end up in the right file.", "if", "self", ".", "state", "==", "State", ".", "RUNNING", ":", "return", "self", ".", "_try_number", "return", "self", ".", "_try_number", "+", "1" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.command
Returns a command that can be executed anywhere where airflow is installed. This command is part of the message sent to executors by the orchestrator.
airflow/models/taskinstance.py
def command( self, mark_success=False, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ignore_ti_state=False, local=False, pickle_id=None, raw=False, job_id=None, ...
def command( self, mark_success=False, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ignore_ti_state=False, local=False, pickle_id=None, raw=False, job_id=None, ...
[ "Returns", "a", "command", "that", "can", "be", "executed", "anywhere", "where", "airflow", "is", "installed", ".", "This", "command", "is", "part", "of", "the", "message", "sent", "to", "executors", "by", "the", "orchestrator", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L218-L247
[ "def", "command", "(", "self", ",", "mark_success", "=", "False", ",", "ignore_all_deps", "=", "False", ",", "ignore_depends_on_past", "=", "False", ",", "ignore_task_deps", "=", "False", ",", "ignore_ti_state", "=", "False", ",", "local", "=", "False", ",", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.command_as_list
Returns a command that can be executed anywhere where airflow is installed. This command is part of the message sent to executors by the orchestrator.
airflow/models/taskinstance.py
def command_as_list( self, mark_success=False, ignore_all_deps=False, ignore_task_deps=False, ignore_depends_on_past=False, ignore_ti_state=False, local=False, pickle_id=None, raw=False, job_id=None, ...
def command_as_list( self, mark_success=False, ignore_all_deps=False, ignore_task_deps=False, ignore_depends_on_past=False, ignore_ti_state=False, local=False, pickle_id=None, raw=False, job_id=None, ...
[ "Returns", "a", "command", "that", "can", "be", "executed", "anywhere", "where", "airflow", "is", "installed", ".", "This", "command", "is", "part", "of", "the", "message", "sent", "to", "executors", "by", "the", "orchestrator", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L249-L292
[ "def", "command_as_list", "(", "self", ",", "mark_success", "=", "False", ",", "ignore_all_deps", "=", "False", ",", "ignore_task_deps", "=", "False", ",", "ignore_depends_on_past", "=", "False", ",", "ignore_ti_state", "=", "False", ",", "local", "=", "False", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.generate_command
Generates the shell command required to execute this task instance. :param dag_id: DAG ID :type dag_id: unicode :param task_id: Task ID :type task_id: unicode :param execution_date: Execution date for the task :type execution_date: datetime :param mark_success: W...
airflow/models/taskinstance.py
def generate_command(dag_id, task_id, execution_date, mark_success=False, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ...
def generate_command(dag_id, task_id, execution_date, mark_success=False, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ...
[ "Generates", "the", "shell", "command", "required", "to", "execute", "this", "task", "instance", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L295-L361
[ "def", "generate_command", "(", "dag_id", ",", "task_id", ",", "execution_date", ",", "mark_success", "=", "False", ",", "ignore_all_deps", "=", "False", ",", "ignore_depends_on_past", "=", "False", ",", "ignore_task_deps", "=", "False", ",", "ignore_ti_state", "=...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.current_state
Get the very latest state from the database, if a session is passed, we use and looking up the state becomes part of the session, otherwise a new session is used.
airflow/models/taskinstance.py
def current_state(self, session=None): """ Get the very latest state from the database, if a session is passed, we use and looking up the state becomes part of the session, otherwise a new session is used. """ TI = TaskInstance ti = session.query(TI).filter( ...
def current_state(self, session=None): """ Get the very latest state from the database, if a session is passed, we use and looking up the state becomes part of the session, otherwise a new session is used. """ TI = TaskInstance ti = session.query(TI).filter( ...
[ "Get", "the", "very", "latest", "state", "from", "the", "database", "if", "a", "session", "is", "passed", "we", "use", "and", "looking", "up", "the", "state", "becomes", "part", "of", "the", "session", "otherwise", "a", "new", "session", "is", "used", "....
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L395-L411
[ "def", "current_state", "(", "self", ",", "session", "=", "None", ")", ":", "TI", "=", "TaskInstance", "ti", "=", "session", ".", "query", "(", "TI", ")", ".", "filter", "(", "TI", ".", "dag_id", "==", "self", ".", "dag_id", ",", "TI", ".", "task_i...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.error
Forces the task instance's state to FAILED in the database.
airflow/models/taskinstance.py
def error(self, session=None): """ Forces the task instance's state to FAILED in the database. """ self.log.error("Recording the task instance as FAILED") self.state = State.FAILED session.merge(self) session.commit()
def error(self, session=None): """ Forces the task instance's state to FAILED in the database. """ self.log.error("Recording the task instance as FAILED") self.state = State.FAILED session.merge(self) session.commit()
[ "Forces", "the", "task", "instance", "s", "state", "to", "FAILED", "in", "the", "database", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L414-L421
[ "def", "error", "(", "self", ",", "session", "=", "None", ")", ":", "self", ".", "log", ".", "error", "(", "\"Recording the task instance as FAILED\"", ")", "self", ".", "state", "=", "State", ".", "FAILED", "session", ".", "merge", "(", "self", ")", "se...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.refresh_from_db
Refreshes the task instance from the database based on the primary key :param lock_for_update: if True, indicates that the database should lock the TaskInstance (issuing a FOR UPDATE clause) until the session is committed.
airflow/models/taskinstance.py
def refresh_from_db(self, session=None, lock_for_update=False): """ Refreshes the task instance from the database based on the primary key :param lock_for_update: if True, indicates that the database should lock the TaskInstance (issuing a FOR UPDATE clause) until the se...
def refresh_from_db(self, session=None, lock_for_update=False): """ Refreshes the task instance from the database based on the primary key :param lock_for_update: if True, indicates that the database should lock the TaskInstance (issuing a FOR UPDATE clause) until the se...
[ "Refreshes", "the", "task", "instance", "from", "the", "database", "based", "on", "the", "primary", "key" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L424-L455
[ "def", "refresh_from_db", "(", "self", ",", "session", "=", "None", ",", "lock_for_update", "=", "False", ")", ":", "TI", "=", "TaskInstance", "qry", "=", "session", ".", "query", "(", "TI", ")", ".", "filter", "(", "TI", ".", "dag_id", "==", "self", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.clear_xcom_data
Clears all XCom data from the database for the task instance
airflow/models/taskinstance.py
def clear_xcom_data(self, session=None): """ Clears all XCom data from the database for the task instance """ session.query(XCom).filter( XCom.dag_id == self.dag_id, XCom.task_id == self.task_id, XCom.execution_date == self.execution_date ).del...
def clear_xcom_data(self, session=None): """ Clears all XCom data from the database for the task instance """ session.query(XCom).filter( XCom.dag_id == self.dag_id, XCom.task_id == self.task_id, XCom.execution_date == self.execution_date ).del...
[ "Clears", "all", "XCom", "data", "from", "the", "database", "for", "the", "task", "instance" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L458-L467
[ "def", "clear_xcom_data", "(", "self", ",", "session", "=", "None", ")", ":", "session", ".", "query", "(", "XCom", ")", ".", "filter", "(", "XCom", ".", "dag_id", "==", "self", ".", "dag_id", ",", "XCom", ".", "task_id", "==", "self", ".", "task_id"...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.key
Returns a tuple that identifies the task instance uniquely
airflow/models/taskinstance.py
def key(self): """ Returns a tuple that identifies the task instance uniquely """ return self.dag_id, self.task_id, self.execution_date, self.try_number
def key(self): """ Returns a tuple that identifies the task instance uniquely """ return self.dag_id, self.task_id, self.execution_date, self.try_number
[ "Returns", "a", "tuple", "that", "identifies", "the", "task", "instance", "uniquely" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L470-L474
[ "def", "key", "(", "self", ")", ":", "return", "self", ".", "dag_id", ",", "self", ".", "task_id", ",", "self", ".", "execution_date", ",", "self", ".", "try_number" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.are_dependents_done
Checks whether the dependents of this task instance have all succeeded. This is meant to be used by wait_for_downstream. This is useful when you do not want to start processing the next schedule of a task until the dependents are done. For instance, if the task DROPs and recreates a tab...
airflow/models/taskinstance.py
def are_dependents_done(self, session=None): """ Checks whether the dependents of this task instance have all succeeded. This is meant to be used by wait_for_downstream. This is useful when you do not want to start processing the next schedule of a task until the dependents are ...
def are_dependents_done(self, session=None): """ Checks whether the dependents of this task instance have all succeeded. This is meant to be used by wait_for_downstream. This is useful when you do not want to start processing the next schedule of a task until the dependents are ...
[ "Checks", "whether", "the", "dependents", "of", "this", "task", "instance", "have", "all", "succeeded", ".", "This", "is", "meant", "to", "be", "used", "by", "wait_for_downstream", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L494-L515
[ "def", "are_dependents_done", "(", "self", ",", "session", "=", "None", ")", ":", "task", "=", "self", ".", "task", "if", "not", "task", ".", "downstream_task_ids", ":", "return", "True", "ti", "=", "session", ".", "query", "(", "func", ".", "count", "...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.are_dependencies_met
Returns whether or not all the conditions are met for this task instance to be run given the context for the dependencies (e.g. a task instance being force run from the UI will ignore some dependencies). :param dep_context: The execution context that determines the dependencies that ...
airflow/models/taskinstance.py
def are_dependencies_met( self, dep_context=None, session=None, verbose=False): """ Returns whether or not all the conditions are met for this task instance to be run given the context for the dependencies (e.g. a task instance being force run from...
def are_dependencies_met( self, dep_context=None, session=None, verbose=False): """ Returns whether or not all the conditions are met for this task instance to be run given the context for the dependencies (e.g. a task instance being force run from...
[ "Returns", "whether", "or", "not", "all", "the", "conditions", "are", "met", "for", "this", "task", "instance", "to", "be", "run", "given", "the", "context", "for", "the", "dependencies", "(", "e", ".", "g", ".", "a", "task", "instance", "being", "force"...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L550-L586
[ "def", "are_dependencies_met", "(", "self", ",", "dep_context", "=", "None", ",", "session", "=", "None", ",", "verbose", "=", "False", ")", ":", "dep_context", "=", "dep_context", "or", "DepContext", "(", ")", "failed", "=", "False", "verbose_aware_logger", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.next_retry_datetime
Get datetime of the next retry if the task instance fails. For exponential backoff, retry_delay is used as base and will be converted to seconds.
airflow/models/taskinstance.py
def next_retry_datetime(self): """ Get datetime of the next retry if the task instance fails. For exponential backoff, retry_delay is used as base and will be converted to seconds. """ delay = self.task.retry_delay if self.task.retry_exponential_backoff: min_b...
def next_retry_datetime(self): """ Get datetime of the next retry if the task instance fails. For exponential backoff, retry_delay is used as base and will be converted to seconds. """ delay = self.task.retry_delay if self.task.retry_exponential_backoff: min_b...
[ "Get", "datetime", "of", "the", "next", "retry", "if", "the", "task", "instance", "fails", ".", "For", "exponential", "backoff", "retry_delay", "is", "used", "as", "base", "and", "will", "be", "converted", "to", "seconds", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L614-L642
[ "def", "next_retry_datetime", "(", "self", ")", ":", "delay", "=", "self", ".", "task", ".", "retry_delay", "if", "self", ".", "task", ".", "retry_exponential_backoff", ":", "min_backoff", "=", "int", "(", "delay", ".", "total_seconds", "(", ")", "*", "(",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.ready_for_retry
Checks on whether the task instance is in the right state and timeframe to be retried.
airflow/models/taskinstance.py
def ready_for_retry(self): """ Checks on whether the task instance is in the right state and timeframe to be retried. """ return (self.state == State.UP_FOR_RETRY and self.next_retry_datetime() < timezone.utcnow())
def ready_for_retry(self): """ Checks on whether the task instance is in the right state and timeframe to be retried. """ return (self.state == State.UP_FOR_RETRY and self.next_retry_datetime() < timezone.utcnow())
[ "Checks", "on", "whether", "the", "task", "instance", "is", "in", "the", "right", "state", "and", "timeframe", "to", "be", "retried", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L644-L650
[ "def", "ready_for_retry", "(", "self", ")", ":", "return", "(", "self", ".", "state", "==", "State", ".", "UP_FOR_RETRY", "and", "self", ".", "next_retry_datetime", "(", ")", "<", "timezone", ".", "utcnow", "(", ")", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.pool_full
Returns a boolean as to whether the slot pool has room for this task to run
airflow/models/taskinstance.py
def pool_full(self, session): """ Returns a boolean as to whether the slot pool has room for this task to run """ if not self.task.pool: return False pool = ( session .query(Pool) .filter(Pool.pool == self.task.pool) ...
def pool_full(self, session): """ Returns a boolean as to whether the slot pool has room for this task to run """ if not self.task.pool: return False pool = ( session .query(Pool) .filter(Pool.pool == self.task.pool) ...
[ "Returns", "a", "boolean", "as", "to", "whether", "the", "slot", "pool", "has", "room", "for", "this", "task", "to", "run" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L653-L671
[ "def", "pool_full", "(", "self", ",", "session", ")", ":", "if", "not", "self", ".", "task", ".", "pool", ":", "return", "False", "pool", "=", "(", "session", ".", "query", "(", "Pool", ")", ".", "filter", "(", "Pool", ".", "pool", "==", "self", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.get_dagrun
Returns the DagRun for this TaskInstance :param session: :return: DagRun
airflow/models/taskinstance.py
def get_dagrun(self, session): """ Returns the DagRun for this TaskInstance :param session: :return: DagRun """ from airflow.models.dagrun import DagRun # Avoid circular import dr = session.query(DagRun).filter( DagRun.dag_id == self.dag_id, ...
def get_dagrun(self, session): """ Returns the DagRun for this TaskInstance :param session: :return: DagRun """ from airflow.models.dagrun import DagRun # Avoid circular import dr = session.query(DagRun).filter( DagRun.dag_id == self.dag_id, ...
[ "Returns", "the", "DagRun", "for", "this", "TaskInstance" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L674-L687
[ "def", "get_dagrun", "(", "self", ",", "session", ")", ":", "from", "airflow", ".", "models", ".", "dagrun", "import", "DagRun", "# Avoid circular import", "dr", "=", "session", ".", "query", "(", "DagRun", ")", ".", "filter", "(", "DagRun", ".", "dag_id",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance._check_and_change_state_before_execution
Checks dependencies and then sets state to RUNNING if they are met. Returns True if and only if state is set to RUNNING, which implies that task should be executed, in preparation for _run_raw_task :param verbose: whether to turn on more verbose logging :type verbose: bool :para...
airflow/models/taskinstance.py
def _check_and_change_state_before_execution( self, verbose=True, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ignore_ti_state=False, mark_success=False, test_mode=False, job_id=No...
def _check_and_change_state_before_execution( self, verbose=True, ignore_all_deps=False, ignore_depends_on_past=False, ignore_task_deps=False, ignore_ti_state=False, mark_success=False, test_mode=False, job_id=No...
[ "Checks", "dependencies", "and", "then", "sets", "state", "to", "RUNNING", "if", "they", "are", "met", ".", "Returns", "True", "if", "and", "only", "if", "state", "is", "set", "to", "RUNNING", "which", "implies", "that", "task", "should", "be", "executed",...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L690-L822
[ "def", "_check_and_change_state_before_execution", "(", "self", ",", "verbose", "=", "True", ",", "ignore_all_deps", "=", "False", ",", "ignore_depends_on_past", "=", "False", ",", "ignore_task_deps", "=", "False", ",", "ignore_ti_state", "=", "False", ",", "mark_su...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance._run_raw_task
Immediately runs the task (without checking or changing db state before execution) and then sets the appropriate final state after completion and runs any post-execute callbacks. Meant to be called only after another function changes the state to running. :param mark_success: Don't run ...
airflow/models/taskinstance.py
def _run_raw_task( self, mark_success=False, test_mode=False, job_id=None, pool=None, session=None): """ Immediately runs the task (without checking or changing db state before execution) and then sets the appropriate final ...
def _run_raw_task( self, mark_success=False, test_mode=False, job_id=None, pool=None, session=None): """ Immediately runs the task (without checking or changing db state before execution) and then sets the appropriate final ...
[ "Immediately", "runs", "the", "task", "(", "without", "checking", "or", "changing", "db", "state", "before", "execution", ")", "and", "then", "sets", "the", "appropriate", "final", "state", "after", "completion", "and", "runs", "any", "post", "-", "execute", ...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L825-L943
[ "def", "_run_raw_task", "(", "self", ",", "mark_success", "=", "False", ",", "test_mode", "=", "False", ",", "job_id", "=", "None", ",", "pool", "=", "None", ",", "session", "=", "None", ")", ":", "task", "=", "self", ".", "task", "self", ".", "pool"...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.xcom_push
Make an XCom available for tasks to pull. :param key: A key for the XCom :type key: str :param value: A value for the XCom. The value is pickled and stored in the database. :type value: any pickleable object :param execution_date: if provided, the XCom will not be vi...
airflow/models/taskinstance.py
def xcom_push( self, key, value, execution_date=None): """ Make an XCom available for tasks to pull. :param key: A key for the XCom :type key: str :param value: A value for the XCom. The value is pickled and stored in t...
def xcom_push( self, key, value, execution_date=None): """ Make an XCom available for tasks to pull. :param key: A key for the XCom :type key: str :param value: A value for the XCom. The value is pickled and stored in t...
[ "Make", "an", "XCom", "available", "for", "tasks", "to", "pull", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L1271-L1301
[ "def", "xcom_push", "(", "self", ",", "key", ",", "value", ",", "execution_date", "=", "None", ")", ":", "if", "execution_date", "and", "execution_date", "<", "self", ".", "execution_date", ":", "raise", "ValueError", "(", "'execution_date can not be in the past (...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.xcom_pull
Pull XComs that optionally meet certain criteria. The default value for `key` limits the search to XComs that were returned by other tasks (as opposed to those that were pushed manually). To remove this filter, pass key=None (or any desired value). If a single task_id string is provide...
airflow/models/taskinstance.py
def xcom_pull( self, task_ids=None, dag_id=None, key=XCOM_RETURN_KEY, include_prior_dates=False): """ Pull XComs that optionally meet certain criteria. The default value for `key` limits the search to XComs that were returned b...
def xcom_pull( self, task_ids=None, dag_id=None, key=XCOM_RETURN_KEY, include_prior_dates=False): """ Pull XComs that optionally meet certain criteria. The default value for `key` limits the search to XComs that were returned b...
[ "Pull", "XComs", "that", "optionally", "meet", "certain", "criteria", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L1303-L1352
[ "def", "xcom_pull", "(", "self", ",", "task_ids", "=", "None", ",", "dag_id", "=", "None", ",", "key", "=", "XCOM_RETURN_KEY", ",", "include_prior_dates", "=", "False", ")", ":", "if", "dag_id", "is", "None", ":", "dag_id", "=", "self", ".", "dag_id", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
TaskInstance.init_run_context
Sets the log context.
airflow/models/taskinstance.py
def init_run_context(self, raw=False): """ Sets the log context. """ self.raw = raw self._set_context(self)
def init_run_context(self, raw=False): """ Sets the log context. """ self.raw = raw self._set_context(self)
[ "Sets", "the", "log", "context", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/models/taskinstance.py#L1363-L1368
[ "def", "init_run_context", "(", "self", ",", "raw", "=", "False", ")", ":", "self", ".", "raw", "=", "raw", "self", ".", "_set_context", "(", "self", ")" ]
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
WasbTaskHandler.close
Close and upload local log file to remote storage Wasb.
airflow/utils/log/wasb_task_handler.py
def close(self): """ Close and upload local log file to remote storage Wasb. """ # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple ...
def close(self): """ Close and upload local log file to remote storage Wasb. """ # When application exit, system shuts down all handlers by # calling close method. Here we check if logger is already # closed to prevent uploading the log to remote storage multiple ...
[ "Close", "and", "upload", "local", "log", "file", "to", "remote", "storage", "Wasb", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/wasb_task_handler.py#L68-L95
[ "def", "close", "(", "self", ")", ":", "# When application exit, system shuts down all handlers by", "# calling close method. Here we check if logger is already", "# closed to prevent uploading the log to remote storage multiple", "# times when `logging.shutdown` is called.", "if", "self", "...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
WasbTaskHandler._read
Read logs of given task instance and try_number from Wasb remote storage. If failed, read the log from task instance host machine. :param ti: task instance object :param try_number: task instance try_number to read logs from :param metadata: log metadata, can be ...
airflow/utils/log/wasb_task_handler.py
def _read(self, ti, try_number, metadata=None): """ Read logs of given task instance and try_number from Wasb remote storage. If failed, read the log from task instance host machine. :param ti: task instance object :param try_number: task instance try_number to read logs from ...
def _read(self, ti, try_number, metadata=None): """ Read logs of given task instance and try_number from Wasb remote storage. If failed, read the log from task instance host machine. :param ti: task instance object :param try_number: task instance try_number to read logs from ...
[ "Read", "logs", "of", "given", "task", "instance", "and", "try_number", "from", "Wasb", "remote", "storage", ".", "If", "failed", "read", "the", "log", "from", "task", "instance", "host", "machine", ".", ":", "param", "ti", ":", "task", "instance", "object...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/wasb_task_handler.py#L97-L121
[ "def", "_read", "(", "self", ",", "ti", ",", "try_number", ",", "metadata", "=", "None", ")", ":", "# Explicitly getting log relative path is necessary as the given", "# task instance might be different than task instance passed in", "# in set_context method.", "log_relative_path",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
WasbTaskHandler.wasb_log_exists
Check if remote_log_location exists in remote storage :param remote_log_location: log's location in remote storage :return: True if location exists else False
airflow/utils/log/wasb_task_handler.py
def wasb_log_exists(self, remote_log_location): """ Check if remote_log_location exists in remote storage :param remote_log_location: log's location in remote storage :return: True if location exists else False """ try: return self.hook.check_for_blob(self.was...
def wasb_log_exists(self, remote_log_location): """ Check if remote_log_location exists in remote storage :param remote_log_location: log's location in remote storage :return: True if location exists else False """ try: return self.hook.check_for_blob(self.was...
[ "Check", "if", "remote_log_location", "exists", "in", "remote", "storage", ":", "param", "remote_log_location", ":", "log", "s", "location", "in", "remote", "storage", ":", "return", ":", "True", "if", "location", "exists", "else", "False" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/wasb_task_handler.py#L123-L133
[ "def", "wasb_log_exists", "(", "self", ",", "remote_log_location", ")", ":", "try", ":", "return", "self", ".", "hook", ".", "check_for_blob", "(", "self", ".", "wasb_container", ",", "remote_log_location", ")", "except", "Exception", ":", "pass", "return", "F...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
WasbTaskHandler.wasb_read
Returns the log found at the remote_log_location. Returns '' if no logs are found or there is an error. :param remote_log_location: the log's location in remote storage :type remote_log_location: str (path) :param return_error: if True, returns a string error message if an er...
airflow/utils/log/wasb_task_handler.py
def wasb_read(self, remote_log_location, return_error=False): """ Returns the log found at the remote_log_location. Returns '' if no logs are found or there is an error. :param remote_log_location: the log's location in remote storage :type remote_log_location: str (path) ...
def wasb_read(self, remote_log_location, return_error=False): """ Returns the log found at the remote_log_location. Returns '' if no logs are found or there is an error. :param remote_log_location: the log's location in remote storage :type remote_log_location: str (path) ...
[ "Returns", "the", "log", "found", "at", "the", "remote_log_location", ".", "Returns", "if", "no", "logs", "are", "found", "or", "there", "is", "an", "error", ".", ":", "param", "remote_log_location", ":", "the", "log", "s", "location", "in", "remote", "sto...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/wasb_task_handler.py#L135-L152
[ "def", "wasb_read", "(", "self", ",", "remote_log_location", ",", "return_error", "=", "False", ")", ":", "try", ":", "return", "self", ".", "hook", ".", "read_file", "(", "self", ".", "wasb_container", ",", "remote_log_location", ")", "except", "AzureHttpErro...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
WasbTaskHandler.wasb_write
Writes the log to the remote_log_location. Fails silently if no hook was created. :param log: the log to write to the remote_log_location :type log: str :param remote_log_location: the log's location in remote storage :type remote_log_location: str (path) :param append: i...
airflow/utils/log/wasb_task_handler.py
def wasb_write(self, log, remote_log_location, append=True): """ Writes the log to the remote_log_location. Fails silently if no hook was created. :param log: the log to write to the remote_log_location :type log: str :param remote_log_location: the log's location in remo...
def wasb_write(self, log, remote_log_location, append=True): """ Writes the log to the remote_log_location. Fails silently if no hook was created. :param log: the log to write to the remote_log_location :type log: str :param remote_log_location: the log's location in remo...
[ "Writes", "the", "log", "to", "the", "remote_log_location", ".", "Fails", "silently", "if", "no", "hook", "was", "created", ".", ":", "param", "log", ":", "the", "log", "to", "write", "to", "the", "remote_log_location", ":", "type", "log", ":", "str", ":...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/wasb_task_handler.py#L154-L178
[ "def", "wasb_write", "(", "self", ",", "log", ",", "remote_log_location", ",", "append", "=", "True", ")", ":", "if", "append", "and", "self", ".", "wasb_log_exists", "(", "remote_log_location", ")", ":", "old_log", "=", "self", ".", "wasb_read", "(", "rem...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GceHook.get_conn
Retrieves connection to Google Compute Engine. :return: Google Compute Engine services object :rtype: dict
airflow/contrib/hooks/gcp_compute_hook.py
def get_conn(self): """ Retrieves connection to Google Compute Engine. :return: Google Compute Engine services object :rtype: dict """ if not self._conn: http_authorized = self._authorize() self._conn = build('compute', self.api_version, ...
def get_conn(self): """ Retrieves connection to Google Compute Engine. :return: Google Compute Engine services object :rtype: dict """ if not self._conn: http_authorized = self._authorize() self._conn = build('compute', self.api_version, ...
[ "Retrieves", "connection", "to", "Google", "Compute", "Engine", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_compute_hook.py#L55-L66
[ "def", "get_conn", "(", "self", ")", ":", "if", "not", "self", ".", "_conn", ":", "http_authorized", "=", "self", ".", "_authorize", "(", ")", "self", ".", "_conn", "=", "build", "(", "'compute'", ",", "self", ".", "api_version", ",", "http", "=", "h...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GceHook.start_instance
Starts an existing instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the instance exists :type zone: str :param resource_id: Name of the Compute Engine instance resource ...
airflow/contrib/hooks/gcp_compute_hook.py
def start_instance(self, zone, resource_id, project_id=None): """ Starts an existing instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the instance exists :type zone: ...
def start_instance(self, zone, resource_id, project_id=None): """ Starts an existing instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the instance exists :type zone: ...
[ "Starts", "an", "existing", "instance", "defined", "by", "project_id", "zone", "and", "resource_id", ".", "Must", "be", "called", "with", "keyword", "arguments", "rather", "than", "positional", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_compute_hook.py#L69-L97
[ "def", "start_instance", "(", "self", ",", "zone", ",", "resource_id", ",", "project_id", "=", "None", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "instances", "(", ")", ".", "start", "(", "project", "=", "project_id", ",", "zone...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GceHook.set_machine_type
Sets machine type of an instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the instance exists. :type zone: str :param resource_id: Name of the Compute Engine instance resource...
airflow/contrib/hooks/gcp_compute_hook.py
def set_machine_type(self, zone, resource_id, body, project_id=None): """ Sets machine type of an instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the instance exists. ...
def set_machine_type(self, zone, resource_id, body, project_id=None): """ Sets machine type of an instance defined by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the instance exists. ...
[ "Sets", "machine", "type", "of", "an", "instance", "defined", "by", "project_id", "zone", "and", "resource_id", ".", "Must", "be", "called", "with", "keyword", "arguments", "rather", "than", "positional", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_compute_hook.py#L131-L159
[ "def", "set_machine_type", "(", "self", ",", "zone", ",", "resource_id", ",", "body", ",", "project_id", "=", "None", ")", ":", "response", "=", "self", ".", "_execute_set_machine_type", "(", "zone", ",", "resource_id", ",", "body", ",", "project_id", ")", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GceHook.get_instance_template
Retrieves instance template by project_id and resource_id. Must be called with keyword arguments rather than positional. :param resource_id: Name of the instance template :type resource_id: str :param project_id: Optional, Google Cloud Platform project ID where the Compute E...
airflow/contrib/hooks/gcp_compute_hook.py
def get_instance_template(self, resource_id, project_id=None): """ Retrieves instance template by project_id and resource_id. Must be called with keyword arguments rather than positional. :param resource_id: Name of the instance template :type resource_id: str :param pro...
def get_instance_template(self, resource_id, project_id=None): """ Retrieves instance template by project_id and resource_id. Must be called with keyword arguments rather than positional. :param resource_id: Name of the instance template :type resource_id: str :param pro...
[ "Retrieves", "instance", "template", "by", "project_id", "and", "resource_id", ".", "Must", "be", "called", "with", "keyword", "arguments", "rather", "than", "positional", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_compute_hook.py#L167-L186
[ "def", "get_instance_template", "(", "self", ",", "resource_id", ",", "project_id", "=", "None", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "instanceTemplates", "(", ")", ".", "get", "(", "project", "=", "project_id", ",", "instance...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GceHook.insert_instance_template
Inserts instance template using body specified Must be called with keyword arguments rather than positional. :param body: Instance template representation as object according to https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates :type body: dict :param...
airflow/contrib/hooks/gcp_compute_hook.py
def insert_instance_template(self, body, request_id=None, project_id=None): """ Inserts instance template using body specified Must be called with keyword arguments rather than positional. :param body: Instance template representation as object according to https://cloud.goo...
def insert_instance_template(self, body, request_id=None, project_id=None): """ Inserts instance template using body specified Must be called with keyword arguments rather than positional. :param body: Instance template representation as object according to https://cloud.goo...
[ "Inserts", "instance", "template", "using", "body", "specified", "Must", "be", "called", "with", "keyword", "arguments", "rather", "than", "positional", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_compute_hook.py#L189-L220
[ "def", "insert_instance_template", "(", "self", ",", "body", ",", "request_id", "=", "None", ",", "project_id", "=", "None", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "instanceTemplates", "(", ")", ".", "insert", "(", "project", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GceHook.get_instance_group_manager
Retrieves Instance Group Manager by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the Instance Group Manager exists :type zone: str :param resource_id: Name of the Instance Group Manager ...
airflow/contrib/hooks/gcp_compute_hook.py
def get_instance_group_manager(self, zone, resource_id, project_id=None): """ Retrieves Instance Group Manager by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the Instance Group Manager exis...
def get_instance_group_manager(self, zone, resource_id, project_id=None): """ Retrieves Instance Group Manager by project_id, zone and resource_id. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the Instance Group Manager exis...
[ "Retrieves", "Instance", "Group", "Manager", "by", "project_id", "zone", "and", "resource_id", ".", "Must", "be", "called", "with", "keyword", "arguments", "rather", "than", "positional", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_compute_hook.py#L223-L245
[ "def", "get_instance_group_manager", "(", "self", ",", "zone", ",", "resource_id", ",", "project_id", "=", "None", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "instanceGroupManagers", "(", ")", ".", "get", "(", "project", "=", "proje...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GceHook.patch_instance_group_manager
Patches Instance Group Manager with the specified body. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Platform zone where the Instance Group Manager exists :type zone: str :param resource_id: Name of the Instance Group Manager :type reso...
airflow/contrib/hooks/gcp_compute_hook.py
def patch_instance_group_manager(self, zone, resource_id, body, request_id=None, project_id=None): """ Patches Instance Group Manager with the specified body. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Pla...
def patch_instance_group_manager(self, zone, resource_id, body, request_id=None, project_id=None): """ Patches Instance Group Manager with the specified body. Must be called with keyword arguments rather than positional. :param zone: Google Cloud Pla...
[ "Patches", "Instance", "Group", "Manager", "with", "the", "specified", "body", ".", "Must", "be", "called", "with", "keyword", "arguments", "rather", "than", "positional", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_compute_hook.py#L248-L288
[ "def", "patch_instance_group_manager", "(", "self", ",", "zone", ",", "resource_id", ",", "body", ",", "request_id", "=", "None", ",", "project_id", "=", "None", ")", ":", "response", "=", "self", ".", "get_conn", "(", ")", ".", "instanceGroupManagers", "(",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
GceHook._wait_for_operation_to_complete
Waits for the named operation to complete - checks status of the async call. :param operation_name: name of the operation :type operation_name: str :param zone: optional region of the request (might be None for global operations) :type zone: str :return: None
airflow/contrib/hooks/gcp_compute_hook.py
def _wait_for_operation_to_complete(self, project_id, operation_name, zone=None): """ Waits for the named operation to complete - checks status of the async call. :param operation_name: name of the operation :type operation_name: str :param zone: optional region of the request (...
def _wait_for_operation_to_complete(self, project_id, operation_name, zone=None): """ Waits for the named operation to complete - checks status of the async call. :param operation_name: name of the operation :type operation_name: str :param zone: optional region of the request (...
[ "Waits", "for", "the", "named", "operation", "to", "complete", "-", "checks", "status", "of", "the", "async", "call", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/hooks/gcp_compute_hook.py#L290-L320
[ "def", "_wait_for_operation_to_complete", "(", "self", ",", "project_id", ",", "operation_name", ",", "zone", "=", "None", ")", ":", "service", "=", "self", ".", "get_conn", "(", ")", "while", "True", ":", "if", "zone", "is", "None", ":", "# noinspection PyT...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.check_for_bucket
Check if bucket_name exists. :param bucket_name: the name of the bucket :type bucket_name: str
airflow/hooks/S3_hook.py
def check_for_bucket(self, bucket_name): """ Check if bucket_name exists. :param bucket_name: the name of the bucket :type bucket_name: str """ try: self.get_conn().head_bucket(Bucket=bucket_name) return True except ClientError as e: ...
def check_for_bucket(self, bucket_name): """ Check if bucket_name exists. :param bucket_name: the name of the bucket :type bucket_name: str """ try: self.get_conn().head_bucket(Bucket=bucket_name) return True except ClientError as e: ...
[ "Check", "if", "bucket_name", "exists", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L48-L60
[ "def", "check_for_bucket", "(", "self", ",", "bucket_name", ")", ":", "try", ":", "self", ".", "get_conn", "(", ")", ".", "head_bucket", "(", "Bucket", "=", "bucket_name", ")", "return", "True", "except", "ClientError", "as", "e", ":", "self", ".", "log"...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.create_bucket
Creates an Amazon S3 bucket. :param bucket_name: The name of the bucket :type bucket_name: str :param region_name: The name of the aws region in which to create the bucket. :type region_name: str
airflow/hooks/S3_hook.py
def create_bucket(self, bucket_name, region_name=None): """ Creates an Amazon S3 bucket. :param bucket_name: The name of the bucket :type bucket_name: str :param region_name: The name of the aws region in which to create the bucket. :type region_name: str """ ...
def create_bucket(self, bucket_name, region_name=None): """ Creates an Amazon S3 bucket. :param bucket_name: The name of the bucket :type bucket_name: str :param region_name: The name of the aws region in which to create the bucket. :type region_name: str """ ...
[ "Creates", "an", "Amazon", "S3", "bucket", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L72-L90
[ "def", "create_bucket", "(", "self", ",", "bucket_name", ",", "region_name", "=", "None", ")", ":", "s3_conn", "=", "self", ".", "get_conn", "(", ")", "if", "not", "region_name", ":", "region_name", "=", "s3_conn", ".", "meta", ".", "region_name", "if", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.check_for_prefix
Checks that a prefix exists in a bucket :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix :type prefix: str :param delimiter: the delimiter marks key hierarchy. :type delimiter: str
airflow/hooks/S3_hook.py
def check_for_prefix(self, bucket_name, prefix, delimiter): """ Checks that a prefix exists in a bucket :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix :type prefix: str :param delimiter: the delimiter marks key hiera...
def check_for_prefix(self, bucket_name, prefix, delimiter): """ Checks that a prefix exists in a bucket :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix :type prefix: str :param delimiter: the delimiter marks key hiera...
[ "Checks", "that", "a", "prefix", "exists", "in", "a", "bucket" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L92-L107
[ "def", "check_for_prefix", "(", "self", ",", "bucket_name", ",", "prefix", ",", "delimiter", ")", ":", "prefix", "=", "prefix", "+", "delimiter", "if", "prefix", "[", "-", "1", "]", "!=", "delimiter", "else", "prefix", "prefix_split", "=", "re", ".", "sp...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.list_prefixes
Lists prefixes in a bucket under prefix :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix :type prefix: str :param delimiter: the delimiter marks key hierarchy. :type delimiter: str :param page_size: pagination size ...
airflow/hooks/S3_hook.py
def list_prefixes(self, bucket_name, prefix='', delimiter='', page_size=None, max_items=None): """ Lists prefixes in a bucket under prefix :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix :type prefix: st...
def list_prefixes(self, bucket_name, prefix='', delimiter='', page_size=None, max_items=None): """ Lists prefixes in a bucket under prefix :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix :type prefix: st...
[ "Lists", "prefixes", "in", "a", "bucket", "under", "prefix" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L109-L145
[ "def", "list_prefixes", "(", "self", ",", "bucket_name", ",", "prefix", "=", "''", ",", "delimiter", "=", "''", ",", "page_size", "=", "None", ",", "max_items", "=", "None", ")", ":", "config", "=", "{", "'PageSize'", ":", "page_size", ",", "'MaxItems'",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.list_keys
Lists keys in a bucket under prefix and not containing delimiter :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix :type prefix: str :param delimiter: the delimiter marks key hierarchy. :type delimiter: str :param page_...
airflow/hooks/S3_hook.py
def list_keys(self, bucket_name, prefix='', delimiter='', page_size=None, max_items=None): """ Lists keys in a bucket under prefix and not containing delimiter :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix ...
def list_keys(self, bucket_name, prefix='', delimiter='', page_size=None, max_items=None): """ Lists keys in a bucket under prefix and not containing delimiter :param bucket_name: the name of the bucket :type bucket_name: str :param prefix: a key prefix ...
[ "Lists", "keys", "in", "a", "bucket", "under", "prefix", "and", "not", "containing", "delimiter" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L147-L183
[ "def", "list_keys", "(", "self", ",", "bucket_name", ",", "prefix", "=", "''", ",", "delimiter", "=", "''", ",", "page_size", "=", "None", ",", "max_items", "=", "None", ")", ":", "config", "=", "{", "'PageSize'", ":", "page_size", ",", "'MaxItems'", "...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.check_for_key
Checks if a key exists in a bucket :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which the file is stored :type bucket_name: str
airflow/hooks/S3_hook.py
def check_for_key(self, key, bucket_name=None): """ Checks if a key exists in a bucket :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which the file is stored :type bucket_name: str """ if not bucke...
def check_for_key(self, key, bucket_name=None): """ Checks if a key exists in a bucket :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which the file is stored :type bucket_name: str """ if not bucke...
[ "Checks", "if", "a", "key", "exists", "in", "a", "bucket" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L185-L202
[ "def", "check_for_key", "(", "self", ",", "key", ",", "bucket_name", "=", "None", ")", ":", "if", "not", "bucket_name", ":", "(", "bucket_name", ",", "key", ")", "=", "self", ".", "parse_s3_url", "(", "key", ")", "try", ":", "self", ".", "get_conn", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.get_key
Returns a boto3.s3.Object :param key: the path to the key :type key: str :param bucket_name: the name of the bucket :type bucket_name: str
airflow/hooks/S3_hook.py
def get_key(self, key, bucket_name=None): """ Returns a boto3.s3.Object :param key: the path to the key :type key: str :param bucket_name: the name of the bucket :type bucket_name: str """ if not bucket_name: (bucket_name, key) = self.parse_s3...
def get_key(self, key, bucket_name=None): """ Returns a boto3.s3.Object :param key: the path to the key :type key: str :param bucket_name: the name of the bucket :type bucket_name: str """ if not bucket_name: (bucket_name, key) = self.parse_s3...
[ "Returns", "a", "boto3", ".", "s3", ".", "Object" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L204-L218
[ "def", "get_key", "(", "self", ",", "key", ",", "bucket_name", "=", "None", ")", ":", "if", "not", "bucket_name", ":", "(", "bucket_name", ",", "key", ")", "=", "self", ".", "parse_s3_url", "(", "key", ")", "obj", "=", "self", ".", "get_resource_type",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.read_key
Reads a key from S3 :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which the file is stored :type bucket_name: str
airflow/hooks/S3_hook.py
def read_key(self, key, bucket_name=None): """ Reads a key from S3 :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which the file is stored :type bucket_name: str """ obj = self.get_key(key, bucket_...
def read_key(self, key, bucket_name=None): """ Reads a key from S3 :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which the file is stored :type bucket_name: str """ obj = self.get_key(key, bucket_...
[ "Reads", "a", "key", "from", "S3" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L220-L231
[ "def", "read_key", "(", "self", ",", "key", ",", "bucket_name", "=", "None", ")", ":", "obj", "=", "self", ".", "get_key", "(", "key", ",", "bucket_name", ")", "return", "obj", ".", "get", "(", ")", "[", "'Body'", "]", ".", "read", "(", ")", ".",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.select_key
Reads a key with S3 Select. :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which the file is stored :type bucket_name: str :param expression: S3 Select expression :type expression: str :param expression_typ...
airflow/hooks/S3_hook.py
def select_key(self, key, bucket_name=None, expression='SELECT * FROM S3Object', expression_type='SQL', input_serialization=None, output_serialization=None): """ Reads a key with S3 Select. :param key: S3 key that will ...
def select_key(self, key, bucket_name=None, expression='SELECT * FROM S3Object', expression_type='SQL', input_serialization=None, output_serialization=None): """ Reads a key with S3 Select. :param key: S3 key that will ...
[ "Reads", "a", "key", "with", "S3", "Select", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L233-L277
[ "def", "select_key", "(", "self", ",", "key", ",", "bucket_name", "=", "None", ",", "expression", "=", "'SELECT * FROM S3Object'", ",", "expression_type", "=", "'SQL'", ",", "input_serialization", "=", "None", ",", "output_serialization", "=", "None", ")", ":", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.check_for_wildcard_key
Checks that a key matching a wildcard expression exists in a bucket :param wildcard_key: the path to the key :type wildcard_key: str :param bucket_name: the name of the bucket :type bucket_name: str :param delimiter: the delimiter marks key hierarchy :type delimiter: str
airflow/hooks/S3_hook.py
def check_for_wildcard_key(self, wildcard_key, bucket_name=None, delimiter=''): """ Checks that a key matching a wildcard expression exists in a bucket :param wildcard_key: the path to the key :type wildcard_key: str :param bucket_name: the name of...
def check_for_wildcard_key(self, wildcard_key, bucket_name=None, delimiter=''): """ Checks that a key matching a wildcard expression exists in a bucket :param wildcard_key: the path to the key :type wildcard_key: str :param bucket_name: the name of...
[ "Checks", "that", "a", "key", "matching", "a", "wildcard", "expression", "exists", "in", "a", "bucket" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L279-L293
[ "def", "check_for_wildcard_key", "(", "self", ",", "wildcard_key", ",", "bucket_name", "=", "None", ",", "delimiter", "=", "''", ")", ":", "return", "self", ".", "get_wildcard_key", "(", "wildcard_key", "=", "wildcard_key", ",", "bucket_name", "=", "bucket_name"...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.get_wildcard_key
Returns a boto3.s3.Object object matching the wildcard expression :param wildcard_key: the path to the key :type wildcard_key: str :param bucket_name: the name of the bucket :type bucket_name: str :param delimiter: the delimiter marks key hierarchy :type delimiter: str
airflow/hooks/S3_hook.py
def get_wildcard_key(self, wildcard_key, bucket_name=None, delimiter=''): """ Returns a boto3.s3.Object object matching the wildcard expression :param wildcard_key: the path to the key :type wildcard_key: str :param bucket_name: the name of the bucket :type bucket_name: ...
def get_wildcard_key(self, wildcard_key, bucket_name=None, delimiter=''): """ Returns a boto3.s3.Object object matching the wildcard expression :param wildcard_key: the path to the key :type wildcard_key: str :param bucket_name: the name of the bucket :type bucket_name: ...
[ "Returns", "a", "boto3", ".", "s3", ".", "Object", "object", "matching", "the", "wildcard", "expression" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L295-L314
[ "def", "get_wildcard_key", "(", "self", ",", "wildcard_key", ",", "bucket_name", "=", "None", ",", "delimiter", "=", "''", ")", ":", "if", "not", "bucket_name", ":", "(", "bucket_name", ",", "wildcard_key", ")", "=", "self", ".", "parse_s3_url", "(", "wild...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.load_file
Loads a local file to S3 :param filename: name of the file to load. :type filename: str :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which to store the file :type bucket_name: str :param replace: A flag t...
airflow/hooks/S3_hook.py
def load_file(self, filename, key, bucket_name=None, replace=False, encrypt=False): """ Loads a local file to S3 :param filename: name of the file to load. :type filename: str :param key: S...
def load_file(self, filename, key, bucket_name=None, replace=False, encrypt=False): """ Loads a local file to S3 :param filename: name of the file to load. :type filename: str :param key: S...
[ "Loads", "a", "local", "file", "to", "S3" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L316-L350
[ "def", "load_file", "(", "self", ",", "filename", ",", "key", ",", "bucket_name", "=", "None", ",", "replace", "=", "False", ",", "encrypt", "=", "False", ")", ":", "if", "not", "bucket_name", ":", "(", "bucket_name", ",", "key", ")", "=", "self", "....
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.load_string
Loads a string to S3 This is provided as a convenience to drop a string in S3. It uses the boto infrastructure to ship a file to s3. :param string_data: str to set as content for the key. :type string_data: str :param key: S3 key that will point to the file :type key: s...
airflow/hooks/S3_hook.py
def load_string(self, string_data, key, bucket_name=None, replace=False, encrypt=False, encoding='utf-8'): """ Loads a string to S3 This is provided as a convenience to drop a...
def load_string(self, string_data, key, bucket_name=None, replace=False, encrypt=False, encoding='utf-8'): """ Loads a string to S3 This is provided as a convenience to drop a...
[ "Loads", "a", "string", "to", "S3" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L352-L382
[ "def", "load_string", "(", "self", ",", "string_data", ",", "key", ",", "bucket_name", "=", "None", ",", "replace", "=", "False", ",", "encrypt", "=", "False", ",", "encoding", "=", "'utf-8'", ")", ":", "self", ".", "load_bytes", "(", "string_data", ".",...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.load_bytes
Loads bytes to S3 This is provided as a convenience to drop a string in S3. It uses the boto infrastructure to ship a file to s3. :param bytes_data: bytes to set as content for the key. :type bytes_data: bytes :param key: S3 key that will point to the file :type key: st...
airflow/hooks/S3_hook.py
def load_bytes(self, bytes_data, key, bucket_name=None, replace=False, encrypt=False): """ Loads bytes to S3 This is provided as a convenience to drop a string in S3. It uses the boto infrastr...
def load_bytes(self, bytes_data, key, bucket_name=None, replace=False, encrypt=False): """ Loads bytes to S3 This is provided as a convenience to drop a string in S3. It uses the boto infrastr...
[ "Loads", "bytes", "to", "S3" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L384-L422
[ "def", "load_bytes", "(", "self", ",", "bytes_data", ",", "key", ",", "bucket_name", "=", "None", ",", "replace", "=", "False", ",", "encrypt", "=", "False", ")", ":", "if", "not", "bucket_name", ":", "(", "bucket_name", ",", "key", ")", "=", "self", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.load_file_obj
Loads a file object to S3 :param file_obj: The file-like object to set as the content for the S3 key. :type file_obj: file-like object :param key: S3 key that will point to the file :type key: str :param bucket_name: Name of the bucket in which to store the file :type bu...
airflow/hooks/S3_hook.py
def load_file_obj(self, file_obj, key, bucket_name=None, replace=False, encrypt=False): """ Loads a file object to S3 :param file_obj: The file-like object to set as the content for the...
def load_file_obj(self, file_obj, key, bucket_name=None, replace=False, encrypt=False): """ Loads a file object to S3 :param file_obj: The file-like object to set as the content for the...
[ "Loads", "a", "file", "object", "to", "S3" ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L424-L457
[ "def", "load_file_obj", "(", "self", ",", "file_obj", ",", "key", ",", "bucket_name", "=", "None", ",", "replace", "=", "False", ",", "encrypt", "=", "False", ")", ":", "if", "not", "bucket_name", ":", "(", "bucket_name", ",", "key", ")", "=", "self", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.copy_object
Creates a copy of an object that is already stored in S3. Note: the S3 connection used here needs to have access to both source and destination bucket/key. :param source_bucket_key: The key of the source object. It can be either full s3:// style url or relative path from root leve...
airflow/hooks/S3_hook.py
def copy_object(self, source_bucket_key, dest_bucket_key, source_bucket_name=None, dest_bucket_name=None, source_version_id=None): """ Creates a copy of an object that is already stored in S3. No...
def copy_object(self, source_bucket_key, dest_bucket_key, source_bucket_name=None, dest_bucket_name=None, source_version_id=None): """ Creates a copy of an object that is already stored in S3. No...
[ "Creates", "a", "copy", "of", "an", "object", "that", "is", "already", "stored", "in", "S3", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L459-L518
[ "def", "copy_object", "(", "self", ",", "source_bucket_key", ",", "dest_bucket_key", ",", "source_bucket_name", "=", "None", ",", "dest_bucket_name", "=", "None", ",", "source_version_id", "=", "None", ")", ":", "if", "dest_bucket_name", "is", "None", ":", "dest...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
S3Hook.delete_objects
:param bucket: Name of the bucket in which you are going to delete object(s) :type bucket: str :param keys: The key(s) to delete from S3 bucket. When ``keys`` is a string, it's supposed to be the key name of the single object to delete. When ``keys`` is a list, it's...
airflow/hooks/S3_hook.py
def delete_objects(self, bucket, keys): """ :param bucket: Name of the bucket in which you are going to delete object(s) :type bucket: str :param keys: The key(s) to delete from S3 bucket. When ``keys`` is a string, it's supposed...
def delete_objects(self, bucket, keys): """ :param bucket: Name of the bucket in which you are going to delete object(s) :type bucket: str :param keys: The key(s) to delete from S3 bucket. When ``keys`` is a string, it's supposed...
[ ":", "param", "bucket", ":", "Name", "of", "the", "bucket", "in", "which", "you", "are", "going", "to", "delete", "object", "(", "s", ")", ":", "type", "bucket", ":", "str", ":", "param", "keys", ":", "The", "key", "(", "s", ")", "to", "delete", ...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/hooks/S3_hook.py#L520-L543
[ "def", "delete_objects", "(", "self", ",", "bucket", ",", "keys", ")", ":", "if", "isinstance", "(", "keys", ",", "list", ")", ":", "keys", "=", "keys", "else", ":", "keys", "=", "[", "keys", "]", "delete_dict", "=", "{", "\"Objects\"", ":", "[", "...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CassandraToGoogleCloudStorageOperator._query_cassandra
Queries cassandra and returns a cursor to the results.
airflow/contrib/operators/cassandra_to_gcs.py
def _query_cassandra(self): """ Queries cassandra and returns a cursor to the results. """ self.hook = CassandraHook(cassandra_conn_id=self.cassandra_conn_id) session = self.hook.get_conn() cursor = session.execute(self.cql) return cursor
def _query_cassandra(self): """ Queries cassandra and returns a cursor to the results. """ self.hook = CassandraHook(cassandra_conn_id=self.cassandra_conn_id) session = self.hook.get_conn() cursor = session.execute(self.cql) return cursor
[ "Queries", "cassandra", "and", "returns", "a", "cursor", "to", "the", "results", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/cassandra_to_gcs.py#L147-L154
[ "def", "_query_cassandra", "(", "self", ")", ":", "self", ".", "hook", "=", "CassandraHook", "(", "cassandra_conn_id", "=", "self", ".", "cassandra_conn_id", ")", "session", "=", "self", ".", "hook", ".", "get_conn", "(", ")", "cursor", "=", "session", "."...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CassandraToGoogleCloudStorageOperator._write_local_data_files
Takes a cursor, and writes results to a local file. :return: A dictionary where keys are filenames to be used as object names in GCS, and values are file handles to local files that contain the data for the GCS objects.
airflow/contrib/operators/cassandra_to_gcs.py
def _write_local_data_files(self, cursor): """ Takes a cursor, and writes results to a local file. :return: A dictionary where keys are filenames to be used as object names in GCS, and values are file handles to local files that contain the data for the GCS objects. ...
def _write_local_data_files(self, cursor): """ Takes a cursor, and writes results to a local file. :return: A dictionary where keys are filenames to be used as object names in GCS, and values are file handles to local files that contain the data for the GCS objects. ...
[ "Takes", "a", "cursor", "and", "writes", "results", "to", "a", "local", "file", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/cassandra_to_gcs.py#L156-L180
[ "def", "_write_local_data_files", "(", "self", ",", "cursor", ")", ":", "file_no", "=", "0", "tmp_file_handle", "=", "NamedTemporaryFile", "(", "delete", "=", "True", ")", "tmp_file_handles", "=", "{", "self", ".", "filename", ".", "format", "(", "file_no", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CassandraToGoogleCloudStorageOperator._write_local_schema_file
Takes a cursor, and writes the BigQuery schema for the results to a local file system. :return: A dictionary where key is a filename to be used as an object name in GCS, and values are file handles to local files that contains the BigQuery schema fields in .json format.
airflow/contrib/operators/cassandra_to_gcs.py
def _write_local_schema_file(self, cursor): """ Takes a cursor, and writes the BigQuery schema for the results to a local file system. :return: A dictionary where key is a filename to be used as an object name in GCS, and values are file handles to local files that ...
def _write_local_schema_file(self, cursor): """ Takes a cursor, and writes the BigQuery schema for the results to a local file system. :return: A dictionary where key is a filename to be used as an object name in GCS, and values are file handles to local files that ...
[ "Takes", "a", "cursor", "and", "writes", "the", "BigQuery", "schema", "for", "the", "results", "to", "a", "local", "file", "system", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/cassandra_to_gcs.py#L182-L199
[ "def", "_write_local_schema_file", "(", "self", ",", "cursor", ")", ":", "schema", "=", "[", "]", "tmp_schema_file_handle", "=", "NamedTemporaryFile", "(", "delete", "=", "True", ")", "for", "name", ",", "type", "in", "zip", "(", "cursor", ".", "column_names...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CassandraToGoogleCloudStorageOperator.convert_user_type
Converts a user type to RECORD that contains n fields, where n is the number of attributes. Each element in the user type class will be converted to its corresponding data type in BQ.
airflow/contrib/operators/cassandra_to_gcs.py
def convert_user_type(cls, name, value): """ Converts a user type to RECORD that contains n fields, where n is the number of attributes. Each element in the user type class will be converted to its corresponding data type in BQ. """ names = value._fields values = ...
def convert_user_type(cls, name, value): """ Converts a user type to RECORD that contains n fields, where n is the number of attributes. Each element in the user type class will be converted to its corresponding data type in BQ. """ names = value._fields values = ...
[ "Converts", "a", "user", "type", "to", "RECORD", "that", "contains", "n", "fields", "where", "n", "is", "the", "number", "of", "attributes", ".", "Each", "element", "in", "the", "user", "type", "class", "will", "be", "converted", "to", "its", "correspondin...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/cassandra_to_gcs.py#L247-L255
[ "def", "convert_user_type", "(", "cls", ",", "name", ",", "value", ")", ":", "names", "=", "value", ".", "_fields", "values", "=", "[", "cls", ".", "convert_value", "(", "name", ",", "getattr", "(", "value", ",", "name", ")", ")", "for", "name", "in"...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CassandraToGoogleCloudStorageOperator.convert_tuple_type
Converts a tuple to RECORD that contains n fields, each will be converted to its corresponding data type in bq and will be named 'field_<index>', where index is determined by the order of the tuple elements defined in cassandra.
airflow/contrib/operators/cassandra_to_gcs.py
def convert_tuple_type(cls, name, value): """ Converts a tuple to RECORD that contains n fields, each will be converted to its corresponding data type in bq and will be named 'field_<index>', where index is determined by the order of the tuple elements defined in cassandra. """ ...
def convert_tuple_type(cls, name, value): """ Converts a tuple to RECORD that contains n fields, each will be converted to its corresponding data type in bq and will be named 'field_<index>', where index is determined by the order of the tuple elements defined in cassandra. """ ...
[ "Converts", "a", "tuple", "to", "RECORD", "that", "contains", "n", "fields", "each", "will", "be", "converted", "to", "its", "corresponding", "data", "type", "in", "bq", "and", "will", "be", "named", "field_<index", ">", "where", "index", "is", "determined",...
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/cassandra_to_gcs.py#L258-L266
[ "def", "convert_tuple_type", "(", "cls", ",", "name", ",", "value", ")", ":", "names", "=", "[", "'field_'", "+", "str", "(", "i", ")", "for", "i", "in", "range", "(", "len", "(", "value", ")", ")", "]", "values", "=", "[", "cls", ".", "convert_v...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
CassandraToGoogleCloudStorageOperator.convert_map_type
Converts a map to a repeated RECORD that contains two fields: 'key' and 'value', each will be converted to its corresponding data type in BQ.
airflow/contrib/operators/cassandra_to_gcs.py
def convert_map_type(cls, name, value): """ Converts a map to a repeated RECORD that contains two fields: 'key' and 'value', each will be converted to its corresponding data type in BQ. """ converted_map = [] for k, v in zip(value.keys(), value.values()): conv...
def convert_map_type(cls, name, value): """ Converts a map to a repeated RECORD that contains two fields: 'key' and 'value', each will be converted to its corresponding data type in BQ. """ converted_map = [] for k, v in zip(value.keys(), value.values()): conv...
[ "Converts", "a", "map", "to", "a", "repeated", "RECORD", "that", "contains", "two", "fields", ":", "key", "and", "value", "each", "will", "be", "converted", "to", "its", "corresponding", "data", "type", "in", "BQ", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/operators/cassandra_to_gcs.py#L269-L280
[ "def", "convert_map_type", "(", "cls", ",", "name", ",", "value", ")", ":", "converted_map", "=", "[", "]", "for", "k", ",", "v", "in", "zip", "(", "value", ".", "keys", "(", ")", ",", "value", ".", "values", "(", ")", ")", ":", "converted_map", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597
test
send_email
Send an email with html content using sendgrid. To use this plugin: 0. include sendgrid subpackage as part of your Airflow installation, e.g., pip install 'apache-airflow[sendgrid]' 1. update [email] backend in airflow.cfg, i.e., [email] email_backend = airflow.contrib.utils.sendgrid.send_email...
airflow/contrib/utils/sendgrid.py
def send_email(to, subject, html_content, files=None, dryrun=False, cc=None, bcc=None, mime_subtype='mixed', sandbox_mode=False, **kwargs): """ Send an email with html content using sendgrid. To use this plugin: 0. include sendgrid subpackage as part of your Airflow installation, e.g., ...
def send_email(to, subject, html_content, files=None, dryrun=False, cc=None, bcc=None, mime_subtype='mixed', sandbox_mode=False, **kwargs): """ Send an email with html content using sendgrid. To use this plugin: 0. include sendgrid subpackage as part of your Airflow installation, e.g., ...
[ "Send", "an", "email", "with", "html", "content", "using", "sendgrid", "." ]
apache/airflow
python
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/contrib/utils/sendgrid.py#L33-L102
[ "def", "send_email", "(", "to", ",", "subject", ",", "html_content", ",", "files", "=", "None", ",", "dryrun", "=", "False", ",", "cc", "=", "None", ",", "bcc", "=", "None", ",", "mime_subtype", "=", "'mixed'", ",", "sandbox_mode", "=", "False", ",", ...
b69c686ad8a0c89b9136bb4b31767257eb7b2597