repo_name
stringclasses
4 values
method_name
stringlengths
3
72
method_code
stringlengths
87
3.59k
method_summary
stringlengths
12
196
original_method_code
stringlengths
129
8.98k
method_path
stringlengths
15
136
apache/airflow
BackfillJob._set_unfinished_dag_runs_to_failed
def _set_unfinished_dag_runs_to_failed(self, dag_runs, session=None): for dag_run in dag_runs: dag_run.update_state() if dag_run.state not in State.finished(): dag_run.set_state(State.FAILED) session.merge(dag_run)
Go through the dag_runs and update the state based on the task_instance state. Then set DAG runs that are not finished to failed.
def _set_unfinished_dag_runs_to_failed(self, dag_runs, session=None): """ Go through the dag_runs and update the state based on the task_instance state. Then set DAG runs that are not finished to failed. :param dag_runs: DAG runs :param session: session :return: None ...
airflow/jobs.py
apache/airflow
BackfillJob._execute
def _execute(self, session=None): ti_status = BackfillJob._DagRunTaskStatus() start_date = self.bf_start_date run_dates = self.dag.get_run_dates(start_date=start_date, end_date=self.bf_end_date) if self.run_backwards: task...
Initializes all components required to run a dag for a specified date range and calls helper method to execute the tasks.
def _execute(self, session=None): """ Initializes all components required to run a dag for a specified date range and calls helper method to execute the tasks. """ ti_status = BackfillJob._DagRunTaskStatus() start_date = self.bf_start_date # Get intervals betwee...
airflow/jobs.py
apache/airflow
LocalTaskJob.heartbeat_callback
def heartbeat_callback(self, session=None): if self.terminating: self.task_runner.terminate() return self.task_instance.refresh_from_db() ti = self.task_instance fqdn = get_hostname() same_hostname = fqdn == ti.hostname same_process ...
Self destruct task if state has been moved away from running externally
def heartbeat_callback(self, session=None): """Self destruct task if state has been moved away from running externally""" if self.terminating: # ensure termination if processes are created later self.task_runner.terminate() return self.task_instance.refresh_...
airflow/jobs.py
apache/airflow
CloudSpannerHook._get_client
def _get_client(self, project_id): if not self._client: self._client = Client(project=project_id, credentials=self._get_credentials()) return self._client
Provides a client for interacting with the Cloud Spanner API.
def _get_client(self, project_id): """ Provides a client for interacting with the Cloud Spanner API. :param project_id: The ID of the GCP project. :type project_id: str :return: google.cloud.spanner_v1.client.Client :rtype: object """ if not self._client...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
CloudSpannerHook.get_instance
def get_instance(self, instance_id, project_id=None): instance = self._get_client(project_id=project_id).instance(instance_id=instance_id) if not instance.exists(): return None return instance
Gets information about a particular instance.
def get_instance(self, instance_id, project_id=None): """ Gets information about a particular instance. :param project_id: Optional, The ID of the GCP project that owns the Cloud Spanner database. If set to None or missing, the default project_id from the GCP connection is used. ...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
CloudSpannerHook._apply_to_instance
def _apply_to_instance(self, project_id, instance_id, configuration_name, node_count, display_name, func): instance = self._get_client(project_id=project_id).instance( instance_id=instance_id, configuration_name=configuration_name, node_count=node_coun...
Invokes a method on a given instance by applying a specified Callable.
def _apply_to_instance(self, project_id, instance_id, configuration_name, node_count, display_name, func): """ Invokes a method on a given instance by applying a specified Callable. :param project_id: The ID of the GCP project that owns the Cloud Spanner ...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
CloudSpannerHook.create_instance
def create_instance(self, instance_id, configuration_name, node_count, display_name, project_id=None): self._apply_to_instance(project_id, instance_id, configuration_name, node_count, display_name, lambda x: x.create())
Creates a new Cloud Spanner instance.
def create_instance(self, instance_id, configuration_name, node_count, display_name, project_id=None): """ Creates a new Cloud Spanner instance. :param instance_id: The ID of the Cloud Spanner instance. :type instance_id: str :param configuration_name: Th...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
CloudSpannerHook.update_instance
def update_instance(self, instance_id, configuration_name, node_count, display_name, project_id=None): return self._apply_to_instance(project_id, instance_id, configuration_name, node_count, display_name, lambda x: x.update())
Updates an existing Cloud Spanner instance.
def update_instance(self, instance_id, configuration_name, node_count, display_name, project_id=None): """ Updates an existing Cloud Spanner instance. :param instance_id: The ID of the Cloud Spanner instance. :type instance_id: str :param configuration_na...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
CloudSpannerHook.delete_instance
def delete_instance(self, instance_id, project_id=None): instance = self._get_client(project_id=project_id).instance(instance_id) try: instance.delete() return except GoogleAPICallError as e: self.log.error('An error occurred: %s. Exiting.', e.message) ...
Deletes an existing Cloud Spanner instance.
def delete_instance(self, instance_id, project_id=None): """ Deletes an existing Cloud Spanner instance. :param instance_id: The ID of the Cloud Spanner instance. :type instance_id: str :param project_id: Optional, the ID of the GCP project that owns the Cloud Spanner ...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
CloudSpannerHook.get_database
def get_database(self, instance_id, database_id, project_id=None): instance = self._get_client(project_id=project_id).instance( instance_id=instance_id) if not instance.exists(): raise AirflowException("The instance {} does not exist in project {} !". ...
Retrieves a database in Cloud Spanner. If the database does not exist in the specified instance, it returns None.
def get_database(self, instance_id, database_id, project_id=None): """ Retrieves a database in Cloud Spanner. If the database does not exist in the specified instance, it returns None. :param instance_id: The ID of the Cloud Spanner instance. :type instance_id: str :para...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
CloudSpannerHook.create_database
def create_database(self, instance_id, database_id, ddl_statements, project_id=None): instance = self._get_client(project_id=project_id).instance( instance_id=instance_id) if not instance.exists(): raise AirflowException("The instance {} does not exist in project {} !". ...
Creates a new database in Cloud Spanner.
def create_database(self, instance_id, database_id, ddl_statements, project_id=None): """ Creates a new database in Cloud Spanner. :type project_id: str :param instance_id: The ID of the Cloud Spanner instance. :type instance_id: str :param database_id: The ID of the dat...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
CloudSpannerHook.update_database
def update_database(self, instance_id, database_id, ddl_statements, project_id=None, operation_id=None): instance = self._get_client(project_id=project_id).instance( instance_id=instance_id) if not instance.exists(): raise AirflowEx...
Updates DDL of a database in Cloud Spanner.
def update_database(self, instance_id, database_id, ddl_statements, project_id=None, operation_id=None): """ Updates DDL of a database in Cloud Spanner. :type project_id: str :param instance_id: The ID of the Cloud Spanner instance. ...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
CloudSpannerHook.delete_database
def delete_database(self, instance_id, database_id, project_id=None): instance = self._get_client(project_id=project_id).\ instance(instance_id=instance_id) if not instance.exists(): raise AirflowException("The instance {} does not exist in project {} !". ...
Drops a database in Cloud Spanner.
def delete_database(self, instance_id, database_id, project_id=None): """ Drops a database in Cloud Spanner. :type project_id: str :param instance_id: The ID of the Cloud Spanner instance. :type instance_id: str :param database_id: The ID of the database in Cloud Spanner...
airflow/contrib/hooks/gcp_spanner_hook.py
apache/airflow
ImapAttachmentSensor.poke
def poke(self, context): self.log.info('Poking for %s', self.attachment_name) with ImapHook(imap_conn_id=self.conn_id) as imap_hook: return imap_hook.has_mail_attachment( name=self.attachment_name, mail_folder=self.mail_folder, check_regex=sel...
Pokes for a mail attachment on the mail server.
def poke(self, context): """ Pokes for a mail attachment on the mail server. :param context: The context that is being provided when poking. :type context: dict :return: True if attachment with the given name is present and False if not. :rtype: bool """ ...
airflow/contrib/sensors/imap_attachment_sensor.py
apache/airflow
prepare_additional_parameters
def prepare_additional_parameters(additional_properties, language_hints, web_detection_params): if language_hints is None and web_detection_params is None: return additional_properties if additional_properties is None: return {} merged_additional_parameters = deepcopy(additional_properties...
Creates additional_properties parameter based on language_hints, web_detection_params and additional_properties parameters specified by the user
def prepare_additional_parameters(additional_properties, language_hints, web_detection_params): """ Creates additional_properties parameter based on language_hints, web_detection_params and additional_properties parameters specified by the user """ if language_hints is None and web_detection_params ...
airflow/contrib/operators/gcp_vision_operator.py
apache/airflow
CassandraHook.table_exists
def table_exists(self, table): keyspace = self.keyspace if '.' in table: keyspace, table = table.split('.', 1) cluster_metadata = self.get_conn().cluster.metadata return (keyspace in cluster_metadata.keyspaces and table in cluster_metadata.keyspaces[keyspace]....
Checks if a table exists in Cassandra
def table_exists(self, table): """ Checks if a table exists in Cassandra :param table: Target Cassandra table. Use dot notation to target a specific keyspace. :type table: str """ keyspace = self.keyspace if '.' in table: keyspac...
airflow/contrib/hooks/cassandra_hook.py
apache/airflow
CassandraHook.record_exists
def record_exists(self, table, keys): keyspace = self.keyspace if '.' in table: keyspace, table = table.split('.', 1) ks = " AND ".join("{}=%({})s".format(key, key) for key in keys.keys()) cql = "SELECT * FROM {keyspace}.{table} WHERE {keys}".format( keyspace=keys...
Checks if a record exists in Cassandra
def record_exists(self, table, keys): """ Checks if a record exists in Cassandra :param table: Target Cassandra table. Use dot notation to target a specific keyspace. :type table: str :param keys: The keys and their values to check the existence. :t...
airflow/contrib/hooks/cassandra_hook.py
apache/airflow
SparkSubmitHook._build_track_driver_status_command
def _build_track_driver_status_command(self): connection_cmd = self._get_spark_binary_path() connection_cmd += ["--master", self._connection['master']] if self._driver_id: connection_cmd += ["--status", self._driver_id] else: raise AirflowExcep...
Construct the command to poll the driver status.
def _build_track_driver_status_command(self): """ Construct the command to poll the driver status. :return: full command to be executed """ connection_cmd = self._get_spark_binary_path() # The url ot the spark master connection_cmd += ["--master", self._connecti...
airflow/contrib/hooks/spark_submit_hook.py
apache/airflow
SparkSubmitHook.submit
def submit(self, application="", **kwargs): spark_submit_cmd = self._build_spark_submit_command(application) if hasattr(self, '_env'): env = os.environ.copy() env.update(self._env) kwargs["env"] = env self._submit_sp = subprocess.Popen(spark_submit_cmd, ...
Remote Popen to execute the spark-submit job
def submit(self, application="", **kwargs): """ Remote Popen to execute the spark-submit job :param application: Submitted application, jar or py file :type application: str :param kwargs: extra arguments to Popen (see subprocess.Popen) """ spark_submit_cmd = sel...
airflow/contrib/hooks/spark_submit_hook.py
apache/airflow
SparkSubmitHook._process_spark_submit_log
def _process_spark_submit_log(self, itr): for line in itr: line = line.strip() if self._is_yarn and self._connection['deploy_mode'] == 'cluster': match = re.search('(application[0-9_]+)', line) if match: ...
Processes the log files and extracts useful information out of it. If the deploy-mode is 'client', log the output of the submit command as those are the output logs of the Spark worker directly.
def _process_spark_submit_log(self, itr): """ Processes the log files and extracts useful information out of it. If the deploy-mode is 'client', log the output of the submit command as those are the output logs of the Spark worker directly. Remark: If the driver needs to be tra...
airflow/contrib/hooks/spark_submit_hook.py
apache/airflow
SparkSubmitHook._process_spark_status_log
def _process_spark_status_log(self, itr): for line in itr: line = line.strip() if "driverState" in line: self._driver_status = line.split(' : ')[1] \ .replace(',', '').replace('\"', '').strip() self.log.debug("spark ...
parses the logs of the spark driver status query process
def _process_spark_status_log(self, itr): """ parses the logs of the spark driver status query process :param itr: An iterator which iterates over the input of the subprocess """ # Consume the iterator for line in itr: line = line.strip() # Check...
airflow/contrib/hooks/spark_submit_hook.py
apache/airflow
get_task_runner
def get_task_runner(local_task_job): if _TASK_RUNNER == "StandardTaskRunner": return StandardTaskRunner(local_task_job) elif _TASK_RUNNER == "CgroupTaskRunner": from airflow.contrib.task_runner.cgroup_task_runner import CgroupTaskRunner return CgroupTaskRunner(local_task_job) else: ...
Get the task runner that can be used to run the given job.
def get_task_runner(local_task_job): """ Get the task runner that can be used to run the given job. :param local_task_job: The LocalTaskJob associated with the TaskInstance that needs to be executed. :type local_task_job: airflow.jobs.LocalTaskJob :return: The task runner to use to run the ...
airflow/task/task_runner/__init__.py
apache/airflow
AWSBatchOperator._wait_for_task_ended
def _wait_for_task_ended(self): try: waiter = self.client.get_waiter('job_execution_complete') waiter.config.max_attempts = sys.maxsize waiter.wait(jobs=[self.jobId]) except ValueError: retry = True retries = 0 while...
Try to use a waiter from the below pull request
def _wait_for_task_ended(self): """ Try to use a waiter from the below pull request * https://github.com/boto/botocore/pull/1307 If the waiter is not available apply a exponential backoff * docs.aws.amazon.com/general/latest/gr/api-retries.html """ try:...
airflow/contrib/operators/awsbatch_operator.py
apache/airflow
MySqlToGoogleCloudStorageOperator._query_mysql
def _query_mysql(self): mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id) conn = mysql.get_conn() cursor = conn.cursor() cursor.execute(self.sql) return cursor
Queries mysql and returns a cursor to the results.
def _query_mysql(self): """ Queries mysql and returns a cursor to the results. """ mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id) conn = mysql.get_conn() cursor = conn.cursor() cursor.execute(self.sql) return cursor
airflow/contrib/operators/mysql_to_gcs.py
apache/airflow
MySqlToGoogleCloudStorageOperator._configure_csv_file
def _configure_csv_file(self, file_handle, schema): csv_writer = csv.writer(file_handle, encoding='utf-8', delimiter=self.field_delimiter) csv_writer.writerow(schema) return csv_writer
Configure a csv writer with the file_handle and write schema as headers for the new file.
def _configure_csv_file(self, file_handle, schema): """Configure a csv writer with the file_handle and write schema as headers for the new file. """ csv_writer = csv.writer(file_handle, encoding='utf-8', delimiter=self.field_delimiter) csv_writer.w...
airflow/contrib/operators/mysql_to_gcs.py
apache/airflow
MySqlToGoogleCloudStorageOperator._write_local_schema_file
def _write_local_schema_file(self, cursor): schema_str = None schema_file_mime_type = 'application/json' tmp_schema_file_handle = NamedTemporaryFile(delete=True) if self.schema is not None and isinstance(self.schema, string_types): schema_str = self.schema.encode('utf-8') ...
Takes a cursor, and writes the BigQuery schema in .json format for the results to a local file system.
def _write_local_schema_file(self, cursor): """ Takes a cursor, and writes the BigQuery schema in .json format for the results to a local file system. :return: A dictionary where key is a filename to be used as an object name in GCS, and values are file handles to local file...
airflow/contrib/operators/mysql_to_gcs.py
apache/airflow
MySqlToGoogleCloudStorageOperator._get_col_type_dict
def _get_col_type_dict(self): schema = [] if isinstance(self.schema, string_types): schema = json.loads(self.schema) elif isinstance(self.schema, list): schema = self.schema elif self.schema is not None: self.log.warn('Using default schema due to unexp...
Return a dict of column name and column type based on self.schema if not None.
def _get_col_type_dict(self): """ Return a dict of column name and column type based on self.schema if not None. """ schema = [] if isinstance(self.schema, string_types): schema = json.loads(self.schema) elif isinstance(self.schema, list): schema =...
airflow/contrib/operators/mysql_to_gcs.py
apache/airflow
MySqlToGoogleCloudStorageOperator.type_map
def type_map(cls, mysql_type): d = { FIELD_TYPE.INT24: 'INTEGER', FIELD_TYPE.TINY: 'INTEGER', FIELD_TYPE.BIT: 'INTEGER', FIELD_TYPE.DATETIME: 'TIMESTAMP', FIELD_TYPE.DATE: 'TIMESTAMP', FIELD_TYPE.DECIMAL: 'FLOAT', FIELD_TYPE.NEW...
Helper function that maps from MySQL fields to BigQuery fields. Used when a schema_filename is set.
def type_map(cls, mysql_type): """ Helper function that maps from MySQL fields to BigQuery fields. Used when a schema_filename is set. """ d = { FIELD_TYPE.INT24: 'INTEGER', FIELD_TYPE.TINY: 'INTEGER', FIELD_TYPE.BIT: 'INTEGER', FIE...
airflow/contrib/operators/mysql_to_gcs.py
apache/airflow
SqoopOperator.execute
def execute(self, context): self.hook = SqoopHook( conn_id=self.conn_id, verbose=self.verbose, num_mappers=self.num_mappers, hcatalog_database=self.hcatalog_database, hcatalog_table=self.hcatalog_table, properties=self.properties ) ...
Execute sqoop job
def execute(self, context): """ Execute sqoop job """ self.hook = SqoopHook( conn_id=self.conn_id, verbose=self.verbose, num_mappers=self.num_mappers, hcatalog_database=self.hcatalog_database, hcatalog_table=self.hcatalog_table,...
airflow/contrib/operators/sqoop_operator.py
apache/airflow
apply_lineage
def apply_lineage(func): backend = _get_backend() @wraps(func) def wrapper(self, context, *args, **kwargs): self.log.debug("Backend: %s, Lineage called with inlets: %s, outlets: %s", backend, self.inlets, self.outlets) ret_val = func(self, context, *args, **kwargs) ...
Saves the lineage to XCom and if configured to do so sends it to the backend.
def apply_lineage(func): """ Saves the lineage to XCom and if configured to do so sends it to the backend. """ backend = _get_backend() @wraps(func) def wrapper(self, context, *args, **kwargs): self.log.debug("Backend: %s, Lineage called with inlets: %s, outlets: %s", ...
airflow/lineage/__init__.py
apache/airflow
date_range
def date_range(start_date, end_date=None, num=None, delta=None): if not delta: return [] if end_date and start_date > end_date: raise Exception("Wait. start_date needs to be before end_date") if end_date and num: raise Exception("Wait. Either specify end_date OR num") if not end_...
Get a set of dates as a list based on a start, end and delta, delta can be something that can be added to `datetime.datetime` or a cron expression as a `str`
def date_range(start_date, end_date=None, num=None, delta=None): """ Get a set of dates as a list based on a start, end and delta, delta can be something that can be added to `datetime.datetime` or a cron expression as a `str` :Example:: date_range(datetime(2016, 1, 1), datetime(2016, 1, 3...
airflow/utils/dates.py
apache/airflow
scale_time_units
def scale_time_units(time_seconds_arr, unit): if unit == 'minutes': return list(map(lambda x: x * 1.0 / 60, time_seconds_arr)) elif unit == 'hours': return list(map(lambda x: x * 1.0 / (60 * 60), time_seconds_arr)) elif unit == 'days': return list(map(lambda x: x * 1.0 / (24 * 60 * 6...
Convert an array of time durations in seconds to the specified time unit.
def scale_time_units(time_seconds_arr, unit): """ Convert an array of time durations in seconds to the specified time unit. """ if unit == 'minutes': return list(map(lambda x: x * 1.0 / 60, time_seconds_arr)) elif unit == 'hours': return list(map(lambda x: x * 1.0 / (60 * 60), time_s...
airflow/utils/dates.py
apache/airflow
days_ago
def days_ago(n, hour=0, minute=0, second=0, microsecond=0): today = timezone.utcnow().replace( hour=hour, minute=minute, second=second, microsecond=microsecond) return today - timedelta(days=n)
Get a datetime object representing `n` days ago. By default the time is set to midnight.
def days_ago(n, hour=0, minute=0, second=0, microsecond=0): """ Get a datetime object representing `n` days ago. By default the time is set to midnight. """ today = timezone.utcnow().replace( hour=hour, minute=minute, second=second, microsecond=microsecond) return...
airflow/utils/dates.py
apache/airflow
AirflowSecurityManager.init_role
def init_role(self, role_name, role_vms, role_perms): pvms = self.get_session.query(sqla_models.PermissionView).all() pvms = [p for p in pvms if p.permission and p.view_menu] role = self.find_role(role_name) if not role: role = self.add_role(role_name) if len(role.p...
Initialize the role with the permissions and related view-menus.
def init_role(self, role_name, role_vms, role_perms): """ Initialize the role with the permissions and related view-menus. :param role_name: :param role_vms: :param role_perms: :return: """ pvms = self.get_session.query(sqla_models.PermissionView).all() ...
airflow/www/security.py
apache/airflow
AirflowSecurityManager.delete_role
def delete_role(self, role_name): session = self.get_session role = session.query(sqla_models.Role)\ .filter(sqla_models.Role.name == role_name)\ .first() if role: self.log.info("Deleting role '%s'", role_name) session.delete(ro...
Delete the given Role
def delete_role(self, role_name): """Delete the given Role :param role_name: the name of a role in the ab_role table """ session = self.get_session role = session.query(sqla_models.Role)\ .filter(sqla_models.Role.name == role_name)\ .f...
airflow/www/security.py
apache/airflow
AirflowSecurityManager.get_user_roles
def get_user_roles(self, user=None): if user is None: user = g.user if user.is_anonymous: public_role = appbuilder.config.get('AUTH_ROLE_PUBLIC') return [appbuilder.security_manager.find_role(public_role)] \ if public_role else [] return user.r...
Get all the roles associated with the user.
def get_user_roles(self, user=None): """ Get all the roles associated with the user. :param user: the ab_user in FAB model. :return: a list of roles associated with the user. """ if user is None: user = g.user if user.is_anonymous: public_...
airflow/www/security.py
apache/airflow
AirflowSecurityManager._has_role
def _has_role(self, role_name_or_list): if not isinstance(role_name_or_list, list): role_name_or_list = [role_name_or_list] return any( [r.name in role_name_or_list for r in self.get_user_roles()])
Whether the user has this role name
def _has_role(self, role_name_or_list): """ Whether the user has this role name """ if not isinstance(role_name_or_list, list): role_name_or_list = [role_name_or_list] return any( [r.name in role_name_or_list for r in self.get_user_roles()])
airflow/www/security.py
apache/airflow
AirflowSecurityManager._has_perm
def _has_perm(self, permission_name, view_menu_name): if hasattr(self, 'perms'): if (permission_name, view_menu_name) in self.perms: return True self._get_and_cache_perms() return (permission_name, view_menu_name) in self.perms
Whether the user has this perm
def _has_perm(self, permission_name, view_menu_name): """ Whether the user has this perm """ if hasattr(self, 'perms'): if (permission_name, view_menu_name) in self.perms: return True # rebuild the permissions set self._get_and_cache_perms() ...
airflow/www/security.py
apache/airflow
AirflowSecurityManager.clean_perms
def clean_perms(self): self.log.debug('Cleaning faulty perms') sesh = self.get_session pvms = ( sesh.query(sqla_models.PermissionView) .filter(or_( sqla_models.PermissionView.permission == None, sqla_models.PermissionView.view_menu == Non...
FAB leaves faulty permissions that need to be cleaned up
def clean_perms(self): """ FAB leaves faulty permissions that need to be cleaned up """ self.log.debug('Cleaning faulty perms') sesh = self.get_session pvms = ( sesh.query(sqla_models.PermissionView) .filter(or_( sqla_models.Permiss...
airflow/www/security.py
apache/airflow
AirflowSecurityManager._merge_perm
def _merge_perm(self, permission_name, view_menu_name): permission = self.find_permission(permission_name) view_menu = self.find_view_menu(view_menu_name) pv = None if permission and view_menu: pv = self.get_session.query(self.permissionview_model).filter_by( ...
Add the new permission , view_menu to ab_permission_view_role if not exists. It will add the related entry to ab_permission and ab_view_menu two meta tables as well.
def _merge_perm(self, permission_name, view_menu_name): """ Add the new permission , view_menu to ab_permission_view_role if not exists. It will add the related entry to ab_permission and ab_view_menu two meta tables as well. :param permission_name: Name of the permission. ...
airflow/www/security.py
apache/airflow
AirflowSecurityManager.update_admin_perm_view
def update_admin_perm_view(self): pvms = self.get_session.query(sqla_models.PermissionView).all() pvms = [p for p in pvms if p.permission and p.view_menu] admin = self.find_role('Admin') admin.permissions = list(set(admin.permissions) | set(pvms)) self.get_session.commit()
Admin should have all the permission-views. Add the missing ones to the table for admin.
def update_admin_perm_view(self): """ Admin should have all the permission-views. Add the missing ones to the table for admin. :return: None. """ pvms = self.get_session.query(sqla_models.PermissionView).all() pvms = [p for p in pvms if p.permission and p.view_me...
airflow/www/security.py
apache/airflow
AirflowSecurityManager._sync_dag_view_permissions
def _sync_dag_view_permissions(self, dag_id, access_control): def _get_or_create_dag_permission(perm_name): dag_perm = self.find_permission_view_menu(perm_name, dag_id) if not dag_perm: self.log.info( "Creating new permission '%s' on view '%s'", ...
Set the access policy on the given DAG's ViewModel.
def _sync_dag_view_permissions(self, dag_id, access_control): """Set the access policy on the given DAG's ViewModel. :param dag_id: the ID of the DAG whose permissions should be updated :type dag_id: string :param access_control: a dict where each key is a rolename and each ...
airflow/www/security.py
apache/airflow
AirflowSecurityManager.create_perm_vm_for_all_dag
def create_perm_vm_for_all_dag(self): for dag_vm in self.DAG_VMS: for perm in self.DAG_PERMS: self._merge_perm(permission_name=perm, view_menu_name=dag_vm)
Create perm-vm if not exist and insert into FAB security model for all-dags.
def create_perm_vm_for_all_dag(self): """ Create perm-vm if not exist and insert into FAB security model for all-dags. """ # create perm for global logical dag for dag_vm in self.DAG_VMS: for perm in self.DAG_PERMS: self._merge_perm(permission_name=per...
airflow/www/security.py
apache/airflow
get_fernet
def get_fernet(): global _fernet log = LoggingMixin().log if _fernet: return _fernet try: from cryptography.fernet import Fernet, MultiFernet, InvalidToken global InvalidFernetToken InvalidFernetToken = InvalidToken except BuiltinImportError: log.warning( ...
Deferred load of Fernet key. This function could fail either because Cryptography is not installed or because the Fernet key is invalid.
def get_fernet(): """ Deferred load of Fernet key. This function could fail either because Cryptography is not installed or because the Fernet key is invalid. :return: Fernet object :raises: airflow.exceptions.AirflowException if there's a problem trying to load Fernet """ global _fern...
airflow/models/crypto.py
apache/airflow
AwsGlueCatalogPartitionSensor.poke
def poke(self, context): if '.' in self.table_name: self.database_name, self.table_name = self.table_name.split('.') self.log.info( 'Poking for table %s. %s, expression %s', self.database_name, self.table_name, self.expression ) return self.get_hook().check_for_p...
Checks for existence of the partition in the AWS Glue Catalog table
def poke(self, context): """ Checks for existence of the partition in the AWS Glue Catalog table """ if '.' in self.table_name: self.database_name, self.table_name = self.table_name.split('.') self.log.info( 'Poking for table %s. %s, expression %s', self.d...
airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py
apache/airflow
AwsGlueCatalogPartitionSensor.get_hook
def get_hook(self): if not hasattr(self, 'hook'): from airflow.contrib.hooks.aws_glue_catalog_hook import AwsGlueCatalogHook self.hook = AwsGlueCatalogHook( aws_conn_id=self.aws_conn_id, region_name=self.region_name) return self.hook
Gets the AwsGlueCatalogHook
def get_hook(self): """ Gets the AwsGlueCatalogHook """ if not hasattr(self, 'hook'): from airflow.contrib.hooks.aws_glue_catalog_hook import AwsGlueCatalogHook self.hook = AwsGlueCatalogHook( aws_conn_id=self.aws_conn_id, region_na...
airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py
apache/airflow
SQSSensor.poke
def poke(self, context): sqs_hook = SQSHook(aws_conn_id=self.aws_conn_id) sqs_conn = sqs_hook.get_conn() self.log.info('SQSSensor checking for message on queue: %s', self.sqs_queue) messages = sqs_conn.receive_message(QueueUrl=self.sqs_queue, ...
Check for message on subscribed queue and write to xcom the message with key ``messages``
def poke(self, context): """ Check for message on subscribed queue and write to xcom the message with key ``messages`` :param context: the context object :type context: dict :return: ``True`` if message is available or ``False`` """ sqs_hook = SQSHook(aws_conn_i...
airflow/contrib/sensors/aws_sqs_sensor.py
apache/airflow
WebHDFSHook.get_conn
def get_conn(self): connections = self.get_connections(self.webhdfs_conn_id) for connection in connections: try: self.log.debug('Trying namenode %s', connection.host) client = self._get_client(connection) client.status('/') sel...
Establishes a connection depending on the security mode set via config or environment variable.
def get_conn(self): """ Establishes a connection depending on the security mode set via config or environment variable. :return: a hdfscli InsecureClient or KerberosClient object. :rtype: hdfs.InsecureClient or hdfs.ext.kerberos.KerberosClient """ connections = self.get_...
airflow/hooks/webhdfs_hook.py
apache/airflow
WebHDFSHook.check_for_path
def check_for_path(self, hdfs_path): conn = self.get_conn() status = conn.status(hdfs_path, strict=False) return bool(status)
Check for the existence of a path in HDFS by querying FileStatus.
def check_for_path(self, hdfs_path): """ Check for the existence of a path in HDFS by querying FileStatus. :param hdfs_path: The path to check. :type hdfs_path: str :return: True if the path exists and False if not. :rtype: bool """ conn = self.get_conn()...
airflow/hooks/webhdfs_hook.py
apache/airflow
WebHDFSHook.load_file
def load_file(self, source, destination, overwrite=True, parallelism=1, **kwargs): rconn = self.get_conn() conn.upload(hdfs_path=destination, local_path=source, overwrite=overwrite, n_threads=parallelism, **kwargs) ...
r""" Uploads a file to HDFS.
def load_file(self, source, destination, overwrite=True, parallelism=1, **kwargs): r""" Uploads a file to HDFS. :param source: Local path to file or folder. If it's a folder, all the files inside of it will be uploaded. .. note:: This implies that folders empty of files ...
airflow/hooks/webhdfs_hook.py
apache/airflow
PinotDbApiHook.get_conn
def get_conn(self): conn = self.get_connection(self.pinot_broker_conn_id) pinot_broker_conn = connect( host=conn.host, port=conn.port, path=conn.extra_dejson.get('endpoint', '/pql'), scheme=conn.extra_dejson.get('schema', 'http') ) self.log...
Establish a connection to pinot broker through pinot dbqpi.
def get_conn(self): """ Establish a connection to pinot broker through pinot dbqpi. """ conn = self.get_connection(self.pinot_broker_conn_id) pinot_broker_conn = connect( host=conn.host, port=conn.port, path=conn.extra_dejson.get('endpoint', '/...
airflow/contrib/hooks/pinot_hook.py
apache/airflow
PinotDbApiHook.get_uri
def get_uri(self): conn = self.get_connection(getattr(self, self.conn_name_attr)) host = conn.host if conn.port is not None: host += ':{port}'.format(port=conn.port) conn_type = 'http' if not conn.conn_type else conn.conn_type endpoint = conn.extra_dejson.get('endpoin...
Get the connection uri for pinot broker. e.g:
def get_uri(self): """ Get the connection uri for pinot broker. e.g: http://localhost:9000/pql """ conn = self.get_connection(getattr(self, self.conn_name_attr)) host = conn.host if conn.port is not None: host += ':{port}'.format(port=conn.port) ...
airflow/contrib/hooks/pinot_hook.py
apache/airflow
TransferJobPreprocessor._convert_date_to_dict
def _convert_date_to_dict(field_date): return {DAY: field_date.day, MONTH: field_date.month, YEAR: field_date.year}
Convert native python ``datetime.date`` object to a format supported by the API
def _convert_date_to_dict(field_date): """ Convert native python ``datetime.date`` object to a format supported by the API """ return {DAY: field_date.day, MONTH: field_date.month, YEAR: field_date.year}
airflow/contrib/operators/gcp_transfer_operator.py
apache/airflow
TransferJobPreprocessor._convert_time_to_dict
def _convert_time_to_dict(time): return {HOURS: time.hour, MINUTES: time.minute, SECONDS: time.second}
Convert native python ``datetime.time`` object to a format supported by the API
def _convert_time_to_dict(time): """ Convert native python ``datetime.time`` object to a format supported by the API """ return {HOURS: time.hour, MINUTES: time.minute, SECONDS: time.second}
airflow/contrib/operators/gcp_transfer_operator.py
apache/airflow
DbApiHook.get_pandas_df
def get_pandas_df(self, sql, parameters=None): import pandas.io.sql as psql with closing(self.get_conn()) as conn: return psql.read_sql(sql, con=conn, params=parameters)
Executes the sql and returns a pandas dataframe
def get_pandas_df(self, sql, parameters=None): """ Executes the sql and returns a pandas dataframe :param sql: the sql statement to be executed (str) or a list of sql statements to execute :type sql: str or list :param parameters: The parameters to render the SQL que...
airflow/hooks/dbapi_hook.py
apache/airflow
DbApiHook.run
def run(self, sql, autocommit=False, parameters=None): if isinstance(sql, basestring): sql = [sql] with closing(self.get_conn()) as conn: if self.supports_autocommit: self.set_autocommit(conn, autocommit) with closing(conn.cursor()) as cur: ...
Runs a command or a list of commands. Pass a list of sql statements to the sql parameter to get them to execute sequentially
def run(self, sql, autocommit=False, parameters=None): """ Runs a command or a list of commands. Pass a list of sql statements to the sql parameter to get them to execute sequentially :param sql: the sql statement to be executed (str) or a list of sql statements to e...
airflow/hooks/dbapi_hook.py
apache/airflow
DbApiHook.set_autocommit
def set_autocommit(self, conn, autocommit): if not self.supports_autocommit and autocommit: self.log.warn( ("%s connection doesn't support " "autocommit but autocommit activated."), getattr(self, self.conn_name_attr)) conn.autocommit = autocom...
Sets the autocommit flag on the connection
def set_autocommit(self, conn, autocommit): """ Sets the autocommit flag on the connection """ if not self.supports_autocommit and autocommit: self.log.warn( ("%s connection doesn't support " "autocommit but autocommit activated."), ...
airflow/hooks/dbapi_hook.py
apache/airflow
DbApiHook.insert_rows
def insert_rows(self, table, rows, target_fields=None, commit_every=1000, replace=False): if target_fields: target_fields = ", ".join(target_fields) target_fields = "({})".format(target_fields) else: target_fields = '' i = 0 with cl...
A generic way to insert a set of tuples into a table, a new transaction is created every commit_every rows
def insert_rows(self, table, rows, target_fields=None, commit_every=1000, replace=False): """ A generic way to insert a set of tuples into a table, a new transaction is created every commit_every rows :param table: Name of the target table :type table: str ...
airflow/hooks/dbapi_hook.py
apache/airflow
Airflow.health
def health(self, session=None): BJ = jobs.BaseJob payload = {} scheduler_health_check_threshold = timedelta(seconds=conf.getint('scheduler', 'scheduler_health_check_threshold' ...
An endpoint helping check the health status of the Airflow instance, including metadatabase and scheduler.
def health(self, session=None): """ An endpoint helping check the health status of the Airflow instance, including metadatabase and scheduler. """ BJ = jobs.BaseJob payload = {} scheduler_health_check_threshold = timedelta(seconds=conf.getint('scheduler', ...
airflow/www/views.py
apache/airflow
Airflow.extra_links
def extra_links(self): dag_id = request.args.get('dag_id') task_id = request.args.get('task_id') execution_date = request.args.get('execution_date') link_name = request.args.get('link_name') dttm = airflow.utils.timezone.parse(execution_date) dag = dagbag.get_dag(dag_id) ...
A restful endpoint that returns external links for a given Operator It queries the operator that sent the request for the links it wishes to provide for a given external link name.
def extra_links(self): """ A restful endpoint that returns external links for a given Operator It queries the operator that sent the request for the links it wishes to provide for a given external link name. API: GET Args: dag_id: The id of the dag containing the task i...
airflow/www/views.py
apache/airflow
CloudantHook.get_conn
def get_conn(self): conn = self.get_connection(self.cloudant_conn_id) self._validate_connection(conn) cloudant_session = cloudant(user=conn.login, passwd=conn.password, account=conn.host) return cloudant_session
Opens a connection to the cloudant service and closes it automatically if used as context manager.
def get_conn(self): """ Opens a connection to the cloudant service and closes it automatically if used as context manager. .. note:: In the connection form: - 'host' equals the 'Account' (optional) - 'login' equals the 'Username (or API Key)' (required) ...
airflow/contrib/hooks/cloudant_hook.py
apache/airflow
SlackWebhookOperator.execute
def execute(self, context): self.hook = SlackWebhookHook( self.http_conn_id, self.webhook_token, self.message, self.attachments, self.channel, self.username, self.icon_emoji, self.link_names, self.proxy ...
Call the SlackWebhookHook to post the provided Slack message
def execute(self, context): """ Call the SlackWebhookHook to post the provided Slack message """ self.hook = SlackWebhookHook( self.http_conn_id, self.webhook_token, self.message, self.attachments, self.channel, self...
airflow/contrib/operators/slack_webhook_operator.py
apache/airflow
GoogleCloudBaseHook.catch_http_exception
def catch_http_exception(func): @functools.wraps(func) def wrapper_decorator(self, *args, **kwargs): try: return func(self, *args, **kwargs) except GoogleAPICallError as e: if isinstance(e, AlreadyExists): raise e ...
Function decorator that intercepts HTTP Errors and raises AirflowException with more informative message.
def catch_http_exception(func): """ Function decorator that intercepts HTTP Errors and raises AirflowException with more informative message. """ @functools.wraps(func) def wrapper_decorator(self, *args, **kwargs): try: return func(self, *args...
airflow/contrib/hooks/gcp_api_base_hook.py
apache/airflow
State.unfinished
def unfinished(cls): return [ cls.NONE, cls.SCHEDULED, cls.QUEUED, cls.RUNNING, cls.SHUTDOWN, cls.UP_FOR_RETRY, cls.UP_FOR_RESCHEDULE ]
A list of states indicating that a task either has not completed a run or has not even started.
def unfinished(cls): """ A list of states indicating that a task either has not completed a run or has not even started. """ return [ cls.NONE, cls.SCHEDULED, cls.QUEUED, cls.RUNNING, cls.SHUTDOWN, cls.UP_FOR...
airflow/utils/state.py
apache/airflow
SparkSqlHook._prepare_command
def _prepare_command(self, cmd): connection_cmd = ["spark-sql"] if self._conf: for conf_el in self._conf.split(","): connection_cmd += ["--conf", conf_el] if self._total_executor_cores: connection_cmd += ["--total-executor-cores", str(self._total_executor_...
Construct the spark-sql command to execute. Verbose output is enabled as default.
def _prepare_command(self, cmd): """ Construct the spark-sql command to execute. Verbose output is enabled as default. :param cmd: command to append to the spark-sql command :type cmd: str :return: full command to be executed """ connection_cmd = ["spark-...
airflow/contrib/hooks/spark_sql_hook.py
Azure/azure-sdk-for-python
Message.schedule
def schedule(self, schedule_time): if not self.properties.message_id: self.properties.message_id = str(uuid.uuid4()) if not self.message.annotations: self.message.annotations = {} self.message.annotations[types.AMQPSymbol(self._x_OPT_SCHEDULED_ENQUEUE_TIME)] = schedule_ti...
Add a specific enqueue time to the message.
def schedule(self, schedule_time): """Add a specific enqueue time to the message. :param schedule_time: The scheduled time to enqueue the message. :type schedule_time: ~datetime.datetime """ if not self.properties.message_id: self.properties.message_id = str(uuid.uui...
azure-servicebus/azure/servicebus/common/message.py
Azure/azure-sdk-for-python
VpnSitesConfigurationOperations.download
def download( self, resource_group_name, virtual_wan_name, vpn_sites=None, output_blob_sas_url=None, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._download_initial( resource_group_name=resource_group_name, virtual_wan_name=virtual_wan_n...
Gives the sas-url to download the configurations for vpn-sites in a resource group.
def download( self, resource_group_name, virtual_wan_name, vpn_sites=None, output_blob_sas_url=None, custom_headers=None, raw=False, polling=True, **operation_config): """Gives the sas-url to download the configurations for vpn-sites in a resource group. :param resource_group_name: ...
azure-mgmt-network/azure/mgmt/network/v2018_04_01/operations/vpn_sites_configuration_operations.py
Azure/azure-sdk-for-python
guess_service_info_from_path
def guess_service_info_from_path(spec_path): spec_path = spec_path.lower() spec_path = spec_path[spec_path.index("specification"):] split_spec_path = spec_path.split("/") rp_name = split_spec_path[1] is_arm = split_spec_path[2] == "resource-manager" return { "rp_name": rp_name, ...
Guess Python Autorest options based on the spec path. Expected
def guess_service_info_from_path(spec_path): """Guess Python Autorest options based on the spec path. Expected path: specification/compute/resource-manager/readme.md """ spec_path = spec_path.lower() spec_path = spec_path[spec_path.index("specification"):] # Might raise and it's ok split_sp...
scripts/build_sdk.py
Azure/azure-sdk-for-python
PowerShellOperations.update_command
def update_command( self, resource_group_name, node_name, session, pssession, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._update_command_initial( resource_group_name=resource_group_name, node_name=node_name, session=sessio...
Updates a running PowerShell command with more data.
def update_command( self, resource_group_name, node_name, session, pssession, custom_headers=None, raw=False, polling=True, **operation_config): """Updates a running PowerShell command with more data. :param resource_group_name: The resource group name uniquely identifies the resou...
azure-mgmt-servermanager/azure/mgmt/servermanager/operations/power_shell_operations.py
Azure/azure-sdk-for-python
ApplicationDefinitionsOperations.delete_by_id
def delete_by_id( self, application_definition_id, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._delete_by_id_initial( application_definition_id=application_definition_id, custom_headers=custom_headers, raw=True, ...
Deletes the managed application definition.
def delete_by_id( self, application_definition_id, custom_headers=None, raw=False, polling=True, **operation_config): """Deletes the managed application definition. :param application_definition_id: The fully qualified ID of the managed application definition, including the managed...
azure-mgmt-resource/azure/mgmt/resource/managedapplications/operations/application_definitions_operations.py
Azure/azure-sdk-for-python
ApplicationDefinitionsOperations.create_or_update_by_id
def create_or_update_by_id( self, application_definition_id, parameters, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._create_or_update_by_id_initial( application_definition_id=application_definition_id, parameters=parameters, ...
Creates a new managed application definition.
def create_or_update_by_id( self, application_definition_id, parameters, custom_headers=None, raw=False, polling=True, **operation_config): """Creates a new managed application definition. :param application_definition_id: The fully qualified ID of the managed application definitio...
azure-mgmt-resource/azure/mgmt/resource/managedapplications/operations/application_definitions_operations.py
Azure/azure-sdk-for-python
_HTTPClient.get_uri
def get_uri(self, request): protocol = request.protocol_override \ if request.protocol_override else self.protocol protocol = protocol.lower() port = HTTP_PORT if protocol == 'http' else HTTPS_PORT return protocol + '://' + request.host + ':' + str(port) + request.path
Return the target uri for the request.
def get_uri(self, request): ''' Return the target uri for the request.''' protocol = request.protocol_override \ if request.protocol_override else self.protocol protocol = protocol.lower() port = HTTP_PORT if protocol == 'http' else HTTPS_PORT return protocol + '://' ...
azure-servicebus/azure/servicebus/control_client/_http/httpclient.py
Azure/azure-sdk-for-python
_HTTPClient.get_connection
def get_connection(self, request): protocol = request.protocol_override \ if request.protocol_override else self.protocol protocol = protocol.lower() target_host = request.host connection = _RequestsConnection( target_host, protocol, self.request_session...
Create connection for the request.
def get_connection(self, request): ''' Create connection for the request. ''' protocol = request.protocol_override \ if request.protocol_override else self.protocol protocol = protocol.lower() target_host = request.host # target_port = HTTP_PORT if protocol == 'http' ...
azure-servicebus/azure/servicebus/control_client/_http/httpclient.py
Azure/azure-sdk-for-python
_HTTPClient.perform_request
def perform_request(self, request): connection = self.get_connection(request) try: connection.putrequest(request.method, request.path) self.send_request_headers(connection, request.headers) self.send_request_body(connection, request.body) if DEBUG_REQUES...
Sends request to cloud service server and return the response.
def perform_request(self, request): ''' Sends request to cloud service server and return the response. ''' connection = self.get_connection(request) try: connection.putrequest(request.method, request.path) self.send_request_headers(connection, request.headers) ...
azure-servicebus/azure/servicebus/control_client/_http/httpclient.py
Azure/azure-sdk-for-python
ClustersOperations.execute_script_actions
def execute_script_actions( self, resource_group_name, cluster_name, persist_on_success, script_actions=None, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._execute_script_actions_initial( resource_group_name=resource_group_name, cluster...
Executes script actions on the specified HDInsight cluster.
def execute_script_actions( self, resource_group_name, cluster_name, persist_on_success, script_actions=None, custom_headers=None, raw=False, polling=True, **operation_config): """Executes script actions on the specified HDInsight cluster. :param resource_group_name: The name of the resourc...
azure-mgmt-hdinsight/azure/mgmt/hdinsight/operations/clusters_operations.py
Azure/azure-sdk-for-python
FrontDoorManagementClient.check_front_door_name_availability
def check_front_door_name_availability( self, name, type, custom_headers=None, raw=False, **operation_config): check_front_door_name_availability_input = models.CheckNameAvailabilityInput(name=name, type=type) api_version = "2018-08-01" url = self.check_front_door_name_ava...
Check the availability of a Front Door resource name.
def check_front_door_name_availability( self, name, type, custom_headers=None, raw=False, **operation_config): """Check the availability of a Front Door resource name. :param name: The resource name to validate. :type name: str :param type: The type of the resource whose nam...
azure-mgmt-frontdoor/azure/mgmt/frontdoor/front_door_management_client.py
Azure/azure-sdk-for-python
VaultsOperations.purge_deleted
def purge_deleted( self, vault_name, location, custom_headers=None, raw=False, polling=True, **operation_config): raw_result = self._purge_deleted_initial( vault_name=vault_name, location=location, custom_headers=custom_headers, raw=True, *...
Permanently deletes the specified vault. aka Purges the deleted Azure key vault.
def purge_deleted( self, vault_name, location, custom_headers=None, raw=False, polling=True, **operation_config): """Permanently deletes the specified vault. aka Purges the deleted Azure key vault. :param vault_name: The name of the soft-deleted vault. :type vault_name: str ...
azure-mgmt-keyvault/azure/mgmt/keyvault/v2016_10_01/operations/vaults_operations.py
Azure/azure-sdk-for-python
HttpChallenge._validate_request_uri
def _validate_request_uri(self, uri): if not uri: raise ValueError('request_uri cannot be empty') uri = parse.urlparse(uri) if not uri.netloc: raise ValueError('request_uri must be an absolute URI') if uri.scheme.lower() not in ['http', 'https']: rai...
Extracts the host authority from the given URI.
def _validate_request_uri(self, uri): """ Extracts the host authority from the given URI. """ if not uri: raise ValueError('request_uri cannot be empty') uri = parse.urlparse(uri) if not uri.netloc: raise ValueError('request_uri must be an absolute URI') ...
azure-keyvault/azure/keyvault/http_challenge.py
Azure/azure-sdk-for-python
get_cli_profile
def get_cli_profile(): try: from azure.cli.core._profile import Profile from azure.cli.core._session import ACCOUNT from azure.cli.core._environment import get_config_dir except ImportError: raise ImportError("You need to install 'azure-cli-core' to load CLI credentials") a...
Return a CLI profile class.
def get_cli_profile(): """Return a CLI profile class. .. versionadded:: 1.1.6 :return: A CLI Profile :rtype: azure.cli.core._profile.Profile :raises: ImportError if azure-cli-core package is not available """ try: from azure.cli.core._profile import Profile from azure.cli....
azure-common/azure/common/credentials.py
Azure/azure-sdk-for-python
get_azure_cli_credentials
def get_azure_cli_credentials(resource=None, with_tenant=False): profile = get_cli_profile() cred, subscription_id, tenant_id = profile.get_login_credentials(resource=resource) if with_tenant: return cred, subscription_id, tenant_id else: return cred, subscription_id
Return Credentials and default SubscriptionID of current loaded profile of the CLI. Credentials will be the "az login"
def get_azure_cli_credentials(resource=None, with_tenant=False): """Return Credentials and default SubscriptionID of current loaded profile of the CLI. Credentials will be the "az login" command: https://docs.microsoft.com/cli/azure/authenticate-azure-cli Default subscription ID is either the only one...
azure-common/azure/common/credentials.py
Azure/azure-sdk-for-python
PredictionOperations.resolve
def resolve( self, app_id, query, timezone_offset=None, verbose=None, staging=None, spell_check=None, bing_spell_check_subscription_key=None, log=None, custom_headers=None, raw=False, **operation_config): url = self.resolve.metadata['url'] path_format_arguments = { 'Endp...
Gets predictions for a given utterance, in the form of intents and entities. The current maximum query size is 500 characters.
def resolve( self, app_id, query, timezone_offset=None, verbose=None, staging=None, spell_check=None, bing_spell_check_subscription_key=None, log=None, custom_headers=None, raw=False, **operation_config): """Gets predictions for a given utterance, in the form of intents and entities. The cur...
azure-cognitiveservices-language-luis/azure/cognitiveservices/language/luis/runtime/operations/prediction_operations.py
Azure/azure-sdk-for-python
MixedRealityClient.check_name_availability_local
def check_name_availability_local( self, location, name, type, custom_headers=None, raw=False, **operation_config): check_name_availability = models.CheckNameAvailabilityRequest(name=name, type=type) url = self.check_name_availability_local.metadata['url'] path_format_argum...
Check Name Availability for global uniqueness.
def check_name_availability_local( self, location, name, type, custom_headers=None, raw=False, **operation_config): """Check Name Availability for global uniqueness. :param location: The location in which uniqueness will be verified. :type location: str :param name: Resource...
azure-mgmt-mixedreality/azure/mgmt/mixedreality/mixed_reality_client.py
Azure/azure-sdk-for-python
_WinHttpRequest.open
def open(self, method, url): flag = VARIANT.create_bool_false() _method = BSTR(method) _url = BSTR(url) _WinHttpRequest._Open(self, _method, _url, flag)
Opens the request.
def open(self, method, url): ''' Opens the request. method: the request VERB 'GET', 'POST', etc. url: the url to connect ''' flag = VARIANT.create_bool_false() _method = BSTR(method) _url = BSTR(url) _WinHttpRequest._Open(s...
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_WinHttpRequest.set_timeout
def set_timeout(self, timeout_in_seconds): timeout_in_ms = int(timeout_in_seconds * 1000) _WinHttpRequest._SetTimeouts( self, 0, timeout_in_ms, timeout_in_ms, timeout_in_ms)
Sets up the timeout for the request.
def set_timeout(self, timeout_in_seconds): ''' Sets up the timeout for the request. ''' timeout_in_ms = int(timeout_in_seconds * 1000) _WinHttpRequest._SetTimeouts( self, 0, timeout_in_ms, timeout_in_ms, timeout_in_ms)
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_WinHttpRequest.set_request_header
def set_request_header(self, name, value): _name = BSTR(name) _value = BSTR(value) _WinHttpRequest._SetRequestHeader(self, _name, _value)
Sets the request header.
def set_request_header(self, name, value): ''' Sets the request header. ''' _name = BSTR(name) _value = BSTR(value) _WinHttpRequest._SetRequestHeader(self, _name, _value)
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_WinHttpRequest.get_all_response_headers
def get_all_response_headers(self): bstr_headers = c_void_p() _WinHttpRequest._GetAllResponseHeaders(self, byref(bstr_headers)) bstr_headers = ctypes.cast(bstr_headers, c_wchar_p) headers = bstr_headers.value _SysFreeString(bstr_headers) return headers
Gets back all response headers.
def get_all_response_headers(self): ''' Gets back all response headers. ''' bstr_headers = c_void_p() _WinHttpRequest._GetAllResponseHeaders(self, byref(bstr_headers)) bstr_headers = ctypes.cast(bstr_headers, c_wchar_p) headers = bstr_headers.value _SysFreeString(bstr_he...
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_WinHttpRequest.send
def send(self, request=None): if request is None: var_empty = VARIANT.create_empty() _WinHttpRequest._Send(self, var_empty) else: _request = VARIANT.create_safearray_from_str(request) _WinHttpRequest._Send(self, _request)
Sends the request body.
def send(self, request=None): ''' Sends the request body. ''' # Sends VT_EMPTY if it is GET, HEAD request. if request is None: var_empty = VARIANT.create_empty() _WinHttpRequest._Send(self, var_empty) else: # Sends request body as SAFEArray. _request...
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_WinHttpRequest.status
def status(self): status = c_long() _WinHttpRequest._Status(self, byref(status)) return int(status.value)
Gets status of response.
def status(self): ''' Gets status of response. ''' status = c_long() _WinHttpRequest._Status(self, byref(status)) return int(status.value)
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_WinHttpRequest.status_text
def status_text(self): bstr_status_text = c_void_p() _WinHttpRequest._StatusText(self, byref(bstr_status_text)) bstr_status_text = ctypes.cast(bstr_status_text, c_wchar_p) status_text = bstr_status_text.value _SysFreeString(bstr_status_text) return status_text
Gets status text of response.
def status_text(self): ''' Gets status text of response. ''' bstr_status_text = c_void_p() _WinHttpRequest._StatusText(self, byref(bstr_status_text)) bstr_status_text = ctypes.cast(bstr_status_text, c_wchar_p) status_text = bstr_status_text.value _SysFreeString(bstr_stat...
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_WinHttpRequest.response_body
def response_body(self): var_respbody = VARIANT() _WinHttpRequest._ResponseBody(self, byref(var_respbody)) if var_respbody.is_safearray_of_bytes(): respbody = var_respbody.str_from_safearray() return respbody else: return ''
Gets response body as a SAFEARRAY and converts the SAFEARRAY to str.
def response_body(self): ''' Gets response body as a SAFEARRAY and converts the SAFEARRAY to str. ''' var_respbody = VARIANT() _WinHttpRequest._ResponseBody(self, byref(var_respbody)) if var_respbody.is_safearray_of_bytes(): respbody = var_respbody.str_from_sa...
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_WinHttpRequest.set_client_certificate
def set_client_certificate(self, certificate): _certificate = BSTR(certificate) _WinHttpRequest._SetClientCertificate(self, _certificate)
Sets client certificate for the request.
def set_client_certificate(self, certificate): '''Sets client certificate for the request. ''' _certificate = BSTR(certificate) _WinHttpRequest._SetClientCertificate(self, _certificate)
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_HTTPConnection.putrequest
def putrequest(self, method, uri): protocol = unicode(self.protocol + '://') url = protocol + self.host + unicode(uri) self._httprequest.set_timeout(self.timeout) self._httprequest.open(unicode(method), url) if self.cert_file is not None: self._httprequest.s...
Connects to host and sends the request.
def putrequest(self, method, uri): ''' Connects to host and sends the request. ''' protocol = unicode(self.protocol + '://') url = protocol + self.host + unicode(uri) self._httprequest.set_timeout(self.timeout) self._httprequest.open(unicode(method), url) # sets certifi...
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_HTTPConnection.putheader
def putheader(self, name, value): if sys.version_info < (3,): name = str(name).decode('utf-8') value = str(value).decode('utf-8') self._httprequest.set_request_header(name, value)
Sends the headers of request.
def putheader(self, name, value): ''' Sends the headers of request. ''' if sys.version_info < (3,): name = str(name).decode('utf-8') value = str(value).decode('utf-8') self._httprequest.set_request_header(name, value)
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_HTTPConnection.send
def send(self, request_body): if not request_body: self._httprequest.send() else: self._httprequest.send(request_body)
Sends request body.
def send(self, request_body): ''' Sends request body. ''' if not request_body: self._httprequest.send() else: self._httprequest.send(request_body)
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_HTTPConnection.getresponse
def getresponse(self): status = self._httprequest.status() status_text = self._httprequest.status_text() resp_headers = self._httprequest.get_all_response_headers() fixed_headers = [] for resp_header in resp_headers.split('\n'): if (resp_header.startswith('\t') or\ ...
Gets the response and generates the _Response object
def getresponse(self): ''' Gets the response and generates the _Response object''' status = self._httprequest.status() status_text = self._httprequest.status_text() resp_headers = self._httprequest.get_all_response_headers() fixed_headers = [] for resp_header in resp_hea...
azure-servicemanagement-legacy/azure/servicemanagement/_http/winhttp.py
Azure/azure-sdk-for-python
_get_readable_id
def _get_readable_id(id_name, id_prefix_to_skip): pos = id_name.find('//') if pos != -1: pos += 2 if id_prefix_to_skip: pos = id_name.find(id_prefix_to_skip, pos) if pos != -1: pos += len(id_prefix_to_skip) pos = id_name.find('/', pos) ...
simplified an id to be more friendly for us people
def _get_readable_id(id_name, id_prefix_to_skip): """simplified an id to be more friendly for us people""" # id_name is in the form 'https://namespace.host.suffix/name' # where name may contain a forward slash! pos = id_name.find('//') if pos != -1: pos += 2 if id_prefix_to_skip: ...
azure-servicemanagement-legacy/azure/servicemanagement/_common_serialization.py
Azure/azure-sdk-for-python
_get_serialization_name
def _get_serialization_name(element_name): known = _KNOWN_SERIALIZATION_XFORMS.get(element_name) if known is not None: return known if element_name.startswith('x_ms_'): return element_name.replace('_', '-') if element_name.endswith('_id'): element_name = element_name.replace('_i...
converts a Python name into a serializable name
def _get_serialization_name(element_name): """converts a Python name into a serializable name""" known = _KNOWN_SERIALIZATION_XFORMS.get(element_name) if known is not None: return known if element_name.startswith('x_ms_'): return element_name.replace('_', '-') if element_name.endswi...
azure-servicemanagement-legacy/azure/servicemanagement/_common_serialization.py
Azure/azure-sdk-for-python
FaceOperations.verify_face_to_person
def verify_face_to_person( self, face_id, person_id, person_group_id=None, large_person_group_id=None, custom_headers=None, raw=False, **operation_config): body = models.VerifyFaceToPersonRequest(face_id=face_id, person_group_id=person_group_id, large_person_group_id=large_person_group_id, person_id...
Verify whether two faces belong to a same person. Compares a face Id with a Person Id.
def verify_face_to_person( self, face_id, person_id, person_group_id=None, large_person_group_id=None, custom_headers=None, raw=False, **operation_config): """Verify whether two faces belong to a same person. Compares a face Id with a Person Id. :param face_id: FaceId of the face, c...
azure-cognitiveservices-vision-face/azure/cognitiveservices/vision/face/operations/face_operations.py
Azure/azure-sdk-for-python
_MinidomXmlToObject.get_entry_properties_from_node
def get_entry_properties_from_node(entry, include_id, id_prefix_to_skip=None, use_title_as_id=False): properties = {} etag = entry.getAttributeNS(METADATA_NS, 'etag') if etag: properties['etag'] = etag for updated in _MinidomXmlToObject.get_child_nodes(entry, 'updated'): ...
get properties from entry xml
def get_entry_properties_from_node(entry, include_id, id_prefix_to_skip=None, use_title_as_id=False): ''' get properties from entry xml ''' properties = {} etag = entry.getAttributeNS(METADATA_NS, 'etag') if etag: properties['etag'] = etag for updated in _MinidomXmlT...
azure-servicemanagement-legacy/azure/servicemanagement/_serialization.py
Azure/azure-sdk-for-python
_MinidomXmlToObject.get_children_from_path
def get_children_from_path(node, *path): cur = node for index, child in enumerate(path): if isinstance(child, _strtype): next = _MinidomXmlToObject.get_child_nodes(cur, child) else: next = _MinidomXmlToObject._get_child_nodesNS(cur, *child) ...
descends through a hierarchy of nodes returning the list of children at the inner most level. Only returns children who share a common parent, not cousins.
def get_children_from_path(node, *path): '''descends through a hierarchy of nodes returning the list of children at the inner most level. Only returns children who share a common parent, not cousins.''' cur = node for index, child in enumerate(path): if isinstance(ch...
azure-servicemanagement-legacy/azure/servicemanagement/_serialization.py