INSTRUCTION
stringlengths
1
8.43k
RESPONSE
stringlengths
75
104k
Set the access policy on the given DAG s ViewModel.
def _sync_dag_view_permissions(self, dag_id, access_control): """Set the access policy on the given DAG's ViewModel. :param dag_id: the ID of the DAG whose permissions should be updated :type dag_id: string :param access_control: a dict where each key is a rolename and each ...
Create perm - vm if not exist and insert into FAB security model for all - dags.
def create_perm_vm_for_all_dag(self): """ Create perm-vm if not exist and insert into FAB security model for all-dags. """ # create perm for global logical dag for dag_vm in self.DAG_VMS: for perm in self.DAG_PERMS: self._merge_perm(permission_name=per...
Deferred load of Fernet key.
def get_fernet(): """ Deferred load of Fernet key. This function could fail either because Cryptography is not installed or because the Fernet key is invalid. :return: Fernet object :raises: airflow.exceptions.AirflowException if there's a problem trying to load Fernet """ global _fern...
Checks for existence of the partition in the AWS Glue Catalog table
def poke(self, context): """ Checks for existence of the partition in the AWS Glue Catalog table """ if '.' in self.table_name: self.database_name, self.table_name = self.table_name.split('.') self.log.info( 'Poking for table %s. %s, expression %s', self.d...
Gets the AwsGlueCatalogHook
def get_hook(self): """ Gets the AwsGlueCatalogHook """ if not hasattr(self, 'hook'): from airflow.contrib.hooks.aws_glue_catalog_hook import AwsGlueCatalogHook self.hook = AwsGlueCatalogHook( aws_conn_id=self.aws_conn_id, region_na...
Check for message on subscribed queue and write to xcom the message with key messages
def poke(self, context): """ Check for message on subscribed queue and write to xcom the message with key ``messages`` :param context: the context object :type context: dict :return: ``True`` if message is available or ``False`` """ sqs_hook = SQSHook(aws_conn_i...
Returns a path for a temporary file including a full copy of the configuration settings.: return: a path to a temporary file
def tmp_configuration_copy(chmod=0o600): """ Returns a path for a temporary file including a full copy of the configuration settings. :return: a path to a temporary file """ cfg_dict = conf.as_dict(display_sensitive=True, raw=True) temp_fd, cfg_path = mkstemp() with os.fdopen(temp_fd, '...
Returns a snakebite HDFSClient object.
def get_conn(self): """ Returns a snakebite HDFSClient object. """ # When using HAClient, proxy_user must be the same, so is ok to always # take the first. effective_user = self.proxy_user autoconfig = self.autoconfig use_sasl = configuration.conf.get('cor...
Establishes a connection depending on the security mode set via config or environment variable.
def get_conn(self): """ Establishes a connection depending on the security mode set via config or environment variable. :return: a hdfscli InsecureClient or KerberosClient object. :rtype: hdfs.InsecureClient or hdfs.ext.kerberos.KerberosClient """ connections = self.get_...
Check for the existence of a path in HDFS by querying FileStatus.
def check_for_path(self, hdfs_path): """ Check for the existence of a path in HDFS by querying FileStatus. :param hdfs_path: The path to check. :type hdfs_path: str :return: True if the path exists and False if not. :rtype: bool """ conn = self.get_conn()...
r Uploads a file to HDFS.
def load_file(self, source, destination, overwrite=True, parallelism=1, **kwargs): r""" Uploads a file to HDFS. :param source: Local path to file or folder. If it's a folder, all the files inside of it will be uploaded. .. note:: This implies that folders empty of files ...
Establish a connection to pinot broker through pinot dbqpi.
def get_conn(self): """ Establish a connection to pinot broker through pinot dbqpi. """ conn = self.get_connection(self.pinot_broker_conn_id) pinot_broker_conn = connect( host=conn.host, port=conn.port, path=conn.extra_dejson.get('endpoint', '/...
Get the connection uri for pinot broker.
def get_uri(self): """ Get the connection uri for pinot broker. e.g: http://localhost:9000/pql """ conn = self.get_connection(getattr(self, self.conn_name_attr)) host = conn.host if conn.port is not None: host += ':{port}'.format(port=conn.port) ...
Executes the sql and returns a set of records.
def get_records(self, sql): """ Executes the sql and returns a set of records. :param sql: the sql statement to be executed (str) or a list of sql statements to execute :type sql: str """ with self.get_conn() as cur: cur.execute(sql) r...
Executes the sql and returns the first resulting row.
def get_first(self, sql): """ Executes the sql and returns the first resulting row. :param sql: the sql statement to be executed (str) or a list of sql statements to execute :type sql: str or list """ with self.get_conn() as cur: cur.execute(sql) ...
Truncate a string.: param string ( str ): string for modification: param max_length ( int ): output string length: param word_boundary ( bool ):: param save_order ( bool ): if True then word order of output string is like input string: param separator ( str ): separator between words: return:
def smart_truncate(string, max_length=0, word_boundary=False, separator=' ', save_order=False): """ Truncate a string. :param string (str): string for modification :param max_length (int): output string length :param word_boundary (bool): :param save_order (bool): if True then word order of outp...
Make a slug from the given text.: param text ( str ): initial text: param entities ( bool ):: param decimal ( bool ):: param hexadecimal ( bool ):: param max_length ( int ): output string length: param word_boundary ( bool ):: param save_order ( bool ): if parameter is True and max_length > 0 return whole words in the ...
def slugify(text, entities=True, decimal=True, hexadecimal=True, max_length=0, word_boundary=False, separator=DEFAULT_SEPARATOR, save_order=False, stopwords=(), regex_pattern=None, lowercase=True, replacements=()): """ Make a slug from the given text. :param text (str): initial text ...
Store an XCom value. TODO: pickling has been deprecated and JSON is preferred. pickling will be removed in Airflow 2. 0.
def set( cls, key, value, execution_date, task_id, dag_id, session=None): """ Store an XCom value. TODO: "pickling" has been deprecated and JSON is preferred. "pickling" will be removed in Airflow 2.0. ...
Retrieve an XCom value optionally meeting certain criteria. TODO: pickling has been deprecated and JSON is preferred. pickling will be removed in Airflow 2. 0.
def get_one(cls, execution_date, key=None, task_id=None, dag_id=None, include_prior_dates=False, session=None): """ Retrieve an XCom value, optionally meeting certain criteria. TODO: "pickling" has be...
Retrieve an XCom value optionally meeting certain criteria TODO: pickling has been deprecated and JSON is preferred. pickling will be removed in Airflow 2. 0.
def get_many(cls, execution_date, key=None, task_ids=None, dag_ids=None, include_prior_dates=False, limit=100, session=None): """ Retrieve an XCom value, optionally meeting certain crit...
Convert native python datetime. date object to a format supported by the API
def _convert_date_to_dict(field_date): """ Convert native python ``datetime.date`` object to a format supported by the API """ return {DAY: field_date.day, MONTH: field_date.month, YEAR: field_date.year}
Convert native python datetime. time object to a format supported by the API
def _convert_time_to_dict(time): """ Convert native python ``datetime.time`` object to a format supported by the API """ return {HOURS: time.hour, MINUTES: time.minute, SECONDS: time.second}
Returns a Redis connection.
def get_conn(self): """ Returns a Redis connection. """ conn = self.get_connection(self.redis_conn_id) self.host = conn.host self.port = conn.port self.password = None if str(conn.password).lower() in ['none', 'false', ''] else conn.password self.db = conn...
Returns a oracle connection object Optional parameters for using a custom DSN connection ( instead of using a server alias from tnsnames. ora ) The dsn ( data source name ) is the TNS entry ( from the Oracle names server or tnsnames. ora file ) or is a string like the one returned from makedsn ().
def get_conn(self): """ Returns a oracle connection object Optional parameters for using a custom DSN connection (instead of using a server alias from tnsnames.ora) The dsn (data source name) is the TNS entry (from the Oracle names server or tnsnames.ora file) or ...
A generic way to insert a set of tuples into a table the whole set of inserts is treated as one transaction Changes from standard DbApiHook implementation:
def insert_rows(self, table, rows, target_fields=None, commit_every=1000): """ A generic way to insert a set of tuples into a table, the whole set of inserts is treated as one transaction Changes from standard DbApiHook implementation: - Oracle SQL queries in cx_Oracle can not b...
A performant bulk insert for cx_Oracle that uses prepared statements via executemany (). For best performance pass in rows as an iterator.
def bulk_insert_rows(self, table, rows, target_fields=None, commit_every=5000): """ A performant bulk insert for cx_Oracle that uses prepared statements via `executemany()`. For best performance, pass in `rows` as an iterator. :param table: target Oracle table, use dot notation ...
Returns a connection object
def get_conn(self): """Returns a connection object """ db = self.get_connection(getattr(self, self.conn_name_attr)) return self.connector.connect( host=db.host, port=db.port, username=db.login, schema=db.schema)
Executes the sql and returns a pandas dataframe
def get_pandas_df(self, sql, parameters=None): """ Executes the sql and returns a pandas dataframe :param sql: the sql statement to be executed (str) or a list of sql statements to execute :type sql: str or list :param parameters: The parameters to render the SQL que...
Executes the sql and returns a set of records.
def get_records(self, sql, parameters=None): """ Executes the sql and returns a set of records. :param sql: the sql statement to be executed (str) or a list of sql statements to execute :type sql: str or list :param parameters: The parameters to render the SQL query ...
Executes the sql and returns the first resulting row.
def get_first(self, sql, parameters=None): """ Executes the sql and returns the first resulting row. :param sql: the sql statement to be executed (str) or a list of sql statements to execute :type sql: str or list :param parameters: The parameters to render the SQL q...
Runs a command or a list of commands. Pass a list of sql statements to the sql parameter to get them to execute sequentially
def run(self, sql, autocommit=False, parameters=None): """ Runs a command or a list of commands. Pass a list of sql statements to the sql parameter to get them to execute sequentially :param sql: the sql statement to be executed (str) or a list of sql statements to e...
Sets the autocommit flag on the connection
def set_autocommit(self, conn, autocommit): """ Sets the autocommit flag on the connection """ if not self.supports_autocommit and autocommit: self.log.warn( ("%s connection doesn't support " "autocommit but autocommit activated."), ...
A generic way to insert a set of tuples into a table a new transaction is created every commit_every rows
def insert_rows(self, table, rows, target_fields=None, commit_every=1000, replace=False): """ A generic way to insert a set of tuples into a table, a new transaction is created every commit_every rows :param table: Name of the target table :type table: str ...
Returns the SQL literal of the cell as a string.
def _serialize_cell(cell, conn=None): """ Returns the SQL literal of the cell as a string. :param cell: The cell to insert into the table :type cell: object :param conn: The database connection :type conn: connection object :return: The serialized cell :r...
An endpoint helping check the health status of the Airflow instance including metadatabase and scheduler.
def health(self, session=None): """ An endpoint helping check the health status of the Airflow instance, including metadatabase and scheduler. """ BJ = jobs.BaseJob payload = {} scheduler_health_check_threshold = timedelta(seconds=conf.getint('scheduler', ...
A restful endpoint that returns external links for a given Operator
def extra_links(self): """ A restful endpoint that returns external links for a given Operator It queries the operator that sent the request for the links it wishes to provide for a given external link name. API: GET Args: dag_id: The id of the dag containing the task i...
Default filters for model
def get_query(self): """ Default filters for model """ return ( super().get_query() .filter(or_(models.DagModel.is_active, models.DagModel.is_paused)) .filter(~models.DagModel.is_subdag) )
Default filters for model
def get_count_query(self): """ Default filters for model """ return ( super().get_count_query() .filter(models.DagModel.is_active) .filter(~models.DagModel.is_subdag) )
Opens a connection to the cloudant service and closes it automatically if used as context manager.
def get_conn(self): """ Opens a connection to the cloudant service and closes it automatically if used as context manager. .. note:: In the connection form: - 'host' equals the 'Account' (optional) - 'login' equals the 'Username (or API Key)' (required) ...
Call the SlackWebhookHook to post the provided Slack message
def execute(self, context): """ Call the SlackWebhookHook to post the provided Slack message """ self.hook = SlackWebhookHook( self.http_conn_id, self.webhook_token, self.message, self.attachments, self.channel, self...
Returns the Credentials object for Google API
def _get_credentials(self): """ Returns the Credentials object for Google API """ key_path = self._get_field('key_path', False) keyfile_dict = self._get_field('keyfile_dict', False) scope = self._get_field('scope', None) if scope: scopes = [s.strip() f...
Returns an authorized HTTP object to be used to build a Google cloud service hook connection.
def _authorize(self): """ Returns an authorized HTTP object to be used to build a Google cloud service hook connection. """ credentials = self._get_credentials() http = httplib2.Http() authed_http = google_auth_httplib2.AuthorizedHttp( credentials, htt...
Fetches a field from extras and returns it. This is some Airflow magic. The google_cloud_platform hook type adds custom UI elements to the hook page which allow admins to specify service_account key_path etc. They get formatted as shown below.
def _get_field(self, f, default=None): """ Fetches a field from extras, and returns it. This is some Airflow magic. The google_cloud_platform hook type adds custom UI elements to the hook page, which allow admins to specify service_account, key_path, etc. They get formatted as sh...
Function decorator that intercepts HTTP Errors and raises AirflowException with more informative message.
def catch_http_exception(func): """ Function decorator that intercepts HTTP Errors and raises AirflowException with more informative message. """ @functools.wraps(func) def wrapper_decorator(self, *args, **kwargs): try: return func(self, *args...
Decorator that provides fallback for Google Cloud Platform project id. If the project is None it will be replaced with the project_id from the service account the Hook is authenticated with. Project id can be specified either via project_id kwarg or via first parameter in positional args.
def fallback_to_default_project_id(func): """ Decorator that provides fallback for Google Cloud Platform project id. If the project is None it will be replaced with the project_id from the service account the Hook is authenticated with. Project id can be specified either via proj...
A list of states indicating that a task either has not completed a run or has not even started.
def unfinished(cls): """ A list of states indicating that a task either has not completed a run or has not even started. """ return [ cls.NONE, cls.SCHEDULED, cls.QUEUED, cls.RUNNING, cls.SHUTDOWN, cls.UP_FOR...
: param dag_id: the dag_id of the DAG to delete: type dag_id: str: param keep_records_in_log: whether keep records of the given dag_id in the Log table in the backend database ( for reasons like auditing ). The default value is True.: type keep_records_in_log: bool
def delete_dag(dag_id, keep_records_in_log=True, session=None): """ :param dag_id: the dag_id of the DAG to delete :type dag_id: str :param keep_records_in_log: whether keep records of the given dag_id in the Log table in the backend database (for reasons like auditing). The default valu...
Construct the spark - sql command to execute. Verbose output is enabled as default.
def _prepare_command(self, cmd): """ Construct the spark-sql command to execute. Verbose output is enabled as default. :param cmd: command to append to the spark-sql command :type cmd: str :return: full command to be executed """ connection_cmd = ["spark-...
Remote Popen ( actually execute the Spark - sql query )
def run_query(self, cmd="", **kwargs): """ Remote Popen (actually execute the Spark-sql query) :param cmd: command to remotely execute :param kwargs: extra arguments to Popen (see subprocess.Popen) """ spark_sql_cmd = self._prepare_command(cmd) self._sp = subproc...
VGG 11 - layer model ( configuration A ) with batch normalization
def vgg11_bn(pretrained=False, **kwargs): """VGG 11-layer model (configuration "A") with batch normalization Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ if pretrained: kwargs['init_weights'] = False model = VGG(make_layers(cfg['A'], batch_norm=True)...
VGG 13 - layer model ( configuration B )
def vgg13(pretrained=False, **kwargs): """VGG 13-layer model (configuration "B") Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ if pretrained: kwargs['init_weights'] = False model = VGG(make_layers(cfg['B']), **kwargs) if pretrained: model....
r AlexNet model architecture from the One weird trick... <https:// arxiv. org/ abs/ 1404. 5997 > _ paper.
def alexnet(pretrained=False, **kwargs): r"""AlexNet model architecture from the `"One weird trick..." <https://arxiv.org/abs/1404.5997>`_ paper. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = AlexNet(**kwargs) if pretrained: model.load_sta...
r Densenet - 121 model from Densely Connected Convolutional Networks <https:// arxiv. org/ pdf/ 1608. 06993. pdf > _
def densenet121(pretrained=False, **kwargs): r"""Densenet-121 model from `"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = DenseNet(num_init_features=64, growth_rate=32, ...
Convert a PIL Image or numpy. ndarray to tensor.
def to_tensor(pic): """Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor. See ``ToTensor`` for more details. Args: pic (PIL Image or numpy.ndarray): Image to be converted to tensor. Returns: Tensor: Converted image. """ if not(_is_pil_image(pic) or _is_numpy_image(pic)): ...
Convert a tensor or an ndarray to PIL Image.
def to_pil_image(pic, mode=None): """Convert a tensor or an ndarray to PIL Image. See :class:`~torchvision.transforms.ToPILImage` for more details. Args: pic (Tensor or numpy.ndarray): Image to be converted to PIL Image. mode (`PIL.Image mode`_): color space and pixel depth of input data (...
Normalize a tensor image with mean and standard deviation.
def normalize(tensor, mean, std, inplace=False): """Normalize a tensor image with mean and standard deviation. .. note:: This transform acts out of place by default, i.e., it does not mutates the input tensor. See :class:`~torchvision.transforms.Normalize` for more details. Args: tens...
r Resize the input PIL Image to the given size.
def resize(img, size, interpolation=Image.BILINEAR): r"""Resize the input PIL Image to the given size. Args: img (PIL Image): Image to be resized. size (sequence or int): Desired output size. If size is a sequence like (h, w), the output size will be matched to this. If size is an i...
r Pad the given PIL Image on all sides with specified padding mode and fill value.
def pad(img, padding, fill=0, padding_mode='constant'): r"""Pad the given PIL Image on all sides with specified padding mode and fill value. Args: img (PIL Image): Image to be padded. padding (int or tuple): Padding on each border. If a single int is provided this is used to pad all...
Crop the given PIL Image.
def crop(img, i, j, h, w): """Crop the given PIL Image. Args: img (PIL Image): Image to be cropped. i (int): i in (i,j) i.e coordinates of the upper left corner. j (int): j in (i,j) i.e coordinates of the upper left corner. h (int): Height of the cropped image. w (int): ...
Crop the given PIL Image and resize it to desired size.
def resized_crop(img, i, j, h, w, size, interpolation=Image.BILINEAR): """Crop the given PIL Image and resize it to desired size. Notably used in :class:`~torchvision.transforms.RandomResizedCrop`. Args: img (PIL Image): Image to be cropped. i (int): i in (i,j) i.e coordinates of the upper...
Horizontally flip the given PIL Image.
def hflip(img): """Horizontally flip the given PIL Image. Args: img (PIL Image): Image to be flipped. Returns: PIL Image: Horizontall flipped image. """ if not _is_pil_image(img): raise TypeError('img should be PIL Image. Got {}'.format(type(img))) return img.transpos...
Helper function to get the coefficients ( a b c d e f g h ) for the perspective transforms.
def _get_perspective_coeffs(startpoints, endpoints): """Helper function to get the coefficients (a, b, c, d, e, f, g, h) for the perspective transforms. In Perspective Transform each pixel (x, y) in the orignal image gets transformed as, (x, y) -> ( (ax + by + c) / (gx + hy + 1), (dx + ey + f) / (gx + hy ...
Perform perspective transform of the given PIL Image.
def perspective(img, startpoints, endpoints, interpolation=Image.BICUBIC): """Perform perspective transform of the given PIL Image. Args: img (PIL Image): Image to be transformed. coeffs (tuple) : 8-tuple (a, b, c, d, e, f, g, h) which contains the coefficients. for ...
Vertically flip the given PIL Image.
def vflip(img): """Vertically flip the given PIL Image. Args: img (PIL Image): Image to be flipped. Returns: PIL Image: Vertically flipped image. """ if not _is_pil_image(img): raise TypeError('img should be PIL Image. Got {}'.format(type(img))) return img.transpose(I...
Crop the given PIL Image into four corners and the central crop.
def five_crop(img, size): """Crop the given PIL Image into four corners and the central crop. .. Note:: This transform returns a tuple of images and there may be a mismatch in the number of inputs and targets your ``Dataset`` returns. Args: size (sequence or int): Desired output siz...
r Crop the given PIL Image into four corners and the central crop plus the flipped version of these ( horizontal flipping is used by default ).
def ten_crop(img, size, vertical_flip=False): r"""Crop the given PIL Image into four corners and the central crop plus the flipped version of these (horizontal flipping is used by default). .. Note:: This transform returns a tuple of images and there may be a mismatch in the number of i...
Adjust brightness of an Image.
def adjust_brightness(img, brightness_factor): """Adjust brightness of an Image. Args: img (PIL Image): PIL Image to be adjusted. brightness_factor (float): How much to adjust the brightness. Can be any non negative number. 0 gives a black image, 1 gives the original im...
Adjust contrast of an Image.
def adjust_contrast(img, contrast_factor): """Adjust contrast of an Image. Args: img (PIL Image): PIL Image to be adjusted. contrast_factor (float): How much to adjust the contrast. Can be any non negative number. 0 gives a solid gray image, 1 gives the original image wh...
Adjust color saturation of an image.
def adjust_saturation(img, saturation_factor): """Adjust color saturation of an image. Args: img (PIL Image): PIL Image to be adjusted. saturation_factor (float): How much to adjust the saturation. 0 will give a black and white image, 1 will give the original image while ...
Adjust hue of an image.
def adjust_hue(img, hue_factor): """Adjust hue of an image. The image hue is adjusted by converting the image to HSV and cyclically shifting the intensities in the hue channel (H). The image is then converted back to original image mode. `hue_factor` is the amount of shift in H channel and must be...
r Perform gamma correction on an image.
def adjust_gamma(img, gamma, gain=1): r"""Perform gamma correction on an image. Also known as Power Law Transform. Intensities in RGB mode are adjusted based on the following equation: .. math:: I_{\text{out}} = 255 \times \text{gain} \times \left(\frac{I_{\text{in}}}{255}\right)^{\gamma} ...
Rotate the image by angle.
def rotate(img, angle, resample=False, expand=False, center=None): """Rotate the image by angle. Args: img (PIL Image): PIL Image to be rotated. angle (float or int): In degrees degrees counter clockwise order. resample (``PIL.Image.NEAREST`` or ``PIL.Image.BILINEAR`` or ``PIL.Image.BI...
Apply affine transformation on the image keeping image center invariant
def affine(img, angle, translate, scale, shear, resample=0, fillcolor=None): """Apply affine transformation on the image keeping image center invariant Args: img (PIL Image): PIL Image to be rotated. angle (float or int): rotation angle in degrees between -180 and 180, clockwise direction. ...
Convert image to grayscale version of image.
def to_grayscale(img, num_output_channels=1): """Convert image to grayscale version of image. Args: img (PIL Image): Image to be converted to grayscale. Returns: PIL Image: Grayscale version of the image. if num_output_channels = 1 : returned image is single channel ...
Make a grid of images.
def make_grid(tensor, nrow=8, padding=2, normalize=False, range=None, scale_each=False, pad_value=0): """Make a grid of images. Args: tensor (Tensor or list): 4D mini-batch Tensor of shape (B x C x H x W) or a list of images all of the same size. nrow (int, optional): ...
Save a given Tensor into an image file.
def save_image(tensor, filename, nrow=8, padding=2, normalize=False, range=None, scale_each=False, pad_value=0): """Save a given Tensor into an image file. Args: tensor (Tensor or list): Image to be saved. If given a mini-batch tensor, saves the tensor as a grid of images by ...
Finds the class folders in a dataset.
def _find_classes(self, dir): """ Finds the class folders in a dataset. Args: dir (string): Root directory path. Returns: tuple: (classes, class_to_idx) where classes are relative to (dir), and class_to_idx is a dictionary. Ensures: No class...
Return a Tensor containing the patches
def read_image_file(data_dir, image_ext, n): """Return a Tensor containing the patches """ def PIL2array(_img): """Convert PIL image type to numpy 2D array """ return np.array(_img.getdata(), dtype=np.uint8).reshape(64, 64) def find_files(_data_dir, _image_ext): """Retu...
Return a Tensor containing the list of labels Read the file and keep only the ID of the 3D point.
def read_info_file(data_dir, info_file): """Return a Tensor containing the list of labels Read the file and keep only the ID of the 3D point. """ labels = [] with open(os.path.join(data_dir, info_file), 'r') as f: labels = [int(line.split()[0]) for line in f] return torch.LongTensor(l...
Return a Tensor containing the ground truth matches Read the file and keep only 3D point ID. Matches are represented with a 1 non matches with a 0.
def read_matches_files(data_dir, matches_file): """Return a Tensor containing the ground truth matches Read the file and keep only 3D point ID. Matches are represented with a 1, non matches with a 0. """ matches = [] with open(os.path.join(data_dir, matches_file), 'r') as f: for li...
1x1 convolution
def conv1x1(in_planes, out_planes, stride=1): """1x1 convolution""" return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
Computes the accuracy over the k top predictions for the specified values of k
def accuracy(output, target, topk=(1,)): """Computes the accuracy over the k top predictions for the specified values of k""" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(t...
This function disables printing when not in master process
def setup_for_distributed(is_master): """ This function disables printing when not in master process """ import builtins as __builtin__ builtin_print = __builtin__.print def print(*args, **kwargs): force = kwargs.pop('force', False) if is_master or force: builtin_pri...
Warning: does not synchronize the deque!
def synchronize_between_processes(self): """ Warning: does not synchronize the deque! """ if not is_dist_avail_and_initialized(): return t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda') dist.barrier() dist.all_reduce(t) ...
r SqueezeNet 1. 1 model from the official SqueezeNet repo <https:// github. com/ DeepScale/ SqueezeNet/ tree/ master/ SqueezeNet_v1. 1 > _. SqueezeNet 1. 1 has 2. 4x less computation and slightly fewer parameters than SqueezeNet 1. 0 without sacrificing accuracy.
def squeezenet1_1(pretrained=False, **kwargs): r"""SqueezeNet 1.1 model from the `official SqueezeNet repo <https://github.com/DeepScale/SqueezeNet/tree/master/SqueezeNet_v1.1>`_. SqueezeNet 1.1 has 2.4x less computation and slightly fewer parameters than SqueezeNet 1.0, without sacrificing accuracy. ...
Python2 support for os. makedirs (.. exist_ok = True )
def makedir_exist_ok(dirpath): """ Python2 support for os.makedirs(.., exist_ok=True) """ try: os.makedirs(dirpath) except OSError as e: if e.errno == errno.EEXIST: pass else: raise
Download a file from a url and place it in root.
def download_url(url, root, filename=None, md5=None): """Download a file from a url and place it in root. Args: url (str): URL to download file from root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the basename of the ...
List all directories at a given root
def list_dir(root, prefix=False): """List all directories at a given root Args: root (str): Path to directory whose folders need to be listed prefix (bool, optional): If true, prepends the path to each result, otherwise only returns the name of the directories found """ root...
List all files ending with a suffix at a given root
def list_files(root, suffix, prefix=False): """List all files ending with a suffix at a given root Args: root (str): Path to directory whose folders need to be listed suffix (str or tuple): Suffix of the files to match, e.g. '.png' or ('.jpg', '.png'). It uses the Python "str.endswi...
Download a Google Drive file from and place it in root.
def download_file_from_google_drive(file_id, root, filename=None, md5=None): """Download a Google Drive file from and place it in root. Args: file_id (str): id of file to be downloaded root (str): Directory to place downloaded file in filename (str, optional): Name to save the file und...
Get parameters for crop for a random crop.
def get_params(img, output_size): """Get parameters for ``crop`` for a random crop. Args: img (PIL Image): Image to be cropped. output_size (tuple): Expected output size of the crop. Returns: tuple: params (i, j, h, w) to be passed to ``crop`` for random cro...
Get parameters for perspective for a random perspective transform.
def get_params(width, height, distortion_scale): """Get parameters for ``perspective`` for a random perspective transform. Args: width : width of the image. height : height of the image. Returns: List containing [top-left, top-right, bottom-right, bottom-lef...
Get parameters for crop for a random sized crop.
def get_params(img, scale, ratio): """Get parameters for ``crop`` for a random sized crop. Args: img (PIL Image): Image to be cropped. scale (tuple): range of size of the origin size cropped ratio (tuple): range of aspect ratio of the origin aspect ratio cropped ...
Get a randomized transform to be applied on image.
def get_params(brightness, contrast, saturation, hue): """Get a randomized transform to be applied on image. Arguments are same as that of __init__. Returns: Transform which randomly adjusts brightness, contrast and saturation in a random order. """ tran...
Get parameters for affine transformation
def get_params(degrees, translate, scale_ranges, shears, img_size): """Get parameters for affine transformation Returns: sequence: params to be passed to the affine transformation """ angle = random.uniform(degrees[0], degrees[1]) if translate is not None: ...
r Inception v3 model architecture from Rethinking the Inception Architecture for Computer Vision <http:// arxiv. org/ abs/ 1512. 00567 > _.
def inception_v3(pretrained=False, **kwargs): r"""Inception v3 model architecture from `"Rethinking the Inception Architecture for Computer Vision" <http://arxiv.org/abs/1512.00567>`_. .. note:: **Important**: In contrast to the other models the inception_v3 expects tensors with a size of N...
Download and extract the tarball and download each individual photo.
def download(self): """Download and extract the tarball, and download each individual photo.""" import tarfile if self._check_integrity(): print('Files already downloaded and verified') return download_url(self.url, self.root, self.filename, self.md5_checksum) ...
r GoogLeNet ( Inception v1 ) model architecture from Going Deeper with Convolutions <http:// arxiv. org/ abs/ 1409. 4842 > _.
def googlenet(pretrained=False, **kwargs): r"""GoogLeNet (Inception v1) model architecture from `"Going Deeper with Convolutions" <http://arxiv.org/abs/1409.4842>`_. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet aux_logits (bool): If True, adds two auxiliary bran...
Download the MNIST data if it doesn t exist in processed_folder already.
def download(self): """Download the MNIST data if it doesn't exist in processed_folder already.""" if self._check_exists(): return makedir_exist_ok(self.raw_folder) makedir_exist_ok(self.processed_folder) # download files for url in self.urls: f...
Download the EMNIST data if it doesn t exist in processed_folder already.
def download(self): """Download the EMNIST data if it doesn't exist in processed_folder already.""" import shutil import zipfile if self._check_exists(): return makedir_exist_ok(self.raw_folder) makedir_exist_ok(self.processed_folder) # download fil...