project_name
string
class_name
string
class_modifiers
string
class_implements
int64
class_extends
int64
function_name
string
function_body
string
cyclomatic_complexity
int64
NLOC
int64
num_parameter
int64
num_token
int64
num_variable
int64
start_line
int64
end_line
int64
function_index
int64
function_params
string
function_variable
string
function_return_type
string
function_body_line_type
string
function_num_functions
int64
function_num_lines
int64
outgoing_function_count
int64
outgoing_function_names
string
incoming_function_count
int64
incoming_function_names
string
lexical_representation
string
aws-deadline_deadline-cloud
public
public
0
0
_manifest_diff.process_output
def process_output(status: FileStatus, path: str, output_diff: ManifestDiff):if status == FileStatus.MODIFIED:output_diff.modified.append(path)elif status == FileStatus.NEW:output_diff.new.append(path)elif status == FileStatus.DELETED:output_diff.deleted.append(path)
4
7
3
60
0
255
261
255
null
[]
None
null
0
0
0
null
0
null
The function (_manifest_diff.process_output) defined within the public class called public.The function start at line 255 and ends at 261. It contains 7 lines of code and it has a cyclomatic complexity of 4. It takes 3 parameters, represented as [255.0] and does not return any value..
aws-deadline_deadline-cloud
public
public
0
0
_manifest_diff
def _manifest_diff(manifest: str,root: str,include: Optional[List[str]] = None,exclude: Optional[List[str]] = None,include_exclude_config: Optional[str] = None,force_rehash=False,logger: ClickLogger = ClickLogger(False),) -> ManifestDiff:"""BETA API - This API is still evolving but will be made public in the near future.API to diff a manifest root with a previously snapshotted manifest.:param manifest: Manifest file path to compare against.:param root: Root directory to generate the manifest fileset.:param include: Include glob to look for files to add to the manifest.:param exclude: Exclude glob to exclude files from the manifest.:param include_exclude_config: Config JSON or file containeing input and exclude config.:param logger: Click Logger instance to print to CLI as text or JSON.:returns: ManifestDiff object containing all new changed, deleted files."""# Find all files matching our regexinput_files = _glob_files(root=root, include=include, exclude=exclude, include_exclude_config=include_exclude_config)input_paths = [Path(p) for p in input_files]# Placeholder Asset Managerasset_manager = S3AssetManager()# parse the given manifest to compare against.local_manifest_object: BaseAssetManifestwith open(manifest) as input_file:manifest_data_str = input_file.read()local_manifest_object = decode_manifest(manifest_data_str)output: ManifestDiff = ManifestDiff()# Helper function to update output datastructure.def process_output(status: FileStatus, path: str, output_diff: ManifestDiff):if status == FileStatus.MODIFIED:output_diff.modified.append(path)elif status == FileStatus.NEW:output_diff.new.append(path)elif status == FileStatus.DELETED:output_diff.deleted.append(path)if force_rehash:# hash and create manifest of local directorycache_config = config_file.get_cache_directory()with HashCache(cache_config) as hash_cache:directory_manifest_object = asset_manager._create_manifest_file(input_paths=input_paths, root_path=root, hash_cache=hash_cache)# Hash based compare manifests.differences: List[Tuple[FileStatus, BaseManifestPath]] = compare_manifest(reference_manifest=local_manifest_object, compare_manifest=directory_manifest_object)# Map to output datastructure.for item in differences:process_output(item[0], item[1].path, output)else:# File based comparisons.fast_diff: List[Tuple[str, FileStatus]] = _fast_file_list_to_manifest_diff(root=root, current_files=input_files, diff_manifest=local_manifest_object, logger=logger)for fast_diff_item in fast_diff:process_output(fast_diff_item[1], fast_diff_item[0], output)return output
5
38
7
263
7
216
287
216
manifest,root,include,exclude,include_exclude_config,force_rehash,logger
['input_files', 'manifest_data_str', 'local_manifest_object', 'input_paths', 'directory_manifest_object', 'cache_config', 'asset_manager']
ManifestDiff
{"AnnAssign": 4, "Assign": 7, "Expr": 6, "For": 2, "If": 4, "Return": 1, "With": 2}
18
72
18
["ClickLogger", "_glob_files", "Path", "S3AssetManager", "open", "input_file.read", "decode_manifest", "ManifestDiff", "output_diff.modified.append", "output_diff.new.append", "output_diff.deleted.append", "config_file.get_cache_directory", "HashCache", "asset_manager._create_manifest_file", "compare_manifest", "process_output", "_fast_file_list_to_manifest_diff", "process_output"]
5
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.client.cli._groups.manifest_group_py.manifest_diff", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_diff_py.TestDiffAPI.test_diff_deleted_file", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_diff_py.TestDiffAPI.test_diff_modified_file_size", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_diff_py.TestDiffAPI.test_diff_new_files", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_diff_py.TestDiffAPI.test_diff_no_change"]
The function (_manifest_diff) defined within the public class called public.The function start at line 216 and ends at 287. It contains 38 lines of code and it has a cyclomatic complexity of 5. It takes 7 parameters, represented as [216.0] and does not return any value. It declares 18.0 functions, It has 18.0 functions called inside which are ["ClickLogger", "_glob_files", "Path", "S3AssetManager", "open", "input_file.read", "decode_manifest", "ManifestDiff", "output_diff.modified.append", "output_diff.new.append", "output_diff.deleted.append", "config_file.get_cache_directory", "HashCache", "asset_manager._create_manifest_file", "compare_manifest", "process_output", "_fast_file_list_to_manifest_diff", "process_output"], It has 5.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.client.cli._groups.manifest_group_py.manifest_diff", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_diff_py.TestDiffAPI.test_diff_deleted_file", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_diff_py.TestDiffAPI.test_diff_modified_file_size", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_diff_py.TestDiffAPI.test_diff_new_files", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_diff_py.TestDiffAPI.test_diff_no_change"].
aws-deadline_deadline-cloud
public
public
0
0
_manifest_upload
def _manifest_upload(manifest_file: str,s3_bucket_name: str,s3_cas_prefix: str,boto_session: boto3.Session,s3_key_prefix: Optional[str] = None,logger: ClickLogger = ClickLogger(False),):"""BETA API - This API is still evolving but will be made public in the near future.API to upload a job attachment manifest to the Content Addressable Storage. Manifests will beuploaded to s3://{s3_bucket_name}/{cas_prefix}/Manifests/{s3_key_prefix}/{manifest_file_name} as per the Deadline CAS folder structure.manifest_file: File Path to the manifest file for upload.s3_bucket_name: S3 bucket name.boto_session: S3 Content Addressable Storage prefix.s3_key_prefix: [Optional] S3 prefix path to the Content Addressable Storge.boto_session: Boto3 session.logger: Click Logger instance to print to CLI as text or JSON."""# S3 metadata# Upload settings:s3_metadata: Dict[str, Any] = {"Metadata": {}}s3_metadata["Metadata"]["file-system-location-name"] = manifest_file# Always upload the manifest file to case root /Manifest with the original file name.manifest_path: str = "/".join([s3_cas_prefix, S3_MANIFEST_FOLDER_NAME, s3_key_prefix, Path(manifest_file).name]if s3_key_prefixelse [s3_cas_prefix, S3_MANIFEST_FOLDER_NAME, Path(manifest_file).name])# S3 uploader.upload = S3AssetUploader(session=boto_session)manifest_file = str(_get_long_path_compatible_path(manifest_file))with open(manifest_file) as manifest:upload.upload_bytes_to_s3(bytes=BytesIO(manifest.read().encode("utf-8")),bucket=s3_bucket_name,key=manifest_path,progress_handler=logger.echo,extra_args=s3_metadata,)
2
25
6
167
2
290
334
290
manifest_file,s3_bucket_name,s3_cas_prefix,boto_session,s3_key_prefix,logger
['upload', 'manifest_file']
None
{"AnnAssign": 2, "Assign": 3, "Expr": 2, "With": 1}
12
45
12
["ClickLogger", "join", "Path", "Path", "S3AssetUploader", "str", "_get_long_path_compatible_path", "open", "upload.upload_bytes_to_s3", "BytesIO", "encode", "manifest.read"]
3
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.client.cli._groups.manifest_group_py.manifest_upload", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_upload_py.TestManifestUpload.test_upload", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_upload_py.TestManifestUpload.test_upload_with_prefix"]
The function (_manifest_upload) defined within the public class called public.The function start at line 290 and ends at 334. It contains 25 lines of code and it has a cyclomatic complexity of 2. It takes 6 parameters, represented as [290.0] and does not return any value. It declares 12.0 functions, It has 12.0 functions called inside which are ["ClickLogger", "join", "Path", "Path", "S3AssetUploader", "str", "_get_long_path_compatible_path", "open", "upload.upload_bytes_to_s3", "BytesIO", "encode", "manifest.read"], It has 3.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.client.cli._groups.manifest_group_py.manifest_upload", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_upload_py.TestManifestUpload.test_upload", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_upload_py.TestManifestUpload.test_upload_with_prefix"].
aws-deadline_deadline-cloud
public
public
0
0
_manifest_download.add_manifest_by_root
def add_manifest_by_root(manifests_by_root: Dict[str, list], root: str, manifest: BaseAssetManifest):if root not in manifests_by_root:manifests_by_root[root] = []manifests_by_root[root].append(manifest)
2
6
3
42
0
398
403
398
null
[]
None
null
0
0
0
null
0
null
The function (_manifest_download.add_manifest_by_root) defined within the public class called public.The function start at line 398 and ends at 403. It contains 6 lines of code and it has a cyclomatic complexity of 2. It takes 3 parameters, represented as [398.0] and does not return any value..
aws-deadline_deadline-cloud
public
public
0
0
_manifest_download
def _manifest_download(download_dir: str,farm_id: str,queue_id: str,job_id: str,boto3_session: boto3.Session,step_id: Optional[str] = None,asset_type: AssetType = AssetType.ALL,logger: ClickLogger = ClickLogger(False),) -> ManifestDownloadResponse:"""BETA API - This API is still evolving but will be made public in the near future.API to download the Job Attachment manifest for a Job, and optionally dependencies for Step.download_dir: Download directory.farm_id: The Deadline Farm to download from.queue_id: The Deadline Queue to download from.job_id: Job Id to download.boto_session: Boto3 session.step_id: Optional[str]: Optional, download manifest for a stepasset_type: Which asset manifests should be downloaded for given job (& optionally step), options are Input, Output, All. Default behaviour is All.logger: Click Logger instance to print to CLI as text or JSON.return ManifestDownloadResponse Downloaded Manifest data. Contains source S3 key and local download path."""# Deadline Client and get the Queue to download.deadline = boto3_session.client("deadline", config=get_default_client_config())queue: dict = deadline.get_queue(farmId=farm_id,queueId=queue_id,)# assume queue role - session permissionsqueue_role_session: boto3.Session = _get_queue_user_boto3_session(deadline=deadline,base_session=boto3_session,farm_id=farm_id,queue_id=queue_id,queue_display_name=queue["displayName"],)# Queue's Job Attachment settings.queue_s3_settings = JobAttachmentS3Settings(**queue["jobAttachmentSettings"])# Get S3 prefixs3_prefix: Path = Path(queue_s3_settings.rootPrefix, S3_MANIFEST_FOLDER_NAME)# Capture a list of success download files for JSON output.successful_downloads: List[ManifestDownload] = []# Utility function to build up manifests by root.manifests_by_root: Dict[str, List[BaseAssetManifest]] = dict()# Set the values of download input & output as per selected asset types in the api requestdownload_input: bool = (True if asset_type is None or asset_type in (AssetType.INPUT, AssetType.ALL) else False)download_output: bool = (True if asset_type is None or asset_type in (AssetType.OUTPUT, AssetType.ALL) else False)def add_manifest_by_root(manifests_by_root: Dict[str, list], root: str, manifest: BaseAssetManifest):if root not in manifests_by_root:manifests_by_root[root] = []manifests_by_root[root].append(manifest)# Get the job from deadline apijob: dict = deadline.get_job(farmId=farm_id, queueId=queue_id, jobId=job_id)# If input manifests need to be downloadedif download_input:logger.echo(f"Downloading input manifests for job: {job_id}")# Get input_manifest_paths from Deadline GetJob APIattachments: dict = job.get("attachments", {})input_manifest_paths: List[Tuple[str, str]] = [(manifest.get("inputManifestPath", ""), manifest["rootPath"])for manifest in attachments.get("manifests", [])]# Download each input_manifest_pathfor input_manifest_path, root_path in input_manifest_paths:asset_manifest: BaseAssetManifest = get_manifest_from_s3(manifest_key=(s3_prefix / input_manifest_path).as_posix(),s3_bucket=queue_s3_settings.s3BucketName,session=queue_role_session,)if asset_manifest is not None:logger.echo(f"Found input manifest for root: {root_path}")add_manifest_by_root(manifests_by_root=manifests_by_root, root=root_path, manifest=asset_manifest)# Now handle step-step dependenciesif step_id is not None:logger.echo(f"Finding step-step dependency manifests for step: {step_id}")# Get Step-Step dependencies with paginationnext_token = ""while next_token is not None:step_dep_response = deadline.list_step_dependencies(farmId=farm_id,queueId=queue_id,jobId=job_id,stepId=step_id,nextToken=next_token,)for dependent_step in step_dep_response["dependencies"]:logger.echo(f"Found Step-Step dependency. {dependent_step['stepId']}")# Get manifests for the step-step dependencystep_manifests_by_root: Dict[str, List[BaseAssetManifest]] = (get_output_manifests_by_asset_root(s3_settings=queue_s3_settings,farm_id=farm_id,queue_id=queue_id,job_id=job_id,step_id=dependent_step["stepId"],session=queue_role_session,))# Merge all manifests by root.for root in step_manifests_by_root.keys():for manifest in step_manifests_by_root[root]:logger.echo(f"Found step-step output manifest for root: {root}")add_manifest_by_root(manifests_by_root=manifests_by_root, root=root, manifest=manifest)next_token = step_dep_response.get("nextToken")# If output manifests need to be downloadedif download_output:output_manifests_by_root: Dict[str, List[BaseAssetManifest]]if step_id is not None:logger.echo(f"Downloading output manifests step: {step_id} of job: {job_id}")# Only get the output manifests for selected stepoutput_manifests_by_root = get_output_manifests_by_asset_root(s3_settings=queue_s3_settings,farm_id=farm_id,queue_id=queue_id,job_id=job_id,step_id=step_id,session=queue_role_session,)else:logger.echo(f"Downloading output manifests for job: {job_id}")# Get output manifests for all steps of the joboutput_manifests_by_root = get_output_manifests_by_asset_root(s3_settings=queue_s3_settings,farm_id=farm_id,queue_id=queue_id,job_id=job_id,session=queue_role_session,)# Merge all output manifests by root.for root in output_manifests_by_root.keys():for manifest in output_manifests_by_root[root]:logger.echo(f"Found output manifest for root: {root}")add_manifest_by_root(manifests_by_root=manifests_by_root, root=root, manifest=manifest)# Finally, merge all manifest paths to create unified manifests.# TODO: Filter outputs by pathmerged_manifests: Dict[str, BaseAssetManifest] = {}for root in manifests_by_root.keys():merged_manifest = merge_asset_manifests(manifests_by_root[root])if merged_manifest:merged_manifests[root] = merged_manifest# Save the manifest files to disk.for root in merged_manifests.keys():# Save the merged manifest as {root}_{hash}_timestamp.root_hash: str = hash_data(root.encode("utf-8"), merged_manifests[root].get_default_hash_alg())timestamp = datetime.datetime.now().strftime("%Y-%m-%dT%H-%M-%S")manifest_name = root.replace("/", "_")manifest_name = manifest_name[1:] if manifest_name[0] == "_" else manifest_namemanifest_name = f"{manifest_name}-{root_hash}-{timestamp}.manifest"local_manifest_file_path = os.path.join(download_dir, manifest_name)with open(local_manifest_file_path, "w") as file:file.write(merged_manifests[root].encode())successful_downloads.append(ManifestDownload(manifest_root=root, local_manifest_path=str(local_manifest_file_path)))logger.echo(f"Downloaded merged manifest for root: {root} to: {local_manifest_file_path}")# JSON output at the end.output = ManifestDownloadResponse(downloaded=successful_downloads)return output
22
131
8
828
10
337
535
337
download_dir,farm_id,queue_id,job_id,boto3_session,step_id,asset_type,logger
['timestamp', 'step_dep_response', 'output', 'queue_s3_settings', 'deadline', 'merged_manifest', 'output_manifests_by_root', 'manifest_name', 'local_manifest_file_path', 'next_token']
ManifestDownloadResponse
{"AnnAssign": 15, "Assign": 16, "Expr": 16, "For": 8, "If": 7, "Return": 1, "While": 1, "With": 1}
51
199
51
["ClickLogger", "boto3_session.client", "get_default_client_config", "deadline.get_queue", "_get_queue_user_boto3_session", "JobAttachmentS3Settings", "Path", "dict", "append", "deadline.get_job", "logger.echo", "job.get", "manifest.get", "attachments.get", "get_manifest_from_s3", "as_posix", "logger.echo", "add_manifest_by_root", "logger.echo", "deadline.list_step_dependencies", "logger.echo", "get_output_manifests_by_asset_root", "step_manifests_by_root.keys", "logger.echo", "add_manifest_by_root", "step_dep_response.get", "logger.echo", "get_output_manifests_by_asset_root", "logger.echo", "get_output_manifests_by_asset_root", "output_manifests_by_root.keys", "logger.echo", "add_manifest_by_root", "manifests_by_root.keys", "merge_asset_manifests", "merged_manifests.keys", "hash_data", "root.encode", "get_default_hash_alg", "strftime", "datetime.datetime.now", "root.replace", "os.path.join", "open", "file.write", "encode", "successful_downloads.append", "ManifestDownload", "str", "logger.echo", "ManifestDownloadResponse"]
3
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.client.cli._groups.manifest_group_py.manifest_download", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_download_py.TestManifestDownload.test_download_job", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_download_py.TestManifestDownload.test_download_job_paginate_through_step_dependencies"]
The function (_manifest_download) defined within the public class called public.The function start at line 337 and ends at 535. It contains 131 lines of code and it has a cyclomatic complexity of 22. It takes 8 parameters, represented as [337.0] and does not return any value. It declares 51.0 functions, It has 51.0 functions called inside which are ["ClickLogger", "boto3_session.client", "get_default_client_config", "deadline.get_queue", "_get_queue_user_boto3_session", "JobAttachmentS3Settings", "Path", "dict", "append", "deadline.get_job", "logger.echo", "job.get", "manifest.get", "attachments.get", "get_manifest_from_s3", "as_posix", "logger.echo", "add_manifest_by_root", "logger.echo", "deadline.list_step_dependencies", "logger.echo", "get_output_manifests_by_asset_root", "step_manifests_by_root.keys", "logger.echo", "add_manifest_by_root", "step_dep_response.get", "logger.echo", "get_output_manifests_by_asset_root", "logger.echo", "get_output_manifests_by_asset_root", "output_manifests_by_root.keys", "logger.echo", "add_manifest_by_root", "manifests_by_root.keys", "merge_asset_manifests", "merged_manifests.keys", "hash_data", "root.encode", "get_default_hash_alg", "strftime", "datetime.datetime.now", "root.replace", "os.path.join", "open", "file.write", "encode", "successful_downloads.append", "ManifestDownload", "str", "logger.echo", "ManifestDownloadResponse"], It has 3.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.client.cli._groups.manifest_group_py.manifest_download", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_download_py.TestManifestDownload.test_download_job", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_download_py.TestManifestDownload.test_download_job_paginate_through_step_dependencies"].
aws-deadline_deadline-cloud
public
public
0
0
_manifest_merge
def _manifest_merge(root: str,manifest_files: List[str],destination: str,name: Optional[str],logger: ClickLogger = ClickLogger(False),) -> Optional[ManifestMerge]:"""BETA API - API to merge multiple manifests into one.root: Root path for the manifest.manifest_files: List of manifest files to merge.destination: Destination directory for the merged manifest.name: Name of the merged manifest.logger: Click Logger instance to print to CLI as text or JSON.return ManifestMerge object containing the merged manifest."""manifests: List[BaseAssetManifest] = list(_read_manifests(manifest_paths=manifest_files).values())merged_manifest = merge_asset_manifests(manifests)if not merged_manifest:return Nonelocal_manifest_file = _write_manifest(root=root, manifest=merged_manifest, destination=destination, name=name)logger.echo(f"Manifest generated at {local_manifest_file}")return ManifestMerge(manifest_root=root, local_manifest_path=local_manifest_file)
2
18
5
111
2
538
569
538
root,manifest_files,destination,name,logger
['local_manifest_file', 'merged_manifest']
Optional[ManifestMerge]
{"AnnAssign": 1, "Assign": 2, "Expr": 2, "If": 1, "Return": 2}
8
32
8
["ClickLogger", "list", "values", "_read_manifests", "merge_asset_manifests", "_write_manifest", "logger.echo", "ManifestMerge"]
2
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_merge_py.TestMergeAPI.test_merge_different_files", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_merge_py.TestMergeAPI.test_merge_same_file"]
The function (_manifest_merge) defined within the public class called public.The function start at line 538 and ends at 569. It contains 18 lines of code and it has a cyclomatic complexity of 2. It takes 5 parameters, represented as [538.0] and does not return any value. It declares 8.0 functions, It has 8.0 functions called inside which are ["ClickLogger", "list", "values", "_read_manifests", "merge_asset_manifests", "_write_manifest", "logger.echo", "ManifestMerge"], It has 2.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_merge_py.TestMergeAPI.test_merge_different_files", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_manifest_merge_py.TestMergeAPI.test_merge_same_file"].
aws-deadline_deadline-cloud
public
public
0
0
canonical_path_comparator
def canonical_path_comparator(path: BaseManifestPath):"""Comparator for sorting paths."""# Sort by UTF-16 values as per the spec# https://www.rfc-editor.org/rfc/rfc8785.html#name-sorting-of-object-propertie# Use the "surrogatepass" error handler because filenames encountered in the wild# include surrogates.return path.path.encode("utf-16_be", errors="surrogatepass")
1
2
1
21
0
13
21
13
path
[]
Returns
{"Expr": 1, "Return": 1}
1
9
1
["path.path.encode"]
0
[]
The function (canonical_path_comparator) defined within the public class called public.The function start at line 13 and ends at 21. It contains 2 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters, and this function return a value. It declare 1.0 function, and It has 1.0 function called inside which is ["path.path.encode"].
aws-deadline_deadline-cloud
public
public
0
0
manifest_to_canonical_json_string
def manifest_to_canonical_json_string(manifest: BaseAssetManifest) -> str:"""Return a canonicalized JSON string based on the following:* The JSON file *MUST* adhere to the JSON canonicalization guidelinesoutlined here (https://www.rfc-editor.org/rfc/rfc8785.html).* For now this is a simplification of this spec. Whitespace between JSON tokens arenot emitted, and the keys are lexographically sorted. However the current implementation doesn'tserialize Literals, String, Numbers, etc. to the letter of the spec explicitly.It implicitly follows the spec as the object keys all fall within the ASCII range of charactersand this version of the Asset Manifest only serializes strings and integers.* The paths array *MUST* be in lexicographical order by path."""return json.dumps(dataclasses.asdict(manifest), sort_keys=True, separators=(",", ":"), ensure_ascii=True)
1
4
1
38
0
24
38
24
manifest
[]
str
{"Expr": 1, "Return": 1}
2
15
2
["json.dumps", "dataclasses.asdict"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.v2023_03_03.asset_manifest_py.AssetManifest.encode"]
The function (manifest_to_canonical_json_string) defined within the public class called public.The function start at line 24 and ends at 38. It contains 4 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["json.dumps", "dataclasses.asdict"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.v2023_03_03.asset_manifest_py.AssetManifest.encode"].
aws-deadline_deadline-cloud
public
public
0
0
_create_manifest_for_single_root
def _create_manifest_for_single_root(files: List[str],root: str,logger: ClickLogger,) -> Optional[BaseAssetManifest]:"""Shared logic to create a manifest file from a single root.:param files: Input files to create a manifest with.:param root: Asset root of the files.:param logger: Click logger for stdout.:return"""# Placeholder Asset Managerasset_manager = S3AssetManager()hash_callback_manager = _ProgressBarCallbackManager(length=100, label="Hashing Attachments")upload_group = asset_manager.prepare_paths_for_upload(input_paths=files, output_paths=[root], referenced_paths=[])# We only provided 1 root path, so output should only have 1 group.assert len(upload_group.asset_groups) == 1if upload_group.asset_groups:_, manifests = _hash_attachments(asset_manager=asset_manager,asset_groups=upload_group.asset_groups,total_input_files=upload_group.total_input_files,total_input_bytes=upload_group.total_input_bytes,print_function_callback=logger.echo,hashing_progress_callback=(hash_callback_manager.callback if not logger.is_json() else None),)if not manifests or len(manifests) == 0:logger.echo("No manifest generated")return Noneelse:# This is a hard failure, we are snapshotting 1 directory.assert len(manifests) == 1output_manifest = manifests[0].asset_manifestif output_manifest is None:raise ManifestCreationException()# Return the generated manifest.return output_manifest
6
31
3
176
4
13
60
13
files,root,logger
['hash_callback_manager', 'upload_group', 'output_manifest', 'asset_manager']
Optional[BaseAssetManifest]
{"Assign": 5, "Expr": 2, "If": 3, "Return": 2}
10
48
10
["S3AssetManager", "_ProgressBarCallbackManager", "asset_manager.prepare_paths_for_upload", "len", "_hash_attachments", "logger.is_json", "len", "logger.echo", "len", "ManifestCreationException"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_snapshot"]
The function (_create_manifest_for_single_root) defined within the public class called public.The function start at line 13 and ends at 60. It contains 31 lines of code and it has a cyclomatic complexity of 6. It takes 3 parameters, represented as [13.0] and does not return any value. It declares 10.0 functions, It has 10.0 functions called inside which are ["S3AssetManager", "_ProgressBarCallbackManager", "asset_manager.prepare_paths_for_upload", "len", "_hash_attachments", "logger.is_json", "len", "logger.echo", "len", "ManifestCreationException"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_snapshot"].
aws-deadline_deadline-cloud
BaseManifestPath
public
1
1
__init__
def __init__(self, *, path: str, hash: str, size: int, mtime: int) -> None:self.path = pathself.hash = hashself.size = sizeself.mtime = mtime
1
5
5
45
0
27
31
27
self,path,hash,size,mtime
[]
None
{"Assign": 4}
0
5
0
[]
4,993
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"]
The function (__init__) defined within the public class called BaseManifestPath, implement an interface, and it inherit another class.The function start at line 27 and ends at 31. It contains 5 lines of code and it has a cyclomatic complexity of 1. It takes 5 parameters, represented as [27.0] and does not return any value. It has 4993.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"].
aws-deadline_deadline-cloud
BaseManifestPath
public
1
1
__eq__
def __eq__(self, other: object) -> bool:"""By default dataclasses still check ClassVars for equality.We only want to compare fields.:param other::return: True if all fields are equal, False otherwise."""if not isinstance(other, BaseManifestPath):return NotImplementedreturn fields(self) == fields(other)
2
4
2
33
0
33
42
33
self,other
[]
bool
{"Expr": 1, "If": 1, "Return": 2}
3
10
3
["isinstance", "fields", "fields"]
2
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.parser.tokens_base_py.TokenOfCommand.__eq__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.86980582_aws_neuron_transformers_neuronx.src.transformers_neuronx.constants_py.Layout.__eq__"]
The function (__eq__) defined within the public class called BaseManifestPath, implement an interface, and it inherit another class.The function start at line 33 and ends at 42. It contains 4 lines of code and it has a cyclomatic complexity of 2. It takes 2 parameters, represented as [33.0] and does not return any value. It declares 3.0 functions, It has 3.0 functions called inside which are ["isinstance", "fields", "fields"], It has 2.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.parser.tokens_base_py.TokenOfCommand.__eq__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.86980582_aws_neuron_transformers_neuronx.src.transformers_neuronx.constants_py.Layout.__eq__"].
aws-deadline_deadline-cloud
BaseManifestPath
public
1
1
__init__
def __init__(self,*,paths: list[BaseManifestPath],hash_alg: HashAlgorithm,):self.paths = pathsself.hashAlg = hash_alg
1
8
3
29
0
53
60
53
self,path,hash,size,mtime
[]
None
{"Assign": 4}
0
5
0
[]
4,993
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"]
The function (__init__) defined within the public class called BaseManifestPath, implement an interface, and it inherit another class.The function start at line 53 and ends at 60. It contains 8 lines of code and it has a cyclomatic complexity of 1. It takes 3 parameters, represented as [53.0] and does not return any value. It has 4993.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"].
aws-deadline_deadline-cloud
BaseAssetManifest
public
1
1
get_default_hash_alg
def get_default_hash_alg(cls) -> HashAlgorithm:# pragma: no cover"""Returns the default hashing algorithm for the Asset Manifest"""raise NotImplementedError("Asset Manifest base class does not implement get_default_hash_alg")
1
4
1
13
0
64
68
64
cls
[]
HashAlgorithm
{"Expr": 1}
1
5
1
["NotImplementedError"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_download"]
The function (get_default_hash_alg) defined within the public class called BaseAssetManifest, implement an interface, and it inherit another class.The function start at line 64 and ends at 68. It contains 4 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["NotImplementedError"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_download"].
aws-deadline_deadline-cloud
BaseAssetManifest
public
1
1
decode
def decode(cls, *, manifest_data: dict[str, Any]) -> BaseAssetManifest:# pragma: no cover"""Turn a dictionary for a manifest into an AssetManifest object"""raise NotImplementedError("Asset Manifest base class does not implement decode")
1
2
2
24
0
72
74
72
cls,manifest_data
[]
BaseAssetManifest
{"Expr": 1}
1
3
1
["NotImplementedError"]
635
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.19906768_dragon_userbot_dragon_userbot.modules.squotes_py.render_message", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3527343_frigg_frigg_hq.tests.webhooks.test_commands_py.FetchWebhookPayloadCommandTests.test_command_handle_event_failure", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.commands_py.SourceCommand.run", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.console_py.Console.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.repl_py.main", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3550345_napalm_automation_napalm_junos.test.unit.conftest_py.FakeRPCObject.get_config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3551156_parrot_developers_arsdkbuildutils.Utils.Python.ARSDK_PrebuildActions_py.main", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.linux_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.linux_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.osx_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex_commands_py.ExReadShellOut.run", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3596930_mengskysama_shadowsocks_rm.shadowsocks.shell_py.get_config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.spec.debug_py.read_line", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.spec.examples_py.DescribePreStartHook.it_runs_before_debugging_a_service", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.exceptions_py.Frame.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.exceptions_py.Traceback.exception", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3652361_pylons_pyramid_jinja2.tests.test_it_py.DummyTemplate.render", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.tests.unit.test_datagen_py.FuzzDatagenUnittest.test_fuzz_data_xml", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3664958_ldtp_ldtp2.ldtpd.utils_py.Utils._match_name_to_acc", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3673636_bandwidth_python_bandwidth.bandwidth.voice.bxml_py.Response.__str__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.testing.pytest_plugin_py.get_process_ids", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.transports.websocket.connection_py.WebSocket.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.transports.websocket.frames_py.Frame.payload"]
The function (decode) defined within the public class called BaseAssetManifest, implement an interface, and it inherit another class.The function start at line 72 and ends at 74. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [72.0] and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["NotImplementedError"], It has 635.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.19906768_dragon_userbot_dragon_userbot.modules.squotes_py.render_message", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3527343_frigg_frigg_hq.tests.webhooks.test_commands_py.FetchWebhookPayloadCommandTests.test_command_handle_event_failure", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.commands_py.SourceCommand.run", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.console_py.Console.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.repl_py.main", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3550345_napalm_automation_napalm_junos.test.unit.conftest_py.FakeRPCObject.get_config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3551156_parrot_developers_arsdkbuildutils.Utils.Python.ARSDK_PrebuildActions_py.main", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.linux_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.linux_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.osx_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex_commands_py.ExReadShellOut.run", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3596930_mengskysama_shadowsocks_rm.shadowsocks.shell_py.get_config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.spec.debug_py.read_line", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.spec.examples_py.DescribePreStartHook.it_runs_before_debugging_a_service", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.exceptions_py.Frame.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.exceptions_py.Traceback.exception", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3652361_pylons_pyramid_jinja2.tests.test_it_py.DummyTemplate.render", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.tests.unit.test_datagen_py.FuzzDatagenUnittest.test_fuzz_data_xml", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3664958_ldtp_ldtp2.ldtpd.utils_py.Utils._match_name_to_acc", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3673636_bandwidth_python_bandwidth.bandwidth.voice.bxml_py.Response.__str__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.testing.pytest_plugin_py.get_process_ids", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.transports.websocket.connection_py.WebSocket.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.transports.websocket.frames_py.Frame.payload"].
aws-deadline_deadline-cloud
BaseAssetManifest
public
1
1
encode
def encode(self) -> str:# pragma: no cover"""Recursively encode the Asset Manifest into a string according towhatever format the Asset Manifest was written for."""raise NotImplementedError("Asset Manifest base class does not implement encode")
1
2
1
13
0
77
82
77
self
[]
str
{"Expr": 1}
1
6
1
["NotImplementedError"]
354
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.bin.make_version_py.update_changelog", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3581153_zetaops_pyoko.pyoko.node_py.Node.__str__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3619342_bread_and_pepper_django_userena.userena.forms_py.SignupFormOnlyEmail.save", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.comm.irc_py.Client.irc_CAP", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_download", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_messages", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_messages_with_multiline_content", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3640866_pelagicore_qface.qface.helper.generic_py.hash", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.pgctl.cli_py.PgctlApp.config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.pgctl.cli_py.unbuf_print", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.unit.functions_py.DescribeJSONEncoder.it_encodes_frozendict", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.unit.functions_py.DescribeJSONEncoder.it_encodes_other", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3646098_developers_against_repressions_network_case.update_readme_py.load_signed", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.transports.requests_py.HTTPTransport.remote_call", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.transports.urllib_py.HTTPTransport.remote_call", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.syntribos.extensions.basic_http.client_py.basic_auth", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.format.beacon_py.BEACONWriter.write_header", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.format.beacon_py.BEACONWriter.write_shortcode", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.test.mock_tracker_py.TrackerHandler.do_POST", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.test.random_result_py.MockResult.generate_shortcode", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.util.externalsort_py.GNUExternalSort.input_many", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3694885_betamaxpy_betamax.src.betamax.util_py.add_urllib3_response", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3694885_betamaxpy_betamax.src.betamax.util_py.deserialize_prepared_request", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3696527_singnet_snet_cli.snet.cli.commands.mpe_service_py.MPEServiceCommand._publish_metadata_in_ipfs"]
The function (encode) defined within the public class called BaseAssetManifest, implement an interface, and it inherit another class.The function start at line 77 and ends at 82. It contains 2 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["NotImplementedError"], It has 354.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.bin.make_version_py.update_changelog", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3581153_zetaops_pyoko.pyoko.node_py.Node.__str__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3619342_bread_and_pepper_django_userena.userena.forms_py.SignupFormOnlyEmail.save", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.comm.irc_py.Client.irc_CAP", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_download", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_messages", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_messages_with_multiline_content", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3640866_pelagicore_qface.qface.helper.generic_py.hash", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.pgctl.cli_py.PgctlApp.config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.pgctl.cli_py.unbuf_print", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.unit.functions_py.DescribeJSONEncoder.it_encodes_frozendict", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.unit.functions_py.DescribeJSONEncoder.it_encodes_other", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3646098_developers_against_repressions_network_case.update_readme_py.load_signed", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.transports.requests_py.HTTPTransport.remote_call", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.transports.urllib_py.HTTPTransport.remote_call", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.syntribos.extensions.basic_http.client_py.basic_auth", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.format.beacon_py.BEACONWriter.write_header", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.format.beacon_py.BEACONWriter.write_shortcode", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.test.mock_tracker_py.TrackerHandler.do_POST", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.test.random_result_py.MockResult.generate_shortcode", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.util.externalsort_py.GNUExternalSort.input_many", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3694885_betamaxpy_betamax.src.betamax.util_py.add_urllib3_response", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3694885_betamaxpy_betamax.src.betamax.util_py.deserialize_prepared_request", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3696527_singnet_snet_cli.snet.cli.commands.mpe_service_py.MPEServiceCommand._publish_metadata_in_ipfs"].
aws-deadline_deadline-cloud
public
public
0
0
validate_manifest
def validate_manifest(manifest: dict[str, Any], version: ManifestVersion) -> Tuple[bool, Optional[str]]:"""Checks if the given manifest is valid for the given manifest version. Returns True if the manifestis valid for the given version. Returns False and a string explaining the error if the manifest is not valid."""if version == ManifestVersion.v2023_03_03:return validate_manifest_2023_03_03(manifest)else:return False, f"Version {version} is not supported"
2
7
2
46
0
20
30
20
manifest,version
[]
Tuple[bool, Optional[str]]
{"Expr": 1, "If": 1, "Return": 2}
1
11
1
["validate_manifest_2023_03_03"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.decode_py.decode_manifest"]
The function (validate_manifest) defined within the public class called public.The function start at line 20 and ends at 30. It contains 7 lines of code and it has a cyclomatic complexity of 2. It takes 2 parameters, represented as [20.0] and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["validate_manifest_2023_03_03"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.decode_py.decode_manifest"].
aws-deadline_deadline-cloud
public
public
0
0
decode_manifest
def decode_manifest(manifest: str) -> BaseAssetManifest:"""Takes in a manifest string and returns an Asset Manifest object.A ManifestDecodeValidationError will be raised if the manifest version is unknown orthe manifest is not valid."""document: dict[str, Any] = json.loads(manifest)try:version = ManifestVersion(document["manifestVersion"])except ValueError:# Value of the manifest version is not one we know.supported_versions = ", ".join([v.value for v in ManifestVersion if v != ManifestVersion.UNDEFINED])raise ManifestDecodeValidationError(f"Unknown manifest version: {document['manifestVersion']} "f"(Currently supported Manifest versions: {supported_versions})")except KeyError:raise ManifestDecodeValidationError('Manifest is missing the required "manifestVersion" field')manifest_valid, error_string = validate_manifest(document, version)if not manifest_valid:raise ManifestDecodeValidationError(error_string)manifest_model = ManifestModelRegistry.get_manifest_model(version=version)decoded_manifest = manifest_model.AssetManifest.decode(manifest_data=document)# Validate hashes are alphanumericfor path in decoded_manifest.paths:if alphanum_regex.fullmatch(path.hash) is None:raise ManifestDecodeValidationError(f"The hash {path.hash} for path {path.path} is not alphanumeric")return decoded_manifest
8
27
1
145
4
33
72
33
manifest
['decoded_manifest', 'version', 'manifest_model', 'supported_versions']
BaseAssetManifest
{"AnnAssign": 1, "Assign": 5, "Expr": 1, "For": 1, "If": 2, "Return": 1, "Try": 1}
11
40
11
["json.loads", "ManifestVersion", "join", "ManifestDecodeValidationError", "ManifestDecodeValidationError", "validate_manifest", "ManifestDecodeValidationError", "ManifestModelRegistry.get_manifest_model", "manifest_model.AssetManifest.decode", "alphanum_regex.fullmatch", "ManifestDecodeValidationError"]
36
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api._utils_py._read_manifests", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_diff", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_snapshot", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.download_py._get_asset_root_and_manifest_from_s3_with_last_modified", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.download_py._read_manifest_file", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_manifest_download_py.TestManifestDownload.validate_manifest_is_not_None", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentDownload.test_download_conflict_resolution", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentDownload.test_download_multiple_to_current", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentDownload.test_download_single_to_current", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentUpload.test_upload_single_from_mapped", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentUpload.test_upload_single_map_from_root", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_aggregate_asset_root_manifests_and_write", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_404_error", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_no_space_left", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_successful", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_successful_using_vfs_fallback", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_with_step_dependencies", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_with_step_dependencies_same_root_vfs_on_posix", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_with_storage_profiles_path_mapping_rules", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_404_error", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_no_space_left", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_successful", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_successful_using_vfs_fallback", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_with_step_dependencies", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_with_step_dependencies_same_root_vfs_on_posix"]
The function (decode_manifest) defined within the public class called public.The function start at line 33 and ends at 72. It contains 27 lines of code and it has a cyclomatic complexity of 8. The function does not take any parameters and does not return any value. It declares 11.0 functions, It has 11.0 functions called inside which are ["json.loads", "ManifestVersion", "join", "ManifestDecodeValidationError", "ManifestDecodeValidationError", "validate_manifest", "ManifestDecodeValidationError", "ManifestModelRegistry.get_manifest_model", "manifest_model.AssetManifest.decode", "alphanum_regex.fullmatch", "ManifestDecodeValidationError"], It has 36.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api._utils_py._read_manifests", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_diff", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_snapshot", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.download_py._get_asset_root_and_manifest_from_s3_with_last_modified", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.download_py._read_manifest_file", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_manifest_download_py.TestManifestDownload.validate_manifest_is_not_None", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentDownload.test_download_conflict_resolution", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentDownload.test_download_multiple_to_current", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentDownload.test_download_single_to_current", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentUpload.test_upload_single_from_mapped", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentUpload.test_upload_single_map_from_root", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_aggregate_asset_root_manifests_and_write", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_404_error", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_no_space_left", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_successful", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_successful_using_vfs_fallback", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_with_step_dependencies", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_with_step_dependencies_same_root_vfs_on_posix", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_attachment_sync_inputs_with_storage_profiles_path_mapping_rules", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_404_error", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_no_space_left", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_successful", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_successful_using_vfs_fallback", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_with_step_dependencies", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.test_asset_sync_py.TestAssetSync.test_sync_inputs_with_step_dependencies_same_root_vfs_on_posix"].
aws-deadline_deadline-cloud
public
public
0
0
hash_file
def hash_file(file_path: str, hash_alg: HashAlgorithm) -> str:"""Hashes the given file using the given hashing algorithm."""if hash_alg == HashAlgorithm.XXH128:from xxhash import xxh3_128hasher = xxh3_128()else:raise UnsupportedHashingAlgorithmError(f"Unsupported hashing algorithm provided: {hash_alg}")with open(file_path, "rb") as file:while True:chunk = file.read(io.DEFAULT_BUFFER_SIZE)if not chunk:breakhasher.update(chunk)return hasher.hexdigest()
4
15
2
78
2
24
41
24
file_path,hash_alg
['chunk', 'hasher']
str
{"Assign": 2, "Expr": 2, "If": 2, "Return": 1, "While": 1, "With": 1}
6
18
6
["xxh3_128", "UnsupportedHashingAlgorithmError", "open", "file.read", "hasher.update", "hasher.hexdigest"]
5
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_sync_py.AssetSync._get_output_files", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetManager._process_input_path", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.conftest_py.upload_input_files_one_asset_in_cas", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.deadline_job_attachments.test_job_attachments_py.sync_outputs", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.deadline_job_attachments.test_job_attachments_py.upload_input_files_one_asset_in_cas"]
The function (hash_file) defined within the public class called public.The function start at line 24 and ends at 41. It contains 15 lines of code and it has a cyclomatic complexity of 4. It takes 2 parameters, represented as [24.0] and does not return any value. It declares 6.0 functions, It has 6.0 functions called inside which are ["xxh3_128", "UnsupportedHashingAlgorithmError", "open", "file.read", "hasher.update", "hasher.hexdigest"], It has 5.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_sync_py.AssetSync._get_output_files", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetManager._process_input_path", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.conftest_py.upload_input_files_one_asset_in_cas", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.deadline_job_attachments.test_job_attachments_py.sync_outputs", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.deadline_job_attachments.test_job_attachments_py.upload_input_files_one_asset_in_cas"].
aws-deadline_deadline-cloud
public
public
0
0
hash_data
def hash_data(data: bytes, hash_alg: HashAlgorithm) -> str:"""Hashes the given data bytes using the given hashing algorithm."""if hash_alg == HashAlgorithm.XXH128:from xxhash import xxh3_128hasher = xxh3_128()else:raise UnsupportedHashingAlgorithmError(f"Unsupported hashing algorithm provided: {hash_alg}")hasher.update(data)return hasher.hexdigest()
2
10
2
50
1
44
56
44
data,hash_alg
['hasher']
str
{"Assign": 1, "Expr": 2, "If": 1, "Return": 1}
4
13
4
["xxh3_128", "UnsupportedHashingAlgorithmError", "hasher.update", "hasher.hexdigest"]
14
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_download", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._write_manifest", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_sync_py.AssetSync._upload_output_manifest_to_s3", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.models_py.PathMappingRule.get_hashed_source_path", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetUploader._gather_upload_metadata", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetUploader._get_hashed_file_name_from_root_str", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetUploader._snapshot_assets", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetUploader.upload_assets", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_attachment_py.TestAttachment._run_attachment_basic_flow", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_attachment_py.TestAttachment.test_attachment_path_mapping_flow", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_attachment_py.TestAttachment.test_attachment_s3_cross_account_access_denied", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.deadline_job_attachments.test_job_attachments_py.test_upload_input_files_no_download_paths", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentUpload.test_upload_single_map_from_root", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.incremental_downloads.test_manifest_download_py.generate_fake_job_with_output_manifest"]
The function (hash_data) defined within the public class called public.The function start at line 44 and ends at 56. It contains 10 lines of code and it has a cyclomatic complexity of 2. It takes 2 parameters, represented as [44.0] and does not return any value. It declares 4.0 functions, It has 4.0 functions called inside which are ["xxh3_128", "UnsupportedHashingAlgorithmError", "hasher.update", "hasher.hexdigest"], It has 14.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_download", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._write_manifest", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_sync_py.AssetSync._upload_output_manifest_to_s3", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.models_py.PathMappingRule.get_hashed_source_path", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetUploader._gather_upload_metadata", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetUploader._get_hashed_file_name_from_root_str", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetUploader._snapshot_assets", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetUploader.upload_assets", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_attachment_py.TestAttachment._run_attachment_basic_flow", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_attachment_py.TestAttachment.test_attachment_path_mapping_flow", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_attachment_py.TestAttachment.test_attachment_s3_cross_account_access_denied", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.deadline_job_attachments.test_job_attachments_py.test_upload_input_files_no_download_paths", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.api.test_attachment_py.TestAttachmentUpload.test_upload_single_map_from_root", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.incremental_downloads.test_manifest_download_py.generate_fake_job_with_output_manifest"].
aws-deadline_deadline-cloud
ManifestModelRegistry
public
0
0
register
def register(cls) -> None:"""Register the availble manifest models."""# Import here to avoid circular dependancies.from .v2023_03_03 import ManifestModel as _ManifestModel2023_03_03new_manifests = {ManifestVersion.v2023_03_03: _ManifestModel2023_03_03,}cls._asset_manifest_mapping = {**cls._asset_manifest_mapping, **new_manifests}
1
6
1
38
0
28
38
28
cls
[]
None
{"Assign": 2, "Expr": 1}
0
11
0
[]
221
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.syntribos.signal_py.SignalHolder.compare", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3688162_zalando_incubator_kopf.kopf.reactor.registries_py.OperatorRegistry.register_resource_changing_handler", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3688162_zalando_incubator_kopf.kopf.reactor.registries_py.OperatorRegistry.register_resource_watching_handler", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3700918_etianen_django_reversion.reversion.admin_py.VersionAdmin.reversion_register", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3700918_etianen_django_reversion.reversion.revisions_py.register", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.ban", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.chat_unlock", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.demote", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.emergency_lock", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.get_admin", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.gspider", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.kick", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.muter", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.nothanos", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.pin", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.promote", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.rm_deletedacc", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.set_group_photo", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.spider", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.ungmoot", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.unmoot", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.afk_py.afk_on_pm", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.afk_py.mention_afk", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.afk_py.set_afk", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.afk_py.type_afk_is_not_true"]
The function (register) defined within the public class called ManifestModelRegistry.The function start at line 28 and ends at 38. It contains 6 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It has 221.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.syntribos.signal_py.SignalHolder.compare", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3688162_zalando_incubator_kopf.kopf.reactor.registries_py.OperatorRegistry.register_resource_changing_handler", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3688162_zalando_incubator_kopf.kopf.reactor.registries_py.OperatorRegistry.register_resource_watching_handler", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3700918_etianen_django_reversion.reversion.admin_py.VersionAdmin.reversion_register", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3700918_etianen_django_reversion.reversion.revisions_py.register", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.ban", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.chat_unlock", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.demote", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.emergency_lock", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.get_admin", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.gspider", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.kick", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.muter", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.nothanos", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.pin", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.promote", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.rm_deletedacc", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.set_group_photo", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.spider", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.ungmoot", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.admin_py.unmoot", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.afk_py.afk_on_pm", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.afk_py.mention_afk", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.afk_py.set_afk", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3953407_raphielgang_telegram_paperplane.userbot.modules.afk_py.type_afk_is_not_true"].
aws-deadline_deadline-cloud
ManifestModelRegistry
public
0
0
get_manifest_model
def get_manifest_model(cls, *, version: ManifestVersion) -> Type[BaseManifestModel]:"""Get the manifest model for the specified version."""manifest_model = cls._asset_manifest_mapping.get(version, None)if not manifest_model:raise RuntimeError(f"No model for asset manifest version: {version}")return manifest_model
2
5
2
41
0
41
48
41
cls,version
[]
Type[BaseManifestModel]
{"Assign": 1, "Expr": 1, "If": 1, "Return": 1}
2
8
2
["cls._asset_manifest_mapping.get", "RuntimeError"]
6
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.decode_py.decode_manifest", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_sync_py.AssetSync.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetManager._create_manifest_file", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetManager._process_input_path", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.asset_manifests.test_manifest_model_py.test_get_manifest_model", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.asset_manifests.test_manifest_model_py.test_get_manifest_model_no_manifest_for_version"]
The function (get_manifest_model) defined within the public class called ManifestModelRegistry.The function start at line 41 and ends at 48. It contains 5 lines of code and it has a cyclomatic complexity of 2. It takes 2 parameters, represented as [41.0] and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["cls._asset_manifest_mapping.get", "RuntimeError"], It has 6.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.decode_py.decode_manifest", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_sync_py.AssetSync.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetManager._create_manifest_file", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.upload_py.S3AssetManager._process_input_path", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.asset_manifests.test_manifest_model_py.test_get_manifest_model", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.asset_manifests.test_manifest_model_py.test_get_manifest_model_no_manifest_for_version"].
aws-deadline_deadline-cloud
ManifestPath
public
0
1
__init__
def __init__(self, *, path: str, hash: str, size: int, mtime: int) -> None:super().__init__(path=path, hash=hash, size=size, mtime=mtime)
1
2
5
47
0
30
31
30
self,path,hash,size,mtime
[]
None
{"Expr": 1}
2
2
2
["__init__", "super"]
4,993
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"]
The function (__init__) defined within the public class called ManifestPath, that inherit another class.The function start at line 30 and ends at 31. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 5 parameters, represented as [30.0] and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["__init__", "super"], It has 4993.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"].
aws-deadline_deadline-cloud
ManifestPath
public
0
1
__init__
def __init__(self, *, hash_alg: HashAlgorithm, paths: list[BaseManifestPath], total_size: int) -> None:if hash_alg not in SUPPORTED_HASH_ALGS:raise ManifestDecodeValidationError(f"Unsupported hashing algorithm: {hash_alg}. Must be one of: {[e.value for e in SUPPORTED_HASH_ALGS]}")super().__init__(hash_alg=hash_alg, paths=paths)self.totalSize = total_sizeself.manifestVersion = ManifestVersion.v2023_03_03
2
10
4
62
0
40
50
40
self,path,hash,size,mtime
[]
None
{"Expr": 1}
2
2
2
["__init__", "super"]
4,993
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"]
The function (__init__) defined within the public class called ManifestPath, that inherit another class.The function start at line 40 and ends at 50. It contains 10 lines of code and it has a cyclomatic complexity of 2. It takes 4 parameters, represented as [40.0] and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["__init__", "super"], It has 4993.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"].
aws-deadline_deadline-cloud
AssetManifest
public
0
1
decode
def decode(cls, *, manifest_data: dict[str, Any]) -> AssetManifest:"""Return an instance of this class given a manifest dictionary.Assumes the manifest has been validated prior to calling."""try:hash_alg: HashAlgorithm = HashAlgorithm(manifest_data["hashAlg"])except ValueError:raise ManifestDecodeValidationError(f"Unsupported hashing algorithm: {hash_alg}. Must be one of: {[e.value for e in SUPPORTED_HASH_ALGS]}")return cls(hash_alg=hash_alg,paths=[ManifestPath(path=path["path"], hash=path["hash"], size=path["size"], mtime=path["mtime"])for path in manifest_data["paths"]],total_size=manifest_data["totalSize"],)
3
17
2
98
0
53
74
53
cls,manifest_data
[]
AssetManifest
{"AnnAssign": 1, "Expr": 1, "Return": 1, "Try": 1}
4
22
4
["HashAlgorithm", "ManifestDecodeValidationError", "cls", "ManifestPath"]
636
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.19906768_dragon_userbot_dragon_userbot.modules.squotes_py.render_message", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3527343_frigg_frigg_hq.tests.webhooks.test_commands_py.FetchWebhookPayloadCommandTests.test_command_handle_event_failure", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.commands_py.SourceCommand.run", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.console_py.Console.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.repl_py.main", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3550345_napalm_automation_napalm_junos.test.unit.conftest_py.FakeRPCObject.get_config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3551156_parrot_developers_arsdkbuildutils.Utils.Python.ARSDK_PrebuildActions_py.main", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.linux_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.linux_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.osx_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex_commands_py.ExReadShellOut.run", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3596930_mengskysama_shadowsocks_rm.shadowsocks.shell_py.get_config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.spec.debug_py.read_line", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.spec.examples_py.DescribePreStartHook.it_runs_before_debugging_a_service", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.exceptions_py.Frame.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.exceptions_py.Traceback.exception", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3652361_pylons_pyramid_jinja2.tests.test_it_py.DummyTemplate.render", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.tests.unit.test_datagen_py.FuzzDatagenUnittest.test_fuzz_data_xml", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3664958_ldtp_ldtp2.ldtpd.utils_py.Utils._match_name_to_acc", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3673636_bandwidth_python_bandwidth.bandwidth.voice.bxml_py.Response.__str__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.testing.pytest_plugin_py.get_process_ids", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.transports.websocket.connection_py.WebSocket.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.transports.websocket.frames_py.Frame.payload"]
The function (decode) defined within the public class called AssetManifest, that inherit another class.The function start at line 53 and ends at 74. It contains 17 lines of code and it has a cyclomatic complexity of 3. It takes 2 parameters, represented as [53.0] and does not return any value. It declares 4.0 functions, It has 4.0 functions called inside which are ["HashAlgorithm", "ManifestDecodeValidationError", "cls", "ManifestPath"], It has 636.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.19906768_dragon_userbot_dragon_userbot.modules.squotes_py.render_message", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3527343_frigg_frigg_hq.tests.webhooks.test_commands_py.FetchWebhookPayloadCommandTests.test_command_handle_event_failure", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.commands_py.SourceCommand.run", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.console_py.Console.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3549150_freenas_cli.freenas.cli.repl_py.main", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3550345_napalm_automation_napalm_junos.test.unit.conftest_py.FakeRPCObject.get_config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3551156_parrot_developers_arsdkbuildutils.Utils.Python.ARSDK_PrebuildActions_py.main", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.linux_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.linux_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.osx_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.run_and_read", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex_commands_py.ExReadShellOut.run", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3596930_mengskysama_shadowsocks_rm.shadowsocks.shell_py.get_config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.spec.debug_py.read_line", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.spec.examples_py.DescribePreStartHook.it_runs_before_debugging_a_service", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.exceptions_py.Frame.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.exceptions_py.Traceback.exception", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3652361_pylons_pyramid_jinja2.tests.test_it_py.DummyTemplate.render", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.tests.unit.test_datagen_py.FuzzDatagenUnittest.test_fuzz_data_xml", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3664958_ldtp_ldtp2.ldtpd.utils_py.Utils._match_name_to_acc", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3673636_bandwidth_python_bandwidth.bandwidth.voice.bxml_py.Response.__str__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.testing.pytest_plugin_py.get_process_ids", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.transports.websocket.connection_py.WebSocket.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3682933_noisyboiler_wampy.wampy.transports.websocket.frames_py.Frame.payload"].
aws-deadline_deadline-cloud
AssetManifest
public
0
1
get_default_hash_alg
def get_default_hash_alg(cls) -> HashAlgorithm:# pragma: no cover"""Returns the default hashing algorithm for the Asset Manifest, represented as a string"""return DEFAULT_HASH_ALG
1
2
1
10
0
77
79
77
cls
[]
HashAlgorithm
{"Expr": 1, "Return": 1}
0
3
0
[]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_download"]
The function (get_default_hash_alg) defined within the public class called AssetManifest, that inherit another class.The function start at line 77 and ends at 79. It contains 2 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.api.manifest_py._manifest_download"].
aws-deadline_deadline-cloud
AssetManifest
public
0
1
encode
def encode(self) -> str:"""Return a canonicalized JSON string of the manifest"""self.paths.sort(key=canonical_path_comparator)return manifest_to_canonical_json_string(manifest=self)
1
3
1
25
0
81
86
81
self
[]
str
{"Expr": 2, "Return": 1}
2
6
2
["self.paths.sort", "manifest_to_canonical_json_string"]
354
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.bin.make_version_py.update_changelog", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3581153_zetaops_pyoko.pyoko.node_py.Node.__str__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3619342_bread_and_pepper_django_userena.userena.forms_py.SignupFormOnlyEmail.save", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.comm.irc_py.Client.irc_CAP", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_download", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_messages", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_messages_with_multiline_content", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3640866_pelagicore_qface.qface.helper.generic_py.hash", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.pgctl.cli_py.PgctlApp.config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.pgctl.cli_py.unbuf_print", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.unit.functions_py.DescribeJSONEncoder.it_encodes_frozendict", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.unit.functions_py.DescribeJSONEncoder.it_encodes_other", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3646098_developers_against_repressions_network_case.update_readme_py.load_signed", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.transports.requests_py.HTTPTransport.remote_call", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.transports.urllib_py.HTTPTransport.remote_call", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.syntribos.extensions.basic_http.client_py.basic_auth", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.format.beacon_py.BEACONWriter.write_header", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.format.beacon_py.BEACONWriter.write_shortcode", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.test.mock_tracker_py.TrackerHandler.do_POST", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.test.random_result_py.MockResult.generate_shortcode", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.util.externalsort_py.GNUExternalSort.input_many", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3694885_betamaxpy_betamax.src.betamax.util_py.add_urllib3_response", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3694885_betamaxpy_betamax.src.betamax.util_py.deserialize_prepared_request", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3696527_singnet_snet_cli.snet.cli.commands.mpe_service_py.MPEServiceCommand._publish_metadata_in_ipfs"]
The function (encode) defined within the public class called AssetManifest, that inherit another class.The function start at line 81 and ends at 86. It contains 3 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["self.paths.sort", "manifest_to_canonical_json_string"], It has 354.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.bin.make_version_py.update_changelog", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3574420_guillermooo_vintageous.ex.plat.windows_py.filter_region", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3581153_zetaops_pyoko.pyoko.node_py.Node.__str__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3619342_bread_and_pepper_django_userena.userena.forms_py.SignupFormOnlyEmail.save", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.comm.irc_py.Client.irc_CAP", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_download", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_messages", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3632606_shaunduncan_helga.helga.tests.webhooks.test_logger_py.TestChannelLogView.test_messages_with_multiline_content", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3640866_pelagicore_qface.qface.helper.generic_py.hash", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.pgctl.cli_py.PgctlApp.config", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.pgctl.cli_py.unbuf_print", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.unit.functions_py.DescribeJSONEncoder.it_encodes_frozendict", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3643700_yelp_pgctl.tests.unit.functions_py.DescribeJSONEncoder.it_encodes_other", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3646098_developers_against_repressions_network_case.update_readme_py.load_signed", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.transports.requests_py.HTTPTransport.remote_call", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3647266_appenlight_appenlight_client_python.appenlight_client.transports.urllib_py.HTTPTransport.remote_call", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3657361_openstack_archive_syntribos.syntribos.extensions.basic_http.client_py.basic_auth", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.format.beacon_py.BEACONWriter.write_header", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.format.beacon_py.BEACONWriter.write_shortcode", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.test.mock_tracker_py.TrackerHandler.do_POST", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.test.random_result_py.MockResult.generate_shortcode", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3692282_archiveteam_terroroftinytown.terroroftinytown.util.externalsort_py.GNUExternalSort.input_many", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3694885_betamaxpy_betamax.src.betamax.util_py.add_urllib3_response", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3694885_betamaxpy_betamax.src.betamax.util_py.deserialize_prepared_request", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3696527_singnet_snet_cli.snet.cli.commands.mpe_service_py.MPEServiceCommand._publish_metadata_in_ipfs"].
aws-deadline_deadline-cloud
public
public
0
0
_get_missing_fields
def _get_missing_fields(obj: dict[str, Any], required: list[str]) -> list[str]:missing = []for field in required:if field not in obj:missing.append(field)return missing
3
6
2
47
1
26
31
26
obj,required
['missing']
list[str]
{"Assign": 1, "Expr": 1, "For": 1, "If": 1, "Return": 1}
1
6
1
["missing.append"]
2
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.v2023_03_03.validate_py._validate_path_2023_03_03", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.v2023_03_03.validate_py.validate_manifest_2023_03_03"]
The function (_get_missing_fields) defined within the public class called public.The function start at line 26 and ends at 31. It contains 6 lines of code and it has a cyclomatic complexity of 3. It takes 2 parameters, represented as [26.0] and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["missing.append"], It has 2.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.v2023_03_03.validate_py._validate_path_2023_03_03", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.v2023_03_03.validate_py.validate_manifest_2023_03_03"].
aws-deadline_deadline-cloud
public
public
0
0
_validate_path_2023_03_03
def _validate_path_2023_03_03(path_object: dict[str, Any]) -> Tuple[bool, Optional[str]]:missing = _get_missing_fields(path_object, _PATH_REQUIRED_FIELDS_2023_03_03)if len(missing) > 0:return False, f"path is missing required field(s) {missing}"path = path_object["path"]if not isinstance(path, str):return False, "path must be a string"hash = path_object["hash"]if not isinstance(hash, str):return False, "hash must be a string"size = path_object["size"]if not isinstance(size, int):return False, "size must be an integer"mtime = path_object["mtime"]if not isinstance(mtime, int):return False, "mtime must be an integer"return True, None
6
17
1
123
5
34
55
34
path_object
['mtime', 'hash', 'path', 'missing', 'size']
Tuple[bool, Optional[str]]
{"Assign": 5, "If": 5, "Return": 6}
6
22
6
["_get_missing_fields", "len", "isinstance", "isinstance", "isinstance", "isinstance"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.v2023_03_03.validate_py.validate_manifest_2023_03_03"]
The function (_validate_path_2023_03_03) defined within the public class called public.The function start at line 34 and ends at 55. It contains 17 lines of code and it has a cyclomatic complexity of 6. The function does not take any parameters and does not return any value. It declares 6.0 functions, It has 6.0 functions called inside which are ["_get_missing_fields", "len", "isinstance", "isinstance", "isinstance", "isinstance"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.v2023_03_03.validate_py.validate_manifest_2023_03_03"].
aws-deadline_deadline-cloud
public
public
0
0
validate_manifest_2023_03_03
def validate_manifest_2023_03_03(manifest: dict[str, Any]) -> Tuple[bool, Optional[str]]:missing = _get_missing_fields(manifest, _REQUIRED_FIELDS_2023_03_03)if len(missing) > 0:return False, f"manifest is missing required field(s) {missing}"manifest_version = manifest["manifestVersion"]if not isinstance(manifest_version, str) or manifest_version != "2023-03-03":return False, 'manifestVersion must be "2023-03-03"'hash_alg = manifest["hashAlg"]if not isinstance(hash_alg, str) or hash_alg not in _HASH_ALGS_2023_03_03:return False, f"hashAlg must be one of {_HASH_ALGS_2023_03_03}"total_size = manifest["totalSize"]if not isinstance(total_size, int):return False, "totalSize must be an integer"paths = manifest["paths"]if not isinstance(paths, list):return False, "paths must be a list"elif len(paths) < 1:return False, "paths must have a least one item"else:for path_object in paths:ok, message = _validate_path_2023_03_03(path_object)if not ok:return False, messagereturn True, None
11
24
1
168
5
58
86
58
manifest
['hash_alg', 'total_size', 'manifest_version', 'missing', 'paths']
Tuple[bool, Optional[str]]
{"Assign": 6, "For": 1, "If": 7, "Return": 8}
8
29
8
["_get_missing_fields", "len", "isinstance", "isinstance", "isinstance", "isinstance", "len", "_validate_path_2023_03_03"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.decode_py.validate_manifest"]
The function (validate_manifest_2023_03_03) defined within the public class called public.The function start at line 58 and ends at 86. It contains 24 lines of code and it has a cyclomatic complexity of 11. The function does not take any parameters and does not return any value. It declares 8.0 functions, It has 8.0 functions called inside which are ["_get_missing_fields", "len", "isinstance", "isinstance", "isinstance", "isinstance", "len", "_validate_path_2023_03_03"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.src.deadline.job_attachments.asset_manifests.decode_py.validate_manifest"].
aws-deadline_deadline-cloud
CacheDB
public
1
1
__init__
def __init__(self, cache_name: str, table_name: str, create_query: str, cache_dir: Optional[str] = None) -> None:if not cache_name or not table_name or not create_query:raise JobAttachmentsError("Constructor strings for CacheDB cannot be empty.")self.cache_name: str = cache_nameself.table_name: str = table_nameself.create_query: str = create_queryself.local = threading.local()self.local_connections: set = set()try:# SQLite is included in Python installers, but might not exist if building python from source.import sqlite3# noqaself.enabled = Trueexcept ImportError:logger.warning(f"SQLite was not found, {cache_name} will not be used.")self.enabled = Falsereturnif cache_dir is None:cache_dir = self.get_default_cache_db_file_dir()if cache_dir is None:raise JobAttachmentsError(f"No default cache path found. Please provide a directory for {self.cache_name}.")os.makedirs(cache_dir, exist_ok=True)self.cache_dir: str = os.path.join(cache_dir, f"{self.cache_name}.db")self.db_lock = Lock()
7
26
5
164
0
30
59
30
self,cache_name,table_name,create_query,cache_dir
[]
None
{"AnnAssign": 5, "Assign": 5, "Expr": 2, "If": 3, "Return": 1, "Try": 1}
9
30
9
["JobAttachmentsError", "threading.local", "set", "logger.warning", "self.get_default_cache_db_file_dir", "JobAttachmentsError", "os.makedirs", "os.path.join", "Lock"]
4,993
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"]
The function (__init__) defined within the public class called CacheDB, implement an interface, and it inherit another class.The function start at line 30 and ends at 59. It contains 26 lines of code and it has a cyclomatic complexity of 7. It takes 5 parameters, represented as [30.0] and does not return any value. It declares 9.0 functions, It has 9.0 functions called inside which are ["JobAttachmentsError", "threading.local", "set", "logger.warning", "self.get_default_cache_db_file_dir", "JobAttachmentsError", "os.makedirs", "os.path.join", "Lock"], It has 4993.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"].
aws-deadline_deadline-cloud
CacheDB
public
1
1
__enter__
def __enter__(self):"""Called when entering the context manager."""if self.enabled:import sqlite3try:self.db_connection: sqlite3.Connection = sqlite3.connect(self.cache_dir, check_same_thread=False)except sqlite3.OperationalError as oe:raise JobAttachmentsError(f"Could not access cache file in {self.cache_dir}") from oetry:self.db_connection.execute(f"SELECT * FROM {self.table_name}")except Exception:# DB file doesn't have our table, so we need to create itlogger.info(f"No cache entries for the current library version were found. Creating a new cache for {self.cache_name}")self.db_connection.execute(self.create_query)return self
4
19
1
83
0
61
83
61
self
[]
Returns
{"AnnAssign": 1, "Expr": 4, "If": 1, "Return": 1, "Try": 2}
5
23
5
["sqlite3.connect", "JobAttachmentsError", "self.db_connection.execute", "logger.info", "self.db_connection.execute"]
3
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3955847_kinto_kinto_http_py.tests.test_replication_py.test_new_records_are_sent_to_the_destination", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3955847_kinto_kinto_http_py.tests.test_replication_py.test_removed_records_are_deleted_on_the_destination", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94654328_simonsobs_sotodlib.sotodlib.utils.procs_pool_py._get_mpi_comm"]
The function (__enter__) defined within the public class called CacheDB, implement an interface, and it inherit another class.The function start at line 61 and ends at 83. It contains 19 lines of code and it has a cyclomatic complexity of 4. The function does not take any parameters, and this function return a value. It declares 5.0 functions, It has 5.0 functions called inside which are ["sqlite3.connect", "JobAttachmentsError", "self.db_connection.execute", "logger.info", "self.db_connection.execute"], It has 3.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3955847_kinto_kinto_http_py.tests.test_replication_py.test_new_records_are_sent_to_the_destination", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3955847_kinto_kinto_http_py.tests.test_replication_py.test_removed_records_are_deleted_on_the_destination", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94654328_simonsobs_sotodlib.sotodlib.utils.procs_pool_py._get_mpi_comm"].
aws-deadline_deadline-cloud
CacheDB
public
1
1
__exit__
def __exit__(self, exc_type, exc_value, exc_traceback):"""Called when exiting the context manager."""if self.enabled:import sqlite3self.db_connection.close()for conn in self.local_connections:try:conn.close()except sqlite3.Error as e:logger.warning(f"SQLite connection failed to close with error {e}")self.local_connections.clear()
4
10
4
61
0
85
98
85
self,exc_type,exc_value,exc_traceback
[]
None
{"Expr": 5, "For": 1, "If": 1, "Try": 1}
4
14
4
["self.db_connection.close", "conn.close", "logger.warning", "self.local_connections.clear"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.84022601_jaraco_jaraco_windows.jaraco.windows.tempfile_py.TemporaryDirectory.__exit__"]
The function (__exit__) defined within the public class called CacheDB, implement an interface, and it inherit another class.The function start at line 85 and ends at 98. It contains 10 lines of code and it has a cyclomatic complexity of 4. It takes 4 parameters, represented as [85.0] and does not return any value. It declares 4.0 functions, It has 4.0 functions called inside which are ["self.db_connection.close", "conn.close", "logger.warning", "self.local_connections.clear"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.84022601_jaraco_jaraco_windows.jaraco.windows.tempfile_py.TemporaryDirectory.__exit__"].
aws-deadline_deadline-cloud
CacheDB
public
1
1
get_local_connection
def get_local_connection(self):"""Create and/or returns a thread local connection to the SQLite database."""if not self.enabled:return Noneimport sqlite3if not hasattr(self.local, "connection"):try:self.local.connection = sqlite3.connect(self.cache_dir, check_same_thread=False)self.local_connections.add(self.local.connection)except sqlite3.OperationalError as oe:raise JobAttachmentsError(f"Could not create connection to cache in {self.cache_dir}") from oereturn self.local.connection
4
13
1
80
0
100
115
100
self
[]
Returns
{"Assign": 1, "Expr": 2, "If": 2, "Return": 2, "Try": 1}
4
16
4
["hasattr", "sqlite3.connect", "self.local_connections.add", "JobAttachmentsError"]
0
[]
The function (get_local_connection) defined within the public class called CacheDB, implement an interface, and it inherit another class.The function start at line 100 and ends at 115. It contains 13 lines of code and it has a cyclomatic complexity of 4. The function does not take any parameters, and this function return a value. It declares 4.0 functions, and It has 4.0 functions called inside which are ["hasattr", "sqlite3.connect", "self.local_connections.add", "JobAttachmentsError"].
aws-deadline_deadline-cloud
CacheDB
public
1
1
get_default_cache_db_file_dir
def get_default_cache_db_file_dir(cls) -> Optional[str]:"""Gets the expected directory for the cache database file based on OS environment variables.If a directory cannot be found, defaults to the working directory."""default_path = os.environ.get("HOME")if default_path:default_path = os.path.join(default_path, CONFIG_ROOT, COMPONENT_NAME)return default_path
2
5
1
40
0
118
126
118
cls
[]
Optional[str]
{"Assign": 2, "Expr": 1, "If": 1, "Return": 1}
2
9
2
["os.environ.get", "os.path.join"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.caches.test_caches_py.TestCacheDB.test_get_default_cache_db_file_dir_env_var_path_exists"]
The function (get_default_cache_db_file_dir) defined within the public class called CacheDB, implement an interface, and it inherit another class.The function start at line 118 and ends at 126. It contains 5 lines of code and it has a cyclomatic complexity of 2. The function does not take any parameters and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["os.environ.get", "os.path.join"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_job_attachments.caches.test_caches_py.TestCacheDB.test_get_default_cache_db_file_dir_env_var_path_exists"].
aws-deadline_deadline-cloud
CacheDB
public
1
1
remove_cache
def remove_cache(self) -> None:"""Removes the underlying cache contents from the file system."""if self.enabled:import sqlite3self.db_connection.close()conn_list = list(self.local_connections)for conn in conn_list:try:conn.close()self.local_connections.remove(conn)except sqlite3.Error as e:logger.warning(f"SQLite connection failed to close with error {e}")logger.debug(f"The cache {self.cache_dir} will be removed")try:os.remove(self.cache_dir)except Exception as e:logger.error(f"Error occurred while removing the cache file {self.cache_dir}: {e}")raise e
5
17
1
95
0
128
151
128
self
[]
None
{"Assign": 1, "Expr": 8, "For": 1, "If": 1, "Try": 2}
8
24
8
["self.db_connection.close", "list", "conn.close", "self.local_connections.remove", "logger.warning", "logger.debug", "os.remove", "logger.error"]
0
[]
The function (remove_cache) defined within the public class called CacheDB, implement an interface, and it inherit another class.The function start at line 128 and ends at 151. It contains 17 lines of code and it has a cyclomatic complexity of 5. The function does not take any parameters and does not return any value. It declares 8.0 functions, and It has 8.0 functions called inside which are ["self.db_connection.close", "list", "conn.close", "self.local_connections.remove", "logger.warning", "logger.debug", "os.remove", "logger.error"].
aws-deadline_deadline-cloud
HashCacheEntry
public
0
0
to_dict
def to_dict(self) -> Dict[str, Any]:return {"file_path": self.file_path,"hash_algorithm": self.hash_algorithm.value,"file_hash": self.file_hash,"last_modified_time": self.last_modified_time,}
1
7
1
41
0
29
35
29
self
[]
Dict[str, Any]
{"Return": 1}
0
7
0
[]
35
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3957978_dgilland_pydash.src.pydash.objects_py.omit_by", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3957978_dgilland_pydash.src.pydash.objects_py.pick_by", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3967770_docusign_code_examples_python.app.docusign.ds_client_py.DSClient.get_token", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.database.model.base_py.ProperDictMixin.to_proper_dict", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.database.utils_py.init_cls_from_base", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.services.models_py.edit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.services.tasks_py.edit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.59624560_openwpm_openwpm.openwpm.browser_manager_py.BrowserManagerHandle._unpack_pickled_error", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69272947_misp_misp_stix.tests._test_stix_export_py.TestCollectionSTIX1Export._check_stix1_collection_export_results", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69272947_misp_misp_stix.tests._test_stix_export_py.TestCollectionSTIX1Export._check_stix1_export_results", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69737800_eprbell_dali_rp2.src.dali.plugin.pair_converter.coinbase_advanced_py.PairConverterPlugin.get_historic_bar_from_native_source", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69848748_scverse_squidpy.src.squidpy.pl._ligrec_py.ligrec", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69936553_pandas_dev_pandas_stubs.tests.series.test_series_py.test_change_to_dict_return_type", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70258989_dynaconf_dynaconf.dynaconf.loaders.__init___py.write", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70258989_dynaconf_dynaconf.tests.test_base_py.test_dotted_set", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs_paging", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs_user_id", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.dataset.arrow.dec_py.ArrowDecoder.decode_batch", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.ext.rotbaum._model_py.QRX.fit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.daf.tslib.engine.hyperopt_py.HyperOptManager.load_records", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m1_py.generate_m1_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m3_py.generate_m3_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m4_py.generate_m4_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.tsbench.src.tsbench.surrogate.transformers.config_py.DatasetCatch22Encoder.fit"]
The function (to_dict) defined within the public class called HashCacheEntry.The function start at line 29 and ends at 35. It contains 7 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It has 35.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3957978_dgilland_pydash.src.pydash.objects_py.omit_by", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3957978_dgilland_pydash.src.pydash.objects_py.pick_by", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3967770_docusign_code_examples_python.app.docusign.ds_client_py.DSClient.get_token", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.database.model.base_py.ProperDictMixin.to_proper_dict", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.database.utils_py.init_cls_from_base", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.services.models_py.edit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.services.tasks_py.edit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.59624560_openwpm_openwpm.openwpm.browser_manager_py.BrowserManagerHandle._unpack_pickled_error", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69272947_misp_misp_stix.tests._test_stix_export_py.TestCollectionSTIX1Export._check_stix1_collection_export_results", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69272947_misp_misp_stix.tests._test_stix_export_py.TestCollectionSTIX1Export._check_stix1_export_results", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69737800_eprbell_dali_rp2.src.dali.plugin.pair_converter.coinbase_advanced_py.PairConverterPlugin.get_historic_bar_from_native_source", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69848748_scverse_squidpy.src.squidpy.pl._ligrec_py.ligrec", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69936553_pandas_dev_pandas_stubs.tests.series.test_series_py.test_change_to_dict_return_type", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70258989_dynaconf_dynaconf.dynaconf.loaders.__init___py.write", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70258989_dynaconf_dynaconf.tests.test_base_py.test_dotted_set", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs_paging", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs_user_id", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.dataset.arrow.dec_py.ArrowDecoder.decode_batch", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.ext.rotbaum._model_py.QRX.fit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.daf.tslib.engine.hyperopt_py.HyperOptManager.load_records", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m1_py.generate_m1_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m3_py.generate_m3_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m4_py.generate_m4_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.tsbench.src.tsbench.surrogate.transformers.config_py.DatasetCatch22Encoder.fit"].
aws-deadline_deadline-cloud
HashCache
public
0
1
__init__
def __init__(self, cache_dir: Optional[str] = None) -> None:table_name: str = f"hashesV{self.CACHE_DB_VERSION}"create_query: str = f"CREATE TABLE IF NOT EXISTS hashesV{self.CACHE_DB_VERSION}(file_path blob primary key, hash_algorithm text secondary key, file_hash text, last_modified_time timestamp)"super().__init__(cache_name=self.CACHE_NAME,table_name=table_name,create_query=create_query,cache_dir=cache_dir,)
1
9
2
53
0
52
60
52
self,cache_dir
[]
None
{"AnnAssign": 2, "Expr": 1}
2
9
2
["__init__", "super"]
4,993
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"]
The function (__init__) defined within the public class called HashCache, that inherit another class.The function start at line 52 and ends at 60. It contains 9 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [52.0] and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["__init__", "super"], It has 4993.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"].
aws-deadline_deadline-cloud
HashCache
public
0
1
get_connection_entry
def get_connection_entry(self, file_path_key: str, hash_algorithm: HashAlgorithm, connection) -> Optional[HashCacheEntry]:"""Returns an entry from the hash cache, if it exists."""if not self.enabled:return Noneentry_vals = connection.execute(f"SELECT * FROM {self.table_name} WHERE file_path=? AND hash_algorithm=?",[file_path_key.encode(encoding="utf-8", errors="surrogatepass"),hash_algorithm.value,],).fetchone()if entry_vals:return HashCacheEntry(file_path=str(entry_vals[0], encoding="utf-8", errors="surrogatepass"),hash_algorithm=HashAlgorithm(entry_vals[1]),file_hash=entry_vals[2],last_modified_time=str(entry_vals[3]),)else:return None
3
21
4
119
0
62
86
62
self,file_path_key,hash_algorithm,connection
[]
Optional[HashCacheEntry]
{"Assign": 1, "Expr": 1, "If": 2, "Return": 3}
7
25
7
["fetchone", "connection.execute", "file_path_key.encode", "HashCacheEntry", "str", "HashAlgorithm", "str"]
0
[]
The function (get_connection_entry) defined within the public class called HashCache, that inherit another class.The function start at line 62 and ends at 86. It contains 21 lines of code and it has a cyclomatic complexity of 3. It takes 4 parameters, represented as [62.0] and does not return any value. It declares 7.0 functions, and It has 7.0 functions called inside which are ["fetchone", "connection.execute", "file_path_key.encode", "HashCacheEntry", "str", "HashAlgorithm", "str"].
aws-deadline_deadline-cloud
HashCache
public
0
1
get_entry
def get_entry(self, file_path_key: str, hash_algorithm: HashAlgorithm) -> Optional[HashCacheEntry]:"""Returns an entry from the hash cache, if it exists."""if not self.enabled:return Nonewith self.db_lock, self.db_connection:return self.get_connection_entry(file_path_key, hash_algorithm, self.db_connection)
2
7
3
49
0
88
98
88
self,file_path_key,hash_algorithm
[]
Optional[HashCacheEntry]
{"Expr": 1, "If": 1, "Return": 2, "With": 1}
1
11
1
["self.get_connection_entry"]
0
[]
The function (get_entry) defined within the public class called HashCache, that inherit another class.The function start at line 88 and ends at 98. It contains 7 lines of code and it has a cyclomatic complexity of 2. It takes 3 parameters, represented as [88.0] and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["self.get_connection_entry"].
aws-deadline_deadline-cloud
HashCache
public
0
1
put_entry
def put_entry(self, entry: HashCacheEntry) -> None:"""Inserts or replaces an entry into the hash cache database after acquiring the lock."""if self.enabled:with self.db_lock, self.db_connection:entry_dict = entry.to_dict()entry_dict["file_path"] = entry_dict["file_path"].encode(encoding="utf-8", errors="surrogatepass")self.db_connection.execute(f"INSERT OR REPLACE INTO {self.table_name} VALUES(:file_path, :hash_algorithm, :file_hash, :last_modified_time)",entry_dict,)
2
11
2
65
0
100
111
100
self,entry
[]
None
{"Assign": 2, "Expr": 2, "If": 1, "With": 1}
3
12
3
["entry.to_dict", "encode", "self.db_connection.execute"]
0
[]
The function (put_entry) defined within the public class called HashCache, that inherit another class.The function start at line 100 and ends at 111. It contains 11 lines of code and it has a cyclomatic complexity of 2. It takes 2 parameters, represented as [100.0] and does not return any value. It declares 3.0 functions, and It has 3.0 functions called inside which are ["entry.to_dict", "encode", "self.db_connection.execute"].
aws-deadline_deadline-cloud
S3CheckCacheEntry
public
0
0
to_dict
def to_dict(self) -> Dict[str, Any]:return {"s3_key": self.s3_key,"last_seen_time": self.last_seen_time,}
1
5
1
27
0
25
29
25
self
[]
Dict[str, Any]
{"Return": 1}
0
5
0
[]
35
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3957978_dgilland_pydash.src.pydash.objects_py.omit_by", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3957978_dgilland_pydash.src.pydash.objects_py.pick_by", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3967770_docusign_code_examples_python.app.docusign.ds_client_py.DSClient.get_token", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.database.model.base_py.ProperDictMixin.to_proper_dict", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.database.utils_py.init_cls_from_base", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.services.models_py.edit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.services.tasks_py.edit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.59624560_openwpm_openwpm.openwpm.browser_manager_py.BrowserManagerHandle._unpack_pickled_error", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69272947_misp_misp_stix.tests._test_stix_export_py.TestCollectionSTIX1Export._check_stix1_collection_export_results", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69272947_misp_misp_stix.tests._test_stix_export_py.TestCollectionSTIX1Export._check_stix1_export_results", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69737800_eprbell_dali_rp2.src.dali.plugin.pair_converter.coinbase_advanced_py.PairConverterPlugin.get_historic_bar_from_native_source", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69848748_scverse_squidpy.src.squidpy.pl._ligrec_py.ligrec", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69936553_pandas_dev_pandas_stubs.tests.series.test_series_py.test_change_to_dict_return_type", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70258989_dynaconf_dynaconf.dynaconf.loaders.__init___py.write", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70258989_dynaconf_dynaconf.tests.test_base_py.test_dotted_set", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs_paging", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs_user_id", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.dataset.arrow.dec_py.ArrowDecoder.decode_batch", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.ext.rotbaum._model_py.QRX.fit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.daf.tslib.engine.hyperopt_py.HyperOptManager.load_records", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m1_py.generate_m1_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m3_py.generate_m3_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m4_py.generate_m4_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.tsbench.src.tsbench.surrogate.transformers.config_py.DatasetCatch22Encoder.fit"]
The function (to_dict) defined within the public class called S3CheckCacheEntry.The function start at line 25 and ends at 29. It contains 5 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It has 35.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3957978_dgilland_pydash.src.pydash.objects_py.omit_by", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3957978_dgilland_pydash.src.pydash.objects_py.pick_by", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3967770_docusign_code_examples_python.app.docusign.ds_client_py.DSClient.get_token", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.database.model.base_py.ProperDictMixin.to_proper_dict", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.database.utils_py.init_cls_from_base", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.services.models_py.edit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3981965_allegroai_clearml_server.apiserver.services.tasks_py.edit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.59624560_openwpm_openwpm.openwpm.browser_manager_py.BrowserManagerHandle._unpack_pickled_error", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69272947_misp_misp_stix.tests._test_stix_export_py.TestCollectionSTIX1Export._check_stix1_collection_export_results", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69272947_misp_misp_stix.tests._test_stix_export_py.TestCollectionSTIX1Export._check_stix1_export_results", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69737800_eprbell_dali_rp2.src.dali.plugin.pair_converter.coinbase_advanced_py.PairConverterPlugin.get_historic_bar_from_native_source", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69848748_scverse_squidpy.src.squidpy.pl._ligrec_py.ligrec", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.69936553_pandas_dev_pandas_stubs.tests.series.test_series_py.test_change_to_dict_return_type", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70258989_dynaconf_dynaconf.dynaconf.loaders.__init___py.write", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70258989_dynaconf_dynaconf.tests.test_base_py.test_dotted_set", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs_paging", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70538610_asfhyp3_hyp3_sdk.tests.test_hyp3_py.test_find_jobs_user_id", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.dataset.arrow.dec_py.ArrowDecoder.decode_batch", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.ext.rotbaum._model_py.QRX.fit", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.daf.tslib.engine.hyperopt_py.HyperOptManager.load_records", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m1_py.generate_m1_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m3_py.generate_m3_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.few_shot_prediction.src.meta.datasets.m4_py.generate_m4_dataset", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.70598532_awslabs_gluonts.src.gluonts.nursery.tsbench.src.tsbench.surrogate.transformers.config_py.DatasetCatch22Encoder.fit"].
aws-deadline_deadline-cloud
S3CheckCache
public
0
1
__init__
def __init__(self, cache_dir: Optional[str] = None) -> None:table_name: str = f"s3checkV{self.CACHE_DB_VERSION}"create_query: str = f"CREATE TABLE IF NOT EXISTS s3checkV{self.CACHE_DB_VERSION}(s3_key text primary key, last_seen_time timestamp)"super().__init__(cache_name=self.CACHE_NAME,table_name=table_name,create_query=create_query,cache_dir=cache_dir,)
1
9
2
53
0
49
57
49
self,cache_dir
[]
None
{"AnnAssign": 2, "Expr": 1}
2
9
2
["__init__", "super"]
4,993
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"]
The function (__init__) defined within the public class called S3CheckCache, that inherit another class.The function start at line 49 and ends at 57. It contains 9 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [49.0] and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["__init__", "super"], It has 4993.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"].
aws-deadline_deadline-cloud
S3CheckCache
public
0
1
get_connection_entry
def get_connection_entry(self, s3_key: str, connection) -> Optional[S3CheckCacheEntry]:"""Checks if an entry exists in the cache, and returns it if it hasn't expired."""entry_vals = connection.execute(f"SELECT * FROM {self.table_name} WHERE s3_key=?",[s3_key],).fetchone()if entry_vals:entry = S3CheckCacheEntry(s3_key=entry_vals[0],last_seen_time=str(entry_vals[1]),)try:last_seen = datetime.fromtimestamp(float(entry.last_seen_time))if (datetime.now() - last_seen).days < self.ENTRY_EXPIRY_DAYS:return entryexcept ValueError:logger.warning(f"Timestamp for S3 key {s3_key} is not valid. Ignoring.")return None
4
17
3
106
0
59
80
59
self,s3_key,connection
[]
Optional[S3CheckCacheEntry]
{"Assign": 3, "Expr": 2, "If": 2, "Return": 2, "Try": 1}
8
22
8
["fetchone", "connection.execute", "S3CheckCacheEntry", "str", "datetime.fromtimestamp", "float", "datetime.now", "logger.warning"]
0
[]
The function (get_connection_entry) defined within the public class called S3CheckCache, that inherit another class.The function start at line 59 and ends at 80. It contains 17 lines of code and it has a cyclomatic complexity of 4. It takes 3 parameters, represented as [59.0] and does not return any value. It declares 8.0 functions, and It has 8.0 functions called inside which are ["fetchone", "connection.execute", "S3CheckCacheEntry", "str", "datetime.fromtimestamp", "float", "datetime.now", "logger.warning"].
aws-deadline_deadline-cloud
S3CheckCache
public
0
1
get_entry
def get_entry(self, s3_key: str) -> Optional[S3CheckCacheEntry]:"""Checks if an entry exists in the cache, and returns it if it hasn't expired."""if not self.enabled:return Nonewith self.db_lock, self.db_connection:return self.get_connection_entry(s3_key, self.db_connection)
2
5
2
43
0
82
90
82
self,s3_key
[]
Optional[S3CheckCacheEntry]
{"Expr": 1, "If": 1, "Return": 2, "With": 1}
1
9
1
["self.get_connection_entry"]
0
[]
The function (get_entry) defined within the public class called S3CheckCache, that inherit another class.The function start at line 82 and ends at 90. It contains 5 lines of code and it has a cyclomatic complexity of 2. It takes 2 parameters, represented as [82.0] and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["self.get_connection_entry"].
aws-deadline_deadline-cloud
S3CheckCache
public
0
1
put_entry
def put_entry(self, entry: S3CheckCacheEntry) -> None:"""Inserts or replaces an entry into the cache database."""if self.enabled:with self.db_lock, self.db_connection:self.db_connection.execute(f"INSERT OR REPLACE INTO {self.table_name} VALUES(:s3_key, :last_seen_time)",entry.to_dict(),)
2
7
2
42
0
92
99
92
self,entry
[]
None
{"Expr": 2, "If": 1, "With": 1}
2
8
2
["self.db_connection.execute", "entry.to_dict"]
0
[]
The function (put_entry) defined within the public class called S3CheckCache, that inherit another class.The function start at line 92 and ends at 99. It contains 7 lines of code and it has a cyclomatic complexity of 2. It takes 2 parameters, represented as [92.0] and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["self.db_connection.execute", "entry.to_dict"].
aws-deadline_deadline-cloud
public
public
0
0
_is_admin
def _is_admin() -> bool:"""Platform independent utility to determine if the tests are running withelevated privileges"""# sys.platform helps mypy type checking ignore other platformsif sys.platform != "win32":return os.getuid() == 0import ctypestry:return ctypes.windll.shell32.IsUserAnAdmin() == 1except Exception:return False
3
8
0
43
0
19
31
19
[]
bool
{"Expr": 1, "If": 1, "Return": 3, "Try": 1}
2
13
2
["os.getuid", "ctypes.windll.shell32.IsUserAnAdmin"]
3
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestLinuxAndMacOS.test_system_permissions", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestWindows._verify_windows_least_privilege", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestWindows.test_system_permissions"]
The function (_is_admin) defined within the public class called public.The function start at line 19 and ends at 31. It contains 8 lines of code and it has a cyclomatic complexity of 3. The function does not take any parameters and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["os.getuid", "ctypes.windll.shell32.IsUserAnAdmin"], It has 3.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestLinuxAndMacOS.test_system_permissions", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestWindows._verify_windows_least_privilege", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestWindows.test_system_permissions"].
aws-deadline_deadline-cloud
public
public
0
0
installer_path
def installer_path():path = "DeadlineCloudClient-{platform}-installer.{ext}"if platform.system() == "Darwin":path = os.path.join(path.format(platform="osx", ext="app"),"Contents","MacOS","installbuilder.sh",)elif platform.system() == "Windows":path = path.format(platform="windows-x64", ext="exe")elif platform.system() == "Linux":path = path.format(platform="linux-x64", ext="run")if not os.path.isfile(path):raise FileNotFoundError(f"Installer not found at '{path}'")if not os.access(path, os.X_OK) and not platform.system() == "Darwin":raise PermissionError(f"Installer at '{path}' is not executable")yield Path(path).absolute()
7
18
0
144
1
35
56
35
['path']
None
{"Assign": 4, "Expr": 1, "If": 5}
15
22
15
["platform.system", "os.path.join", "path.format", "platform.system", "path.format", "platform.system", "path.format", "os.path.isfile", "FileNotFoundError", "os.access", "platform.system", "PermissionError", "absolute", "Path", "pytest.fixture"]
0
[]
The function (installer_path) defined within the public class called public.The function start at line 35 and ends at 56. It contains 18 lines of code and it has a cyclomatic complexity of 7. The function does not take any parameters and does not return any value. It declares 15.0 functions, and It has 15.0 functions called inside which are ["platform.system", "os.path.join", "path.format", "platform.system", "path.format", "platform.system", "path.format", "os.path.isfile", "FileNotFoundError", "os.access", "platform.system", "PermissionError", "absolute", "Path", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
_run_installer
def _run_installer(installer_path, install_scope, installation_path) -> Path:# use a path that does not existinstallation_path = installation_path / "dne"args = [installer_path,"--mode","unattended","--installscope",install_scope,"--prefix",installation_path,]subprocess.run(args, check=True)return Path(installation_path)
1
13
3
49
2
59
73
59
installer_path,install_scope,installation_path
['args', 'installation_path']
Path
{"Assign": 2, "Expr": 1, "Return": 1}
2
15
2
["subprocess.run", "Path"]
4
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.per_test_system_installation", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.per_test_user_installation", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.system_installation", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.user_installation"]
The function (_run_installer) defined within the public class called public.The function start at line 59 and ends at 73. It contains 13 lines of code and it has a cyclomatic complexity of 1. It takes 3 parameters, represented as [59.0] and does not return any value. It declares 2.0 functions, It has 2.0 functions called inside which are ["subprocess.run", "Path"], It has 4.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.per_test_system_installation", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.per_test_user_installation", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.system_installation", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.user_installation"].
aws-deadline_deadline-cloud
public
public
0
0
_validate_files
def _validate_files(installation_path: Path) -> None:if platform.system() == "Darwin":uninstaller = "uninstall.app"elif platform.system() == "Windows":uninstaller = "uninstall.exe"else:uninstaller = "uninstall"# THENtop_level_dir = [f.name for f in installation_path.iterdir()]assert "DeadlineClient" in top_level_dirdeadline_client_dir = [f.name for f in (Path(installation_path, "DeadlineClient")).iterdir()]assert "installer_version.txt" in deadline_client_dirassert uninstaller in deadline_client_dir# Check main CLI runscli_path = installation_path / "DeadlineClient" / "deadline"cli_result = subprocess.run([cli_path, "--version"], capture_output=True, text=True)version_regex = r"deadline, version (\d+)\.(\d+)\.(\d+)"match = re.search(version_regex, cli_result.stdout)assert match is not None, f"--version output is not as expected, got: {cli_result.stdout}"assert cli_result.returncode == 0# Just check that we have dependencies in this foldercli_dir = installation_path / "DeadlineClient" / "cli"cli_dir_contents = [f.name for f in (cli_dir).iterdir()]assert "deadline" in cli_dir_contentsassert "xxhash" in cli_dir_contents# Check the deadline module is here and there's a version fileclient_dir = [f.name for f in (cli_dir.joinpath("deadline", "client")).iterdir()]assert "_version.py" in client_dir
7
24
1
204
10
76
107
76
installation_path
['version_regex', 'client_dir', 'uninstaller', 'top_level_dir', 'cli_dir', 'match', 'cli_dir_contents', 'deadline_client_dir', 'cli_path', 'cli_result']
None
{"Assign": 12, "If": 2}
10
32
10
["platform.system", "platform.system", "installation_path.iterdir", "iterdir", "Path", "subprocess.run", "re.search", "cli_dir.iterdir", "iterdir", "cli_dir.joinpath"]
2
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestSystemInstall.test_install", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestUserInstall.test_install"]
The function (_validate_files) defined within the public class called public.The function start at line 76 and ends at 107. It contains 24 lines of code and it has a cyclomatic complexity of 7. The function does not take any parameters and does not return any value. It declares 10.0 functions, It has 10.0 functions called inside which are ["platform.system", "platform.system", "installation_path.iterdir", "iterdir", "Path", "subprocess.run", "re.search", "cli_dir.iterdir", "iterdir", "cli_dir.joinpath"], It has 2.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestSystemInstall.test_install", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.installer.test_installer_py.TestUserInstall.test_install"].
aws-deadline_deadline-cloud
public
public
0
0
user_installation
def user_installation(installer_path, tmp_path_factory):"""Used for tests that just want to assert some facts around the install but do not modify"""tmp_path = tmp_path_factory.mktemp("install")yield _run_installer(installer_path, "user", tmp_path)
1
3
2
25
1
111
114
111
installer_path,tmp_path_factory
['tmp_path']
None
{"Assign": 1, "Expr": 2}
3
4
3
["tmp_path_factory.mktemp", "_run_installer", "pytest.fixture"]
0
[]
The function (user_installation) defined within the public class called public.The function start at line 111 and ends at 114. It contains 3 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [111.0] and does not return any value. It declares 3.0 functions, and It has 3.0 functions called inside which are ["tmp_path_factory.mktemp", "_run_installer", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
system_installation
def system_installation(installer_path, tmp_path_factory):"""Used for tests that just want to assert some facts around the install but do not modify"""tmp_path = tmp_path_factory.mktemp("install")yield _run_installer(installer_path, "system", tmp_path)
1
3
2
25
1
118
121
118
installer_path,tmp_path_factory
['tmp_path']
None
{"Assign": 1, "Expr": 2}
3
4
3
["tmp_path_factory.mktemp", "_run_installer", "pytest.fixture"]
0
[]
The function (system_installation) defined within the public class called public.The function start at line 118 and ends at 121. It contains 3 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [118.0] and does not return any value. It declares 3.0 functions, and It has 3.0 functions called inside which are ["tmp_path_factory.mktemp", "_run_installer", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
per_test_user_installation
def per_test_user_installation(installer_path, tmp_path):"""Used for tests that modify the installation"""yield _run_installer(installer_path, "user", tmp_path)
1
2
2
17
0
125
127
125
installer_path,tmp_path
[]
None
{"Expr": 2}
2
3
2
["_run_installer", "pytest.fixture"]
0
[]
The function (per_test_user_installation) defined within the public class called public.The function start at line 125 and ends at 127. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [125.0] and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["_run_installer", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
per_test_system_installation
def per_test_system_installation(installer_path, tmp_path):"""Used for tests that modify the installation"""yield _run_installer(installer_path, "system", tmp_path)
1
2
2
17
0
131
133
131
installer_path,tmp_path
[]
None
{"Expr": 2}
2
3
2
["_run_installer", "pytest.fixture"]
0
[]
The function (per_test_system_installation) defined within the public class called public.The function start at line 131 and ends at 133. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [131.0] and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["_run_installer", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
uninstaller_path
def uninstaller_path():uninstaller_path = Path("DeadlineClient", "uninstall")if platform.system() == "Darwin":uninstaller_path = Path("DeadlineClient", "uninstall.app", "Contents", "MacOS", "installbuilder.sh")elif platform.system() == "Windows":uninstaller_path = uninstaller_path.with_suffix(".exe")yield uninstaller_path
3
9
0
54
1
137
146
137
['uninstaller_path']
None
{"Assign": 3, "Expr": 1, "If": 2}
6
10
6
["Path", "platform.system", "Path", "platform.system", "uninstaller_path.with_suffix", "pytest.fixture"]
0
[]
The function (uninstaller_path) defined within the public class called public.The function start at line 137 and ends at 146. It contains 9 lines of code and it has a cyclomatic complexity of 3. The function does not take any parameters and does not return any value. It declares 6.0 functions, and It has 6.0 functions called inside which are ["Path", "platform.system", "Path", "platform.system", "uninstaller_path.with_suffix", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
test_default_location
def test_default_location(installer_path: Path):"""Ensures that the default output location reported by the installer is accurate. The help text will only show it for the default scope (user). Example help output:--prefix <prefix> Installation DirectoryDefault: /home/<user>/DeadlineCloudClient"""# GIVENdefault_install_location = Path("~/DeadlineCloudClient").expanduser()default_pattern = r"Default: (.*)"location = None# WHENtext_mode = [] if sys.platform == "win32" else ["--mode", "text"]# Since windows doesn't have text mode, it'll pop-up a gui. We use the timeout to ensure it stopstry:help_result = subprocess.run([installer_path, *text_mode, "--help"], capture_output=True, text=True, timeout=5)assert help_result.returncode == 0, (f"Installer exited with non-zero code: {help_result.returncode}")assert help_result.stdout is not None, "No stdout from --help"help_output = iter(help_result.stdout.splitlines())except subprocess.TimeoutExpired as e:assert e.stdout is not None, "No stdout from --help"# mypy/docs say this should be bytes, but was str?assert isinstance(e.stdout, str)help_output = iter(e.stdout.splitlines())# THENwhile (line := next(help_output, None)) is not None:if line.strip().startswith("--prefix"):location = re.match(default_pattern, next(help_output, "").strip(), flags=re.IGNORECASE)breakassert location is not None, (f"Could not find default install location in help output:\n{help_result.stdout}")if platform.system() != "Windows":assert location.group(1) == default_install_location.as_posix()else:assert str(Path(location.group(1))) == str(default_install_location)
6
29
1
244
6
149
191
149
installer_path
['help_result', 'text_mode', 'default_pattern', 'default_install_location', 'help_output', 'location']
None
{"Assign": 8, "Expr": 1, "If": 2, "Try": 1, "While": 1}
21
43
21
["expanduser", "Path", "subprocess.run", "iter", "help_result.stdout.splitlines", "isinstance", "iter", "e.stdout.splitlines", "next", "startswith", "line.strip", "re.match", "strip", "next", "platform.system", "location.group", "default_install_location.as_posix", "str", "Path", "location.group", "str"]
0
[]
The function (test_default_location) defined within the public class called public.The function start at line 149 and ends at 191. It contains 29 lines of code and it has a cyclomatic complexity of 6. The function does not take any parameters and does not return any value. It declares 21.0 functions, and It has 21.0 functions called inside which are ["expanduser", "Path", "subprocess.run", "iter", "help_result.stdout.splitlines", "isinstance", "iter", "e.stdout.splitlines", "next", "startswith", "line.strip", "re.match", "strip", "next", "platform.system", "location.group", "default_install_location.as_posix", "str", "Path", "location.group", "str"].
aws-deadline_deadline-cloud
public
public
0
0
test_did_not_build_with_evaluation_mode
def test_did_not_build_with_evaluation_mode(installer_path: Path, tmp_path: Path):"""Tests to see if there's an evaluation version header from installbuilder.This is done by launching the installer, exiting before it completes, and checking the outputdoes NOT contain a specific line entry. Unfortunately this is makes the test pretty fragile, butthe behaviour has existed for years. If it exists it's expected to be the second line, but we checkthe top few lines that we're guaranteed to have in case it shifts a tiny bit.note: tmp_path is leveraged to ensure that the user's install is not messed with if the test does notbehave correctly."""# GIVENeval_text = r"Created with an evaluation version of InstallBuilder"output = []# WHENproc = subprocess.Popen([installer_path, "--mode", "text", "--prefix", tmp_path],stdin=subprocess.PIPE,stdout=subprocess.PIPE,stderr=subprocess.STDOUT,text=True,)try:# We want to fail the installer fast so that it doesn't proceed to install with the defaultsproc.wait(0.5)except subprocess.TimeoutExpired:# expected behaviour due to the above wait happening before the installer finishespassfinally:proc.terminate()proc.kill()# THENassert proc.stdout is not None# Example header from installbuilder"""----------------------------------------------------------------------------Created with an evaluation version of InstallBuilderWelcome to the AWS Deadline Cloud Client Setup Wizard.----------------------------------------------------------------------------"""output = [proc.stdout.readline().strip() for _ in range(6)]assert outputfor line in output:assert eval_text not in line, ("Installer was detected to have been built with Evaluation mode")
5
24
2
135
3
201
250
201
installer_path,tmp_path
['proc', 'output', 'eval_text']
None
{"Assign": 4, "Expr": 5, "For": 1, "Try": 1}
12
50
12
["subprocess.Popen", "proc.wait", "proc.terminate", "proc.kill", "strip", "proc.stdout.readline", "range", "pytest.mark.skipif", "os.getenv", "lower", "os.getenv", "pytest.mark.skipif"]
0
[]
The function (test_did_not_build_with_evaluation_mode) defined within the public class called public.The function start at line 201 and ends at 250. It contains 24 lines of code and it has a cyclomatic complexity of 5. It takes 2 parameters, represented as [201.0] and does not return any value. It declares 12.0 functions, and It has 12.0 functions called inside which are ["subprocess.Popen", "proc.wait", "proc.terminate", "proc.kill", "strip", "proc.stdout.readline", "range", "pytest.mark.skipif", "os.getenv", "lower", "os.getenv", "pytest.mark.skipif"].
aws-deadline_deadline-cloud
TestLinuxAndMacOS
public
0
0
test_user_permissions
def test_user_permissions(self, user_installation: Path):# GIVEN / WHEN / THENself._validate_posix_permissions(user_installation)
1
2
2
15
0
255
257
255
self,user_installation
[]
None
{"Expr": 1}
1
3
1
["self._validate_posix_permissions"]
0
[]
The function (test_user_permissions) defined within the public class called TestLinuxAndMacOS.The function start at line 255 and ends at 257. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [255.0] and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["self._validate_posix_permissions"].
aws-deadline_deadline-cloud
TestLinuxAndMacOS
public
0
0
test_system_permissions
def test_system_permissions(self, system_installation):# GIVEN / WHEN / THENself._validate_posix_permissions(system_installation)
1
2
2
13
0
260
262
260
self,system_installation
[]
None
{"Expr": 1}
3
3
3
["self._validate_posix_permissions", "pytest.mark.skipif", "_is_admin"]
0
[]
The function (test_system_permissions) defined within the public class called TestLinuxAndMacOS.The function start at line 260 and ends at 262. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [260.0] and does not return any value. It declares 3.0 functions, and It has 3.0 functions called inside which are ["self._validate_posix_permissions", "pytest.mark.skipif", "_is_admin"].
aws-deadline_deadline-cloud
TestLinuxAndMacOS
public
0
0
_has_group_other_write
def _has_group_other_write(self, mode: int) -> bool:return bool(mode & (stat.S_IWGRP | stat.S_IWOTH))
1
2
2
26
0
264
265
264
self,mode
[]
bool
{"Return": 1}
1
2
1
["bool"]
0
[]
The function (_has_group_other_write) defined within the public class called TestLinuxAndMacOS.The function start at line 264 and ends at 265. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [264.0] and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["bool"].
aws-deadline_deadline-cloud
TestLinuxAndMacOS
public
0
0
_has_user_read_write
def _has_user_read_write(self, mode: int) -> bool:return bool(mode & (stat.S_IRUSR | stat.S_IWUSR))
1
2
2
26
0
267
268
267
self,mode
[]
bool
{"Return": 1}
1
2
1
["bool"]
0
[]
The function (_has_user_read_write) defined within the public class called TestLinuxAndMacOS.The function start at line 267 and ends at 268. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [267.0] and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["bool"].
aws-deadline_deadline-cloud
TestLinuxAndMacOS
public
0
0
_has_user_group_other_execute
def _has_user_group_other_execute(self, mode: int) -> bool:return bool(mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH))
1
2
2
30
0
270
271
270
self,mode
[]
bool
{"Return": 1}
1
2
1
["bool"]
0
[]
The function (_has_user_group_other_execute) defined within the public class called TestLinuxAndMacOS.The function start at line 270 and ends at 271. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [270.0] and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["bool"].
aws-deadline_deadline-cloud
TestLinuxAndMacOS
public
0
0
_validate_posix_permissions
def _validate_posix_permissions(self, installation_path: Path):# assists mypy type checking to ignore this on Windowsassert sys.platform != "win32"# pwd is not available on Windowsimport pwd# GIVENcurrent_user = pwd.getpwuid(os.getuid())[0]# type: ignore# WHENbad_perms: DefaultDict[Path, List[str]] = defaultdict(list)for entry in [installation_path, *installation_path.rglob("*")]:mode = stat.S_IMODE(entry.stat().st_mode)if self._has_group_other_write(mode):bad_perms[entry].append("should not have group/other write permissions")if not self._has_user_read_write(mode):bad_perms[entry].append("should have user read/write permissions")if entry.is_dir() and not self._has_user_group_other_execute(mode):bad_perms[entry].append("is a directory and should have execute permissions")if entry.owner() != current_user:# type: ignorebad_perms[entry].append(f"is not owned by the '{current_user}'")error_message = [f"Found {len(bad_perms)} instance(s) of incorrect permissions"]for i, entry in enumerate(bad_perms):fmted_reasons = "\n- ".join(reason for reason in bad_perms[entry])error_message.append(f"{i + 1}. ({stat.S_IMODE(entry.stat().st_mode):o}) '{Path(*entry.parts[len(installation_path.parts) :])!s:>3}'\n"f"- {fmted_reasons}")# THENassert len(bad_perms) == 0, "\n".join(error_message)
9
23
2
209
0
273
304
273
self,installation_path
[]
None
{"AnnAssign": 1, "Assign": 4, "Expr": 5, "For": 2, "If": 4}
25
32
25
["pwd.getpwuid", "os.getuid", "defaultdict", "installation_path.rglob", "stat.S_IMODE", "entry.stat", "self._has_group_other_write", "append", "self._has_user_read_write", "append", "entry.is_dir", "self._has_user_group_other_execute", "append", "entry.owner", "append", "len", "enumerate", "join", "error_message.append", "stat.S_IMODE", "entry.stat", "Path", "len", "len", "join"]
0
[]
The function (_validate_posix_permissions) defined within the public class called TestLinuxAndMacOS.The function start at line 273 and ends at 304. It contains 23 lines of code and it has a cyclomatic complexity of 9. It takes 2 parameters, represented as [273.0] and does not return any value. It declares 25.0 functions, and It has 25.0 functions called inside which are ["pwd.getpwuid", "os.getuid", "defaultdict", "installation_path.rglob", "stat.S_IMODE", "entry.stat", "self._has_group_other_write", "append", "self._has_user_read_write", "append", "entry.is_dir", "self._has_user_group_other_execute", "append", "entry.owner", "append", "len", "enumerate", "join", "error_message.append", "stat.S_IMODE", "entry.stat", "Path", "len", "len", "join"].
aws-deadline_deadline-cloud
TestLinuxAndMacOS
public
0
0
test_user_permissions
def test_user_permissions(self, user_installation):# GIVEN / WHEN / THENself._verify_windows_least_privilege(user_installation)
1
2
2
13
0
309
311
309
self,user_installation
[]
None
{"Expr": 1}
1
3
1
["self._validate_posix_permissions"]
0
[]
The function (test_user_permissions) defined within the public class called TestLinuxAndMacOS.The function start at line 309 and ends at 311. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [309.0] and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["self._validate_posix_permissions"].
aws-deadline_deadline-cloud
TestLinuxAndMacOS
public
0
0
test_system_permissions
def test_system_permissions(self, system_installation):# GIVEN / WHEN / THENself._validate_posix_permissions(system_installation)
1
2
2
13
0
314
316
260
self,system_installation
[]
None
{"Expr": 1}
3
3
3
["self._validate_posix_permissions", "pytest.mark.skipif", "_is_admin"]
0
[]
The function (test_system_permissions) defined within the public class called TestLinuxAndMacOS.The function start at line 314 and ends at 316. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [260.0] and does not return any value. It declares 3.0 functions, and It has 3.0 functions called inside which are ["self._validate_posix_permissions", "pytest.mark.skipif", "_is_admin"].
aws-deadline_deadline-cloud
TestWindows
public
0
0
_running_in_container
def _running_in_container(self) -> bool:"""Check to see if the cexecsvc service exists and is runningto determine if we're running on a container."""# assists mypy type checking to ignore this on non-Windowsassert sys.platform == "win32"import win32serviceimport win32serviceutiltry:service_status = win32serviceutil.QueryServiceStatus("cexecsvc")return service_status[1] == win32service.SERVICE_RUNNINGexcept win32service.error:# Service doesn't exist, not on a containerreturn False
2
9
1
44
0
318
333
318
self
[]
bool
{"Assign": 1, "Expr": 1, "Return": 2, "Try": 1}
1
16
1
["win32serviceutil.QueryServiceStatus"]
0
[]
The function (_running_in_container) defined within the public class called TestWindows.The function start at line 318 and ends at 333. It contains 9 lines of code and it has a cyclomatic complexity of 2. The function does not take any parameters and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["win32serviceutil.QueryServiceStatus"].
aws-deadline_deadline-cloud
TestWindows
public
0
0
_verify_windows_least_privilege
def _verify_windows_least_privilege(self, installation_path: Path):# assists mypy type checking to ignore this on non-Windowsassert sys.platform == "win32"import ntsecurityconimport win32conimport win32fileimport win32security# GIVENwindows_user = getpass.getuser()if self._running_in_container():# The admin group is different when running# in a container.admin_group = "ContainerAdministrator"else:admin_group = "Administrators"builtin_admin_group_sid, _, _ = win32security.LookupAccountName(None, admin_group)user_sid, _, _ = win32security.LookupAccountName(None, windows_user)# WHENbad_perms: DefaultDict[Path, List[str]] = defaultdict(list)for path in [installation_path, *installation_path.rglob("*")]:sd = win32security.GetFileSecurity(str(path),win32con.DACL_SECURITY_INFORMATION | win32con.OWNER_SECURITY_INFORMATION,)# Verify ownershipowner_sid = sd.GetSecurityDescriptorOwner()if _is_admin():if builtin_admin_group_sid != owner_sid:bad_perms[path].append(f"Expected to be owned by '{admin_group}' but got '{win32security.LookupAccountSid(None, owner_sid)}'")elif user_sid != owner_sid:bad_perms[path].append(f"Expected to be owned by '{win32security.LookupAccountSid(None, user_sid)}' but got '{win32security.LookupAccountSid(None, owner_sid)}'")# Verify all ACEsdacl = sd.GetSecurityDescriptorDacl()if dacl.GetAceCount() != 3:bad_perms[path].append(f"Expected 3 ACEs, but was {dacl.GetAceCount()}")for ace in [dacl.GetAce(i) for i in range(dacl.GetAceCount())]:_ace_info, mask, sid = aceace_type, ace_flags = _ace_infoif sid not in [builtin_admin_group_sid, user_sid]:continueif ace_type != ntsecuritycon.ACCESS_ALLOWED_ACE_TYPE:bad_perms[path].append(f"Expected ACE type {ntsecuritycon.ACCESS_ALLOWED_ACE_TYPE} but got {ace_type}")if (path.is_dir()and ace_flags!= ntsecuritycon.OBJECT_INHERIT_ACE | ntsecuritycon.CONTAINER_INHERIT_ACE):bad_perms[path].append(f"Expected inheritance in ACE to be {ntsecuritycon.OBJECT_INHERIT_ACE | ntsecuritycon.CONTAINER_INHERIT_ACE} but got {ace_flags}")if mask != win32file.FILE_ALL_ACCESS:bad_perms[path].append(f"Expected only FILE_ALL_ACCESS ({win32file.FILE_ALL_ACCESS}) ACEs but got {mask}")error_message = [f"Found {len(bad_perms)} instance(s) of incorrect permissions"]for i, path in enumerate(bad_perms):fmted_reasons = "\n- ".join(reason for reason in bad_perms[path])error_message.append(f"{i + 1}. '{Path(*path.parts[len(installation_path.parts) :])!s:>3}'\n"f"- {fmted_reasons}")# THENassert len(bad_perms) == 0, "\n".join(error_message)
16
61
2
355
0
335
413
335
self,installation_path
[]
None
{"AnnAssign": 1, "Assign": 12, "Expr": 7, "For": 3, "If": 9}
34
79
34
["getpass.getuser", "self._running_in_container", "win32security.LookupAccountName", "win32security.LookupAccountName", "defaultdict", "installation_path.rglob", "win32security.GetFileSecurity", "str", "sd.GetSecurityDescriptorOwner", "_is_admin", "append", "win32security.LookupAccountSid", "append", "win32security.LookupAccountSid", "win32security.LookupAccountSid", "sd.GetSecurityDescriptorDacl", "dacl.GetAceCount", "append", "dacl.GetAceCount", "dacl.GetAce", "range", "dacl.GetAceCount", "append", "path.is_dir", "append", "append", "len", "enumerate", "join", "error_message.append", "Path", "len", "len", "join"]
0
[]
The function (_verify_windows_least_privilege) defined within the public class called TestWindows.The function start at line 335 and ends at 413. It contains 61 lines of code and it has a cyclomatic complexity of 16. It takes 2 parameters, represented as [335.0] and does not return any value. It declares 34.0 functions, and It has 34.0 functions called inside which are ["getpass.getuser", "self._running_in_container", "win32security.LookupAccountName", "win32security.LookupAccountName", "defaultdict", "installation_path.rglob", "win32security.GetFileSecurity", "str", "sd.GetSecurityDescriptorOwner", "_is_admin", "append", "win32security.LookupAccountSid", "append", "win32security.LookupAccountSid", "win32security.LookupAccountSid", "sd.GetSecurityDescriptorDacl", "dacl.GetAceCount", "append", "dacl.GetAceCount", "dacl.GetAce", "range", "dacl.GetAceCount", "append", "path.is_dir", "append", "append", "len", "enumerate", "join", "error_message.append", "Path", "len", "len", "join"].
aws-deadline_deadline-cloud
TestUserInstall
public
0
0
test_install
def test_install(self, user_installation: Path):# GIVEN / WHEN / THEN_validate_files(user_installation)
1
2
2
13
0
421
423
421
self,user_installation
[]
None
{"Expr": 1}
1
3
1
["_validate_files"]
0
[]
The function (test_install) defined within the public class called TestUserInstall.The function start at line 421 and ends at 423. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [421.0] and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["_validate_files"].
aws-deadline_deadline-cloud
TestUserInstall
public
0
0
test_uninstall
def test_uninstall(self, per_test_user_installation: Path, uninstaller_path: Path):# GIVEN / WHENresult = subprocess.run([per_test_user_installation / uninstaller_path, "--mode", "unattended"])# THENassert result.returncode == 0# On Windows, the uninstall process will return before the uninstallation is complete.# If necessary, wait for up to 1 minute 40 seconds before timing out.if platform.system() == "Windows":for _ in range(WINDOWS_MAX_RETRIES):if not per_test_user_installation.exists():breaktime.sleep(WINDOWS_RETRY_DELAY)assert not per_test_user_installation.exists()
4
11
3
74
0
425
441
425
self,per_test_user_installation,uninstaller_path
[]
None
{"Assign": 1, "Expr": 1, "For": 1, "If": 2}
6
17
6
["subprocess.run", "platform.system", "range", "per_test_user_installation.exists", "time.sleep", "per_test_user_installation.exists"]
0
[]
The function (test_uninstall) defined within the public class called TestUserInstall.The function start at line 425 and ends at 441. It contains 11 lines of code and it has a cyclomatic complexity of 4. It takes 3 parameters, represented as [425.0] and does not return any value. It declares 6.0 functions, and It has 6.0 functions called inside which are ["subprocess.run", "platform.system", "range", "per_test_user_installation.exists", "time.sleep", "per_test_user_installation.exists"].
aws-deadline_deadline-cloud
TestUserInstall
public
0
0
test_install
def test_install(self, user_installation: Path):# GIVEN / WHEN / THEN_validate_files(user_installation)
1
2
2
13
0
446
448
421
self,user_installation
[]
None
{"Expr": 1}
1
3
1
["_validate_files"]
0
[]
The function (test_install) defined within the public class called TestUserInstall.The function start at line 446 and ends at 448. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [421.0] and does not return any value. It declare 1.0 function, and It has 1.0 function called inside which is ["_validate_files"].
aws-deadline_deadline-cloud
TestUserInstall
public
0
0
test_uninstall
def test_uninstall(self, per_test_system_installation: Path, uninstaller_path: Path):# GIVEN / WHENresult = subprocess.run([per_test_system_installation / uninstaller_path, "--mode", "unattended"],capture_output=True,text=True,)# THENassert result.returncode == 0# On Windows, the uninstall process will return before the uninstallation is complete.# If necessary, wait for up to 1 minute 40 seconds before timing out.if platform.system() == "Windows":for _ in range(WINDOWS_MAX_RETRIES):if not per_test_system_installation.exists():breaktime.sleep(WINDOWS_RETRY_DELAY)assert not per_test_system_installation.exists()
4
13
3
83
0
450
468
450
self,per_test_user_installation,uninstaller_path
[]
None
{"Assign": 1, "Expr": 1, "For": 1, "If": 2}
6
17
6
["subprocess.run", "platform.system", "range", "per_test_user_installation.exists", "time.sleep", "per_test_user_installation.exists"]
0
[]
The function (test_uninstall) defined within the public class called TestUserInstall.The function start at line 450 and ends at 468. It contains 13 lines of code and it has a cyclomatic complexity of 4. It takes 3 parameters, represented as [450.0] and does not return any value. It declares 6.0 functions, and It has 6.0 functions called inside which are ["subprocess.run", "platform.system", "range", "per_test_user_installation.exists", "time.sleep", "per_test_user_installation.exists"].
aws-deadline_deadline-cloud
TestVerifySigning
public
0
0
test_windows_signing
def test_windows_signing(self, installer_path):"""Assumes that the Windows SDK is installed so we can find signtool:C:/Program Files*/Windows Kits/*/bin/*/x64/signtool.exeExample success:IndexAlgorithmTimestamp========================================0sha256 AuthenticodeSuccessfully verified: C:\\*.exeExample failure:IndexAlgorithmTimestamp========================================SignTool Error: No signature found.Number of errors: 1"""# GIVEN# Check PATH, then SDK installation locationsigntool = shutil.which("signtool")if not signtool:signtool = next(glob.iglob("C:/Program Files*/Windows Kits/*/bin/*/x64/signtool.exe"), None)assert signtool, "signtool not found in expected location"# WHENresult = subprocess.run([signtool, "verify", "/pa", installer_path], capture_output=True, text=True)# THENassert "SignTool Error:" not in result.stderr, "signtool did not succeed"assert result.returncode == 0assert "Successfully verified:" in result.stdout
2
13
2
82
0
477
513
477
self,installer_path
[]
None
{"Assign": 3, "Expr": 1, "If": 1}
6
37
6
["shutil.which", "next", "glob.iglob", "subprocess.run", "pytest.mark.skipif", "platform.system"]
0
[]
The function (test_windows_signing) defined within the public class called TestVerifySigning.The function start at line 477 and ends at 513. It contains 13 lines of code and it has a cyclomatic complexity of 2. It takes 2 parameters, represented as [477.0] and does not return any value. It declares 6.0 functions, and It has 6.0 functions called inside which are ["shutil.which", "next", "glob.iglob", "subprocess.run", "pytest.mark.skipif", "platform.system"].
aws-deadline_deadline-cloud
TestVerifySigning
public
0
0
test_linux_signing
def test_linux_signing(self, installer_path):"""Assumes that gpg is on the PATH, and that the public key has already been imported"""# GIVENgpg = shutil.which("gpg")assert gpg, "gpg not found in PATH"# WHENresult = subprocess.run([gpg, "--verify", f"{installer_path}.sig", installer_path],capture_output=True,text=True,)# THENassert "Can't check signature: No public key" not in result.stderr, ("Missing Public Key in keyring")assert result.returncode == 0, "Code signing validation failed"# gpg shoves the success message into stderr for some reason# This matches what customers are told to do via the public docs:# https://docs.aws.amazon.com/deadline-cloud/latest/userguide/submitter.html#verify-installerassert ('Good signature from "AWS Deadline Cloud <aws-deadline@amazon.com>"' in result.stderr), "Signing succeeded, but did not match docs instructions"
1
15
2
75
0
516
539
516
self,installer_path
[]
None
{"Assign": 2, "Expr": 1}
4
24
4
["shutil.which", "subprocess.run", "pytest.mark.skipif", "platform.system"]
0
[]
The function (test_linux_signing) defined within the public class called TestVerifySigning.The function start at line 516 and ends at 539. It contains 15 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [516.0] and does not return any value. It declares 4.0 functions, and It has 4.0 functions called inside which are ["shutil.which", "subprocess.run", "pytest.mark.skipif", "platform.system"].
aws-deadline_deadline-cloud
TestVerifySigning
public
0
0
test_macos_signing
def test_macos_signing(self, installer_path):"""Tests that the relevant files are signed on MacOS. Assumes codesign and spectl are on the PATHLeverages codesign to check signing, and spctl to verify that MacOS does not complain about running it"""# GIVENcodesign = shutil.which("codesign")spctl = shutil.which("spctl")assert codesign, "codesign not found in PATH"assert spctl, "spctl not found in PATH"# WHEN / THENcodesign_result = subprocess.run([codesign, "--verify", "--deep", "--verbose", installer_path],capture_output=True,text=True,)assert "code object is not signed at all" not in codesign_result.stdout, ("The file is not signed")assert codesign_result.returncode == 0# WHEN / THENspctl_result = subprocess.run([spctl, "--verbose", "--assess", "--type", "execute", installer_path],capture_output=True,text=True,)assert "rejected" not in spctl_result.stderr, ("MacOS will not allow this file to be executed")assert spctl_result.returncode == 0# success message is sent to stderrassert "accepted" in spctl_result.stderr, "File was not accepted by MacOS"
1
24
2
130
0
542
575
542
self,installer_path
[]
None
{"Assign": 4, "Expr": 1}
6
34
6
["shutil.which", "shutil.which", "subprocess.run", "subprocess.run", "pytest.mark.skipif", "platform.system"]
0
[]
The function (test_macos_signing) defined within the public class called TestVerifySigning.The function start at line 542 and ends at 575. It contains 24 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [542.0] and does not return any value. It declares 6.0 functions, and It has 6.0 functions called inside which are ["shutil.which", "shutil.which", "subprocess.run", "subprocess.run", "pytest.mark.skipif", "platform.system"].
aws-deadline_deadline-cloud
public
public
0
0
external_bucket
def external_bucket() -> str:"""Return a bucket that all developers and test accounts have access to, but isn't in the testers account."""return os.environ.get("INTEG_TEST_JA_CROSS_ACCOUNT_BUCKET", "job-attachment-bucket-snipe-test")
1
2
0
18
0
9
13
9
[]
str
{"Expr": 1, "Return": 1}
2
5
2
["os.environ.get", "pytest.fixture"]
0
[]
The function (external_bucket) defined within the public class called public.The function start at line 9 and ends at 13. It contains 2 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["os.environ.get", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
default_job_template
def default_job_template() -> str:"""A generic job template with 2 steps. First step has 2 tasks and the second step has 1 task."""return json.dumps({"name": "custom-job","specificationVersion": "jobtemplate-2023-09","steps": [{"name": "custom-step","parameterSpace": {"taskParameterDefinitions": [{"name": "frame", "type": "INT", "range": ["0", "1"]}]},"script": {"actions": {"onRun": {"command": "{{ Task.File.run }}"}},"embeddedFiles": [{"name": "run","data": "#!/bin/env bash\nset -ex\necho 'First Step'","runnable": True,"type": "TEXT",}],},},{"name": "custom-step-2","parameterSpace": {"taskParameterDefinitions": [{"name": "frame", "type": "INT", "range": ["0"]}]},"script": {"actions": {"onRun": {"command": "{{ Task.File.run }}"}},"embeddedFiles": [{"name": "run","data": "#!/bin/env bash\nset -ex\necho 'Second step'","runnable": True,"type": "TEXT",}],},},],})
1
47
0
172
0
17
66
17
[]
str
{"Expr": 1, "Return": 1}
2
50
2
["json.dumps", "pytest.fixture"]
0
[]
The function (default_job_template) defined within the public class called public.The function start at line 17 and ends at 66. It contains 47 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["json.dumps", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
default_job_template_step_step_dependency
def default_job_template_step_step_dependency() -> str:"""A generic job template with 2 steps. Second step depends on first step. Both steps have 1 task."""return json.dumps({"name": "custom-step-step-job","specificationVersion": "jobtemplate-2023-09","steps": [{"name": "custom-step","parameterSpace": {"taskParameterDefinitions": [{"name": "frame", "type": "INT", "range": ["0"]}]},"script": {"actions": {"onRun": {"command": "{{ Task.File.run }}"}},"embeddedFiles": [{"name": "run","data": "#!/bin/env bash\nset -ex\necho 'First Step'","runnable": True,"type": "TEXT",}],},},{"name": "custom-step-2","dependencies": [{"dependsOn": "custom-step"}],"parameterSpace": {"taskParameterDefinitions": [{"name": "frame", "type": "INT", "range": ["0"]}]},"script": {"actions": {"onRun": {"command": "{{ Task.File.run }}"}},"embeddedFiles": [{"name": "run","data": "#!/bin/env bash\nset -ex\necho 'Second step'","runnable": True,"type": "TEXT",}],},},],})
1
48
0
180
0
70
120
70
[]
str
{"Expr": 1, "Return": 1}
2
51
2
["json.dumps", "pytest.fixture"]
0
[]
The function (default_job_template_step_step_dependency) defined within the public class called public.The function start at line 70 and ends at 120. It contains 48 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["json.dumps", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
default_job_template_one_task_one_step
def default_job_template_one_task_one_step() -> str:"""A generic job template with one step and one task."""return json.dumps({"name": "custom-job","specificationVersion": "jobtemplate-2023-09","steps": [{"name": "custom-step","parameterSpace": {"taskParameterDefinitions": [{"name": "frame", "type": "INT", "range": ["0"]}]},"script": {"actions": {"onRun": {"command": "{{ Task.File.run }}"}},"embeddedFiles": [{"name": "run","data": "#!/bin/env bash\nset -ex\necho 'First Step'","runnable": True,"type": "TEXT",}],},},],})
1
28
0
99
0
124
154
124
[]
str
{"Expr": 1, "Return": 1}
2
31
2
["json.dumps", "pytest.fixture"]
0
[]
The function (default_job_template_one_task_one_step) defined within the public class called public.The function start at line 124 and ends at 154. It contains 28 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["json.dumps", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
fresh_deadline_config
def fresh_deadline_config():"""Fixture to start with a blank AWS Deadline Cloud config file.This fixture is configured for autouse, so that every test is isolated fromthe user's config file."""# Clear the session cache as part of switching out the config.from deadline.client.api._session import invalidate_boto3_session_cacheinvalidate_boto3_session_cache()try:original_config_file_value = os.environ.get("DEADLINE_CONFIG_FILE_PATH")# Create an empty temp file to set as the AWS Deadline Cloud configtemp_dir = tempfile.TemporaryDirectory()temp_dir_path = Path(temp_dir.name)temp_file_path = temp_dir_path / "config"with open(temp_file_path, "w+t", encoding="utf8") as temp_file:temp_file.write("")# Use the environment variable to override the path for both the# current process and subprocessesos.environ["DEADLINE_CONFIG_FILE_PATH"] = str(temp_file_path)# Write a telemetry id to force it getting saved to the config file. If we don't, then# an ID will get generated and force a save of the config file in the middle of a test.# Writing the config file may be undesirable in the middle of a test.config_file.set_setting("telemetry.identifier", "00000000-0000-0000-0000-000000000000")yield str(temp_file_path)finally:if original_config_file_value is None:del os.environ["DEADLINE_CONFIG_FILE_PATH"]else:os.environ["DEADLINE_CONFIG_FILE_PATH"] = original_config_file_valuetemp_dir.cleanup()
3
19
0
123
4
18
56
18
['temp_dir_path', 'temp_file_path', 'original_config_file_value', 'temp_dir']
None
{"Assign": 6, "Expr": 6, "If": 1, "Try": 1, "With": 1}
11
39
11
["invalidate_boto3_session_cache", "os.environ.get", "tempfile.TemporaryDirectory", "Path", "open", "temp_file.write", "str", "config_file.set_setting", "str", "temp_dir.cleanup", "pytest.fixture"]
0
[]
The function (fresh_deadline_config) defined within the public class called public.The function start at line 18 and ends at 56. It contains 19 lines of code and it has a cyclomatic complexity of 3. The function does not take any parameters and does not return any value. It declares 11.0 functions, and It has 11.0 functions called inside which are ["invalidate_boto3_session_cache", "os.environ.get", "tempfile.TemporaryDirectory", "Path", "open", "temp_file.write", "str", "config_file.set_setting", "str", "temp_dir.cleanup", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
deadline_cli_test
def deadline_cli_test() -> DeadlineCliTest:"""Fixture to get the sessions DeadlineCliTest object."""return DeadlineCliTest()
1
2
0
11
0
60
65
60
[]
DeadlineCliTest
{"Expr": 1, "Return": 1}
2
6
2
["DeadlineCliTest", "pytest.fixture"]
0
[]
The function (deadline_cli_test) defined within the public class called public.The function start at line 60 and ends at 65. It contains 2 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["DeadlineCliTest", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
job_attachment_test
def job_attachment_test(tmp_path_factory: pytest.TempPathFactory,request: pytest.FixtureRequest,):"""Fixture to get the session's JobAttachmentTest object."""return JobAttachmentTest(tmp_path_factory, manifest_version=ManifestVersion.v2023_03_03)
1
5
2
28
0
69
77
69
tmp_path_factory,request
[]
Returns
{"Expr": 1, "Return": 1}
2
9
2
["JobAttachmentTest", "pytest.fixture"]
0
[]
The function (job_attachment_test) defined within the public class called public.The function start at line 69 and ends at 77. It contains 5 lines of code and it has a cyclomatic complexity of 1. It takes 2 parameters, represented as [69.0], and this function return a value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["JobAttachmentTest", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
upload_input_files_assets_not_in_cas
def upload_input_files_assets_not_in_cas(job_attachment_test: JobAttachmentTest):"""When no assets are in the CAS, make sure all files are uploaded."""# IFjob_attachment_settings = get_queue(farm_id=job_attachment_test.farm_id,queue_id=job_attachment_test.queue_id,deadline_endpoint_url=job_attachment_test.deadline_endpoint,).jobAttachmentSettingsif job_attachment_settings is None:raise TypeError("Job attachment settings must be set for this test.")asset_manager = upload.S3AssetManager(farm_id=job_attachment_test.farm_id,queue_id=job_attachment_test.queue_id,job_attachment_settings=job_attachment_settings,asset_manifest_version=job_attachment_test.manifest_version,)mock_on_preparing_to_submit = MagicMock(return_value=True)mock_on_uploading_files = MagicMock(return_value=True)# WHENupload_group = asset_manager.prepare_paths_for_upload(input_paths=[str(job_attachment_test.SCENE_MA_PATH)],output_paths=[str(job_attachment_test.OUTPUT_PATH)],referenced_paths=[],)(_, manifests) = asset_manager.hash_assets_and_create_manifest(asset_groups=upload_group.asset_groups,total_input_files=upload_group.total_input_files,total_input_bytes=upload_group.total_input_bytes,hash_cache_dir=str(job_attachment_test.hash_cache_dir),on_preparing_to_submit=mock_on_preparing_to_submit,)asset_manager.upload_assets(manifests,on_uploading_assets=mock_on_uploading_files,s3_check_cache_dir=str(job_attachment_test.s3_cache_dir),)# THENscene_ma_s3_path = (f"{job_attachment_settings.full_cas_prefix()}/{job_attachment_test.SCENE_MA_HASH}.xxh128")object_summary_iterator = job_attachment_test.bucket.objects.filter(Prefix=scene_ma_s3_path,)assert list(object_summary_iterator)[0].key == scene_ma_s3_path
2
40
1
217
7
81
134
81
job_attachment_test
['object_summary_iterator', 'job_attachment_settings', 'mock_on_uploading_files', 'scene_ma_s3_path', 'upload_group', 'mock_on_preparing_to_submit', 'asset_manager']
None
{"Assign": 8, "Expr": 2, "If": 1}
16
54
16
["get_queue", "TypeError", "upload.S3AssetManager", "MagicMock", "MagicMock", "asset_manager.prepare_paths_for_upload", "str", "str", "asset_manager.hash_assets_and_create_manifest", "str", "asset_manager.upload_assets", "str", "job_attachment_settings.full_cas_prefix", "job_attachment_test.bucket.objects.filter", "list", "pytest.fixture"]
0
[]
The function (upload_input_files_assets_not_in_cas) defined within the public class called public.The function start at line 81 and ends at 134. It contains 40 lines of code and it has a cyclomatic complexity of 2. The function does not take any parameters and does not return any value. It declares 16.0 functions, and It has 16.0 functions called inside which are ["get_queue", "TypeError", "upload.S3AssetManager", "MagicMock", "MagicMock", "asset_manager.prepare_paths_for_upload", "str", "str", "asset_manager.hash_assets_and_create_manifest", "str", "asset_manager.upload_assets", "str", "job_attachment_settings.full_cas_prefix", "job_attachment_test.bucket.objects.filter", "list", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
upload_input_files_one_asset_in_cas
def upload_input_files_one_asset_in_cas(job_attachment_test: JobAttachmentTest, upload_input_files_assets_not_in_cas: None) -> UploadInputFilesOneAssetInCasOutputs:"""Test that when one asset is already in the CAS, that every file except for the one in the CAS is uploaded."""# IFjob_attachment_settings = get_queue(farm_id=job_attachment_test.farm_id,queue_id=job_attachment_test.queue_id,deadline_endpoint_url=job_attachment_test.deadline_endpoint,).jobAttachmentSettingsif job_attachment_settings is None:raise Exception("Job attachment settings must be set for this test.")asset_manager = upload.S3AssetManager(farm_id=job_attachment_test.farm_id,queue_id=job_attachment_test.queue_id,job_attachment_settings=job_attachment_settings,asset_manifest_version=job_attachment_test.manifest_version,)input_paths = [str(job_attachment_test.SCENE_MA_PATH),str(job_attachment_test.BRICK_PNG_PATH),str(job_attachment_test.CLOTH_PNG_PATH),str(job_attachment_test.INPUT_IN_OUTPUT_DIR_PATH),]scene_ma_s3_path = (f"{job_attachment_settings.full_cas_prefix()}/{job_attachment_test.SCENE_MA_HASH}.xxh128")# This file has already been uploadedscene_ma_upload_time = job_attachment_test.bucket.Object(scene_ma_s3_path).last_modifiedmock_on_preparing_to_submit = MagicMock(return_value=True)mock_on_uploading_files = MagicMock(return_value=True)# WHENupload_group = asset_manager.prepare_paths_for_upload(input_paths=input_paths,output_paths=[str(job_attachment_test.OUTPUT_PATH)],referenced_paths=[],)(_, manifests) = asset_manager.hash_assets_and_create_manifest(asset_groups=upload_group.asset_groups,total_input_files=upload_group.total_input_files,total_input_bytes=upload_group.total_input_bytes,hash_cache_dir=str(job_attachment_test.hash_cache_dir),on_preparing_to_submit=mock_on_preparing_to_submit,)(_, attachments) = asset_manager.upload_assets(manifests,on_uploading_assets=mock_on_uploading_files,s3_check_cache_dir=str(job_attachment_test.s3_cache_dir),)# THENbrick_png_hash = hash_file(str(job_attachment_test.BRICK_PNG_PATH), HashAlgorithm.XXH128)cloth_png_hash = hash_file(str(job_attachment_test.CLOTH_PNG_PATH), HashAlgorithm.XXH128)input_in_output_dir_hash = hash_file(str(job_attachment_test.INPUT_IN_OUTPUT_DIR_PATH), HashAlgorithm.XXH128)brick_png_s3_path = f"{job_attachment_settings.full_cas_prefix()}/{brick_png_hash}.xxh128"cloth_png_s3_path = f"{job_attachment_settings.full_cas_prefix()}/{cloth_png_hash}.xxh128"input_in_output_dir_s3_path = (f"{job_attachment_settings.full_cas_prefix()}/{input_in_output_dir_hash}.xxh128")object_summary_iterator = job_attachment_test.bucket.objects.filter(Prefix=f"{job_attachment_settings.full_cas_prefix()}/",)s3_objects = {obj.key: obj for obj in object_summary_iterator}assert {brick_png_s3_path, cloth_png_s3_path, input_in_output_dir_s3_path} <= set(map(lambda x: x.key, object_summary_iterator))assert brick_png_s3_path in s3_objectsassert cloth_png_s3_path in s3_objectsassert input_in_output_dir_s3_path in s3_objects# Make sure that the file hasn't been modified/reuploadedassert s3_objects[scene_ma_s3_path].last_modified == scene_ma_upload_timereturn UploadInputFilesOneAssetInCasOutputs(attachments)
3
67
2
376
16
138
227
138
job_attachment_test,upload_input_files_assets_not_in_cas
['object_summary_iterator', 'brick_png_s3_path', 'job_attachment_settings', 'cloth_png_hash', 's3_objects', 'input_in_output_dir_hash', 'mock_on_uploading_files', 'scene_ma_s3_path', 'input_paths', 'brick_png_hash', 'cloth_png_s3_path', 'input_in_output_dir_s3_path', 'upload_group', 'scene_ma_upload_time', 'mock_on_preparing_to_submit', 'asset_manager']
UploadInputFilesOneAssetInCasOutputs
{"Assign": 18, "Expr": 1, "If": 1, "Return": 1}
32
90
32
["get_queue", "Exception", "upload.S3AssetManager", "str", "str", "str", "str", "job_attachment_settings.full_cas_prefix", "job_attachment_test.bucket.Object", "MagicMock", "MagicMock", "asset_manager.prepare_paths_for_upload", "str", "asset_manager.hash_assets_and_create_manifest", "str", "asset_manager.upload_assets", "str", "hash_file", "str", "hash_file", "str", "hash_file", "str", "job_attachment_settings.full_cas_prefix", "job_attachment_settings.full_cas_prefix", "job_attachment_settings.full_cas_prefix", "job_attachment_test.bucket.objects.filter", "job_attachment_settings.full_cas_prefix", "set", "map", "UploadInputFilesOneAssetInCasOutputs", "pytest.fixture"]
0
[]
The function (upload_input_files_one_asset_in_cas) defined within the public class called public.The function start at line 138 and ends at 227. It contains 67 lines of code and it has a cyclomatic complexity of 3. It takes 2 parameters, represented as [138.0] and does not return any value. It declares 32.0 functions, and It has 32.0 functions called inside which are ["get_queue", "Exception", "upload.S3AssetManager", "str", "str", "str", "str", "job_attachment_settings.full_cas_prefix", "job_attachment_test.bucket.Object", "MagicMock", "MagicMock", "asset_manager.prepare_paths_for_upload", "str", "asset_manager.hash_assets_and_create_manifest", "str", "asset_manager.upload_assets", "str", "hash_file", "str", "hash_file", "str", "hash_file", "str", "job_attachment_settings.full_cas_prefix", "job_attachment_settings.full_cas_prefix", "job_attachment_settings.full_cas_prefix", "job_attachment_test.bucket.objects.filter", "job_attachment_settings.full_cas_prefix", "set", "map", "UploadInputFilesOneAssetInCasOutputs", "pytest.fixture"].
aws-deadline_deadline-cloud
public
public
0
0
get_job_bundle_path
def get_job_bundle_path(name: str) -> str:"""Get the path to a job bundle directory.Args:name: Name of the bundle (e.g., 'make_many_small_files')Returns:Path to the job bundle directory"""current_dir = Path(__file__).parentbundle_path = current_dir / "job_bundles" / nameif not bundle_path.exists():raise FileNotFoundError(f"Job bundle not found: {bundle_path}")return str(bundle_path)
2
6
1
44
2
14
30
14
name
['bundle_path', 'current_dir']
str
{"Assign": 2, "Expr": 1, "If": 1, "Return": 1}
4
17
4
["Path", "bundle_path.exists", "FileNotFoundError", "str"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_job_bundle"]
The function (get_job_bundle_path) defined within the public class called public.The function start at line 14 and ends at 30. It contains 6 lines of code and it has a cyclomatic complexity of 2. The function does not take any parameters and does not return any value. It declares 4.0 functions, It has 4.0 functions called inside which are ["Path", "bundle_path.exists", "FileNotFoundError", "str"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_job_bundle"].
aws-deadline_deadline-cloud
public
public
0
0
submit_job_bundle
def submit_job_bundle(farm_id: str, queue_id: str, template_name: str, parameters: Optional[dict] = None) -> str:"""Submit a job using a local job bundle template.Args:farm_id: The farm ID to usequeue_id: The queue ID to usetemplate_name: Name of the template directoryparameters: Optional parameters to pass to the jobReturns:The job ID of the submitted job"""bundle_path = get_job_bundle_path(template_name)# Convert parameters to the format expected by create_job_from_job_bundlejob_parameters = []if parameters:job_parameters = [{"name": key, "value": value} for key, value in parameters.items()]# Set farm and queue in configconfig = config_file.read_config()config_file.set_setting("defaults.farm_id", farm_id, config)config_file.set_setting("defaults.queue_id", queue_id, config)job_id = create_job_from_job_bundle(job_bundle_dir=bundle_path, job_parameters=job_parameters, config=config)assert job_id is not Nonereturn job_id
3
15
4
113
4
33
64
33
farm_id,queue_id,template_name,parameters
['bundle_path', 'config', 'job_id', 'job_parameters']
str
{"Assign": 5, "Expr": 3, "If": 1, "Return": 1}
6
32
6
["get_job_bundle_path", "parameters.items", "config_file.read_config", "config_file.set_setting", "config_file.set_setting", "create_job_from_job_bundle"]
4
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_dep_chain_job", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_dep_data_flow_job", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_make_many_small_files_job", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_make_many_small_files_slow_job"]
The function (submit_job_bundle) defined within the public class called public.The function start at line 33 and ends at 64. It contains 15 lines of code and it has a cyclomatic complexity of 3. It takes 4 parameters, represented as [33.0] and does not return any value. It declares 6.0 functions, It has 6.0 functions called inside which are ["get_job_bundle_path", "parameters.items", "config_file.read_config", "config_file.set_setting", "config_file.set_setting", "create_job_from_job_bundle"], It has 4.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_dep_chain_job", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_dep_data_flow_job", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_make_many_small_files_job", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.job_templates_py.submit_make_many_small_files_slow_job"].
aws-deadline_deadline-cloud
public
public
0
0
submit_make_many_small_files_job
def submit_make_many_small_files_job(farm_id: str,queue_id: str,files_per_task: int = 100,task_count: int = 100,output_dir: str = "output",) -> str:"""Submit a job that creates many small files.Args:farm_id: The farm ID to usequeue_id: The queue ID to usefiles_per_task: Number of files to create per tasktask_count: Number of tasks to runoutput_dir: The output directory to use (defaults to "output")Returns:The job ID of the submitted job"""parameters = {"FilesPerTask": files_per_task, "Tasks": f"1-{task_count}", "DataDir": output_dir}return submit_job_bundle(farm_id, queue_id, "make_many_small_files", parameters)
1
9
5
60
1
67
89
67
farm_id,queue_id,files_per_task,task_count,output_dir
['parameters']
str
{"Assign": 1, "Expr": 1, "Return": 1}
1
23
1
["submit_job_bundle"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.test_incremental_download_many_small_files"]
The function (submit_make_many_small_files_job) defined within the public class called public.The function start at line 67 and ends at 89. It contains 9 lines of code and it has a cyclomatic complexity of 1. It takes 5 parameters, represented as [67.0] and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["submit_job_bundle"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.test_incremental_download_many_small_files"].
aws-deadline_deadline-cloud
public
public
0
0
submit_make_many_small_files_slow_job
def submit_make_many_small_files_slow_job(farm_id: str,queue_id: str,files_per_task: int = 100,task_count: int = 100,output_dir: str = "output",) -> str:"""Submit a job that creates many small files with longer task duration (for requeue tests).Args:farm_id: The farm ID to usequeue_id: The queue ID to usefiles_per_task: Number of files to create per tasktask_count: Number of tasks to runoutput_dir: The output directory to use (defaults to "output")Returns:The job ID of the submitted job"""parameters = {"FilesPerTask": files_per_task, "Tasks": f"1-{task_count}", "DataDir": output_dir}return submit_job_bundle(farm_id, queue_id, "make_many_small_files_slow", parameters)
1
9
5
60
1
92
114
92
farm_id,queue_id,files_per_task,task_count,output_dir
['parameters']
str
{"Assign": 1, "Expr": 1, "Return": 1}
1
23
1
["submit_job_bundle"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.test_conflict_resolution_with_requeue"]
The function (submit_make_many_small_files_slow_job) defined within the public class called public.The function start at line 92 and ends at 114. It contains 9 lines of code and it has a cyclomatic complexity of 1. It takes 5 parameters, represented as [92.0] and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["submit_job_bundle"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.test_conflict_resolution_with_requeue"].
aws-deadline_deadline-cloud
public
public
0
0
submit_dep_data_flow_job
def submit_dep_data_flow_job(farm_id: str, queue_id: str, data_dir: Optional[str] = None, input_dir: Optional[str] = None) -> str:"""Submit a job with branching and merging step dependencies.Args:farm_id: The farm ID to usequeue_id: The queue ID to usedata_dir: The data directory to use (optional, defaults to ./data_dir)input_dir: The input directory to use (optional, defaults to ./input_dir)Returns:The job ID of the submitted job"""parameters = {"JobName": "Step-Step Dependency Test", "Frames": "8-11"}# Override DataDir if provided, otherwise use default ./data_dir from templateif data_dir:parameters["DataDir"] = data_dir# Override InputDir if provided, otherwise use default ./input_dir from templateif input_dir:parameters["InputDir"] = input_dirreturn submit_job_bundle(farm_id, queue_id, "dep_data_flow", parameters)
3
9
4
72
1
117
142
117
farm_id,queue_id,data_dir,input_dir
['parameters']
str
{"Assign": 3, "Expr": 1, "If": 2, "Return": 1}
1
26
1
["submit_job_bundle"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.test_incremental_download_dep_data_flow"]
The function (submit_dep_data_flow_job) defined within the public class called public.The function start at line 117 and ends at 142. It contains 9 lines of code and it has a cyclomatic complexity of 3. It takes 4 parameters, represented as [117.0] and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["submit_job_bundle"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.test_incremental_download_dep_data_flow"].
aws-deadline_deadline-cloud
public
public
0
0
submit_dep_chain_job
def submit_dep_chain_job(farm_id: str, queue_id: str, output_dir: str = "output") -> str:"""Submit a job with a chain of step dependencies.Args:farm_id: The farm ID to usequeue_id: The queue ID to useoutput_dir: The output directory to useReturns:The job ID of the submitted job:param output_dir:"""parameters = {"JobName": "Step-Step Chain JA Output Test","OutputPath": output_dir,# JobScriptDir uses default "scripts" from template}return submit_job_bundle(farm_id, queue_id, "dep_chain", parameters)
1
6
3
43
1
145
164
145
farm_id,queue_id,output_dir
['parameters']
str
{"Assign": 1, "Expr": 1, "Return": 1}
1
20
1
["submit_job_bundle"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.test_incremental_download_dependency_chain"]
The function (submit_dep_chain_job) defined within the public class called public.The function start at line 145 and ends at 164. It contains 6 lines of code and it has a cyclomatic complexity of 1. It takes 3 parameters, represented as [145.0] and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["submit_job_bundle"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.test_incremental_download_dependency_chain"].
aws-deadline_deadline-cloud
TestAttachment
public
0
0
temp_dir
def temp_dir(self):with tempfile.TemporaryDirectory() as tmpdir_path:asset_dir: str = os.path.join(tmpdir_path, "files")os.makedirs(asset_dir)with open(os.path.join(asset_dir, "file1.txt"),"w",encoding="utf8",) as f:f.write(MOCK_FILE_CASE["TEST_CASE_1"])with open(os.path.join(asset_dir, "file2.txt"),"w",encoding="utf8",) as f:f.write(MOCK_FILE_CASE["TEST_CASE_2"])yield tmpdir_path
1
17
1
102
0
61
79
61
self
[]
None
{"AnnAssign": 1, "Expr": 4, "With": 3}
9
19
9
["tempfile.TemporaryDirectory", "os.path.join", "os.makedirs", "open", "os.path.join", "f.write", "open", "os.path.join", "f.write"]
1
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3924769_jaraco_zipp.tests.test_path_py.TestPath.zipfile_ondisk"]
The function (temp_dir) defined within the public class called TestAttachment.The function start at line 61 and ends at 79. It contains 17 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 9.0 functions, It has 9.0 functions called inside which are ["tempfile.TemporaryDirectory", "os.path.join", "os.makedirs", "open", "os.path.join", "f.write", "open", "os.path.join", "f.write"], It has 1.0 function calling this function which is ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.3924769_jaraco_zipp.tests.test_path_py.TestPath.zipfile_ondisk"].
aws-deadline_deadline-cloud
TestAttachment
public
0
0
job_attachment_resources
def job_attachment_resources(self, deploy_job_attachment_resources: JobAttachmentManager):if deploy_job_attachment_resources.farm_id is None:raise TypeError("The Farm ID was not properly retrieved when initializing resources.")if (deploy_job_attachment_resources.queue is Noneor deploy_job_attachment_resources.queue_with_no_settings is None):raise TypeError("The Queues were not properly created when initializing resources.")yield deploy_job_attachment_resources
4
9
2
43
0
82
91
82
self,deploy_job_attachment_resources
[]
None
{"Expr": 1, "If": 2}
2
10
2
["TypeError", "TypeError"]
0
[]
The function (job_attachment_resources) defined within the public class called TestAttachment.The function start at line 82 and ends at 91. It contains 9 lines of code and it has a cyclomatic complexity of 4. It takes 2 parameters, represented as [82.0] and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["TypeError", "TypeError"].
aws-deadline_deadline-cloud
TestAttachment
public
0
0
test_attachment_s3_cross_account_access_denied
def test_attachment_s3_cross_account_access_denied(self, external_bucket, temp_dir):# Givenfile_name: str = f"{hash_data(temp_dir.encode('utf-8'), HashAlgorithm.XXH128)}_output"manifest_path: str = os.path.join(temp_dir, file_name)with open(manifest_path,"w",encoding="utf8",) as f:json.dump(MOCK_MANIFEST_CASE.get("TEST_CASE_1"), f)# When - test upload the local asset filerunner = CliRunner()result = runner.invoke(main,["attachment","upload","--manifests",manifest_path,"--root-dirs",temp_dir,"--profile","default","--s3-root-uri",f"s3://{external_bucket}/test",],)assert result.exit_code != 0, (f"Expecting cross-account s3 access to fail but not, CLI output {result.output}")assert "HTTP Status Code: 403, Access denied." in result.outputresult = runner.invoke(main,["attachment","download","--manifests",manifest_path,"--profile","default","--s3-root-uri",f"s3://{external_bucket}/test",],)assert result.exit_code != 0, (f"Expecting cross-account s3 access to fail but not, CLI output {result.output}")assert "HTTP Status Code: 403, Forbidden or Access denied." in result.output
1
46
3
158
0
95
146
95
self,external_bucket,temp_dir
[]
None
{"AnnAssign": 2, "Assign": 3, "Expr": 1, "With": 1}
9
52
9
["hash_data", "temp_dir.encode", "os.path.join", "open", "json.dump", "MOCK_MANIFEST_CASE.get", "CliRunner", "runner.invoke", "runner.invoke"]
0
[]
The function (test_attachment_s3_cross_account_access_denied) defined within the public class called TestAttachment.The function start at line 95 and ends at 146. It contains 46 lines of code and it has a cyclomatic complexity of 1. It takes 3 parameters, represented as [95.0] and does not return any value. It declares 9.0 functions, and It has 9.0 functions called inside which are ["hash_data", "temp_dir.encode", "os.path.join", "open", "json.dump", "MOCK_MANIFEST_CASE.get", "CliRunner", "runner.invoke", "runner.invoke"].
aws-deadline_deadline-cloud
TestAttachment
public
0
0
_run_attachment_basic_flow
def _run_attachment_basic_flow(self, temp_dir, job_attachment_resources, manifest_case_key) -> Tuple[str, str, str]:"""Helper function that runs the basic attachment flow and returns file info.This is used by both the basic flow test and the file override test."""# Givenfile_name: str = f"{hash_data(temp_dir.encode('utf-8'), HashAlgorithm.XXH128)}_output"manifest_path: str = os.path.join(temp_dir, file_name)with open(manifest_path,"w",encoding="utf8",) as f:json.dump(MOCK_MANIFEST_CASE[manifest_case_key], f)s3_root_uri = f"s3://{job_attachment_resources.bucket_name}/{job_attachment_resources.bucket_root_prefix}"runner = CliRunner()# When - test upload the local asset fileresult = runner.invoke(main,["attachment","upload","--manifests",manifest_path,"--root-dirs",temp_dir,"--profile","default","--s3-root-uri",s3_root_uri,],)# Thenassert result.exit_code == 0, f"Non-Zeo exit code, CLI output {result.output}"# When - test download the file just uploadedresult = runner.invoke(main,["attachment","download","--manifests",manifest_path,"--profile","default","--s3-root-uri",s3_root_uri,"--json",],)# Thenassert result.exit_code == 0, f"Non-Zeo exit code, CLI output {result.output}"assert json.loads(result.output)["processed_bytes"] == len(MOCK_FILE_CASE[manifest_case_key])assert file_name in os.listdir(os.getcwd()), ("Expecting downloaded folder named with data hash created in the working directory with downloaded files but not.")asset_files = os.listdir(os.path.join(os.getcwd(), file_name, "files"))assert len(asset_files) == 1return file_name, manifest_path, s3_root_uri
1
53
4
229
0
148
215
148
self,temp_dir,job_attachment_resources,manifest_case_key
[]
Tuple[str, str, str]
{"AnnAssign": 2, "Assign": 5, "Expr": 2, "Return": 1, "With": 1}
16
68
16
["hash_data", "temp_dir.encode", "os.path.join", "open", "json.dump", "CliRunner", "runner.invoke", "runner.invoke", "json.loads", "len", "os.listdir", "os.getcwd", "os.listdir", "os.path.join", "os.getcwd", "len"]
0
[]
The function (_run_attachment_basic_flow) defined within the public class called TestAttachment.The function start at line 148 and ends at 215. It contains 53 lines of code and it has a cyclomatic complexity of 1. It takes 4 parameters, represented as [148.0] and does not return any value. It declares 16.0 functions, and It has 16.0 functions called inside which are ["hash_data", "temp_dir.encode", "os.path.join", "open", "json.dump", "CliRunner", "runner.invoke", "runner.invoke", "json.loads", "len", "os.listdir", "os.getcwd", "os.listdir", "os.path.join", "os.getcwd", "len"].
aws-deadline_deadline-cloud
TestAttachment
public
0
0
test_attachment_basic_flow
def test_attachment_basic_flow(self, temp_dir, job_attachment_resources, manifest_case_key):"""Test the basic attachment upload and download flow."""# Run the basic attachment flow and verify the results through assertions# (all assertions are done within _run_attachment_basic_flow)self._run_attachment_basic_flow(temp_dir, job_attachment_resources, manifest_case_key)
1
2
4
22
0
219
223
219
self,temp_dir,job_attachment_resources,manifest_case_key
[]
None
{"Expr": 2}
3
5
3
["self._run_attachment_basic_flow", "pytest.mark.parametrize", "MOCK_MANIFEST_CASE.keys"]
0
[]
The function (test_attachment_basic_flow) defined within the public class called TestAttachment.The function start at line 219 and ends at 223. It contains 2 lines of code and it has a cyclomatic complexity of 1. It takes 4 parameters, represented as [219.0] and does not return any value. It declares 3.0 functions, and It has 3.0 functions called inside which are ["self._run_attachment_basic_flow", "pytest.mark.parametrize", "MOCK_MANIFEST_CASE.keys"].
aws-deadline_deadline-cloud
TestAttachment
public
0
0
test_attachment_path_mapping_flow
def test_attachment_path_mapping_flow(self, temp_dir, job_attachment_resources, manifest_case_key):# Givensource_path: str = os.path.join(temp_dir, "virtual_source")destination_path: str = temp_dirfile_name: str = f"{hash_data(source_path.encode('utf-8'), HashAlgorithm.XXH128)}_output"manifest_path: str = os.path.join(temp_dir, file_name)with open(manifest_path,"w",encoding="utf8",) as f:json.dump(MOCK_MANIFEST_CASE[manifest_case_key], f)path_mapping_file_path: str = os.path.join(temp_dir, "path_mapping")with open(path_mapping_file_path, "w", encoding="utf8") as f:f.write(json.dumps([asdict(PathMappingRule(source_path_format="posix",source_path=source_path,destination_path=destination_path,))]))s3_root_uri = f"s3://{job_attachment_resources.bucket_name}/{job_attachment_resources.bucket_root_prefix}"runner = CliRunner()# When - test upload the local asset file with path mappingresult = runner.invoke(main,["attachment","upload","--manifests",manifest_path,"--path-mapping-rules",path_mapping_file_path,"--profile","default","--s3-root-uri",s3_root_uri,],)# Thenassert result.exit_code == 0, f"Non-Zeo exit code, CLI output {result.output}"# When - test download the file just uploaded with path mappingresult = runner.invoke(main,["attachment","download","--manifests",manifest_path,"--path-mapping-rules",path_mapping_file_path,"--profile","default","--s3-root-uri",s3_root_uri,"--json",],)# Thenassert result.exit_code == 0, f"Non-Zeo exit code, CLI output {result.output}"assert json.loads(result.output)["processed_bytes"] == len(MOCK_FILE_CASE[manifest_case_key])asset_files = os.listdir(os.path.join(destination_path, "files"))assert len(asset_files) == 3, (f"Expecting 3 asset files, 2 from upload and 1 from download, but got {len(asset_files)}.")
1
70
4
276
0
227
309
227
self,temp_dir,job_attachment_resources,manifest_case_key
[]
None
{"AnnAssign": 5, "Assign": 5, "Expr": 2, "With": 2}
23
83
23
["os.path.join", "hash_data", "source_path.encode", "os.path.join", "open", "json.dump", "os.path.join", "open", "f.write", "json.dumps", "asdict", "PathMappingRule", "CliRunner", "runner.invoke", "runner.invoke", "json.loads", "len", "os.listdir", "os.path.join", "len", "len", "pytest.mark.parametrize", "MOCK_MANIFEST_CASE.keys"]
0
[]
The function (test_attachment_path_mapping_flow) defined within the public class called TestAttachment.The function start at line 227 and ends at 309. It contains 70 lines of code and it has a cyclomatic complexity of 1. It takes 4 parameters, represented as [227.0] and does not return any value. It declares 23.0 functions, and It has 23.0 functions called inside which are ["os.path.join", "hash_data", "source_path.encode", "os.path.join", "open", "json.dump", "os.path.join", "open", "f.write", "json.dumps", "asdict", "PathMappingRule", "CliRunner", "runner.invoke", "runner.invoke", "json.loads", "len", "os.listdir", "os.path.join", "len", "len", "pytest.mark.parametrize", "MOCK_MANIFEST_CASE.keys"].
aws-deadline_deadline-cloud
TestAttachment
public
0
0
test_attachment_file_override
def test_attachment_file_override(self, temp_dir, job_attachment_resources, override_mode, expected_num_files, expected_files):test_case_key = "TEST_CASE_1"file_name, manifest_path, s3_root_uri = self._run_attachment_basic_flow(temp_dir=temp_dir,job_attachment_resources=job_attachment_resources,manifest_case_key=test_case_key,)# When 2 - test download again with file override mode.runner = CliRunner()result = runner.invoke(main,["attachment","download","--manifests",manifest_path,"--profile","default","--s3-root-uri",s3_root_uri,"--conflict-resolution",override_mode,"--json",],)# Thenassert result.exit_code == 0, f"Non-Zeo exit code, CLI output {result.output}"# How many bytes downloaded.expected_processed_bytes = (0 if override_mode == "SKIP" else len(MOCK_FILE_CASE[test_case_key]))assert json.loads(result.output)["processed_bytes"] == expected_processed_bytesassert file_name in os.listdir(os.getcwd()), ("Expecting downloaded folder named with data hash created in the working directory with downloaded files but not.")asset_files = os.listdir(os.path.join(os.getcwd(), file_name, "files"))assert len(asset_files) == expected_num_files# Make sure the files are named correctly and what we expected to download.for file in expected_files:assert file in asset_files
3
38
6
176
0
320
364
320
self,temp_dir,job_attachment_resources,override_mode,expected_num_files,expected_files
[]
None
{"Assign": 6, "For": 1}
15
45
15
["self._run_attachment_basic_flow", "CliRunner", "runner.invoke", "len", "json.loads", "os.listdir", "os.getcwd", "os.listdir", "os.path.join", "os.getcwd", "len", "pytest.mark.parametrize", "pytest.param", "pytest.param", "pytest.param"]
0
[]
The function (test_attachment_file_override) defined within the public class called TestAttachment.The function start at line 320 and ends at 364. It contains 38 lines of code and it has a cyclomatic complexity of 3. It takes 6 parameters, represented as [320.0] and does not return any value. It declares 15.0 functions, and It has 15.0 functions called inside which are ["self._run_attachment_basic_flow", "CliRunner", "runner.invoke", "len", "json.loads", "os.listdir", "os.getcwd", "os.listdir", "os.path.join", "os.getcwd", "len", "pytest.mark.parametrize", "pytest.param", "pytest.param", "pytest.param"].
aws-deadline_deadline-cloud
public
public
0
0
test_farm_get
def test_farm_get(deadline_cli_test: DeadlineCliTest) -> None:runner = CliRunner()result = runner.invoke(main,["farm", "get", "--farm-id", deadline_cli_test.farm_id],)assert result.exit_code == 0assert f"farmId: {deadline_cli_test.farm_id}" in result.output# The following vary from farm to farm, so just make sure the general layout is there.# Unit tests are able to test the output more throughly. We'll only look for the required fields.assert "displayName:" in result.outputassert "description:" in result.output
1
10
1
60
2
9
23
9
deadline_cli_test
['runner', 'result']
None
{"Assign": 2}
2
15
2
["CliRunner", "runner.invoke"]
0
[]
The function (test_farm_get) defined within the public class called public.The function start at line 9 and ends at 23. It contains 10 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["CliRunner", "runner.invoke"].
aws-deadline_deadline-cloud
public
public
0
0
test_farm_list
def test_farm_list(deadline_cli_test: DeadlineCliTest) -> None:runner = CliRunner()result = runner.invoke(main,["farm","list",],)assert result.exit_code == 0assert f"- farmId: {deadline_cli_test.farm_id}" in result.output# The following vary from farm to farm, so just make sure the general layout is there.# Unit tests are able to test the output more throughly. We'll only look for the required fields.assert "displayName:" in result.output
1
12
1
49
2
26
42
26
deadline_cli_test
['runner', 'result']
None
{"Assign": 2}
2
17
2
["CliRunner", "runner.invoke"]
0
[]
The function (test_farm_list) defined within the public class called public.The function start at line 26 and ends at 42. It contains 12 lines of code and it has a cyclomatic complexity of 1. The function does not take any parameters and does not return any value. It declares 2.0 functions, and It has 2.0 functions called inside which are ["CliRunner", "runner.invoke"].
aws-deadline_deadline-cloud
IncrementalDownloadTest
public
0
0
__init__
def __init__(self, farm_id: str, queue_id: str):self.farm_id = farm_idself.queue_id = queue_idself.deadline_client = boto3.client("deadline")
1
4
3
33
0
32
35
32
self,farm_id,queue_id
[]
None
{"Assign": 3}
1
4
1
["boto3.client"]
4,993
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"]
The function (__init__) defined within the public class called IncrementalDownloadTest.The function start at line 32 and ends at 35. It contains 4 lines of code and it has a cyclomatic complexity of 1. It takes 3 parameters, represented as [32.0] and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["boto3.client"], It has 4993.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.logging_py.LoggerHandler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16379211_lorcalhost_btb_manager_telegram.btb_manager_telegram.schedule_py.TgScheduler.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.AmountMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.CalculatedAmountDiscrepancyError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ExchangeRateMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.InvalidTransactionError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.ParsingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.PriceMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.QuantityNotPositiveError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.SymbolMissingError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedColumnCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.exceptions_py.UnexpectedRowCountError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.raw_py.RawTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_equity_award_json_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.schwab_py.SchwabTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.trading212_py.Trading212Transaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.16798231_kapji_capital_gains_calculator.cgt_calc.parsers.vanguard_py.VanguardTransaction.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.ExporterError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordFileDoesNotExistError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.config.exceptions_py.FritzPasswordTooLongError.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.DeviceInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HomeAutomation.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostInfo.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.HostNumberOfEntries.__init__", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.18106662_pdreker_fritz_exporter.fritzexporter.fritzcapabilities_py.LanInterfaceConfig.__init__"].
aws-deadline_deadline-cloud
IncrementalDownloadTest
public
0
0
wait_for_job_completion
def wait_for_job_completion(self, job_id: str, timeout: int = 600, poll_interval: int = 5) -> Tuple[bool, str]:"""Wait for a job to complete. Returns (isSuccess, final_status)."""try:result = wait_for_job_completion(farm_id=self.farm_id,queue_id=self.queue_id,job_id=job_id,timeout=timeout,max_poll_interval=poll_interval,)return result.status == "SUCCEEDED", result.statusexcept Exception as e:return False, f"TIMEOUT! Received downstream exception: {e}"
2
14
4
80
0
37
51
37
self,job_id,timeout,poll_interval
[]
Tuple[bool, str]
{"Assign": 1, "Expr": 1, "Return": 2, "Try": 1}
1
15
1
["wait_for_job_completion"]
7
["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.IncrementalDownloadTest.wait_for_job_completion", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_exponential_backoff", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_failure", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_max_interval_cap", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_success", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_timeout", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_with_pagination"]
The function (wait_for_job_completion) defined within the public class called IncrementalDownloadTest.The function start at line 37 and ends at 51. It contains 14 lines of code and it has a cyclomatic complexity of 2. It takes 4 parameters, represented as [37.0] and does not return any value. It declare 1.0 function, It has 1.0 function called inside which is ["wait_for_job_completion"], It has 7.0 functions calling this function which are ["_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.integ.cli.test_cli_incremental_download_py.IncrementalDownloadTest.wait_for_job_completion", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_exponential_backoff", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_failure", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_max_interval_cap", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_success", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_timeout", "_.content.gdrive.MyDrive.Phd_Thesis.Dataset_Creation.Output.Cloned_Repo_3.94876780_aws_deadline_deadline_cloud.test.unit.deadline_client.api.test_job_monitoring_py.test_wait_for_job_completion_with_pagination"].
aws-deadline_deadline-cloud
IncrementalDownloadTest
public
0
0
wait_for_all_files
def wait_for_all_files(self,tmp_path: Path,expected_files: dict,test_name: str,file_pattern: str = "**/*.out",content_check: Optional[str] = None,exact_match: bool = False,count_only: bool = False,):"""Generic function to wait for all expected files to be downloaded with correct content."""result = self.run_incremental_download_without_storage_profiles(str(tmp_path), test_name=test_name)assert result.returncode == 0, f"Download failed: {result.stderr}"downloaded_files = list(tmp_path.glob(file_pattern))if count_only:# For tests that only care about file count (like make_many_small_files)expected_count = expected_files.get("count", 0)print(f"[{test_name}] Found {len(downloaded_files)}/{expected_count} files")assert len(downloaded_files) == expected_count, (f"Expected exactly {expected_count} files, but found {len(downloaded_files)}")returnverified_files = []for downloaded_file in downloaded_files:filename = downloaded_file.nameif filename in expected_files:expected_marker = expected_files[filename]content = downloaded_file.read_text()if exact_match:# For exact content matching (like dep_chain)if content.strip() == expected_marker:verified_files.append(filename)else:# For content containing markers (like dep_data_flow)content_valid = Trueif content_check:content_valid = content_check in contentif content_valid and expected_marker in content:verified_files.append(filename)expected_count = len(expected_files)print(f"[{test_name}] Found {len(verified_files)}/{expected_count} files")assert len(verified_files) == expected_count, (f"Expected exactly {expected_count} verified output files, found {len(verified_files)}: {verified_files}")
9
42
8
211
0
54
106
54
self,tmp_path,expected_files,test_name,file_pattern,content_check,exact_match,count_only
[]
None
{"Assign": 10, "Expr": 5, "For": 1, "If": 6, "Return": 1}
19
53
19
["self.run_incremental_download_without_storage_profiles", "str", "list", "tmp_path.glob", "expected_files.get", "print", "len", "len", "len", "downloaded_file.read_text", "content.strip", "verified_files.append", "verified_files.append", "len", "print", "len", "len", "len", "_retry"]
0
[]
The function (wait_for_all_files) defined within the public class called IncrementalDownloadTest.The function start at line 54 and ends at 106. It contains 42 lines of code and it has a cyclomatic complexity of 9. It takes 8 parameters, represented as [54.0] and does not return any value. It declares 19.0 functions, and It has 19.0 functions called inside which are ["self.run_incremental_download_without_storage_profiles", "str", "list", "tmp_path.glob", "expected_files.get", "print", "len", "len", "len", "downloaded_file.read_text", "content.strip", "verified_files.append", "verified_files.append", "len", "print", "len", "len", "len", "_retry"].