language stringclasses 1
value | repo stringclasses 346
values | path stringlengths 6 201 | class_span dict | source stringlengths 21 2.38M | target stringlengths 1 96 |
|---|---|---|---|---|---|
python | pydantic__pydantic | tests/mypy/modules/plugin_success_baseConfig.py | {
"start": 3063,
"end": 3250
} | class ____(FrozenModel):
a: int = 1
model_config = dict(frozen=False, from_attributes=True)
NotFrozenModel(x=1).x = 2
NotFrozenModel.model_validate(model.__dict__)
| NotFrozenModel |
python | justquick__django-activity-stream | actstream/feeds.py | {
"start": 7831,
"end": 8265
} | class ____:
def get_object(self, request, content_type_id, object_id):
ct = get_object_or_404(ContentType, pk=content_type_id)
try:
obj = ct.get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist:
raise Http404('No %s matches the given query.' % ct.model_class()._meta.object_name)
return obj
def get_stream(self):
return any_stream
| ObjectActivityMixin |
python | davidhalter__jedi | jedi/inference/arguments.py | {
"start": 5453,
"end": 9944
} | class ____(AbstractArguments):
def __init__(self, inference_state, context, argument_node, trailer=None):
"""
:param argument_node: May be an argument_node or a list of nodes.
"""
self.argument_node = argument_node
self.context = context
self._inference_state = inference_state
self.trailer = trailer # Can be None, e.g. in a class definition.
@classmethod
@inference_state_as_method_param_cache()
def create_cached(cls, *args, **kwargs):
return cls(*args, **kwargs)
def unpack(self, funcdef=None):
named_args = []
for star_count, el in unpack_arglist(self.argument_node):
if star_count == 1:
arrays = self.context.infer_node(el)
iterators = [_iterate_star_args(self.context, a, el, funcdef)
for a in arrays]
for values in list(zip_longest(*iterators)):
yield None, get_merged_lazy_value(
[v for v in values if v is not None]
)
elif star_count == 2:
arrays = self.context.infer_node(el)
for dct in arrays:
yield from _star_star_dict(self.context, dct, el, funcdef)
else:
if el.type == 'argument':
c = el.children
if len(c) == 3: # Keyword argument.
named_args.append((c[0].value, LazyTreeValue(self.context, c[2]),))
else: # Generator comprehension.
# Include the brackets with the parent.
sync_comp_for = el.children[1]
if sync_comp_for.type == 'comp_for':
sync_comp_for = sync_comp_for.children[1]
comp = iterable.GeneratorComprehension(
self._inference_state,
defining_context=self.context,
sync_comp_for_node=sync_comp_for,
entry_node=el.children[0],
)
yield None, LazyKnownValue(comp)
else:
yield None, LazyTreeValue(self.context, el)
# Reordering arguments is necessary, because star args sometimes appear
# after named argument, but in the actual order it's prepended.
yield from named_args
def _as_tree_tuple_objects(self):
for star_count, argument in unpack_arglist(self.argument_node):
default = None
if argument.type == 'argument':
if len(argument.children) == 3: # Keyword argument.
argument, default = argument.children[::2]
yield argument, default, star_count
def iter_calling_names_with_star(self):
for name, default, star_count in self._as_tree_tuple_objects():
# TODO this function is a bit strange. probably refactor?
if not star_count or not isinstance(name, tree.Name):
continue
yield TreeNameDefinition(self.context, name)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.argument_node)
def get_calling_nodes(self):
old_arguments_list = []
arguments = self
while arguments not in old_arguments_list:
if not isinstance(arguments, TreeArguments):
break
old_arguments_list.append(arguments)
for calling_name in reversed(list(arguments.iter_calling_names_with_star())):
names = calling_name.goto()
if len(names) != 1:
break
if isinstance(names[0], AnonymousParamName):
# Dynamic parameters should not have calling nodes, because
# they are dynamic and extremely random.
return []
if not isinstance(names[0], ParamName):
break
executed_param_name = names[0].get_executed_param_name()
arguments = executed_param_name.arguments
break
if arguments.argument_node is not None:
return [ContextualizedNode(arguments.context, arguments.argument_node)]
if arguments.trailer is not None:
return [ContextualizedNode(arguments.context, arguments.trailer)]
return []
| TreeArguments |
python | run-llama__llama_index | llama-index-integrations/vector_stores/llama-index-vector-stores-nile/tests/test_vector_stores_nile.py | {
"start": 359,
"end": 3828
} | class ____(unittest.TestCase):
@pytest.fixture(autouse=True)
@mock.patch("psycopg.connect")
def vector_store_setup(self, mock_connect):
# Mock the psycopg connection and cursor
self.mock_connection = (
mock_connect.return_value
) # result of psycopg2.connect(**connection_stuff)
self.mock_cursor = (
self.mock_connection.cursor.return_value
) # result of con.cursor(cursor_factory=DictCursor)
self.vector_store = NileVectorStore(
service_url="postgresql://user:password@localhost/dbname",
table_name="test_table",
)
self.tenant_aware_vector_store = NileVectorStore(
service_url="postgresql://user:password@localhost/dbname",
table_name="test_table",
tenant_aware=True,
)
def test_class(self):
names_of_base_classes = [b.__name__ for b in NileVectorStore.__mro__]
self.assertIn(BasePydanticVectorStore.__name__, names_of_base_classes)
def test_add(self):
node = TextNode(
text="Test node", embedding=np.array([0.1, 0.2, 0.3]), id_="test_id"
)
self.vector_store.add([node])
# create table twice when initializing the fixture, and insert one row
assert self.mock_connection.commit.call_count == 3
# expected to fail if we don't have tenant_id in the node metadata
with pytest.raises(Exception) as e_info:
self.tenant_aware_vector_store.add([node])
assert "tenant_id cannot be None if tenant_aware is True" in str(
e_info.value
)
# one more insert for the new node
node.metadata["tenant_id"] = "test_tenant"
self.tenant_aware_vector_store.add([node])
assert self.mock_connection.commit.call_count == 4
def test_delete(self):
self.vector_store.delete("test_id")
# create table twice when initializing the fixture, and delete one row
assert self.mock_connection.commit.call_count == 3
# expected to fail if we don't have tenant_id in kwargs
with pytest.raises(Exception) as e_info:
self.tenant_aware_vector_store.delete("test_id")
assert (
"tenant_id must be specified in delete_kwargs if tenant_aware is True"
in str(e_info.value)
)
# delete the node with tenant_id
self.tenant_aware_vector_store.delete("test_id", tenant_id="test_tenant")
assert self.mock_connection.commit.call_count == 4
def test_query(self):
query_embedding = VectorStoreQuery(
query_embedding=np.array([0.1, 0.2, 0.3]), similarity_top_k=2
)
results = self.vector_store.query(query_embedding)
assert isinstance(results, VectorStoreQueryResult)
# expected to fail if we don't have tenant_id in kwargs
with pytest.raises(Exception) as e_info:
self.tenant_aware_vector_store.query(query_embedding)
assert (
"tenant_id must be specified in query_kwargs if tenant_aware is True"
in str(e_info.value)
)
# query the node with tenant_id
results = self.tenant_aware_vector_store.query(
query_embedding, tenant_id="test_tenant"
)
assert isinstance(results, VectorStoreQueryResult)
if __name__ == "__main__":
unittest.main()
| TestNileVectorStore |
python | sphinx-doc__sphinx | sphinx/testing/util.py | {
"start": 8684,
"end": 9886
} | class ____(SphinxTestApp):
"""A wrapper for SphinxTestApp.
This class is used to speed up the test by skipping ``app.build()``
if it has already been built and there are any output files.
"""
def build(self, force_all: bool = False, filenames: Sequence[Path] = ()) -> None:
if not list(self.outdir.iterdir()):
# if listdir is empty, do build.
super().build(force_all, filenames)
# otherwise, we can use built cache
def _clean_up_global_state() -> None:
# clean up Docutils global state
directives._directives.clear() # type: ignore[attr-defined]
roles._roles.clear() # type: ignore[attr-defined]
for node in additional_nodes:
delattr(nodes.GenericNodeVisitor, f'visit_{node.__name__}')
delattr(nodes.GenericNodeVisitor, f'depart_{node.__name__}')
delattr(nodes.SparseNodeVisitor, f'visit_{node.__name__}')
delattr(nodes.SparseNodeVisitor, f'depart_{node.__name__}')
additional_nodes.clear()
# clean up Sphinx global state
sphinx.locale.translators.clear()
# clean up autodoc global state
sphinx.pycode.ModuleAnalyzer.cache.clear()
| SphinxTestAppWrapperForSkipBuilding |
python | pytorch__pytorch | torch/distributed/fsdp/_flat_param.py | {
"start": 3725,
"end": 4162
} | class ____(Enum):
FULL_SHARD = auto()
SHARD_GRAD_OP = auto()
NO_SHARD = auto()
HYBRID_SHARD = auto()
_HYBRID_SHARD_ZERO2 = auto()
RESHARD_AFTER_FORWARD_HANDLE_STRATEGIES = (
HandleShardingStrategy.FULL_SHARD,
HandleShardingStrategy.HYBRID_SHARD,
)
NO_RESHARD_AFTER_FORWARD_HANDLE_STRATEGIES = (
HandleShardingStrategy.SHARD_GRAD_OP,
HandleShardingStrategy._HYBRID_SHARD_ZERO2,
)
| HandleShardingStrategy |
python | jd__tenacity | tenacity/__init__.py | {
"start": 6470,
"end": 16095
} | class ____(ABC):
def __init__(
self,
sleep: t.Callable[[t.Union[int, float]], None] = sleep,
stop: "StopBaseT" = stop_never,
wait: "WaitBaseT" = wait_none(),
retry: "RetryBaseT" = retry_if_exception_type(),
before: t.Callable[["RetryCallState"], None] = before_nothing,
after: t.Callable[["RetryCallState"], None] = after_nothing,
before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None,
reraise: bool = False,
retry_error_cls: t.Type[RetryError] = RetryError,
retry_error_callback: t.Optional[t.Callable[["RetryCallState"], t.Any]] = None,
):
self.sleep = sleep
self.stop = stop
self.wait = wait
self.retry = retry
self.before = before
self.after = after
self.before_sleep = before_sleep
self.reraise = reraise
self._local = threading.local()
self.retry_error_cls = retry_error_cls
self.retry_error_callback = retry_error_callback
def copy(
self,
sleep: t.Union[t.Callable[[t.Union[int, float]], None], object] = _unset,
stop: t.Union["StopBaseT", object] = _unset,
wait: t.Union["WaitBaseT", object] = _unset,
retry: t.Union[retry_base, object] = _unset,
before: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
after: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
before_sleep: t.Union[
t.Optional[t.Callable[["RetryCallState"], None]], object
] = _unset,
reraise: t.Union[bool, object] = _unset,
retry_error_cls: t.Union[t.Type[RetryError], object] = _unset,
retry_error_callback: t.Union[
t.Optional[t.Callable[["RetryCallState"], t.Any]], object
] = _unset,
) -> "Self":
"""Copy this object with some parameters changed if needed."""
return self.__class__(
sleep=_first_set(sleep, self.sleep),
stop=_first_set(stop, self.stop),
wait=_first_set(wait, self.wait),
retry=_first_set(retry, self.retry),
before=_first_set(before, self.before),
after=_first_set(after, self.after),
before_sleep=_first_set(before_sleep, self.before_sleep),
reraise=_first_set(reraise, self.reraise),
retry_error_cls=_first_set(retry_error_cls, self.retry_error_cls),
retry_error_callback=_first_set(
retry_error_callback, self.retry_error_callback
),
)
def __repr__(self) -> str:
return (
f"<{self.__class__.__name__} object at 0x{id(self):x} ("
f"stop={self.stop}, "
f"wait={self.wait}, "
f"sleep={self.sleep}, "
f"retry={self.retry}, "
f"before={self.before}, "
f"after={self.after})>"
)
@property
def statistics(self) -> t.Dict[str, t.Any]:
"""Return a dictionary of runtime statistics.
This dictionary will be empty when the controller has never been
ran. When it is running or has ran previously it should have (but
may not) have useful and/or informational keys and values when
running is underway and/or completed.
.. warning:: The keys in this dictionary **should** be some what
stable (not changing), but there existence **may**
change between major releases as new statistics are
gathered or removed so before accessing keys ensure that
they actually exist and handle when they do not.
.. note:: The values in this dictionary are local to the thread
running call (so if multiple threads share the same retrying
object - either directly or indirectly) they will each have
there own view of statistics they have collected (in the
future we may provide a way to aggregate the various
statistics from each thread).
"""
if not hasattr(self._local, "statistics"):
self._local.statistics = t.cast(t.Dict[str, t.Any], {})
return self._local.statistics # type: ignore[no-any-return]
@property
def iter_state(self) -> IterState:
if not hasattr(self._local, "iter_state"):
self._local.iter_state = IterState()
return self._local.iter_state # type: ignore[no-any-return]
def wraps(self, f: WrappedFn) -> WrappedFn:
"""Wrap a function for retrying.
:param f: A function to wraps for retrying.
"""
@functools.wraps(
f, functools.WRAPPER_ASSIGNMENTS + ("__defaults__", "__kwdefaults__")
)
def wrapped_f(*args: t.Any, **kw: t.Any) -> t.Any:
# Always create a copy to prevent overwriting the local contexts when
# calling the same wrapped functions multiple times in the same stack
copy = self.copy()
wrapped_f.statistics = copy.statistics # type: ignore[attr-defined]
return copy(f, *args, **kw)
def retry_with(*args: t.Any, **kwargs: t.Any) -> WrappedFn:
return self.copy(*args, **kwargs).wraps(f)
# Preserve attributes
wrapped_f.retry = self # type: ignore[attr-defined]
wrapped_f.retry_with = retry_with # type: ignore[attr-defined]
wrapped_f.statistics = {} # type: ignore[attr-defined]
return wrapped_f # type: ignore[return-value]
def begin(self) -> None:
self.statistics.clear()
self.statistics["start_time"] = time.monotonic()
self.statistics["attempt_number"] = 1
self.statistics["idle_for"] = 0
def _add_action_func(self, fn: t.Callable[..., t.Any]) -> None:
self.iter_state.actions.append(fn)
def _run_retry(self, retry_state: "RetryCallState") -> None:
self.iter_state.retry_run_result = self.retry(retry_state)
def _run_wait(self, retry_state: "RetryCallState") -> None:
if self.wait:
sleep = self.wait(retry_state)
else:
sleep = 0.0
retry_state.upcoming_sleep = sleep
def _run_stop(self, retry_state: "RetryCallState") -> None:
self.statistics["delay_since_first_attempt"] = retry_state.seconds_since_start
self.iter_state.stop_run_result = self.stop(retry_state)
def iter(self, retry_state: "RetryCallState") -> t.Union[DoAttempt, DoSleep, t.Any]: # noqa
self._begin_iter(retry_state)
result = None
for action in self.iter_state.actions:
result = action(retry_state)
return result
def _begin_iter(self, retry_state: "RetryCallState") -> None: # noqa
self.iter_state.reset()
fut = retry_state.outcome
if fut is None:
if self.before is not None:
self._add_action_func(self.before)
self._add_action_func(lambda rs: DoAttempt())
return
self.iter_state.is_explicit_retry = fut.failed and isinstance(
fut.exception(), TryAgain
)
if not self.iter_state.is_explicit_retry:
self._add_action_func(self._run_retry)
self._add_action_func(self._post_retry_check_actions)
def _post_retry_check_actions(self, retry_state: "RetryCallState") -> None:
if not (self.iter_state.is_explicit_retry or self.iter_state.retry_run_result):
self._add_action_func(lambda rs: rs.outcome.result())
return
if self.after is not None:
self._add_action_func(self.after)
self._add_action_func(self._run_wait)
self._add_action_func(self._run_stop)
self._add_action_func(self._post_stop_check_actions)
def _post_stop_check_actions(self, retry_state: "RetryCallState") -> None:
if self.iter_state.stop_run_result:
if self.retry_error_callback:
self._add_action_func(self.retry_error_callback)
return
def exc_check(rs: "RetryCallState") -> None:
fut = t.cast(Future, rs.outcome)
retry_exc = self.retry_error_cls(fut)
if self.reraise:
raise retry_exc.reraise()
raise retry_exc from fut.exception()
self._add_action_func(exc_check)
return
def next_action(rs: "RetryCallState") -> None:
sleep = rs.upcoming_sleep
rs.next_action = RetryAction(sleep)
rs.idle_for += sleep
self.statistics["idle_for"] += sleep
self.statistics["attempt_number"] += 1
self._add_action_func(next_action)
if self.before_sleep is not None:
self._add_action_func(self.before_sleep)
self._add_action_func(lambda rs: DoSleep(rs.upcoming_sleep))
def __iter__(self) -> t.Generator[AttemptManager, None, None]:
self.begin()
retry_state = RetryCallState(self, fn=None, args=(), kwargs={})
while True:
do = self.iter(retry_state=retry_state)
if isinstance(do, DoAttempt):
yield AttemptManager(retry_state=retry_state)
elif isinstance(do, DoSleep):
retry_state.prepare_for_next_attempt()
self.sleep(do)
else:
break
@abstractmethod
def __call__(
self,
fn: t.Callable[..., WrappedFnReturnT],
*args: t.Any,
**kwargs: t.Any,
) -> WrappedFnReturnT:
pass
| BaseRetrying |
python | has2k1__plotnine | plotnine/scales/scale_identity.py | {
"start": 306,
"end": 885
} | class ____:
"""
Override map and train methods
"""
def map(self, x, limits=None) -> Sequence[Any]:
"""
Identity map
Notes
-----
Identity scales bypass the palette completely since the
map is the identity function.
"""
return x
def train(self, x, drop=False):
# do nothing if no guide,
# otherwise train so we know what breaks to use
if self.guide is None: # pyright: ignore
return
return super().train(x) # pyright: ignore
@dataclass
| MapTrainMixin |
python | requests__requests-oauthlib | tests/test_compliance_fixes.py | {
"start": 8114,
"end": 10027
} | class ____(TestCase):
def setUp(self):
mocker = requests_mock.Mocker()
mocker.request(
method="GET",
url="https://api.instagram.com/v1/users/self",
json={
"data": {
"id": "1574083",
"username": "snoopdogg",
"full_name": "Snoop Dogg",
"profile_picture": "http://distillery.s3.amazonaws.com/profiles/profile_1574083_75sq_1295469061.jpg",
"bio": "This is my bio",
"website": "http://snoopdogg.com",
"is_business": False,
"counts": {"media": 1320, "follows": 420, "followed_by": 3410},
}
},
)
mocker.start()
self.addCleanup(mocker.stop)
instagram = OAuth2Session("someclientid", redirect_uri="https://i.b")
self.session = instagram_compliance_fix(instagram)
def test_protected_request(self):
self.session.token = {"access_token": "dummy-access-token"}
response = self.session.get("https://api.instagram.com/v1/users/self")
url = response.request.url
query = parse_qs(urlparse(url).query)
self.assertIn("access_token", query)
self.assertEqual(query["access_token"], ["dummy-access-token"])
def test_protected_request_dont_override(self):
"""check that if the access_token param
already exist we don't override it"""
self.session.token = {"access_token": "dummy-access-token"}
response = self.session.get(
"https://api.instagram.com/v1/users/self?access_token=correct-access-token"
)
url = response.request.url
query = parse_qs(urlparse(url).query)
self.assertIn("access_token", query)
self.assertEqual(query["access_token"], ["correct-access-token"])
| InstagramComplianceFixTest |
python | readthedocs__readthedocs.org | readthedocs/api/v3/tests/test_notifications.py | {
"start": 750,
"end": 2584
} | class ____(APIEndpointMixin):
def test_notifications_list(self):
url = reverse("notifications-list")
self.client.logout()
response = self.client.get(url)
self.assertEqual(response.status_code, 401)
self.client.credentials(HTTP_AUTHORIZATION=f"Token {self.token.key}")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertDictEqual(
response.json(),
self._get_response_dict("notifications-list"),
)
# Adding a CANCELLED/DISMISSED notification won't be returned on this endpoint
fixture.get(
Notification,
attached_to_content_type=ContentType.objects.get_for_model(self.project),
attached_to_id=self.project.id,
message_id=MESSAGE_PROJECT_SKIP_BUILDS,
state=CANCELLED,
)
fixture.get(
Notification,
attached_to_content_type=ContentType.objects.get_for_model(self.project),
attached_to_id=self.project.id,
message_id=MESSAGE_PROJECT_SKIP_BUILDS,
state=DISMISSED,
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertDictEqual(
response.json(),
self._get_response_dict("notifications-list"),
)
def test_notifications_list_post(self):
url = reverse("notifications-list")
self.client.logout()
response = self.client.post(url)
self.assertEqual(response.status_code, 401)
self.client.credentials(HTTP_AUTHORIZATION=f"Token {self.token.key}")
response = self.client.post(url)
# We don't allow POST on this endpoint
self.assertEqual(response.status_code, 405)
| NotificationsEndpointTests |
python | lepture__authlib | tests/flask/test_oauth1/oauth1_server.py | {
"start": 3141,
"end": 8952
} | class ____(db.Model):
__table_args__ = (
db.UniqueConstraint(
"client_id", "timestamp", "nonce", "oauth_token", name="unique_nonce"
),
)
id = db.Column(db.Integer, primary_key=True)
client_id = db.Column(db.String(48), nullable=False)
timestamp = db.Column(db.Integer, nullable=False)
nonce = db.Column(db.String(48), nullable=False)
oauth_token = db.Column(db.String(84))
def exists_nonce(nonce, timestamp, client_id, oauth_token):
q = TimestampNonce.query.filter_by(
nonce=nonce,
timestamp=timestamp,
client_id=client_id,
)
if oauth_token:
q = q.filter_by(oauth_token=oauth_token)
rv = q.first()
if rv:
return True
item = TimestampNonce(
nonce=nonce,
timestamp=timestamp,
client_id=client_id,
oauth_token=oauth_token,
)
db.session.add(item)
db.session.commit()
return False
def create_temporary_credential(token, client_id, redirect_uri):
item = TemporaryCredential(
client_id=client_id,
oauth_token=token["oauth_token"],
oauth_token_secret=token["oauth_token_secret"],
oauth_callback=redirect_uri,
)
db.session.add(item)
db.session.commit()
return item
def get_temporary_credential(oauth_token):
return TemporaryCredential.query.filter_by(oauth_token=oauth_token).first()
def delete_temporary_credential(oauth_token):
q = TemporaryCredential.query.filter_by(oauth_token=oauth_token)
q.delete(synchronize_session=False)
db.session.commit()
def create_authorization_verifier(credential, grant_user, verifier):
credential.user_id = grant_user.id # assuming your end user model has `.id`
credential.oauth_verifier = verifier
db.session.add(credential)
db.session.commit()
return credential
def create_token_credential(token, temporary_credential):
credential = TokenCredential(
oauth_token=token["oauth_token"],
oauth_token_secret=token["oauth_token_secret"],
client_id=temporary_credential.get_client_id(),
)
credential.user_id = temporary_credential.get_user_id()
db.session.add(credential)
db.session.commit()
return credential
def create_authorization_server(app, use_cache=False, lazy=False):
def query_client(client_id):
return Client.query.filter_by(client_id=client_id).first()
if lazy:
server = AuthorizationServer()
server.init_app(app, query_client)
else:
server = AuthorizationServer(app, query_client=query_client)
if use_cache:
cache = SimpleCache()
register_nonce_hooks(server, cache)
register_temporary_credential_hooks(server, cache)
server.register_hook("create_token_credential", create_token_credential)
else:
server.register_hook("exists_nonce", exists_nonce)
server.register_hook("create_temporary_credential", create_temporary_credential)
server.register_hook("get_temporary_credential", get_temporary_credential)
server.register_hook("delete_temporary_credential", delete_temporary_credential)
server.register_hook(
"create_authorization_verifier", create_authorization_verifier
)
server.register_hook("create_token_credential", create_token_credential)
@app.route("/oauth/initiate", methods=["GET", "POST"])
def initiate():
return server.create_temporary_credentials_response()
@app.route("/oauth/authorize", methods=["GET", "POST"])
def authorize():
if request.method == "GET":
try:
server.check_authorization_request()
return "ok"
except OAuth1Error:
return "error"
user_id = request.form.get("user_id")
if user_id:
grant_user = db.session.get(User, int(user_id))
else:
grant_user = None
try:
return server.create_authorization_response(grant_user=grant_user)
except OAuth1Error as error:
return url_encode(error.get_body())
@app.route("/oauth/token", methods=["POST"])
def issue_token():
return server.create_token_response()
return server
def create_resource_server(app, use_cache=False, lazy=False):
if use_cache:
cache = SimpleCache()
exists_nonce = create_cache_exists_nonce_func(cache)
else:
def exists_nonce(nonce, timestamp, client_id, oauth_token):
q = db.session.query(TimestampNonce.nonce).filter_by(
nonce=nonce,
timestamp=timestamp,
client_id=client_id,
)
if oauth_token:
q = q.filter_by(oauth_token=oauth_token)
rv = q.first()
if rv:
return True
tn = TimestampNonce(
nonce=nonce,
timestamp=timestamp,
client_id=client_id,
oauth_token=oauth_token,
)
db.session.add(tn)
db.session.commit()
return False
def query_client(client_id):
return Client.query.filter_by(client_id=client_id).first()
def query_token(client_id, oauth_token):
return TokenCredential.query.filter_by(
client_id=client_id, oauth_token=oauth_token
).first()
if lazy:
require_oauth = ResourceProtector()
require_oauth.init_app(app, query_client, query_token, exists_nonce)
else:
require_oauth = ResourceProtector(app, query_client, query_token, exists_nonce)
@app.route("/user")
@require_oauth()
def user_profile():
user = current_credential.user
return jsonify(id=user.id, username=user.username)
| TimestampNonce |
python | spack__spack | lib/spack/spack/vendor/typing_extensions.py | {
"start": 12647,
"end": 100425
} | class ____(GenericMeta):
def __subclasscheck__(self, subclass):
"""This mimics a more modern GenericMeta.__subclasscheck__() logic
(that does not have problems with recursion) to work around interactions
between collections, typing, and spack.vendor.typing_extensions on older
versions of Python, see https://github.com/python/typing/issues/501.
"""
if self.__origin__ is not None:
if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
raise TypeError("Parameterized generics cannot be used with class "
"or instance checks")
return False
if not self.__extra__:
return super().__subclasscheck__(subclass)
res = self.__extra__.__subclasshook__(subclass)
if res is not NotImplemented:
return res
if self.__extra__ in subclass.__mro__:
return True
for scls in self.__extra__.__subclasses__():
if isinstance(scls, GenericMeta):
continue
if issubclass(subclass, scls):
return True
return False
Awaitable = typing.Awaitable
Coroutine = typing.Coroutine
AsyncIterable = typing.AsyncIterable
AsyncIterator = typing.AsyncIterator
# 3.6.1+
if hasattr(typing, 'Deque'):
Deque = typing.Deque
# 3.6.0
else:
class Deque(collections.deque, typing.MutableSequence[T],
metaclass=_ExtensionsGenericMeta,
extra=collections.deque):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Deque:
return collections.deque(*args, **kwds)
return typing._generic_new(collections.deque, cls, *args, **kwds)
ContextManager = typing.ContextManager
# 3.6.2+
if hasattr(typing, 'AsyncContextManager'):
AsyncContextManager = typing.AsyncContextManager
# 3.6.0-3.6.1
else:
from _collections_abc import _check_methods as _check_methods_in_mro # noqa
class AsyncContextManager(typing.Generic[T_co]):
__slots__ = ()
async def __aenter__(self):
return self
@abc.abstractmethod
async def __aexit__(self, exc_type, exc_value, traceback):
return None
@classmethod
def __subclasshook__(cls, C):
if cls is AsyncContextManager:
return _check_methods_in_mro(C, "__aenter__", "__aexit__")
return NotImplemented
DefaultDict = typing.DefaultDict
# 3.7.2+
if hasattr(typing, 'OrderedDict'):
OrderedDict = typing.OrderedDict
# 3.7.0-3.7.2
elif (3, 7, 0) <= sys.version_info[:3] < (3, 7, 2):
OrderedDict = typing._alias(collections.OrderedDict, (KT, VT))
# 3.6
else:
class OrderedDict(collections.OrderedDict, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.OrderedDict):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is OrderedDict:
return collections.OrderedDict(*args, **kwds)
return typing._generic_new(collections.OrderedDict, cls, *args, **kwds)
# 3.6.2+
if hasattr(typing, 'Counter'):
Counter = typing.Counter
# 3.6.0-3.6.1
else:
class Counter(collections.Counter,
typing.Dict[T, int],
metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Counter:
return collections.Counter(*args, **kwds)
return typing._generic_new(collections.Counter, cls, *args, **kwds)
# 3.6.1+
if hasattr(typing, 'ChainMap'):
ChainMap = typing.ChainMap
elif hasattr(collections, 'ChainMap'):
class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.ChainMap):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is ChainMap:
return collections.ChainMap(*args, **kwds)
return typing._generic_new(collections.ChainMap, cls, *args, **kwds)
# 3.6.1+
if hasattr(typing, 'AsyncGenerator'):
AsyncGenerator = typing.AsyncGenerator
# 3.6.0
else:
class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra],
metaclass=_ExtensionsGenericMeta,
extra=collections.abc.AsyncGenerator):
__slots__ = ()
NewType = typing.NewType
Text = typing.Text
TYPE_CHECKING = typing.TYPE_CHECKING
def _gorg(cls):
"""This function exists for compatibility with old typing versions."""
assert isinstance(cls, GenericMeta)
if hasattr(cls, '_gorg'):
return cls._gorg
while cls.__origin__ is not None:
cls = cls.__origin__
return cls
_PROTO_WHITELIST = ['Callable', 'Awaitable',
'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator',
'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
'ContextManager', 'AsyncContextManager']
def _get_protocol_attrs(cls):
attrs = set()
for base in cls.__mro__[:-1]: # without object
if base.__name__ in ('Protocol', 'Generic'):
continue
annotations = getattr(base, '__annotations__', {})
for attr in list(base.__dict__.keys()) + list(annotations.keys()):
if (not attr.startswith('_abc_') and attr not in (
'__abstractmethods__', '__annotations__', '__weakref__',
'_is_protocol', '_is_runtime_protocol', '__dict__',
'__args__', '__slots__',
'__next_in_mro__', '__parameters__', '__origin__',
'__orig_bases__', '__extra__', '__tree_hash__',
'__doc__', '__subclasshook__', '__init__', '__new__',
'__module__', '_MutableMapping__marker', '_gorg')):
attrs.add(attr)
return attrs
def _is_callable_members_only(cls):
return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
# 3.8+
if hasattr(typing, 'Protocol'):
Protocol = typing.Protocol
# 3.7
elif PEP_560:
def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError('Protocols cannot be instantiated')
class _ProtocolMeta(abc.ABCMeta):
# This metaclass is a bit unfortunate and exists only because of the lack
# of __instancehook__.
def __instancecheck__(cls, instance):
# We need this method for situations where attributes are
# assigned in __init__.
if ((not getattr(cls, '_is_protocol', False) or
_is_callable_members_only(cls)) and
issubclass(instance.__class__, cls)):
return True
if cls._is_protocol:
if all(hasattr(instance, attr) and
(not callable(getattr(cls, attr, None)) or
getattr(instance, attr) is not None)
for attr in _get_protocol_attrs(cls)):
return True
return super().__instancecheck__(instance)
class Protocol(metaclass=_ProtocolMeta):
# There is quite a lot of overlapping code with typing.Generic.
# Unfortunately it is hard to avoid this while these live in two different
# modules. The duplicated code will be removed when Protocol is moved to typing.
"""Base class for protocol classes. Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@spack.vendor.typing_extensions.runtime act as simple-minded runtime protocol that checks
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto(Protocol[T]):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol = True
def __new__(cls, *args, **kwds):
if cls is Protocol:
raise TypeError("Type Protocol cannot be instantiated; "
"it can only be used as a base class")
return super().__new__(cls)
@typing._tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple):
params = (params,)
if not params and cls is not typing.Tuple:
raise TypeError(
f"Parameter list to {cls.__qualname__}[...] cannot be empty")
msg = "Parameters to generic types must be types."
params = tuple(typing._type_check(p, msg) for p in params) # noqa
if cls is Protocol:
# Generic can only be subscripted with unique type variables.
if not all(isinstance(p, typing.TypeVar) for p in params):
i = 0
while isinstance(params[i], typing.TypeVar):
i += 1
raise TypeError(
"Parameters to Protocol[...] must all be type variables."
f" Parameter {i + 1} is {params[i]}")
if len(set(params)) != len(params):
raise TypeError(
"Parameters to Protocol[...] must all be unique")
else:
# Subscripting a regular Generic subclass.
_check_generic(cls, params, len(cls.__parameters__))
return typing._GenericAlias(cls, params)
def __init_subclass__(cls, *args, **kwargs):
tvars = []
if '__orig_bases__' in cls.__dict__:
error = typing.Generic in cls.__orig_bases__
else:
error = typing.Generic in cls.__bases__
if error:
raise TypeError("Cannot inherit from plain Generic")
if '__orig_bases__' in cls.__dict__:
tvars = typing._collect_type_vars(cls.__orig_bases__)
# Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
# If found, tvars must be a subset of it.
# If not found, tvars is it.
# Also check for and reject plain Generic,
# and reject multiple Generic[...] and/or Protocol[...].
gvars = None
for base in cls.__orig_bases__:
if (isinstance(base, typing._GenericAlias) and
base.__origin__ in (typing.Generic, Protocol)):
# for error messages
the_base = base.__origin__.__name__
if gvars is not None:
raise TypeError(
"Cannot inherit from Generic[...]"
" and/or Protocol[...] multiple types.")
gvars = base.__parameters__
if gvars is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
s_args = ', '.join(str(g) for g in gvars)
raise TypeError(f"Some type variables ({s_vars}) are"
f" not listed in {the_base}[{s_args}]")
tvars = gvars
cls.__parameters__ = tuple(tvars)
# Determine if this is a protocol or a concrete subclass.
if not cls.__dict__.get('_is_protocol', None):
cls._is_protocol = any(b is Protocol for b in cls.__bases__)
# Set (or override) the protocol subclass hook.
def _proto_hook(other):
if not cls.__dict__.get('_is_protocol', None):
return NotImplemented
if not getattr(cls, '_is_runtime_protocol', False):
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
return NotImplemented
raise TypeError("Instance and class checks can only be used with"
" @runtime protocols")
if not _is_callable_members_only(cls):
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
return NotImplemented
raise TypeError("Protocols with non-method members"
" don't support issubclass()")
if not isinstance(other, type):
# Same error as for issubclass(1, int)
raise TypeError('issubclass() arg 1 must be a class')
for attr in _get_protocol_attrs(cls):
for base in other.__mro__:
if attr in base.__dict__:
if base.__dict__[attr] is None:
return NotImplemented
break
annotations = getattr(base, '__annotations__', {})
if (isinstance(annotations, typing.Mapping) and
attr in annotations and
isinstance(other, _ProtocolMeta) and
other._is_protocol):
break
else:
return NotImplemented
return True
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
# We have nothing more to do for non-protocols.
if not cls._is_protocol:
return
# Check consistency of bases.
for base in cls.__bases__:
if not (base in (object, typing.Generic) or
base.__module__ == 'collections.abc' and
base.__name__ in _PROTO_WHITELIST or
isinstance(base, _ProtocolMeta) and base._is_protocol):
raise TypeError('Protocols can only inherit from other'
f' protocols, got {repr(base)}')
cls.__init__ = _no_init
# 3.6
else:
from typing import _next_in_mro, _type_check # noqa
def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError('Protocols cannot be instantiated')
class _ProtocolMeta(GenericMeta):
"""Internal metaclass for Protocol.
This exists so Protocol classes can be generic without deriving
from Generic.
"""
def __new__(cls, name, bases, namespace,
tvars=None, args=None, origin=None, extra=None, orig_bases=None):
# This is just a version copied from GenericMeta.__new__ that
# includes "Protocol" special treatment. (Comments removed for brevity.)
assert extra is None # Protocols should not have extra
if tvars is not None:
assert origin is not None
assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars
else:
tvars = _type_vars(bases)
gvars = None
for base in bases:
if base is typing.Generic:
raise TypeError("Cannot inherit from plain Generic")
if (isinstance(base, GenericMeta) and
base.__origin__ in (typing.Generic, Protocol)):
if gvars is not None:
raise TypeError(
"Cannot inherit from Generic[...] or"
" Protocol[...] multiple times.")
gvars = base.__parameters__
if gvars is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
s_vars = ", ".join(str(t) for t in tvars if t not in gvarset)
s_args = ", ".join(str(g) for g in gvars)
cls_name = "Generic" if any(b.__origin__ is typing.Generic
for b in bases) else "Protocol"
raise TypeError(f"Some type variables ({s_vars}) are"
f" not listed in {cls_name}[{s_args}]")
tvars = gvars
initial_bases = bases
if (extra is not None and type(extra) is abc.ABCMeta and
extra not in bases):
bases = (extra,) + bases
bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b
for b in bases)
if any(isinstance(b, GenericMeta) and b is not typing.Generic for b in bases):
bases = tuple(b for b in bases if b is not typing.Generic)
namespace.update({'__origin__': origin, '__extra__': extra})
self = super(GenericMeta, cls).__new__(cls, name, bases, namespace,
_root=True)
super(GenericMeta, self).__setattr__('_gorg',
self if not origin else
_gorg(origin))
self.__parameters__ = tvars
self.__args__ = tuple(... if a is typing._TypingEllipsis else
() if a is typing._TypingEmpty else
a for a in args) if args else None
self.__next_in_mro__ = _next_in_mro(self)
if orig_bases is None:
self.__orig_bases__ = initial_bases
elif origin is not None:
self._abc_registry = origin._abc_registry
self._abc_cache = origin._abc_cache
if hasattr(self, '_subs_tree'):
self.__tree_hash__ = (hash(self._subs_tree()) if origin else
super(GenericMeta, self).__hash__())
return self
def __init__(cls, *args, **kwargs):
super().__init__(*args, **kwargs)
if not cls.__dict__.get('_is_protocol', None):
cls._is_protocol = any(b is Protocol or
isinstance(b, _ProtocolMeta) and
b.__origin__ is Protocol
for b in cls.__bases__)
if cls._is_protocol:
for base in cls.__mro__[1:]:
if not (base in (object, typing.Generic) or
base.__module__ == 'collections.abc' and
base.__name__ in _PROTO_WHITELIST or
isinstance(base, typing.TypingMeta) and base._is_protocol or
isinstance(base, GenericMeta) and
base.__origin__ is typing.Generic):
raise TypeError(f'Protocols can only inherit from other'
f' protocols, got {repr(base)}')
cls.__init__ = _no_init
def _proto_hook(other):
if not cls.__dict__.get('_is_protocol', None):
return NotImplemented
if not isinstance(other, type):
# Same error as for issubclass(1, int)
raise TypeError('issubclass() arg 1 must be a class')
for attr in _get_protocol_attrs(cls):
for base in other.__mro__:
if attr in base.__dict__:
if base.__dict__[attr] is None:
return NotImplemented
break
annotations = getattr(base, '__annotations__', {})
if (isinstance(annotations, typing.Mapping) and
attr in annotations and
isinstance(other, _ProtocolMeta) and
other._is_protocol):
break
else:
return NotImplemented
return True
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
def __instancecheck__(self, instance):
# We need this method for situations where attributes are
# assigned in __init__.
if ((not getattr(self, '_is_protocol', False) or
_is_callable_members_only(self)) and
issubclass(instance.__class__, self)):
return True
if self._is_protocol:
if all(hasattr(instance, attr) and
(not callable(getattr(self, attr, None)) or
getattr(instance, attr) is not None)
for attr in _get_protocol_attrs(self)):
return True
return super(GenericMeta, self).__instancecheck__(instance)
def __subclasscheck__(self, cls):
if self.__origin__ is not None:
if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
raise TypeError("Parameterized generics cannot be used with class "
"or instance checks")
return False
if (self.__dict__.get('_is_protocol', None) and
not self.__dict__.get('_is_runtime_protocol', None)):
if sys._getframe(1).f_globals['__name__'] in ['abc',
'functools',
'typing']:
return False
raise TypeError("Instance and class checks can only be used with"
" @runtime protocols")
if (self.__dict__.get('_is_runtime_protocol', None) and
not _is_callable_members_only(self)):
if sys._getframe(1).f_globals['__name__'] in ['abc',
'functools',
'typing']:
return super(GenericMeta, self).__subclasscheck__(cls)
raise TypeError("Protocols with non-method members"
" don't support issubclass()")
return super(GenericMeta, self).__subclasscheck__(cls)
@typing._tp_cache
def __getitem__(self, params):
# We also need to copy this from GenericMeta.__getitem__ to get
# special treatment of "Protocol". (Comments removed for brevity.)
if not isinstance(params, tuple):
params = (params,)
if not params and _gorg(self) is not typing.Tuple:
raise TypeError(
f"Parameter list to {self.__qualname__}[...] cannot be empty")
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
if self in (typing.Generic, Protocol):
if not all(isinstance(p, typing.TypeVar) for p in params):
raise TypeError(
f"Parameters to {repr(self)}[...] must all be type variables")
if len(set(params)) != len(params):
raise TypeError(
f"Parameters to {repr(self)}[...] must all be unique")
tvars = params
args = params
elif self in (typing.Tuple, typing.Callable):
tvars = _type_vars(params)
args = params
elif self.__origin__ in (typing.Generic, Protocol):
raise TypeError(f"Cannot subscript already-subscripted {repr(self)}")
else:
_check_generic(self, params, len(self.__parameters__))
tvars = _type_vars(params)
args = params
prepend = (self,) if self.__origin__ is None else ()
return self.__class__(self.__name__,
prepend + self.__bases__,
_no_slots_copy(self.__dict__),
tvars=tvars,
args=args,
origin=self,
extra=self.__extra__,
orig_bases=self.__orig_bases__)
class Protocol(metaclass=_ProtocolMeta):
"""Base class for protocol classes. Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@spack.vendor.typing_extensions.runtime act as simple-minded runtime protocol that checks
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto(Protocol[T]):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol = True
def __new__(cls, *args, **kwds):
if _gorg(cls) is Protocol:
raise TypeError("Type Protocol cannot be instantiated; "
"it can be used only as a base class")
return typing._generic_new(cls.__next_in_mro__, cls, *args, **kwds)
# 3.8+
if hasattr(typing, 'runtime_checkable'):
runtime_checkable = typing.runtime_checkable
# 3.6-3.7
else:
def runtime_checkable(cls):
"""Mark a protocol class as a runtime protocol, so that it
can be used with isinstance() and issubclass(). Raise TypeError
if applied to a non-protocol class.
This allows a simple-minded structural check very similar to the
one-offs in collections.abc such as Hashable.
"""
if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
raise TypeError('@runtime_checkable can be only applied to protocol classes,'
f' got {cls!r}')
cls._is_runtime_protocol = True
return cls
# Exists for backwards compatibility.
runtime = runtime_checkable
# 3.8+
if hasattr(typing, 'SupportsIndex'):
SupportsIndex = typing.SupportsIndex
# 3.6-3.7
else:
@runtime_checkable
class SupportsIndex(Protocol):
__slots__ = ()
@abc.abstractmethod
def __index__(self) -> int:
pass
if hasattr(typing, "Required"):
# The standard library TypedDict in Python 3.8 does not store runtime information
# about which (if any) keys are optional. See https://bugs.python.org/issue38834
# The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
# keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
# The standard library TypedDict below Python 3.11 does not store runtime
# information about optional and required keys when using Required or NotRequired.
TypedDict = typing.TypedDict
_TypedDictMeta = typing._TypedDictMeta
is_typeddict = typing.is_typeddict
else:
def _check_fails(cls, other):
try:
if sys._getframe(1).f_globals['__name__'] not in ['abc',
'functools',
'typing']:
# Typed dicts are only for static structural subtyping.
raise TypeError('TypedDict does not support instance and class checks')
except (AttributeError, ValueError):
pass
return False
def _dict_new(*args, **kwargs):
if not args:
raise TypeError('TypedDict.__new__(): not enough arguments')
_, args = args[0], args[1:] # allow the "cls" keyword be passed
return dict(*args, **kwargs)
_dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)'
def _typeddict_new(*args, total=True, **kwargs):
if not args:
raise TypeError('TypedDict.__new__(): not enough arguments')
_, args = args[0], args[1:] # allow the "cls" keyword be passed
if args:
typename, args = args[0], args[1:] # allow the "_typename" keyword be passed
elif '_typename' in kwargs:
typename = kwargs.pop('_typename')
import warnings
warnings.warn("Passing '_typename' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError("TypedDict.__new__() missing 1 required positional "
"argument: '_typename'")
if args:
try:
fields, = args # allow the "_fields" keyword be passed
except ValueError:
raise TypeError('TypedDict.__new__() takes from 2 to 3 '
f'positional arguments but {len(args) + 2} '
'were given')
elif '_fields' in kwargs and len(kwargs) == 1:
fields = kwargs.pop('_fields')
import warnings
warnings.warn("Passing '_fields' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
fields = None
if fields is None:
fields = kwargs
elif kwargs:
raise TypeError("TypedDict takes either a dict or keyword arguments,"
" but not both")
ns = {'__annotations__': dict(fields)}
try:
# Setting correct module is necessary to make typed dict classes pickleable.
ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return _TypedDictMeta(typename, (), ns, total=total)
_typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
' /, *, total=True, **kwargs)')
class _TypedDictMeta(type):
def __init__(cls, name, bases, ns, total=True):
super().__init__(name, bases, ns)
def __new__(cls, name, bases, ns, total=True):
# Create new typed dict class object.
# This method is called directly when TypedDict is subclassed,
# or via _typeddict_new when TypedDict is instantiated. This way
# TypedDict supports all three syntaxes described in its docstring.
# Subclasses and instances of TypedDict return actual dictionaries
# via _dict_new.
ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
tp_dict = super().__new__(cls, name, (dict,), ns)
annotations = {}
own_annotations = ns.get('__annotations__', {})
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
own_annotations = {
n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
}
required_keys = set()
optional_keys = set()
for base in bases:
annotations.update(base.__dict__.get('__annotations__', {}))
required_keys.update(base.__dict__.get('__required_keys__', ()))
optional_keys.update(base.__dict__.get('__optional_keys__', ()))
annotations.update(own_annotations)
if PEP_560:
for annotation_key, annotation_type in own_annotations.items():
annotation_origin = get_origin(annotation_type)
if annotation_origin is Annotated:
annotation_args = get_args(annotation_type)
if annotation_args:
annotation_type = annotation_args[0]
annotation_origin = get_origin(annotation_type)
if annotation_origin is Required:
required_keys.add(annotation_key)
elif annotation_origin is NotRequired:
optional_keys.add(annotation_key)
elif total:
required_keys.add(annotation_key)
else:
optional_keys.add(annotation_key)
else:
own_annotation_keys = set(own_annotations.keys())
if total:
required_keys.update(own_annotation_keys)
else:
optional_keys.update(own_annotation_keys)
tp_dict.__annotations__ = annotations
tp_dict.__required_keys__ = frozenset(required_keys)
tp_dict.__optional_keys__ = frozenset(optional_keys)
if not hasattr(tp_dict, '__total__'):
tp_dict.__total__ = total
return tp_dict
__instancecheck__ = __subclasscheck__ = _check_fails
TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
TypedDict.__module__ = __name__
TypedDict.__doc__ = \
"""A simple typed name space. At runtime it is equivalent to a plain dict.
TypedDict creates a dictionary type that expects all of its
instances to have a certain set of keys, with each key
associated with a value of a consistent type. This expectation
is not checked at runtime but is only enforced by type checkers.
Usage::
class Point2D(TypedDict):
x: int
y: int
label: str
a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
The type info can be accessed via the Point2D.__annotations__ dict, and
the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
TypedDict supports two additional equivalent forms::
Point2D = TypedDict('Point2D', x=int, y=int, label=str)
Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
The class syntax is only supported in Python 3.6+, while two other
syntax forms work for Python 2.7 and 3.2+
"""
if hasattr(typing, "_TypedDictMeta"):
_TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
else:
_TYPEDDICT_TYPES = (_TypedDictMeta,)
def is_typeddict(tp):
"""Check if an annotation is a TypedDict class
For example::
class Film(TypedDict):
title: str
year: int
is_typeddict(Film) # => True
is_typeddict(Union[list, str]) # => False
"""
return isinstance(tp, tuple(_TYPEDDICT_TYPES))
if hasattr(typing, "Required"):
get_type_hints = typing.get_type_hints
elif PEP_560:
import functools
import types
# replaces _strip_annotations()
def _strip_extras(t):
"""Strips Annotated, Required and NotRequired from a given type."""
if isinstance(t, _AnnotatedAlias):
return _strip_extras(t.__origin__)
if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired):
return _strip_extras(t.__args__[0])
if isinstance(t, typing._GenericAlias):
stripped_args = tuple(_strip_extras(a) for a in t.__args__)
if stripped_args == t.__args__:
return t
return t.copy_with(stripped_args)
if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias):
stripped_args = tuple(_strip_extras(a) for a in t.__args__)
if stripped_args == t.__args__:
return t
return types.GenericAlias(t.__origin__, stripped_args)
if hasattr(types, "UnionType") and isinstance(t, types.UnionType):
stripped_args = tuple(_strip_extras(a) for a in t.__args__)
if stripped_args == t.__args__:
return t
return functools.reduce(operator.or_, stripped_args)
return t
def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
"""Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
forward references encoded as string literals, adds Optional[t] if a
default value equal to None is set and recursively replaces all
'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
(unless 'include_extras=True').
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
inherited members.
TypeError is raised if the argument is not of a type that can contain
annotations, and an empty dictionary is returned if no annotations are
present.
BEWARE -- the behavior of globalns and localns is counterintuitive
(unless you are familiar with how eval() and exec() work). The
search order is locals first, then globals.
- If no dict arguments are passed, an attempt is made to use the
globals from obj (or the respective module's globals for classes),
and these are also used as the locals. If the object does not appear
to have globals, an empty dictionary is used.
- If one dict argument is passed, it is used for both globals and
locals.
- If two dict arguments are passed, they specify globals and
locals, respectively.
"""
if hasattr(typing, "Annotated"):
hint = typing.get_type_hints(
obj, globalns=globalns, localns=localns, include_extras=True
)
else:
hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
if include_extras:
return hint
return {k: _strip_extras(t) for k, t in hint.items()}
# Python 3.9+ has PEP 593 (Annotated)
if hasattr(typing, 'Annotated'):
Annotated = typing.Annotated
# Not exported and not a public API, but needed for get_origin() and get_args()
# to work.
_AnnotatedAlias = typing._AnnotatedAlias
# 3.7-3.8
elif PEP_560:
class _AnnotatedAlias(typing._GenericAlias, _root=True):
"""Runtime representation of an annotated type.
At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
with extra annotations. The alias behaves like a normal typing alias,
instantiating is the same as instantiating the underlying type, binding
it to types is also the same.
"""
def __init__(self, origin, metadata):
if isinstance(origin, _AnnotatedAlias):
metadata = origin.__metadata__ + metadata
origin = origin.__origin__
super().__init__(origin, origin)
self.__metadata__ = metadata
def copy_with(self, params):
assert len(params) == 1
new_type = params[0]
return _AnnotatedAlias(new_type, self.__metadata__)
def __repr__(self):
return (f"spack.vendor.typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
f"{', '.join(repr(a) for a in self.__metadata__)}]")
def __reduce__(self):
return operator.getitem, (
Annotated, (self.__origin__,) + self.__metadata__
)
def __eq__(self, other):
if not isinstance(other, _AnnotatedAlias):
return NotImplemented
if self.__origin__ != other.__origin__:
return False
return self.__metadata__ == other.__metadata__
def __hash__(self):
return hash((self.__origin__, self.__metadata__))
class Annotated:
"""Add context specific metadata to a type.
Example: Annotated[int, runtime_check.Unsigned] indicates to the
hypothetical runtime_check module that this type is an unsigned int.
Every other consumer of this type can ignore this metadata and treat
this type as int.
The first argument to Annotated must be a valid type (and will be in
the __origin__ field), the remaining arguments are kept as a tuple in
the __extra__ field.
Details:
- It's an error to call `Annotated` with less than two arguments.
- Nested Annotated are flattened::
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
- Instantiating an annotated type is equivalent to instantiating the
underlying type::
Annotated[C, Ann1](5) == C(5)
- Annotated can be used as a generic type alias::
Optimized = Annotated[T, runtime.Optimize()]
Optimized[int] == Annotated[int, runtime.Optimize()]
OptimizedList = Annotated[List[T], runtime.Optimize()]
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
"""
__slots__ = ()
def __new__(cls, *args, **kwargs):
raise TypeError("Type Annotated cannot be instantiated.")
@typing._tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple) or len(params) < 2:
raise TypeError("Annotated[...] should be used "
"with at least two arguments (a type and an "
"annotation).")
allowed_special_forms = (ClassVar, Final)
if get_origin(params[0]) in allowed_special_forms:
origin = params[0]
else:
msg = "Annotated[t, ...]: t must be a type."
origin = typing._type_check(params[0], msg)
metadata = tuple(params[1:])
return _AnnotatedAlias(origin, metadata)
def __init_subclass__(cls, *args, **kwargs):
raise TypeError(
f"Cannot subclass {cls.__module__}.Annotated"
)
# 3.6
else:
def _is_dunder(name):
"""Returns True if name is a __dunder_variable_name__."""
return len(name) > 4 and name.startswith('__') and name.endswith('__')
# Prior to Python 3.7 types did not have `copy_with`. A lot of the equality
# checks, argument expansion etc. are done on the _subs_tre. As a result we
# can't provide a get_type_hints function that strips out annotations.
class AnnotatedMeta(typing.GenericMeta):
"""Metaclass for Annotated"""
def __new__(cls, name, bases, namespace, **kwargs):
if any(b is not object for b in bases):
raise TypeError("Cannot subclass " + str(Annotated))
return super().__new__(cls, name, bases, namespace, **kwargs)
@property
def __metadata__(self):
return self._subs_tree()[2]
def _tree_repr(self, tree):
cls, origin, metadata = tree
if not isinstance(origin, tuple):
tp_repr = typing._type_repr(origin)
else:
tp_repr = origin[0]._tree_repr(origin)
metadata_reprs = ", ".join(repr(arg) for arg in metadata)
return f'{cls}[{tp_repr}, {metadata_reprs}]'
def _subs_tree(self, tvars=None, args=None): # noqa
if self is Annotated:
return Annotated
res = super()._subs_tree(tvars=tvars, args=args)
# Flatten nested Annotated
if isinstance(res[1], tuple) and res[1][0] is Annotated:
sub_tp = res[1][1]
sub_annot = res[1][2]
return (Annotated, sub_tp, sub_annot + res[2])
return res
def _get_cons(self):
"""Return the class used to create instance of this type."""
if self.__origin__ is None:
raise TypeError("Cannot get the underlying type of a "
"non-specialized Annotated type.")
tree = self._subs_tree()
while isinstance(tree, tuple) and tree[0] is Annotated:
tree = tree[1]
if isinstance(tree, tuple):
return tree[0]
else:
return tree
@typing._tp_cache
def __getitem__(self, params):
if not isinstance(params, tuple):
params = (params,)
if self.__origin__ is not None: # specializing an instantiated type
return super().__getitem__(params)
elif not isinstance(params, tuple) or len(params) < 2:
raise TypeError("Annotated[...] should be instantiated "
"with at least two arguments (a type and an "
"annotation).")
else:
if (
isinstance(params[0], typing._TypingBase) and
type(params[0]).__name__ == "_ClassVar"
):
tp = params[0]
else:
msg = "Annotated[t, ...]: t must be a type."
tp = typing._type_check(params[0], msg)
metadata = tuple(params[1:])
return self.__class__(
self.__name__,
self.__bases__,
_no_slots_copy(self.__dict__),
tvars=_type_vars((tp,)),
# Metadata is a tuple so it won't be touched by _replace_args et al.
args=(tp, metadata),
origin=self,
)
def __call__(self, *args, **kwargs):
cons = self._get_cons()
result = cons(*args, **kwargs)
try:
result.__orig_class__ = self
except AttributeError:
pass
return result
def __getattr__(self, attr):
# For simplicity we just don't relay all dunder names
if self.__origin__ is not None and not _is_dunder(attr):
return getattr(self._get_cons(), attr)
raise AttributeError(attr)
def __setattr__(self, attr, value):
if _is_dunder(attr) or attr.startswith('_abc_'):
super().__setattr__(attr, value)
elif self.__origin__ is None:
raise AttributeError(attr)
else:
setattr(self._get_cons(), attr, value)
def __instancecheck__(self, obj):
raise TypeError("Annotated cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Annotated cannot be used with issubclass().")
class Annotated(metaclass=AnnotatedMeta):
"""Add context specific metadata to a type.
Example: Annotated[int, runtime_check.Unsigned] indicates to the
hypothetical runtime_check module that this type is an unsigned int.
Every other consumer of this type can ignore this metadata and treat
this type as int.
The first argument to Annotated must be a valid type, the remaining
arguments are kept as a tuple in the __metadata__ field.
Details:
- It's an error to call `Annotated` with less than two arguments.
- Nested Annotated are flattened::
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
- Instantiating an annotated type is equivalent to instantiating the
underlying type::
Annotated[C, Ann1](5) == C(5)
- Annotated can be used as a generic type alias::
Optimized = Annotated[T, runtime.Optimize()]
Optimized[int] == Annotated[int, runtime.Optimize()]
OptimizedList = Annotated[List[T], runtime.Optimize()]
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
"""
# Python 3.8 has get_origin() and get_args() but those implementations aren't
# Annotated-aware, so we can't use those. Python 3.9's versions don't support
# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
if sys.version_info[:2] >= (3, 10):
get_origin = typing.get_origin
get_args = typing.get_args
# 3.7-3.9
elif PEP_560:
try:
# 3.9+
from typing import _BaseGenericAlias
except ImportError:
_BaseGenericAlias = typing._GenericAlias
try:
# 3.9+
from typing import GenericAlias
except ImportError:
GenericAlias = typing._GenericAlias
def get_origin(tp):
"""Get the unsubscripted version of a type.
This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
and Annotated. Return None for unsupported types. Examples::
get_origin(Literal[42]) is Literal
get_origin(int) is None
get_origin(ClassVar[int]) is ClassVar
get_origin(Generic) is Generic
get_origin(Generic[T]) is Generic
get_origin(Union[T, int]) is Union
get_origin(List[Tuple[T, T]][int]) == list
get_origin(P.args) is P
"""
if isinstance(tp, _AnnotatedAlias):
return Annotated
if isinstance(tp, (typing._GenericAlias, GenericAlias, _BaseGenericAlias,
ParamSpecArgs, ParamSpecKwargs)):
return tp.__origin__
if tp is typing.Generic:
return typing.Generic
return None
def get_args(tp):
"""Get type arguments with all substitutions performed.
For unions, basic simplifications used by Union constructor are performed.
Examples::
get_args(Dict[str, int]) == (str, int)
get_args(int) == ()
get_args(Union[int, Union[T, int], str][int]) == (int, str)
get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
get_args(Callable[[], T][int]) == ([], int)
"""
if isinstance(tp, _AnnotatedAlias):
return (tp.__origin__,) + tp.__metadata__
if isinstance(tp, (typing._GenericAlias, GenericAlias)):
if getattr(tp, "_special", False):
return ()
res = tp.__args__
if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
res = (list(res[:-1]), res[-1])
return res
return ()
# 3.10+
if hasattr(typing, 'TypeAlias'):
TypeAlias = typing.TypeAlias
# 3.9
elif sys.version_info[:2] >= (3, 9):
class _TypeAliasForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'spack.vendor.typing_extensions.' + self._name
@_TypeAliasForm
def TypeAlias(self, parameters):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
raise TypeError(f"{self} is not subscriptable")
# 3.7-3.8
elif sys.version_info[:2] >= (3, 7):
class _TypeAliasForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'spack.vendor.typing_extensions.' + self._name
TypeAlias = _TypeAliasForm('TypeAlias',
doc="""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example
above.""")
# 3.6
else:
class _TypeAliasMeta(typing.TypingMeta):
"""Metaclass for TypeAlias"""
def __repr__(self):
return 'spack.vendor.typing_extensions.TypeAlias'
class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("TypeAlias cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("TypeAlias cannot be used with issubclass().")
def __repr__(self):
return 'spack.vendor.typing_extensions.TypeAlias'
TypeAlias = _TypeAliasBase(_root=True)
# Python 3.10+ has PEP 612
if hasattr(typing, 'ParamSpecArgs'):
ParamSpecArgs = typing.ParamSpecArgs
ParamSpecKwargs = typing.ParamSpecKwargs
# 3.6-3.9
else:
class _Immutable:
"""Mixin to indicate that object should not be copied."""
__slots__ = ()
def __copy__(self):
return self
def __deepcopy__(self, memo):
return self
class ParamSpecArgs(_Immutable):
"""The args for a ParamSpec object.
Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
ParamSpecArgs objects have a reference back to their ParamSpec:
P.args.__origin__ is P
This type is meant for runtime introspection and has no special meaning to
static type checkers.
"""
def __init__(self, origin):
self.__origin__ = origin
def __repr__(self):
return f"{self.__origin__.__name__}.args"
def __eq__(self, other):
if not isinstance(other, ParamSpecArgs):
return NotImplemented
return self.__origin__ == other.__origin__
class ParamSpecKwargs(_Immutable):
"""The kwargs for a ParamSpec object.
Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
ParamSpecKwargs objects have a reference back to their ParamSpec:
P.kwargs.__origin__ is P
This type is meant for runtime introspection and has no special meaning to
static type checkers.
"""
def __init__(self, origin):
self.__origin__ = origin
def __repr__(self):
return f"{self.__origin__.__name__}.kwargs"
def __eq__(self, other):
if not isinstance(other, ParamSpecKwargs):
return NotImplemented
return self.__origin__ == other.__origin__
# 3.10+
if hasattr(typing, 'ParamSpec'):
ParamSpec = typing.ParamSpec
# 3.6-3.9
else:
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
class ParamSpec(list):
"""Parameter specification variable.
Usage::
P = ParamSpec('P')
Parameter specification variables exist primarily for the benefit of static
type checkers. They are used to forward the parameter types of one
callable to another callable, a pattern commonly found in higher order
functions and decorators. They are only valid when used in ``Concatenate``,
or s the first argument to ``Callable``. In Python 3.10 and higher,
they are also supported in user-defined Generics at runtime.
See class Generic for more information on generic types. An
example for annotating a decorator::
T = TypeVar('T')
P = ParamSpec('P')
def add_logging(f: Callable[P, T]) -> Callable[P, T]:
'''A type-safe decorator to add logging to a function.'''
def inner(*args: P.args, **kwargs: P.kwargs) -> T:
logging.info(f'{f.__name__} was called')
return f(*args, **kwargs)
return inner
@add_logging
def add_two(x: float, y: float) -> float:
'''Add two numbers together.'''
return x + y
Parameter specification variables defined with covariant=True or
contravariant=True can be used to declare covariant or contravariant
generic types. These keyword arguments are valid, but their actual semantics
are yet to be decided. See PEP 612 for details.
Parameter specification variables can be introspected. e.g.:
P.__name__ == 'T'
P.__bound__ == None
P.__covariant__ == False
P.__contravariant__ == False
Note that only parameter specification variables defined in global scope can
be pickled.
"""
# Trick Generic __parameters__.
__class__ = typing.TypeVar
@property
def args(self):
return ParamSpecArgs(self)
@property
def kwargs(self):
return ParamSpecKwargs(self)
def __init__(self, name, *, bound=None, covariant=False, contravariant=False):
super().__init__([self])
self.__name__ = name
self.__covariant__ = bool(covariant)
self.__contravariant__ = bool(contravariant)
if bound:
self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
else:
self.__bound__ = None
# for pickling:
try:
def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
def_mod = None
if def_mod != 'spack.vendor.typing_extensions':
self.__module__ = def_mod
def __repr__(self):
if self.__covariant__:
prefix = '+'
elif self.__contravariant__:
prefix = '-'
else:
prefix = '~'
return prefix + self.__name__
def __hash__(self):
return object.__hash__(self)
def __eq__(self, other):
return self is other
def __reduce__(self):
return self.__name__
# Hack to get typing._type_check to pass.
def __call__(self, *args, **kwargs):
pass
if not PEP_560:
# Only needed in 3.6.
def _get_type_vars(self, tvars):
if self not in tvars:
tvars.append(self)
# 3.6-3.9
if not hasattr(typing, 'Concatenate'):
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
class _ConcatenateGenericAlias(list):
# Trick Generic into looking into this for __parameters__.
if PEP_560:
__class__ = typing._GenericAlias
else:
__class__ = typing._TypingBase
# Flag in 3.8.
_special = False
# Attribute in 3.6 and earlier.
_gorg = typing.Generic
def __init__(self, origin, args):
super().__init__(args)
self.__origin__ = origin
self.__args__ = args
def __repr__(self):
_type_repr = typing._type_repr
return (f'{_type_repr(self.__origin__)}'
f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
def __hash__(self):
return hash((self.__origin__, self.__args__))
# Hack to get typing._type_check to pass in Generic.
def __call__(self, *args, **kwargs):
pass
@property
def __parameters__(self):
return tuple(
tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
)
if not PEP_560:
# Only required in 3.6.
def _get_type_vars(self, tvars):
if self.__origin__ and self.__parameters__:
typing._get_type_vars(self.__parameters__, tvars)
# 3.6-3.9
@typing._tp_cache
def _concatenate_getitem(self, parameters):
if parameters == ():
raise TypeError("Cannot take a Concatenate of no types.")
if not isinstance(parameters, tuple):
parameters = (parameters,)
if not isinstance(parameters[-1], ParamSpec):
raise TypeError("The last parameter to Concatenate should be a "
"ParamSpec variable.")
msg = "Concatenate[arg, ...]: each arg must be a type."
parameters = tuple(typing._type_check(p, msg) for p in parameters)
return _ConcatenateGenericAlias(self, parameters)
# 3.10+
if hasattr(typing, 'Concatenate'):
Concatenate = typing.Concatenate
_ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa
# 3.9
elif sys.version_info[:2] >= (3, 9):
@_TypeAliasForm
def Concatenate(self, parameters):
"""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
higher order function which adds, removes or transforms parameters of a
callable.
For example::
Callable[Concatenate[int, P], int]
See PEP 612 for detailed information.
"""
return _concatenate_getitem(self, parameters)
# 3.7-8
elif sys.version_info[:2] >= (3, 7):
class _ConcatenateForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'spack.vendor.typing_extensions.' + self._name
def __getitem__(self, parameters):
return _concatenate_getitem(self, parameters)
Concatenate = _ConcatenateForm(
'Concatenate',
doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
higher order function which adds, removes or transforms parameters of a
callable.
For example::
Callable[Concatenate[int, P], int]
See PEP 612 for detailed information.
""")
# 3.6
else:
class _ConcatenateAliasMeta(typing.TypingMeta):
"""Metaclass for Concatenate."""
def __repr__(self):
return 'spack.vendor.typing_extensions.Concatenate'
class _ConcatenateAliasBase(typing._FinalTypingBase,
metaclass=_ConcatenateAliasMeta,
_root=True):
"""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
higher order function which adds, removes or transforms parameters of a
callable.
For example::
Callable[Concatenate[int, P], int]
See PEP 612 for detailed information.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("Concatenate cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Concatenate cannot be used with issubclass().")
def __repr__(self):
return 'spack.vendor.typing_extensions.Concatenate'
def __getitem__(self, parameters):
return _concatenate_getitem(self, parameters)
Concatenate = _ConcatenateAliasBase(_root=True)
# 3.10+
if hasattr(typing, 'TypeGuard'):
TypeGuard = typing.TypeGuard
# 3.9
elif sys.version_info[:2] >= (3, 9):
class _TypeGuardForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'spack.vendor.typing_extensions.' + self._name
@_TypeGuardForm
def TypeGuard(self, parameters):
"""Special typing form used to annotate the return type of a user-defined
type guard function. ``TypeGuard`` only accepts a single type argument.
At runtime, functions marked this way should return a boolean.
``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
type checkers to determine a more precise type of an expression within a
program's code flow. Usually type narrowing is done by analyzing
conditional code flow and applying the narrowing to a block of code. The
conditional expression here is sometimes referred to as a "type guard".
Sometimes it would be convenient to use a user-defined boolean function
as a type guard. Such a function should use ``TypeGuard[...]`` as its
return type to alert static type checkers to this intention.
Using ``-> TypeGuard`` tells the static type checker that for a given
function:
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
is the type inside ``TypeGuard``.
For example::
def is_str(val: Union[str, float]):
# "isinstance" type guard
if isinstance(val, str):
# Type of ``val`` is narrowed to ``str``
...
else:
# Else, type of ``val`` is narrowed to ``float``.
...
Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
form of ``TypeA`` (it can even be a wider form) and this may lead to
type-unsafe results. The main reason is to allow for things like
narrowing ``List[object]`` to ``List[str]`` even though the latter is not
a subtype of the former, since ``List`` is invariant. The responsibility of
writing type-safe type guards is left to the user.
``TypeGuard`` also works with type variables. For more information, see
PEP 647 (User-Defined Type Guards).
"""
item = typing._type_check(parameters, f'{self} accepts only single type.')
return typing._GenericAlias(self, (item,))
# 3.7-3.8
elif sys.version_info[:2] >= (3, 7):
class _TypeGuardForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'spack.vendor.typing_extensions.' + self._name
def __getitem__(self, parameters):
item = typing._type_check(parameters,
f'{self._name} accepts only a single type')
return typing._GenericAlias(self, (item,))
TypeGuard = _TypeGuardForm(
'TypeGuard',
doc="""Special typing form used to annotate the return type of a user-defined
type guard function. ``TypeGuard`` only accepts a single type argument.
At runtime, functions marked this way should return a boolean.
``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
type checkers to determine a more precise type of an expression within a
program's code flow. Usually type narrowing is done by analyzing
conditional code flow and applying the narrowing to a block of code. The
conditional expression here is sometimes referred to as a "type guard".
Sometimes it would be convenient to use a user-defined boolean function
as a type guard. Such a function should use ``TypeGuard[...]`` as its
return type to alert static type checkers to this intention.
Using ``-> TypeGuard`` tells the static type checker that for a given
function:
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
is the type inside ``TypeGuard``.
For example::
def is_str(val: Union[str, float]):
# "isinstance" type guard
if isinstance(val, str):
# Type of ``val`` is narrowed to ``str``
...
else:
# Else, type of ``val`` is narrowed to ``float``.
...
Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
form of ``TypeA`` (it can even be a wider form) and this may lead to
type-unsafe results. The main reason is to allow for things like
narrowing ``List[object]`` to ``List[str]`` even though the latter is not
a subtype of the former, since ``List`` is invariant. The responsibility of
writing type-safe type guards is left to the user.
``TypeGuard`` also works with type variables. For more information, see
PEP 647 (User-Defined Type Guards).
""")
# 3.6
else:
class _TypeGuard(typing._FinalTypingBase, _root=True):
"""Special typing form used to annotate the return type of a user-defined
type guard function. ``TypeGuard`` only accepts a single type argument.
At runtime, functions marked this way should return a boolean.
``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
type checkers to determine a more precise type of an expression within a
program's code flow. Usually type narrowing is done by analyzing
conditional code flow and applying the narrowing to a block of code. The
conditional expression here is sometimes referred to as a "type guard".
Sometimes it would be convenient to use a user-defined boolean function
as a type guard. Such a function should use ``TypeGuard[...]`` as its
return type to alert static type checkers to this intention.
Using ``-> TypeGuard`` tells the static type checker that for a given
function:
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
is the type inside ``TypeGuard``.
For example::
def is_str(val: Union[str, float]):
# "isinstance" type guard
if isinstance(val, str):
# Type of ``val`` is narrowed to ``str``
...
else:
# Else, type of ``val`` is narrowed to ``float``.
...
Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
form of ``TypeA`` (it can even be a wider form) and this may lead to
type-unsafe results. The main reason is to allow for things like
narrowing ``List[object]`` to ``List[str]`` even though the latter is not
a subtype of the former, since ``List`` is invariant. The responsibility of
writing type-safe type guards is left to the user.
``TypeGuard`` also works with type variables. For more information, see
PEP 647 (User-Defined Type Guards).
"""
__slots__ = ('__type__',)
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
f'{cls.__name__[1:]} accepts only a single type.'),
_root=True)
raise TypeError(f'{cls.__name__[1:]} cannot be further subscripted')
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += f'[{typing._type_repr(self.__type__)}]'
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, _TypeGuard):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
TypeGuard = _TypeGuard(_root=True)
if sys.version_info[:2] >= (3, 7):
# Vendored from cpython typing._SpecialFrom
class _SpecialForm(typing._Final, _root=True):
__slots__ = ('_name', '__doc__', '_getitem')
def __init__(self, getitem):
self._getitem = getitem
self._name = getitem.__name__
self.__doc__ = getitem.__doc__
def __getattr__(self, item):
if item in {'__name__', '__qualname__'}:
return self._name
raise AttributeError(item)
def __mro_entries__(self, bases):
raise TypeError(f"Cannot subclass {self!r}")
def __repr__(self):
return f'spack.vendor.typing_extensions.{self._name}'
def __reduce__(self):
return self._name
def __call__(self, *args, **kwds):
raise TypeError(f"Cannot instantiate {self!r}")
def __or__(self, other):
return typing.Union[self, other]
def __ror__(self, other):
return typing.Union[other, self]
def __instancecheck__(self, obj):
raise TypeError(f"{self} cannot be used with isinstance()")
def __subclasscheck__(self, cls):
raise TypeError(f"{self} cannot be used with issubclass()")
@typing._tp_cache
def __getitem__(self, parameters):
return self._getitem(self, parameters)
if hasattr(typing, "LiteralString"):
LiteralString = typing.LiteralString
elif sys.version_info[:2] >= (3, 7):
@_SpecialForm
def LiteralString(self, params):
"""Represents an arbitrary literal string.
Example::
from spack.vendor.typing_extensions import LiteralString
def query(sql: LiteralString) -> ...:
...
query("SELECT * FROM table") # ok
query(f"SELECT * FROM {input()}") # not ok
See PEP 675 for details.
"""
raise TypeError(f"{self} is not subscriptable")
else:
class _LiteralString(typing._FinalTypingBase, _root=True):
"""Represents an arbitrary literal string.
Example::
from spack.vendor.typing_extensions import LiteralString
def query(sql: LiteralString) -> ...:
...
query("SELECT * FROM table") # ok
query(f"SELECT * FROM {input()}") # not ok
See PEP 675 for details.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError(f"{self} cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError(f"{self} cannot be used with issubclass().")
LiteralString = _LiteralString(_root=True)
if hasattr(typing, "Self"):
Self = typing.Self
elif sys.version_info[:2] >= (3, 7):
@_SpecialForm
def Self(self, params):
"""Used to spell the type of "self" in classes.
Example::
from typing import Self
class ReturnsSelf:
def parse(self, data: bytes) -> Self:
...
return self
"""
raise TypeError(f"{self} is not subscriptable")
else:
class _Self(typing._FinalTypingBase, _root=True):
"""Used to spell the type of "self" in classes.
Example::
from typing import Self
class ReturnsSelf:
def parse(self, data: bytes) -> Self:
...
return self
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError(f"{self} cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError(f"{self} cannot be used with issubclass().")
Self = _Self(_root=True)
if hasattr(typing, "Never"):
Never = typing.Never
elif sys.version_info[:2] >= (3, 7):
@_SpecialForm
def Never(self, params):
"""The bottom type, a type that has no members.
This can be used to define a function that should never be
called, or a function that never returns::
from spack.vendor.typing_extensions import Never
def never_call_me(arg: Never) -> None:
pass
def int_or_str(arg: int | str) -> None:
never_call_me(arg) # type checker error
match arg:
case int():
print("It's an int")
case str():
print("It's a str")
case _:
never_call_me(arg) # ok, arg is of type Never
"""
raise TypeError(f"{self} is not subscriptable")
else:
class _Never(typing._FinalTypingBase, _root=True):
"""The bottom type, a type that has no members.
This can be used to define a function that should never be
called, or a function that never returns::
from spack.vendor.typing_extensions import Never
def never_call_me(arg: Never) -> None:
pass
def int_or_str(arg: int | str) -> None:
never_call_me(arg) # type checker error
match arg:
case int():
print("It's an int")
case str():
print("It's a str")
case _:
never_call_me(arg) # ok, arg is of type Never
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError(f"{self} cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError(f"{self} cannot be used with issubclass().")
Never = _Never(_root=True)
if hasattr(typing, 'Required'):
Required = typing.Required
NotRequired = typing.NotRequired
elif sys.version_info[:2] >= (3, 9):
class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'spack.vendor.typing_extensions.' + self._name
@_ExtensionsSpecialForm
def Required(self, parameters):
"""A special typing construct to mark a key of a total=False TypedDict
as required. For example:
class Movie(TypedDict, total=False):
title: Required[str]
year: int
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
There is no runtime checking that a required key is actually provided
when instantiating a related TypedDict.
"""
item = typing._type_check(parameters, f'{self._name} accepts only single type')
return typing._GenericAlias(self, (item,))
@_ExtensionsSpecialForm
def NotRequired(self, parameters):
"""A special typing construct to mark a key of a TypedDict as
potentially missing. For example:
class Movie(TypedDict):
title: str
year: NotRequired[int]
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
"""
item = typing._type_check(parameters, f'{self._name} accepts only single type')
return typing._GenericAlias(self, (item,))
elif sys.version_info[:2] >= (3, 7):
class _RequiredForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'spack.vendor.typing_extensions.' + self._name
def __getitem__(self, parameters):
item = typing._type_check(parameters,
'{} accepts only single type'.format(self._name))
return typing._GenericAlias(self, (item,))
Required = _RequiredForm(
'Required',
doc="""A special typing construct to mark a key of a total=False TypedDict
as required. For example:
class Movie(TypedDict, total=False):
title: Required[str]
year: int
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
There is no runtime checking that a required key is actually provided
when instantiating a related TypedDict.
""")
NotRequired = _RequiredForm(
'NotRequired',
doc="""A special typing construct to mark a key of a TypedDict as
potentially missing. For example:
class Movie(TypedDict):
title: str
year: NotRequired[int]
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
""")
else:
# NOTE: Modeled after _Final's implementation when _FinalTypingBase available
class _MaybeRequired(typing._FinalTypingBase, _root=True):
__slots__ = ('__type__',)
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
'{} accepts only single type.'.format(cls.__name__[1:])),
_root=True)
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
class _Required(_MaybeRequired, _root=True):
"""A special typing construct to mark a key of a total=False TypedDict
as required. For example:
class Movie(TypedDict, total=False):
title: Required[str]
year: int
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
There is no runtime checking that a required key is actually provided
when instantiating a related TypedDict.
"""
class _NotRequired(_MaybeRequired, _root=True):
"""A special typing construct to mark a key of a TypedDict as
potentially missing. For example:
class Movie(TypedDict):
title: str
year: NotRequired[int]
m = Movie(
title='The Matrix', # typechecker error if key is omitted
year=1999,
)
"""
Required = _Required(_root=True)
NotRequired = _NotRequired(_root=True)
if sys.version_info[:2] >= (3, 9):
class _UnpackSpecialForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'spack.vendor.typing_extensions.' + self._name
class _UnpackAlias(typing._GenericAlias, _root=True):
__class__ = typing.TypeVar
@_UnpackSpecialForm
def Unpack(self, parameters):
"""A special typing construct to unpack a variadic type. For example:
Shape = TypeVarTuple('Shape')
Batch = NewType('Batch', int)
def add_batch_axis(
x: Array[Unpack[Shape]]
) -> Array[Batch, Unpack[Shape]]: ...
"""
item = typing._type_check(parameters, f'{self._name} accepts only single type')
return _UnpackAlias(self, (item,))
def _is_unpack(obj):
return isinstance(obj, _UnpackAlias)
elif sys.version_info[:2] >= (3, 7):
class _UnpackAlias(typing._GenericAlias, _root=True):
__class__ = typing.TypeVar
class _UnpackForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'spack.vendor.typing_extensions.' + self._name
def __getitem__(self, parameters):
item = typing._type_check(parameters,
f'{self._name} accepts only single type')
return _UnpackAlias(self, (item,))
Unpack = _UnpackForm(
'Unpack',
doc="""A special typing construct to unpack a variadic type. For example:
Shape = TypeVarTuple('Shape')
Batch = NewType('Batch', int)
def add_batch_axis(
x: Array[Unpack[Shape]]
) -> Array[Batch, Unpack[Shape]]: ...
""")
def _is_unpack(obj):
return isinstance(obj, _UnpackAlias)
else:
# NOTE: Modeled after _Final's implementation when _FinalTypingBase available
class _Unpack(typing._FinalTypingBase, _root=True):
"""A special typing construct to unpack a variadic type. For example:
Shape = TypeVarTuple('Shape')
Batch = NewType('Batch', int)
def add_batch_axis(
x: Array[Unpack[Shape]]
) -> Array[Batch, Unpack[Shape]]: ...
"""
__slots__ = ('__type__',)
__class__ = typing.TypeVar
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
'Unpack accepts only single type.'),
_root=True)
raise TypeError('Unpack cannot be further subscripted')
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, _Unpack):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
# For 3.6 only
def _get_type_vars(self, tvars):
self.__type__._get_type_vars(tvars)
Unpack = _Unpack(_root=True)
def _is_unpack(obj):
return isinstance(obj, _Unpack)
| _ExtensionsGenericMeta |
python | facebook__pyre-check | tools/playground/application.py | {
"start": 2272,
"end": 4756
} | class ____:
def __init__(self) -> None:
self._directory: Path = Path(tempfile.mkdtemp())
LOG.debug(f"Starting server in `{self._directory}`...")
pyre_configuration = json.dumps(
{
"source_directories": ["."],
}
)
LOG.debug(f"Writing configuration:\n{pyre_configuration}")
pyre_configuration_path = self._directory / PYRE_CONFIG_FILE
pyre_configuration_path.write_text(pyre_configuration)
LOG.debug("Writing watchman configuration")
watchman_configuration_path = self._directory / WATCHMAN_CONFIG_FILE
watchman_configuration_path.write_text("{}\n")
LOG.debug("Initializing the code")
code_path = self._directory / INPUT_FILE
code_path.write_text("x = 0\n")
LOG.debug("Starting watchman")
subprocess.check_call(["watchman", "watch", str(self._directory)])
LOG.debug("Priming the server")
subprocess.check_call(
["pyre", "--noninteractive", "--sequential"],
cwd=self._directory,
)
def check(self, input: str) -> Response:
LOG.debug("Running pyre check")
code_path = self._directory / INPUT_FILE
code_path.write_text(input)
with subprocess.Popen(
["pyre", "--output=json", "--noninteractive", "--sequential"],
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
cwd=self._directory,
text=True,
) as process:
# pyre-fixme[6]: Expected `IO[bytes]` for 1st param but got
# `Optional[IO[typing.Any]]`.
stderr = _consume(process.stderr)
# pyre-fixme[6]: Expected `IO[bytes]` for 1st param but got
# `Optional[IO[typing.Any]]`.
stdout = _consume(process.stdout)
return_code = process.wait()
if return_code > 1:
LOG.error(f"Returning error: {stderr}")
result = jsonify(errors=[stderr])
else:
try:
errors = json.loads(stdout)
result = jsonify(data={"errors": errors, "stderr": stderr})
except BaseException:
LOG.error(f"Could not parse json from stdout: {stdout!r}")
result = jsonify(
errors=[f"Invalid json from pyre --output=json: {stdout!r}"],
)
return result
| Pyre |
python | airbytehq__airbyte | airbyte-integrations/connectors/source-github/source_github/github_schema.py | {
"start": 648286,
"end": 649076
} | class ____(sgqlc.types.relay.Connection):
"""The connection type for EnterpriseRepositoryInfo."""
__schema__ = github_schema
__field_names__ = ("edges", "nodes", "page_info", "total_count")
edges = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseRepositoryInfoEdge"), graphql_name="edges")
"""A list of edges."""
nodes = sgqlc.types.Field(sgqlc.types.list_of("EnterpriseRepositoryInfo"), graphql_name="nodes")
"""A list of nodes."""
page_info = sgqlc.types.Field(sgqlc.types.non_null("PageInfo"), graphql_name="pageInfo")
"""Information to aid in pagination."""
total_count = sgqlc.types.Field(sgqlc.types.non_null(Int), graphql_name="totalCount")
"""Identifies the total count of items in the connection."""
| EnterpriseRepositoryInfoConnection |
python | redis__redis-py | redis/commands/core.py | {
"start": 121559,
"end": 125070
} | class ____(ScanCommands):
async def scan_iter(
self,
match: Union[PatternT, None] = None,
count: Optional[int] = None,
_type: Optional[str] = None,
**kwargs,
) -> AsyncIterator:
"""
Make an iterator using the SCAN command so that the client doesn't
need to remember the cursor position.
``match`` allows for filtering the keys by pattern
``count`` provides a hint to Redis about the number of keys to
return per batch.
``_type`` filters the returned values by a particular Redis type.
Stock Redis instances allow for the following types:
HASH, LIST, SET, STREAM, STRING, ZSET
Additionally, Redis modules can expose other types as well.
"""
cursor = "0"
while cursor != 0:
cursor, data = await self.scan(
cursor=cursor, match=match, count=count, _type=_type, **kwargs
)
for d in data:
yield d
async def sscan_iter(
self,
name: KeyT,
match: Union[PatternT, None] = None,
count: Optional[int] = None,
) -> AsyncIterator:
"""
Make an iterator using the SSCAN command so that the client doesn't
need to remember the cursor position.
``match`` allows for filtering the keys by pattern
``count`` allows for hint the minimum number of returns
"""
cursor = "0"
while cursor != 0:
cursor, data = await self.sscan(
name, cursor=cursor, match=match, count=count
)
for d in data:
yield d
async def hscan_iter(
self,
name: str,
match: Union[PatternT, None] = None,
count: Optional[int] = None,
no_values: Union[bool, None] = None,
) -> AsyncIterator:
"""
Make an iterator using the HSCAN command so that the client doesn't
need to remember the cursor position.
``match`` allows for filtering the keys by pattern
``count`` allows for hint the minimum number of returns
``no_values`` indicates to return only the keys, without values
"""
cursor = "0"
while cursor != 0:
cursor, data = await self.hscan(
name, cursor=cursor, match=match, count=count, no_values=no_values
)
if no_values:
for it in data:
yield it
else:
for it in data.items():
yield it
async def zscan_iter(
self,
name: KeyT,
match: Union[PatternT, None] = None,
count: Optional[int] = None,
score_cast_func: Union[type, Callable] = float,
) -> AsyncIterator:
"""
Make an iterator using the ZSCAN command so that the client doesn't
need to remember the cursor position.
``match`` allows for filtering the keys by pattern
``count`` allows for hint the minimum number of returns
``score_cast_func`` a callable used to cast the score return value
"""
cursor = "0"
while cursor != 0:
cursor, data = await self.zscan(
name,
cursor=cursor,
match=match,
count=count,
score_cast_func=score_cast_func,
)
for d in data:
yield d
| AsyncScanCommands |
python | allegroai__clearml | clearml/backend_api/services/v2_23/models.py | {
"start": 110944,
"end": 112079
} | class ____(Response):
"""
Response of models.make_public endpoint.
:param updated: Number of models updated
:type updated: int
"""
_service = "models"
_action = "make_public"
_version = "2.23"
_schema = {
"definitions": {},
"properties": {
"updated": {
"description": "Number of models updated",
"type": ["integer", "null"],
}
},
"type": "object",
}
def __init__(self, updated: Optional[int] = None, **kwargs: Any) -> None:
super(MakePublicResponse, self).__init__(**kwargs)
self.updated = updated
@schema_property("updated")
def updated(self) -> Optional[int]:
return self._property_updated
@updated.setter
def updated(self, value: Optional[int]) -> None:
if value is None:
self._property_updated = None
return
if isinstance(value, float) and value.is_integer():
value = int(value)
self.assert_isinstance(value, "updated", six.integer_types)
self._property_updated = value
| MakePublicResponse |
python | langchain-ai__langchain | libs/langchain_v1/tests/integration_tests/cache/fake_embeddings.py | {
"start": 1007,
"end": 2108
} | class ____(FakeEmbeddings):
"""Consistent fake embeddings.
Fake embeddings which remember all the texts seen so far to return consistent
vectors for the same texts.
"""
def __init__(self, dimensionality: int = 10) -> None:
self.known_texts: list[str] = []
self.dimensionality = dimensionality
def embed_documents(self, texts: list[str]) -> list[list[float]]:
"""Return consistent embeddings for each text seen so far."""
out_vectors = []
for text in texts:
if text not in self.known_texts:
self.known_texts.append(text)
vector = [1.0] * (self.dimensionality - 1) + [
float(self.known_texts.index(text)),
]
out_vectors.append(vector)
return out_vectors
def embed_query(self, text: str) -> list[float]:
"""Return consistent embeddings.
Return consistent embeddings for the text, if seen before, or a constant
one if the text is unknown.
"""
return self.embed_documents([text])[0]
| ConsistentFakeEmbeddings |
python | pytorch__pytorch | test/quantization/core/experimental/test_bits.py | {
"start": 1639,
"end": 3538
} | class ____(TestCase):
@skipIfRocm
def test_types(self, device):
bits_types = [torch.bits1x8, torch.bits2x4, torch.bits4x2, torch.bits8, torch.bits16]
for bits_type in bits_types:
_ = torch.zeros(20, dtype=torch.int32, device=device).view(bits_type)
_ = torch.empty(20, dtype=bits_type, device=device)
x = torch.randint(100, (20, 20), dtype=torch.int8, device=device).view(bits_type)
y = x.t().contiguous()
view_type = torch.int8 if x.element_size() == 1 else torch.int16
self.assertEqual(x.t().view(view_type), y.view(view_type))
y = x.t().clone()
self.assertEqual(x.t().view(view_type), y.view(view_type))
def test_cat(self, device):
bits_types = [torch.bits1x8, torch.bits2x4, torch.bits4x2, torch.bits8, torch.bits16]
for bits_type in bits_types:
view_type = torch.int8 if bits_type.itemsize == 1 else torch.int16
x_int = torch.randint(100, (512, 512), dtype=view_type, device=device)
x = x_int.view(bits_type)
y_int = torch.randint(100, (512, 512), dtype=view_type, device=device)
y = y_int.view(bits_type)
for dim, transpose in itertools.product(range(x_int.ndim), (True, False)):
y_ref = y_int.t() if transpose else y_int
y_b = y.t() if transpose else y
z_ref = torch.cat([x_int, y_ref], dim=dim)
z = torch.cat([x, y_b], dim=dim)
self.assertEqual(z_ref, z.view(view_type))
def test_subclass(self):
t = torch.zeros(20, dtype=torch.int16).view(torch.bits16)
s = Int16Tensor(t)
s = s + 1 - 1
self.assertTrue(torch.allclose(s, torch.zeros(20, dtype=torch.bits16)))
instantiate_device_type_tests(TestBits, globals())
if __name__ == '__main__':
run_tests()
| TestBits |
python | py-pdf__pypdf | pypdf/_doc_common.py | {
"start": 8636,
"end": 51883
} | class ____:
"""
Common functions from PdfWriter and PdfReader objects.
This root class is strongly abstracted.
"""
strict: bool = False # default
flattened_pages: Optional[list[PageObject]] = None
_encryption: Optional[Encryption] = None
_readonly: bool = False
@property
@abstractmethod
def root_object(self) -> DictionaryObject:
... # pragma: no cover
@property
@abstractmethod
def pdf_header(self) -> str:
... # pragma: no cover
@abstractmethod
def get_object(
self, indirect_reference: Union[int, IndirectObject]
) -> Optional[PdfObject]:
... # pragma: no cover
@abstractmethod
def _replace_object(self, indirect: IndirectObject, obj: PdfObject) -> PdfObject:
... # pragma: no cover
@property
@abstractmethod
def _info(self) -> Optional[DictionaryObject]:
... # pragma: no cover
@property
def metadata(self) -> Optional[DocumentInformation]:
"""
Retrieve the PDF file's document information dictionary, if it exists.
Note that some PDF files use metadata streams instead of document
information dictionaries, and these metadata streams will not be
accessed by this function.
"""
retval = DocumentInformation()
if self._info is None:
return None
retval.update(self._info)
return retval
@property
def xmp_metadata(self) -> Optional[XmpInformation]:
... # pragma: no cover
@property
def viewer_preferences(self) -> Optional[ViewerPreferences]:
"""Returns the existing ViewerPreferences as an overloaded dictionary."""
o = self.root_object.get(CD.VIEWER_PREFERENCES, None)
if o is None:
return None
o = o.get_object()
if not isinstance(o, ViewerPreferences):
o = ViewerPreferences(o)
if hasattr(o, "indirect_reference") and o.indirect_reference is not None:
self._replace_object(o.indirect_reference, o)
else:
self.root_object[NameObject(CD.VIEWER_PREFERENCES)] = o
return o
def get_num_pages(self) -> int:
"""
Calculate the number of pages in this PDF file.
Returns:
The number of pages of the parsed PDF file.
Raises:
PdfReadError: If restrictions prevent this action.
"""
# Flattened pages will not work on an encrypted PDF;
# the PDF file's page count is used in this case. Otherwise,
# the original method (flattened page count) is used.
if self.is_encrypted:
return self.root_object["/Pages"]["/Count"] # type: ignore
if self.flattened_pages is None:
self._flatten(self._readonly)
assert self.flattened_pages is not None
return len(self.flattened_pages)
def get_page(self, page_number: int) -> PageObject:
"""
Retrieve a page by number from this PDF file.
Most of the time ``.pages[page_number]`` is preferred.
Args:
page_number: The page number to retrieve
(pages begin at zero)
Returns:
A :class:`PageObject<pypdf._page.PageObject>` instance.
"""
if self.flattened_pages is None:
self._flatten(self._readonly)
assert self.flattened_pages is not None, "hint for mypy"
return self.flattened_pages[page_number]
def _get_page_in_node(
self,
page_number: int,
) -> tuple[DictionaryObject, int]:
"""
Retrieve the node and position within the /Kids containing the page.
If page_number is greater than the number of pages, it returns the top node, -1.
"""
top = cast(DictionaryObject, self.root_object["/Pages"])
def recursive_call(
node: DictionaryObject, mi: int
) -> tuple[Optional[PdfObject], int]:
ma = cast(int, node.get("/Count", 1)) # default 1 for /Page types
if node["/Type"] == "/Page":
if page_number == mi:
return node, -1
return None, mi + 1
if (page_number - mi) >= ma: # not in nodes below
if node == top:
return top, -1
return None, mi + ma
for idx, kid in enumerate(cast(ArrayObject, node["/Kids"])):
kid = cast(DictionaryObject, kid.get_object())
n, i = recursive_call(kid, mi)
if n is not None: # page has just been found ...
if i < 0: # ... just below!
return node, idx
# ... at lower levels
return n, i
mi = i
raise PyPdfError("Unexpectedly cannot find the node.")
node, idx = recursive_call(top, 0)
assert isinstance(node, DictionaryObject), "mypy"
return node, idx
@property
def named_destinations(self) -> dict[str, Destination]:
"""A read-only dictionary which maps names to destinations."""
return self._get_named_destinations()
def get_named_dest_root(self) -> ArrayObject:
named_dest = ArrayObject()
if CA.NAMES in self.root_object and isinstance(
self.root_object[CA.NAMES], DictionaryObject
):
names = cast(DictionaryObject, self.root_object[CA.NAMES])
if CA.DESTS in names and isinstance(names[CA.DESTS], DictionaryObject):
# §3.6.3 Name Dictionary (PDF spec 1.7)
dests = cast(DictionaryObject, names[CA.DESTS])
dests_ref = dests.indirect_reference
if CA.NAMES in dests:
# §7.9.6, entries in a name tree node dictionary
named_dest = cast(ArrayObject, dests[CA.NAMES])
else:
named_dest = ArrayObject()
dests[NameObject(CA.NAMES)] = named_dest
elif hasattr(self, "_add_object"):
dests = DictionaryObject()
dests_ref = self._add_object(dests)
names[NameObject(CA.DESTS)] = dests_ref
dests[NameObject(CA.NAMES)] = named_dest
elif hasattr(self, "_add_object"):
names = DictionaryObject()
names_ref = self._add_object(names)
self.root_object[NameObject(CA.NAMES)] = names_ref
dests = DictionaryObject()
dests_ref = self._add_object(dests)
names[NameObject(CA.DESTS)] = dests_ref
dests[NameObject(CA.NAMES)] = named_dest
return named_dest
## common
def _get_named_destinations(
self,
tree: Union[TreeObject, None] = None,
retval: Optional[dict[str, Destination]] = None,
) -> dict[str, Destination]:
"""
Retrieve the named destinations present in the document.
Args:
tree: The current tree.
retval: The previously retrieved destinations for nested calls.
Returns:
A dictionary which maps names to destinations.
"""
if retval is None:
retval = {}
catalog = self.root_object
# get the name tree
if CA.DESTS in catalog:
tree = cast(TreeObject, catalog[CA.DESTS])
elif CA.NAMES in catalog:
names = cast(DictionaryObject, catalog[CA.NAMES])
if CA.DESTS in names:
tree = cast(TreeObject, names[CA.DESTS])
if is_null_or_none(tree):
return retval
assert tree is not None, "mypy"
if PagesAttributes.KIDS in tree:
# recurse down the tree
for kid in cast(ArrayObject, tree[PagesAttributes.KIDS]):
self._get_named_destinations(kid.get_object(), retval)
# §7.9.6, entries in a name tree node dictionary
elif CA.NAMES in tree: # /Kids and /Names are exclusives (§7.9.6)
names = cast(DictionaryObject, tree[CA.NAMES])
i = 0
while i < len(names):
key = names[i].get_object()
i += 1
if not isinstance(key, (bytes, str)):
continue
try:
value = names[i].get_object()
except IndexError:
break
i += 1
if isinstance(value, DictionaryObject):
if "/D" in value:
value = value["/D"]
else:
continue
dest = self._build_destination(key, value)
if dest is not None:
retval[cast(str, dest["/Title"])] = dest
# Remain backwards-compatible.
retval[str(key)] = dest
else: # case where Dests is in root catalog (PDF 1.7 specs, §2 about PDF 1.1)
for k__, v__ in tree.items():
val = v__.get_object()
if isinstance(val, DictionaryObject):
if "/D" in val:
val = val["/D"].get_object()
else:
continue
dest = self._build_destination(k__, val)
if dest is not None:
retval[k__] = dest
return retval
# A select group of relevant field attributes. For the complete list,
# see §12.3.2 of the PDF 1.7 or PDF 2.0 specification.
def get_fields(
self,
tree: Optional[TreeObject] = None,
retval: Optional[dict[Any, Any]] = None,
fileobj: Optional[Any] = None,
stack: Optional[list[PdfObject]] = None,
) -> Optional[dict[str, Any]]:
"""
Extract field data if this PDF contains interactive form fields.
The *tree*, *retval*, *stack* parameters are for recursive use.
Args:
tree: Current object to parse.
retval: In-progress list of fields.
fileobj: A file object (usually a text file) to write
a report to on all interactive form fields found.
stack: List of already parsed objects.
Returns:
A dictionary where each key is a field name, and each
value is a :class:`Field<pypdf.generic.Field>` object. By
default, the mapping name is used for keys.
``None`` if form data could not be located.
"""
field_attributes = FA.attributes_dict()
field_attributes.update(CheckboxRadioButtonAttributes.attributes_dict())
if retval is None:
retval = {}
catalog = self.root_object
stack = []
# get the AcroForm tree
if CD.ACRO_FORM in catalog:
tree = cast(Optional[TreeObject], catalog[CD.ACRO_FORM])
else:
return None
if tree is None:
return retval
assert stack is not None
if "/Fields" in tree:
fields = cast(ArrayObject, tree["/Fields"])
for f in fields:
field = f.get_object()
self._build_field(field, retval, fileobj, field_attributes, stack)
elif any(attr in tree for attr in field_attributes):
# Tree is a field
self._build_field(tree, retval, fileobj, field_attributes, stack)
return retval
def _get_qualified_field_name(self, parent: DictionaryObject) -> str:
if "/TM" in parent:
return cast(str, parent["/TM"])
if "/Parent" in parent:
return (
self._get_qualified_field_name(
cast(DictionaryObject, parent["/Parent"])
)
+ "."
+ cast(str, parent.get("/T", ""))
)
return cast(str, parent.get("/T", ""))
def _build_field(
self,
field: Union[TreeObject, DictionaryObject],
retval: dict[Any, Any],
fileobj: Any,
field_attributes: Any,
stack: list[PdfObject],
) -> None:
if all(attr not in field for attr in ("/T", "/TM")):
return
key = self._get_qualified_field_name(field)
if fileobj:
self._write_field(fileobj, field, field_attributes)
fileobj.write("\n")
retval[key] = Field(field)
obj = retval[key].indirect_reference.get_object() # to get the full object
if obj.get(FA.FT, "") == "/Ch":
retval[key][NameObject("/_States_")] = obj[NameObject(FA.Opt)]
if obj.get(FA.FT, "") == "/Btn" and "/AP" in obj:
# Checkbox
retval[key][NameObject("/_States_")] = ArrayObject(
list(obj["/AP"]["/N"].keys())
)
if "/Off" not in retval[key]["/_States_"]:
retval[key][NameObject("/_States_")].append(NameObject("/Off"))
elif obj.get(FA.FT, "") == "/Btn" and obj.get(FA.Ff, 0) & FA.FfBits.Radio != 0:
states: list[str] = []
retval[key][NameObject("/_States_")] = ArrayObject(states)
for k in obj.get(FA.Kids, {}):
k = k.get_object()
for s in list(k["/AP"]["/N"].keys()):
if s not in states:
states.append(s)
retval[key][NameObject("/_States_")] = ArrayObject(states)
if (
obj.get(FA.Ff, 0) & FA.FfBits.NoToggleToOff != 0
and "/Off" in retval[key]["/_States_"]
):
del retval[key]["/_States_"][retval[key]["/_States_"].index("/Off")]
# at last for order
self._check_kids(field, retval, fileobj, stack)
def _check_kids(
self,
tree: Union[TreeObject, DictionaryObject],
retval: Any,
fileobj: Any,
stack: list[PdfObject],
) -> None:
if tree in stack:
logger_warning(
f"{self._get_qualified_field_name(tree)} already parsed", __name__
)
return
stack.append(tree)
if PagesAttributes.KIDS in tree:
# recurse down the tree
for kid in tree[PagesAttributes.KIDS]: # type: ignore
kid = kid.get_object()
self.get_fields(kid, retval, fileobj, stack)
def _write_field(self, fileobj: Any, field: Any, field_attributes: Any) -> None:
field_attributes_tuple = FA.attributes()
field_attributes_tuple = (
field_attributes_tuple + CheckboxRadioButtonAttributes.attributes()
)
for attr in field_attributes_tuple:
if attr in (
FA.Kids,
FA.AA,
):
continue
attr_name = field_attributes[attr]
try:
if attr == FA.FT:
# Make the field type value clearer
types = {
"/Btn": "Button",
"/Tx": "Text",
"/Ch": "Choice",
"/Sig": "Signature",
}
if field[attr] in types:
fileobj.write(f"{attr_name}: {types[field[attr]]}\n")
elif attr == FA.Parent:
# Let's just write the name of the parent
try:
name = field[attr][FA.TM]
except KeyError:
name = field[attr][FA.T]
fileobj.write(f"{attr_name}: {name}\n")
else:
fileobj.write(f"{attr_name}: {field[attr]}\n")
except KeyError:
# Field attribute is N/A or unknown, so don't write anything
pass
def get_form_text_fields(self, full_qualified_name: bool = False) -> dict[str, Any]:
"""
Retrieve form fields from the document with textual data.
Args:
full_qualified_name: to get full name
Returns:
A dictionary. The key is the name of the form field,
the value is the content of the field.
If the document contains multiple form fields with the same name, the
second and following will get the suffix .2, .3, ...
"""
def indexed_key(k: str, fields: dict[Any, Any]) -> str:
if k not in fields:
return k
return (
k
+ "."
+ str(sum(1 for kk in fields if kk.startswith(k + ".")) + 2)
)
# Retrieve document form fields
formfields = self.get_fields()
if formfields is None:
return {}
ff = {}
for field, value in formfields.items():
if value.get("/FT") == "/Tx":
if full_qualified_name:
ff[field] = value.get("/V")
else:
ff[indexed_key(cast(str, value["/T"]), ff)] = value.get("/V")
return ff
def get_pages_showing_field(
self, field: Union[Field, PdfObject, IndirectObject]
) -> list[PageObject]:
"""
Provides list of pages where the field is called.
Args:
field: Field Object, PdfObject or IndirectObject referencing a Field
Returns:
List of pages:
- Empty list:
The field has no widgets attached
(either hidden field or ancestor field).
- Single page list:
Page where the widget is present
(most common).
- Multi-page list:
Field with multiple kids widgets
(example: radio buttons, field repeated on multiple pages).
"""
def _get_inherited(obj: DictionaryObject, key: str) -> Any:
if key in obj:
return obj[key]
if "/Parent" in obj:
return _get_inherited(
cast(DictionaryObject, obj["/Parent"].get_object()), key
)
return None
try:
# to cope with all types
field = cast(DictionaryObject, field.indirect_reference.get_object()) # type: ignore
except Exception as exc:
raise ValueError("Field type is invalid") from exc
if is_null_or_none(_get_inherited(field, "/FT")):
raise ValueError("Field is not valid")
ret = []
if field.get("/Subtype", "") == "/Widget":
if "/P" in field:
ret = [field["/P"].get_object()]
else:
ret = [
p
for p in self.pages
if field.indirect_reference in p.get("/Annots", "")
]
else:
kids = field.get("/Kids", ())
for k in kids:
k = k.get_object()
if (k.get("/Subtype", "") == "/Widget") and ("/T" not in k):
# Kid that is just a widget, not a field:
if "/P" in k:
ret += [k["/P"].get_object()]
else:
ret += [
p
for p in self.pages
if k.indirect_reference in p.get("/Annots", "")
]
return [
x
if isinstance(x, PageObject)
else (self.pages[self._get_page_number_by_indirect(x.indirect_reference)]) # type: ignore
for x in ret
]
@property
def open_destination(
self,
) -> Union[None, Destination, TextStringObject, ByteStringObject]:
"""
Property to access the opening destination (``/OpenAction`` entry in
the PDF catalog). It returns ``None`` if the entry does not exist
or is not set.
Raises:
Exception: If a destination is invalid.
"""
if "/OpenAction" not in self.root_object:
return None
oa: Any = self.root_object["/OpenAction"]
if isinstance(oa, bytes): # pragma: no cover
oa = oa.decode()
if isinstance(oa, str):
return create_string_object(oa)
if isinstance(oa, ArrayObject):
try:
page, typ, *array = oa
fit = Fit(typ, tuple(array))
return Destination("OpenAction", page, fit)
except Exception as exc:
raise Exception(f"Invalid Destination {oa}: {exc}")
else:
return None
@open_destination.setter
def open_destination(self, dest: Union[None, str, Destination, PageObject]) -> None:
raise NotImplementedError("No setter for open_destination")
@property
def outline(self) -> OutlineType:
"""
Read-only property for the outline present in the document
(i.e., a collection of 'outline items' which are also known as
'bookmarks').
"""
return self._get_outline()
def _get_outline(
self, node: Optional[DictionaryObject] = None, outline: Optional[Any] = None
) -> OutlineType:
if outline is None:
outline = []
catalog = self.root_object
# get the outline dictionary and named destinations
if CO.OUTLINES in catalog:
lines = cast(DictionaryObject, catalog[CO.OUTLINES])
if isinstance(lines, NullObject):
return outline
# §12.3.3 Document outline, entries in the outline dictionary
if not is_null_or_none(lines) and "/First" in lines:
node = cast(DictionaryObject, lines["/First"])
self._named_destinations = self._get_named_destinations()
if node is None:
return outline
# see if there are any more outline items
while True:
outline_obj = self._build_outline_item(node)
if outline_obj:
outline.append(outline_obj)
# check for sub-outline
if "/First" in node:
sub_outline: list[Any] = []
self._get_outline(cast(DictionaryObject, node["/First"]), sub_outline)
if sub_outline:
outline.append(sub_outline)
if "/Next" not in node:
break
node = cast(DictionaryObject, node["/Next"])
return outline
@property
def threads(self) -> Optional[ArrayObject]:
"""
Read-only property for the list of threads.
See §12.4.3 from the PDF 1.7 or 2.0 specification.
It is an array of dictionaries with "/F" (the first bead in the thread)
and "/I" (a thread information dictionary containing information about
the thread, such as its title, author, and creation date) properties or
None if there are no articles.
Since PDF 2.0 it can also contain an indirect reference to a metadata
stream containing information about the thread, such as its title,
author, and creation date.
"""
catalog = self.root_object
if CO.THREADS in catalog:
return cast("ArrayObject", catalog[CO.THREADS])
return None
@abstractmethod
def _get_page_number_by_indirect(
self, indirect_reference: Union[None, int, NullObject, IndirectObject]
) -> Optional[int]:
... # pragma: no cover
def get_page_number(self, page: PageObject) -> Optional[int]:
"""
Retrieve page number of a given PageObject.
Args:
page: The page to get page number. Should be
an instance of :class:`PageObject<pypdf._page.PageObject>`
Returns:
The page number or None if page is not found
"""
return self._get_page_number_by_indirect(page.indirect_reference)
def get_destination_page_number(self, destination: Destination) -> Optional[int]:
"""
Retrieve page number of a given Destination object.
Args:
destination: The destination to get page number.
Returns:
The page number or None if page is not found
"""
return self._get_page_number_by_indirect(destination.page)
def _build_destination(
self,
title: Union[str, bytes],
array: Optional[
list[
Union[NumberObject, IndirectObject, None, NullObject, DictionaryObject]
]
],
) -> Destination:
page, typ = None, None
# handle outline items with missing or invalid destination
if (
isinstance(array, (NullObject, str))
or (isinstance(array, ArrayObject) and len(array) == 0)
or array is None
):
page = NullObject()
return Destination(title, page, Fit.fit())
page, typ, *array = array # type: ignore
try:
return Destination(title, page, Fit(fit_type=typ, fit_args=array)) # type: ignore
except PdfReadError:
logger_warning(f"Unknown destination: {title!r} {array}", __name__)
if self.strict:
raise
# create a link to first Page
tmp = self.pages[0].indirect_reference
indirect_reference = NullObject() if tmp is None else tmp
return Destination(title, indirect_reference, Fit.fit())
def _build_outline_item(self, node: DictionaryObject) -> Optional[Destination]:
dest, title, outline_item = None, None, None
# title required for valid outline
# §12.3.3, entries in an outline item dictionary
try:
title = cast("str", node["/Title"])
except KeyError:
if self.strict:
raise PdfReadError(f"Outline Entry Missing /Title attribute: {node!r}")
title = ""
if "/A" in node:
# Action, PDF 1.7 and PDF 2.0 §12.6 (only type GoTo supported)
action = cast(DictionaryObject, node["/A"])
action_type = cast(NameObject, action[GoToActionArguments.S])
if action_type == "/GoTo":
if GoToActionArguments.D in action:
dest = action[GoToActionArguments.D]
elif self.strict:
raise PdfReadError(f"Outline Action Missing /D attribute: {node!r}")
elif "/Dest" in node:
# Destination, PDF 1.7 and PDF 2.0 §12.3.2
dest = node["/Dest"]
# if array was referenced in another object, will be a dict w/ key "/D"
if isinstance(dest, DictionaryObject) and "/D" in dest:
dest = dest["/D"]
if isinstance(dest, ArrayObject):
outline_item = self._build_destination(title, dest)
elif isinstance(dest, str):
# named destination, addresses NameObject Issue #193
# TODO: Keep named destination instead of replacing it?
try:
outline_item = self._build_destination(
title, self._named_destinations[dest].dest_array
)
except KeyError:
# named destination not found in Name Dict
outline_item = self._build_destination(title, None)
elif dest is None:
# outline item not required to have destination or action
# PDFv1.7 Table 153
outline_item = self._build_destination(title, dest)
else:
if self.strict:
raise PdfReadError(f"Unexpected destination {dest!r}")
logger_warning(
f"Removed unexpected destination {dest!r} from destination",
__name__,
)
outline_item = self._build_destination(title, None)
# if outline item created, add color, format, and child count if present
if outline_item:
if "/C" in node:
# Color of outline item font in (R, G, B) with values ranging 0.0-1.0
outline_item[NameObject("/C")] = ArrayObject(FloatObject(c) for c in node["/C"]) # type: ignore
if "/F" in node:
# specifies style characteristics bold and/or italic
# with 1=italic, 2=bold, 3=both
outline_item[NameObject("/F")] = node["/F"]
if "/Count" in node:
# absolute value = num. visible children
# with positive = open/unfolded, negative = closed/folded
outline_item[NameObject("/Count")] = node["/Count"]
# if count is 0 we will consider it as open (to have available is_open)
outline_item[NameObject("/%is_open%")] = BooleanObject(
node.get("/Count", 0) >= 0
)
outline_item.node = node
try:
outline_item.indirect_reference = node.indirect_reference
except AttributeError:
pass
return outline_item
@property
def pages(self) -> list[PageObject]:
"""
Property that emulates a list of :class:`PageObject<pypdf._page.PageObject>`.
This property allows to get a page or a range of pages.
Note:
For PdfWriter only: Provides the capability to remove a page/range of
page from the list (using the del operator). Remember: Only the page
entry is removed, as the objects beneath can be used elsewhere. A
solution to completely remove them - if they are not used anywhere - is
to write to a buffer/temporary file and then load it into a new
PdfWriter.
"""
return _VirtualList(self.get_num_pages, self.get_page) # type: ignore
@property
def page_labels(self) -> list[str]:
"""
A list of labels for the pages in this document.
This property is read-only. The labels are in the order that the pages
appear in the document.
"""
return [page_index2page_label(self, i) for i in range(len(self.pages))]
@property
def page_layout(self) -> Optional[str]:
"""
Get the page layout currently being used.
.. list-table:: Valid ``layout`` values
:widths: 50 200
* - /NoLayout
- Layout explicitly not specified
* - /SinglePage
- Show one page at a time
* - /OneColumn
- Show one column at a time
* - /TwoColumnLeft
- Show pages in two columns, odd-numbered pages on the left
* - /TwoColumnRight
- Show pages in two columns, odd-numbered pages on the right
* - /TwoPageLeft
- Show two pages at a time, odd-numbered pages on the left
* - /TwoPageRight
- Show two pages at a time, odd-numbered pages on the right
"""
try:
return cast(NameObject, self.root_object[CD.PAGE_LAYOUT])
except KeyError:
return None
@property
def page_mode(self) -> Optional[PagemodeType]:
"""
Get the page mode currently being used.
.. list-table:: Valid ``mode`` values
:widths: 50 200
* - /UseNone
- Do not show outline or thumbnails panels
* - /UseOutlines
- Show outline (aka bookmarks) panel
* - /UseThumbs
- Show page thumbnails panel
* - /FullScreen
- Fullscreen view
* - /UseOC
- Show Optional Content Group (OCG) panel
* - /UseAttachments
- Show attachments panel
"""
try:
return self.root_object["/PageMode"] # type: ignore
except KeyError:
return None
def _flatten(
self,
list_only: bool = False,
pages: Union[None, DictionaryObject, PageObject] = None,
inherit: Optional[dict[str, Any]] = None,
indirect_reference: Optional[IndirectObject] = None,
) -> None:
"""
Process the document pages to ease searching.
Attributes of a page may inherit from ancestor nodes
in the page tree. Flattening means moving
any inheritance data into descendant nodes,
effectively removing the inheritance dependency.
Note: It is distinct from another use of "flattening" applied to PDFs.
Flattening a PDF also means combining all the contents into one single layer
and making the file less editable.
Args:
list_only: Will only list the pages within _flatten_pages.
pages:
inherit:
indirect_reference: Used recursively to flatten the /Pages object.
"""
inheritable_page_attributes = (
NameObject(PG.RESOURCES),
NameObject(PG.MEDIABOX),
NameObject(PG.CROPBOX),
NameObject(PG.ROTATE),
)
if inherit is None:
inherit = {}
if pages is None:
# Fix issue 327: set flattened_pages attribute only for
# decrypted file
catalog = self.root_object
pages = catalog.get("/Pages").get_object() # type: ignore
if not isinstance(pages, DictionaryObject):
raise PdfReadError("Invalid object in /Pages")
self.flattened_pages = []
if PagesAttributes.TYPE in pages:
t = cast(str, pages[PagesAttributes.TYPE])
# if the page tree node has no /Type, consider as a page if /Kids is also missing
elif PagesAttributes.KIDS not in pages:
t = "/Page"
else:
t = "/Pages"
if t == "/Pages":
for attr in inheritable_page_attributes:
if attr in pages:
inherit[attr] = pages[attr]
for page in cast(ArrayObject, pages[PagesAttributes.KIDS]):
addt = {}
if isinstance(page, IndirectObject):
addt["indirect_reference"] = page
obj = page.get_object()
if obj:
# damaged file may have invalid child in /Pages
try:
self._flatten(list_only, obj, inherit, **addt)
except RecursionError:
raise PdfReadError(
"Maximum recursion depth reached during page flattening."
)
elif t == "/Page":
for attr_in, value in inherit.items():
# if the page has its own value, it does not inherit the
# parent's value
if attr_in not in pages:
pages[attr_in] = value
page_obj = PageObject(self, indirect_reference)
if not list_only:
page_obj.update(pages)
# TODO: Could flattened_pages be None at this point?
self.flattened_pages.append(page_obj) # type: ignore
def remove_page(
self,
page: Union[int, PageObject, IndirectObject],
clean: bool = False,
) -> None:
"""
Remove page from pages list.
Args:
page:
* :class:`int`: Page number to be removed.
* :class:`~pypdf._page.PageObject`: page to be removed. If the page appears many times
only the first one will be removed.
* :class:`~pypdf.generic.IndirectObject`: Reference to page to be removed.
clean: replace PageObject with NullObject to prevent annotations
or destinations to reference a detached page.
"""
if self.flattened_pages is None:
self._flatten(self._readonly)
assert self.flattened_pages is not None
if isinstance(page, IndirectObject):
p = page.get_object()
if not isinstance(p, PageObject):
logger_warning("IndirectObject is not referencing a page", __name__)
return
page = p
if not isinstance(page, int):
try:
page = self.flattened_pages.index(page)
except ValueError:
logger_warning("Cannot find page in pages", __name__)
return
if not (0 <= page < len(self.flattened_pages)):
logger_warning("Page number is out of range", __name__)
return
ind = self.pages[page].indirect_reference
del self.pages[page]
if clean and ind is not None:
self._replace_object(ind, NullObject())
def _get_indirect_object(self, num: int, gen: int) -> Optional[PdfObject]:
"""
Used to ease development.
This is equivalent to generic.IndirectObject(num,gen,self).get_object()
Args:
num: The object number of the indirect object.
gen: The generation number of the indirect object.
Returns:
A PdfObject
"""
return IndirectObject(num, gen, self).get_object()
def decode_permissions(
self, permissions_code: int
) -> dict[str, bool]: # pragma: no cover
"""Take the permissions as an integer, return the allowed access."""
deprecation_with_replacement(
old_name="decode_permissions",
new_name="user_access_permissions",
removed_in="5.0.0",
)
permissions_mapping = {
"print": UserAccessPermissions.PRINT,
"modify": UserAccessPermissions.MODIFY,
"copy": UserAccessPermissions.EXTRACT,
"annotations": UserAccessPermissions.ADD_OR_MODIFY,
"forms": UserAccessPermissions.FILL_FORM_FIELDS,
# Do not fix typo, as part of official, but deprecated API.
"accessability": UserAccessPermissions.EXTRACT_TEXT_AND_GRAPHICS,
"assemble": UserAccessPermissions.ASSEMBLE_DOC,
"print_high_quality": UserAccessPermissions.PRINT_TO_REPRESENTATION,
}
return {
key: permissions_code & flag != 0
for key, flag in permissions_mapping.items()
}
@property
def user_access_permissions(self) -> Optional[UserAccessPermissions]:
"""Get the user access permissions for encrypted documents. Returns None if not encrypted."""
if self._encryption is None:
return None
return UserAccessPermissions(self._encryption.P)
@property
@abstractmethod
def is_encrypted(self) -> bool:
"""
Read-only boolean property showing whether this PDF file is encrypted.
Note that this property, if true, will remain true even after the
:meth:`decrypt()<pypdf.PdfReader.decrypt>` method is called.
"""
... # pragma: no cover
@property
def xfa(self) -> Optional[dict[str, Any]]:
tree: Optional[TreeObject] = None
retval: dict[str, Any] = {}
catalog = self.root_object
if "/AcroForm" not in catalog or not catalog["/AcroForm"]:
return None
tree = cast(TreeObject, catalog["/AcroForm"])
if "/XFA" in tree:
fields = cast(ArrayObject, tree["/XFA"])
i = iter(fields)
for f in i:
tag = f
f = next(i)
if isinstance(f, IndirectObject):
field = cast(Optional[EncodedStreamObject], f.get_object())
if field:
es = zlib.decompress(field._data)
retval[tag] = es
return retval
@property
def attachments(self) -> Mapping[str, list[bytes]]:
"""Mapping of attachment filenames to their content."""
return LazyDict(
{
name: (self._get_attachment_list, name)
for name in self._list_attachments()
}
)
@property
def attachment_list(self) -> Generator[EmbeddedFile, None, None]:
"""Iterable of attachment objects."""
yield from EmbeddedFile._load(self.root_object)
def _list_attachments(self) -> list[str]:
"""
Retrieves the list of filenames of file attachments.
Returns:
list of filenames
"""
names = []
for entry in self.attachment_list:
names.append(entry.name)
if (name := entry.alternative_name) != entry.name and name:
names.append(name)
return names
def _get_attachment_list(self, name: str) -> list[bytes]:
out = self._get_attachments(name)[name]
if isinstance(out, list):
return out
return [out]
def _get_attachments(
self, filename: Optional[str] = None
) -> dict[str, Union[bytes, list[bytes]]]:
"""
Retrieves all or selected file attachments of the PDF as a dictionary of file names
and the file data as a bytestring.
Args:
filename: If filename is None, then a dictionary of all attachments
will be returned, where the key is the filename and the value
is the content. Otherwise, a dictionary with just a single key
- the filename - and its content will be returned.
Returns:
dictionary of filename -> Union[bytestring or List[ByteString]]
If the filename exists multiple times a list of the different versions will be provided.
"""
attachments: dict[str, Union[bytes, list[bytes]]] = {}
for entry in self.attachment_list:
names = set()
alternative_name = entry.alternative_name
if filename is not None:
if filename in {entry.name, alternative_name}:
name = entry.name if filename == entry.name else alternative_name
names.add(name)
else:
continue
else:
names = {entry.name, alternative_name}
for name in names:
if name is None:
continue
if name in attachments:
if not isinstance(attachments[name], list):
attachments[name] = [attachments[name]] # type:ignore
attachments[name].append(entry.content) # type:ignore
else:
attachments[name] = entry.content
return attachments
@abstractmethod
def _repr_mimebundle_(
self,
include: Union[None, Iterable[str]] = None,
exclude: Union[None, Iterable[str]] = None,
) -> dict[str, Any]:
"""
Integration into Jupyter Notebooks.
This method returns a dictionary that maps a mime-type to its
representation.
.. seealso::
https://ipython.readthedocs.io/en/stable/config/integrating.html
"""
... # pragma: no cover
| PdfDocCommon |
python | eventlet__eventlet | tests/mysqldb_test.py | {
"start": 917,
"end": 6930
} | class ____(tests.LimitedTestCase):
TEST_TIMEOUT = 50
def setUp(self):
self._auth = tests.get_database_auth()['MySQLdb']
self.create_db()
self.connection = None
self.connection = MySQLdb.connect(**self._auth)
cursor = self.connection.cursor()
cursor.execute("""CREATE TABLE gargleblatz
(
a INTEGER
);""")
self.connection.commit()
cursor.close()
super().setUp()
def tearDown(self):
if self.connection:
self.connection.close()
self.drop_db()
super().tearDown()
@tests.skip_unless(mysql_requirement)
def create_db(self):
auth = self._auth.copy()
try:
self.drop_db()
except Exception:
pass
dbname = 'test_%d_%d' % (os.getpid(), int(time.time() * 1000))
db = MySQLdb.connect(**auth).cursor()
db.execute("create database " + dbname)
db.close()
self._auth['db'] = dbname
del db
def drop_db(self):
db = MySQLdb.connect(**self._auth).cursor()
db.execute("drop database IF EXISTS " + self._auth['db'])
db.close()
del db
def set_up_dummy_table(self, connection=None):
close_connection = False
if connection is None:
close_connection = True
if self.connection is None:
connection = MySQLdb.connect(**self._auth)
else:
connection = self.connection
cursor = connection.cursor()
cursor.execute(self.dummy_table_sql)
connection.commit()
cursor.close()
if close_connection:
connection.close()
dummy_table_sql = """CREATE TEMPORARY TABLE test_table
(
row_id INTEGER PRIMARY KEY AUTO_INCREMENT,
value_int INTEGER,
value_float FLOAT,
value_string VARCHAR(200),
value_uuid CHAR(36),
value_binary BLOB,
value_binary_string VARCHAR(200) BINARY,
value_enum ENUM('Y','N'),
created TIMESTAMP
) ENGINE=InnoDB;"""
def assert_cursor_yields(self, curs):
counter = [0]
def tick():
while True:
counter[0] += 1
eventlet.sleep()
gt = eventlet.spawn(tick)
curs.execute("select 1")
rows = curs.fetchall()
self.assertEqual(len(rows), 1)
self.assertEqual(len(rows[0]), 1)
self.assertEqual(rows[0][0], 1)
assert counter[0] > 0, counter[0]
gt.kill()
def assert_cursor_works(self, cursor):
cursor.execute("select 1")
rows = cursor.fetchall()
self.assertEqual(len(rows), 1)
self.assertEqual(len(rows[0]), 1)
self.assertEqual(rows[0][0], 1)
self.assert_cursor_yields(cursor)
def assert_connection_works(self, conn):
curs = conn.cursor()
self.assert_cursor_works(curs)
def test_module_attributes(self):
import MySQLdb as orig
for key in dir(orig):
if key not in ('__author__', '__path__', '__revision__',
'__version__', '__loader__'):
assert hasattr(MySQLdb, key), "%s %s" % (key, getattr(orig, key))
def test_connecting(self):
assert self.connection is not None
def test_connecting_annoyingly(self):
self.assert_connection_works(MySQLdb.Connect(**self._auth))
self.assert_connection_works(MySQLdb.Connection(**self._auth))
self.assert_connection_works(MySQLdb.connections.Connection(**self._auth))
def test_create_cursor(self):
cursor = self.connection.cursor()
cursor.close()
def test_run_query(self):
cursor = self.connection.cursor()
self.assert_cursor_works(cursor)
cursor.close()
def test_run_bad_query(self):
cursor = self.connection.cursor()
try:
cursor.execute("garbage blah blah")
assert False
except AssertionError:
raise
except Exception:
pass
cursor.close()
def fill_up_table(self, conn):
curs = conn.cursor()
for i in range(1000):
curs.execute('insert into test_table (value_int) values (%s)' % i)
conn.commit()
def test_yields(self):
conn = self.connection
self.set_up_dummy_table(conn)
self.fill_up_table(conn)
curs = conn.cursor()
results = []
SHORT_QUERY = "select * from test_table"
evt = event.Event()
def a_query():
self.assert_cursor_works(curs)
curs.execute(SHORT_QUERY)
results.append(2)
evt.send()
eventlet.spawn(a_query)
results.append(1)
self.assertEqual([1], results)
evt.wait()
self.assertEqual([1, 2], results)
def test_visibility_from_other_connections(self):
conn = MySQLdb.connect(**self._auth)
conn2 = MySQLdb.connect(**self._auth)
curs = conn.cursor()
try:
curs2 = conn2.cursor()
curs2.execute("insert into gargleblatz (a) values (%s)" % (314159))
self.assertEqual(curs2.rowcount, 1)
conn2.commit()
selection_query = "select * from gargleblatz"
curs2.execute(selection_query)
self.assertEqual(curs2.rowcount, 1)
del curs2, conn2
# create a new connection, it should see the addition
conn3 = MySQLdb.connect(**self._auth)
curs3 = conn3.cursor()
curs3.execute(selection_query)
self.assertEqual(curs3.rowcount, 1)
# now, does the already-open connection see it?
curs.execute(selection_query)
self.assertEqual(curs.rowcount, 1)
del curs3, conn3
finally:
# clean up my litter
curs.execute("delete from gargleblatz where a=314159")
conn.commit()
| TestMySQLdb |
python | apache__airflow | providers/amazon/src/airflow/providers/amazon/aws/triggers/eks.py | {
"start": 9486,
"end": 11009
} | class ____(AwsBaseWaiterTrigger):
"""
Asynchronously wait for the fargate profile to be created.
:param cluster_name: The name of the EKS cluster
:param fargate_profile_name: The name of the fargate profile
:param waiter_delay: The amount of time in seconds to wait between attempts.
:param waiter_max_attempts: The maximum number of attempts to be made.
:param aws_conn_id: The Airflow connection used for AWS credentials.
"""
def __init__(
self,
cluster_name: str,
fargate_profile_name: str,
waiter_delay: int,
waiter_max_attempts: int,
aws_conn_id: str | None,
region_name: str | None = None,
):
super().__init__(
serialized_fields={"cluster_name": cluster_name, "fargate_profile_name": fargate_profile_name},
waiter_name="fargate_profile_active",
waiter_args={"clusterName": cluster_name, "fargateProfileName": fargate_profile_name},
failure_message="Failure while creating Fargate profile",
status_message="Fargate profile not created yet",
status_queries=["fargateProfile.status"],
return_value=None,
waiter_delay=waiter_delay,
waiter_max_attempts=waiter_max_attempts,
aws_conn_id=aws_conn_id,
region_name=region_name,
)
def hook(self) -> AwsGenericHook:
return EksHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
| EksCreateFargateProfileTrigger |
python | sympy__sympy | sympy/stats/stochastic_process_types.py | {
"start": 86402,
"end": 88562
} | class ____(CountingProcess):
r"""
A Gamma process is a random process with independent gamma distributed
increments. It is a pure-jump increasing Levy process.
Parameters
==========
sym : Symbol/str
lamda : Positive number
Jump size of the process, ``lamda > 0``
gamma : Positive number
Rate of jump arrivals, `\gamma > 0`
Examples
========
>>> from sympy.stats import GammaProcess, E, P, variance
>>> from sympy import symbols, Contains, Interval, Not
>>> t, d, x, l, g = symbols('t d x l g', positive=True)
>>> X = GammaProcess("X", l, g)
>>> E(X(t))
g*t/l
>>> variance(X(t)).simplify()
g*t/l**2
>>> X = GammaProcess('X', 1, 2)
>>> P(X(t) < 1).simplify()
lowergamma(2*t, 1)/gamma(2*t)
>>> P(Not((X(t) < 5) & (X(d) > 3)), Contains(t, Interval.Ropen(2, 4)) &
... Contains(d, Interval.Lopen(7, 8))).simplify()
-4*exp(-3) + 472*exp(-8)/3 + 1
>>> E(X(2) + x*E(X(5)))
10*x + 4
References
==========
.. [1] https://en.wikipedia.org/wiki/Gamma_process
"""
def __new__(cls, sym, lamda, gamma):
_value_check(lamda > 0, 'lamda should be a positive number')
_value_check(gamma > 0, 'gamma should be a positive number')
sym = _symbol_converter(sym)
gamma = _sympify(gamma)
lamda = _sympify(lamda)
return Basic.__new__(cls, sym, lamda, gamma)
@property
def lamda(self):
return self.args[1]
@property
def gamma(self):
return self.args[2]
@property
def state_space(self):
return _set_converter(Interval(0, oo))
def distribution(self, key):
if isinstance(key, RandomIndexedSymbol):
self._deprecation_warn_distribution()
return GammaDistribution(self.gamma*key.key, 1/self.lamda)
return GammaDistribution(self.gamma*key, 1/self.lamda)
def density(self, x):
k = self.gamma*x.key
theta = 1/self.lamda
return x**(k - 1) * exp(-x/theta) / (gamma(k)*theta**k)
def simple_rv(self, rv):
return Gamma(rv.name, self.gamma*rv.key, 1/self.lamda)
| GammaProcess |
python | tensorflow__tensorflow | tensorflow/python/ops/ragged/ragged_squeeze_op_test.py | {
"start": 1233,
"end": 8635
} | class ____(test_util.TensorFlowTestCase,
parameterized.TestCase):
@parameterized.parameters([
{
'input_list': []
},
{
'input_list': [[]],
'squeeze_ranks': [0]
},
{
'input_list': [[[[], []], [[], []]]],
'squeeze_ranks': [0]
},
])
def test_passing_empty(self, input_list, squeeze_ranks=None):
rt = ragged_squeeze_op.squeeze(
ragged_factory_ops.constant(input_list), squeeze_ranks)
dt = array_ops.squeeze(constant_op.constant(input_list), squeeze_ranks)
self.assertAllEqual(ragged_conversion_ops.to_tensor(rt), dt)
@parameterized.parameters([
{
'input_list': [[1]],
'squeeze_ranks': [0]
},
{
'input_list': [[1]],
'squeeze_ranks': [0, 1]
},
{
'input_list': [[1, 2]],
'squeeze_ranks': [0]
},
{
'input_list': [[1], [2]],
'squeeze_ranks': [1]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [1]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [3]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0, 3]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0, 1]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [1, 3]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0, 1, 3]
},
{
'input_list': [[[1], [2]], [[3], [4]]],
'squeeze_ranks': [2]
},
{
'input_list': [[1], [2]],
'squeeze_ranks': [-1]
},
])
def test_passing_simple(self, input_list, squeeze_ranks=None):
rt = ragged_squeeze_op.squeeze(
ragged_factory_ops.constant(input_list), squeeze_ranks)
dt = array_ops.squeeze(constant_op.constant(input_list), squeeze_ranks)
self.assertAllEqual(ragged_conversion_ops.to_tensor(rt), dt)
@parameterized.parameters([
# ragged_conversion_ops.from_tensor does not work for this
# {'input_list': [1]},
{
'input_list': [[1]],
'squeeze_ranks': [0]
},
{
'input_list': [[1, 2]],
'squeeze_ranks': [0]
},
{
'input_list': [[1], [2]],
'squeeze_ranks': [1]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [1]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [3]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0, 3]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0, 1]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [1, 3]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0, 1, 3]
},
{
'input_list': [[[1], [2]], [[3], [4]]],
'squeeze_ranks': [2]
},
])
def test_passing_simple_from_dense(self, input_list, squeeze_ranks=None):
dt = constant_op.constant(input_list)
rt = ragged_conversion_ops.from_tensor(dt)
rt_s = ragged_squeeze_op.squeeze(rt, squeeze_ranks)
dt_s = array_ops.squeeze(dt, squeeze_ranks)
self.assertAllEqual(ragged_conversion_ops.to_tensor(rt_s), dt_s)
@parameterized.parameters([
{
'input_list': [[[[[[1]], [[1, 2]]]], [[[[]], [[]]]]]],
'output_list': [[[1], [1, 2]], [[], []]],
'squeeze_ranks': [0, 2, 4]
},
{
'input_list': [[[[[[1]], [[1, 2]]]], [[[[]], [[]]]]]],
'output_list': [[[[[1]], [[1, 2]]]], [[[[]], [[]]]]],
'squeeze_ranks': [0]
},
])
def test_passing_ragged(self, input_list, output_list, squeeze_ranks=None):
rt = ragged_factory_ops.constant(input_list)
rt_s = ragged_squeeze_op.squeeze(rt, squeeze_ranks)
ref = ragged_factory_ops.constant(output_list)
self.assertAllEqual(rt_s, ref)
def test_passing_text(self):
rt = ragged_factory_ops.constant([[[[[[[['H']], [['e']], [['l']], [['l']],
[['o']]],
[[['W']], [['o']], [['r']], [['l']],
[['d']], [['!']]]]],
[[[[['T']], [['h']], [['i']], [['s']]],
[[['i']], [['s']]],
[[['M']], [['e']], [['h']], [['r']],
[['d']], [['a']], [['d']]],
[[['.']]]]]]]])
output_list = [[['H', 'e', 'l', 'l', 'o'], ['W', 'o', 'r', 'l', 'd', '!']],
[['T', 'h', 'i', 's'], ['i', 's'],
['M', 'e', 'h', 'r', 'd', 'a', 'd'], ['.']]]
ref = ragged_factory_ops.constant(output_list)
rt_s = ragged_squeeze_op.squeeze(rt, [0, 1, 3, 6, 7])
self.assertAllEqual(rt_s, ref)
@parameterized.parameters([
{
'input_list': [[]],
'squeeze_ranks': [1]
},
{
'input_list': [[1, 2]],
'squeeze_ranks': [1]
},
{
'input_list': [[1], [2]],
'squeeze_ranks': [0]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0, 2]
},
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [2]
},
{
'input_list': [[[1], [2]], [[3], [4]]],
'squeeze_ranks': [0]
},
{
'input_list': [[[1], [2]], [[3], [4]]],
'squeeze_ranks': [1]
},
{
'input_list': [[], []],
'squeeze_ranks': [1]
},
{
'input_list': [[[], []], [[], []]],
'squeeze_ranks': [1]
},
])
def test_failing_InvalidArgumentError(self, input_list, squeeze_ranks):
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(
ragged_squeeze_op.squeeze(
ragged_factory_ops.constant(input_list), squeeze_ranks))
@parameterized.parameters([
{
'input_list': [[]]
},
{
'input_list': [[1]]
},
{
'input_list': [[1, 2]]
},
{
'input_list': [[[1], [2]], [[3], [4]]]
},
{
'input_list': [[1]]
},
{
'input_list': [[[1], [2]], [[3], [4]]]
},
{
'input_list': [[[[12], [11]]]]
},
])
def test_failing_no_squeeze_dim_specified(self, input_list):
with self.assertRaises(ValueError):
ragged_squeeze_op.squeeze(ragged_factory_ops.constant(input_list))
@parameterized.parameters([
{
'input_list': [[[[12], [11]]]],
'squeeze_ranks': [0, 1, 3]
},
])
def test_failing_axis_is_not_a_list(self, input_list, squeeze_ranks):
with self.assertRaises(TypeError):
tensor_ranks = constant_op.constant(squeeze_ranks)
ragged_squeeze_op.squeeze(
ragged_factory_ops.constant(input_list), tensor_ranks)
if __name__ == '__main__':
googletest.main()
| RaggedSqueezeTest |
python | huggingface__transformers | src/transformers/models/autoformer/configuration_autoformer.py | {
"start": 816,
"end": 12192
} | class ____(PreTrainedConfig):
r"""
This is the configuration class to store the configuration of an [`AutoformerModel`]. It is used to instantiate an
Autoformer model according to the specified arguments, defining the model architecture. Instantiating a
configuration with the defaults will yield a similar configuration to that of the Autoformer
[huggingface/autoformer-tourism-monthly](https://huggingface.co/huggingface/autoformer-tourism-monthly)
architecture.
Configuration objects inherit from [`PreTrainedConfig`] can be used to control the model outputs. Read the
documentation from [`PreTrainedConfig`] for more information.
Args:
prediction_length (`int`):
The prediction length for the decoder. In other words, the prediction horizon of the model.
context_length (`int`, *optional*, defaults to `prediction_length`):
The context length for the encoder. If unset, the context length will be the same as the
`prediction_length`.
distribution_output (`string`, *optional*, defaults to `"student_t"`):
The distribution emission head for the model. Could be either "student_t", "normal" or "negative_binomial".
loss (`string`, *optional*, defaults to `"nll"`):
The loss function for the model corresponding to the `distribution_output` head. For parametric
distributions it is the negative log likelihood (nll) - which currently is the only supported one.
input_size (`int`, *optional*, defaults to 1):
The size of the target variable which by default is 1 for univariate targets. Would be > 1 in case of
multivariate targets.
lags_sequence (`list[int]`, *optional*, defaults to `[1, 2, 3, 4, 5, 6, 7]`):
The lags of the input time series as covariates often dictated by the frequency. Default is `[1, 2, 3, 4,
5, 6, 7]`.
scaling (`bool`, *optional* defaults to `True`):
Whether to scale the input targets.
num_time_features (`int`, *optional*, defaults to 0):
The number of time features in the input time series.
num_dynamic_real_features (`int`, *optional*, defaults to 0):
The number of dynamic real valued features.
num_static_categorical_features (`int`, *optional*, defaults to 0):
The number of static categorical features.
num_static_real_features (`int`, *optional*, defaults to 0):
The number of static real valued features.
cardinality (`list[int]`, *optional*):
The cardinality (number of different values) for each of the static categorical features. Should be a list
of integers, having the same length as `num_static_categorical_features`. Cannot be `None` if
`num_static_categorical_features` is > 0.
embedding_dimension (`list[int]`, *optional*):
The dimension of the embedding for each of the static categorical features. Should be a list of integers,
having the same length as `num_static_categorical_features`. Cannot be `None` if
`num_static_categorical_features` is > 0.
d_model (`int`, *optional*, defaults to 64):
Dimensionality of the transformer layers.
encoder_layers (`int`, *optional*, defaults to 2):
Number of encoder layers.
decoder_layers (`int`, *optional*, defaults to 2):
Number of decoder layers.
encoder_attention_heads (`int`, *optional*, defaults to 2):
Number of attention heads for each attention layer in the Transformer encoder.
decoder_attention_heads (`int`, *optional*, defaults to 2):
Number of attention heads for each attention layer in the Transformer decoder.
encoder_ffn_dim (`int`, *optional*, defaults to 32):
Dimension of the "intermediate" (often named feed-forward) layer in encoder.
decoder_ffn_dim (`int`, *optional*, defaults to 32):
Dimension of the "intermediate" (often named feed-forward) layer in decoder.
activation_function (`str` or `function`, *optional*, defaults to `"gelu"`):
The non-linear activation function (function or string) in the encoder and decoder. If string, `"gelu"` and
`"relu"` are supported.
dropout (`float`, *optional*, defaults to 0.1):
The dropout probability for all fully connected layers in the encoder, and decoder.
encoder_layerdrop (`float`, *optional*, defaults to 0.1):
The dropout probability for the attention and fully connected layers for each encoder layer.
decoder_layerdrop (`float`, *optional*, defaults to 0.1):
The dropout probability for the attention and fully connected layers for each decoder layer.
attention_dropout (`float`, *optional*, defaults to 0.1):
The dropout probability for the attention probabilities.
activation_dropout (`float`, *optional*, defaults to 0.1):
The dropout probability used between the two layers of the feed-forward networks.
num_parallel_samples (`int`, *optional*, defaults to 100):
The number of samples to generate in parallel for each time step of inference.
init_std (`float`, *optional*, defaults to 0.02):
The standard deviation of the truncated normal weight initialization distribution.
use_cache (`bool`, *optional*, defaults to `True`):
Whether to use the past key/values attentions (if applicable to the model) to speed up decoding.
label_length (`int`, *optional*, defaults to 10):
Start token length of the Autoformer decoder, which is used for direct multi-step prediction (i.e.
non-autoregressive generation).
moving_average (`int`, *optional*, defaults to 25):
The window size of the moving average. In practice, it's the kernel size in AvgPool1d of the Decomposition
Layer.
autocorrelation_factor (`int`, *optional*, defaults to 3):
"Attention" (i.e. AutoCorrelation mechanism) factor which is used to find top k autocorrelations delays.
It's recommended in the paper to set it to a number between 1 and 5.
Example:
```python
>>> from transformers import AutoformerConfig, AutoformerModel
>>> # Initializing a default Autoformer configuration
>>> configuration = AutoformerConfig()
>>> # Randomly initializing a model (with random weights) from the configuration
>>> model = AutoformerModel(configuration)
>>> # Accessing the model configuration
>>> configuration = model.config
```"""
model_type = "autoformer"
attribute_map = {
"hidden_size": "d_model",
"num_attention_heads": "encoder_attention_heads",
"num_hidden_layers": "encoder_layers",
}
def __init__(
self,
prediction_length: Optional[int] = None,
context_length: Optional[int] = None,
distribution_output: str = "student_t",
loss: str = "nll",
input_size: int = 1,
lags_sequence: list[int] = [1, 2, 3, 4, 5, 6, 7],
scaling: bool = True,
num_time_features: int = 0,
num_dynamic_real_features: int = 0,
num_static_categorical_features: int = 0,
num_static_real_features: int = 0,
cardinality: Optional[list[int]] = None,
embedding_dimension: Optional[list[int]] = None,
d_model: int = 64,
encoder_attention_heads: int = 2,
decoder_attention_heads: int = 2,
encoder_layers: int = 2,
decoder_layers: int = 2,
encoder_ffn_dim: int = 32,
decoder_ffn_dim: int = 32,
activation_function: str = "gelu",
dropout: float = 0.1,
encoder_layerdrop: float = 0.1,
decoder_layerdrop: float = 0.1,
attention_dropout: float = 0.1,
activation_dropout: float = 0.1,
num_parallel_samples: int = 100,
init_std: float = 0.02,
use_cache: bool = True,
is_encoder_decoder=True,
# Autoformer arguments
label_length: int = 10,
moving_average: int = 25,
autocorrelation_factor: int = 3,
**kwargs,
):
# time series specific configuration
self.prediction_length = prediction_length
self.context_length = context_length if context_length is not None else prediction_length
self.distribution_output = distribution_output
self.loss = loss
self.input_size = input_size
self.num_time_features = num_time_features
self.lags_sequence = lags_sequence
self.scaling = scaling
self.num_dynamic_real_features = num_dynamic_real_features
self.num_static_real_features = num_static_real_features
self.num_static_categorical_features = num_static_categorical_features
if cardinality is not None and num_static_categorical_features > 0:
if len(cardinality) != num_static_categorical_features:
raise ValueError(
"The cardinality should be a list of the same length as `num_static_categorical_features`"
)
self.cardinality = cardinality
else:
self.cardinality = [0]
if embedding_dimension is not None and num_static_categorical_features > 0:
if len(embedding_dimension) != num_static_categorical_features:
raise ValueError(
"The embedding dimension should be a list of the same length as `num_static_categorical_features`"
)
self.embedding_dimension = embedding_dimension
else:
self.embedding_dimension = [min(50, (cat + 1) // 2) for cat in self.cardinality]
self.num_parallel_samples = num_parallel_samples
# Transformer architecture configuration
self.feature_size = input_size * len(self.lags_sequence) + self._number_of_features
self.d_model = d_model
self.encoder_attention_heads = encoder_attention_heads
self.decoder_attention_heads = decoder_attention_heads
self.encoder_ffn_dim = encoder_ffn_dim
self.decoder_ffn_dim = decoder_ffn_dim
self.encoder_layers = encoder_layers
self.decoder_layers = decoder_layers
self.dropout = dropout
self.attention_dropout = attention_dropout
self.activation_dropout = activation_dropout
self.encoder_layerdrop = encoder_layerdrop
self.decoder_layerdrop = decoder_layerdrop
self.activation_function = activation_function
self.init_std = init_std
self.use_cache = use_cache
# Autoformer
self.label_length = label_length
self.moving_average = moving_average
self.autocorrelation_factor = autocorrelation_factor
super().__init__(is_encoder_decoder=is_encoder_decoder, **kwargs)
@property
def _number_of_features(self) -> int:
return (
sum(self.embedding_dimension)
+ self.num_dynamic_real_features
+ self.num_time_features
+ self.num_static_real_features
+ self.input_size * 2 # the log1p(abs(loc)) and log(scale) features
)
__all__ = ["AutoformerConfig"]
| AutoformerConfig |
python | microsoft__pyright | packages/pyright-internal/src/tests/samples/genericType20.py | {
"start": 242,
"end": 316
} | class ____(Parent[Any]):
# This should generate an error.
y = 42
| Child |
python | getsentry__sentry | tests/sentry/issues/test_json_schemas.py | {
"start": 107,
"end": 753
} | class ____(TestCase):
def test_loads_json_schema(self) -> None:
assert json_schemas.EVENT_PAYLOAD_SCHEMA != json_schemas.LEGACY_EVENT_PAYLOAD_SCHEMA
assert (
json_schemas.EVENT_PAYLOAD_SCHEMA.get("description")
== " The sentry v7 event structure."
)
def test_falls_back_to_legacy(self) -> None:
with mock.patch(
"sentry.issues.json_schemas.open", mock.mock_open(read_data="invalid json")
):
reload(json_schemas)
assert json_schemas.EVENT_PAYLOAD_SCHEMA == json_schemas.LEGACY_EVENT_PAYLOAD_SCHEMA
reload(json_schemas)
| JsonSchemasTest |
python | PyCQA__pylint | tests/functional/i/init_not_called.py | {
"start": 956,
"end": 1091
} | class ____(NewStyleC):
"""No init called, but abstract so that is fine."""
def __init__(self):
self.arg = 0
| AssignedInit |
python | doocs__leetcode | lcof/面试题03. 数组中重复的数字/Solution.py | {
"start": 0,
"end": 164
} | class ____:
def findRepeatNumber(self, nums: List[int]) -> int:
for a, b in pairwise(sorted(nums)):
if a == b:
return a
| Solution |
python | gevent__gevent | src/gevent/tests/known_failures.py | {
"start": 5795,
"end": 17690
} | class ____(metaclass=DefinitionsMeta):
test__util = RunAlone(
"""
If we have extra greenlets hanging around due to changes in GC, we won't
match the expected output.
So far, this is only seen on one version, in CI environment.
""",
when=(CI & (PY312B3_EXACTLY | PY312B4_EXACTLY))
)
test__issue6 = Flaky(
"""test__issue6 (see comments in test file) is really flaky on both Travis and Appveyor;
on Travis we could just run the test again (but that gets old fast), but on appveyor
we don't have that option without a new commit---and sometimes we really need a build
to succeed in order to get a release wheel"""
)
test__core_fork = Ignored(
"""fork watchers don't get called on windows
because fork is not a concept windows has.
See this file for a detailed explanation.""",
when=WIN
)
test__greenletset = Flaky(
when=WIN,
ignore_coverage=PYPY
)
test__example_udp_client = test__example_udp_server = Flaky(
"""
These both run on port 9000 and can step on each other...seems
like the appveyor containers aren't fully port safe? Or it
takes longer for the processes to shut down? Or we run them in
a different order in the process pool than we do other places?
On PyPy on Travis, this fails to get the correct results,
sometimes. I can't reproduce locally
""",
when=APPVEYOR | (PYPY & TRAVIS)
)
# This one sometimes randomly closes connections, but no indication
# of a server crash, only a client side close.
test__server_pywsgi = Flaky(when=APPVEYOR)
test_threading = Multi().ignored(
"""
This one seems to just stop right after patching is done. It
passes on a local win 10 vm, and the main test_threading_2.py
does as well. Based on the printouts we added, it appears to
not even finish importing:
https://ci.appveyor.com/project/denik/gevent/build/1.0.1277/job/tpvhesij5gldjxqw#L1190
Ignored because it takes two minutes to time out.
""",
when=APPVEYOR & LIBUV & PYPY
).flaky(
"""
test_set_and_clear in Py3 relies on 5 threads all starting and
coming to an Event wait point while a sixth thread sleeps for a half
second. The sixth thread then does something and checks that
the 5 threads were all at the wait point. But the timing is sometimes
too tight for appveyor. This happens even if Event isn't
monkey-patched
""",
when=APPVEYOR & PY3
)
test_ftplib = Flaky(
r"""
could be a problem of appveyor - not sure
======================================================================
ERROR: test_af (__main__.TestIPv6Environment)
----------------------------------------------------------------------
File "C:\Python27-x64\lib\ftplib.py", line 135, in connect
self.sock = socket.create_connection((self.host, self.port), self.timeout)
File "c:\projects\gevent\gevent\socket.py", line 73, in create_connection
raise err
error: [Errno 10049] [Error 10049] The requested address is not valid in its context.
XXX: On Jan 3 2016 this suddenly started passing on Py27/64; no idea why, the python version
was 2.7.11 before and after.
""",
when=APPVEYOR & BIT_64
)
test__backdoor = Flaky(when=LEAKTEST | PYPY)
test__socket_errors = Flaky(when=LEAKTEST)
test_signal = Multi().flaky(
"On Travis, this very frequently fails due to timing",
when=TRAVIS & LEAKTEST,
# Partial workaround for the _testcapi issue on PyPy,
# but also because signal delivery can sometimes be slow, and this
# spawn processes of its own
run_alone=APPVEYOR,
).ignored(
"""
This fails to run a single test. It looks like just importing the module
can hang. All I see is the output from patch_all()
""",
when=APPVEYOR & PYPY3
)
test__monkey_sigchld_2 = Ignored(
"""
This hangs for no apparent reason when run by the testrunner,
even wher maked standalone when run standalone from the
command line, it's fine. Issue in pypy2 6.0?
""",
when=PYPY & LIBUV
)
test_ssl = Ignored(
"""
PyPy 7.0 and 7.1 on Travis with Ubunto Xenial 16.04 can't
allocate SSL Context objects, either in Python 2.7 or 3.6.
There must be some library incompatibility. No point even
running them. XXX: Remember to turn this back on.
On Windows, with PyPy3.7 7.3.7, there seem to be all kind of certificate
errors.
""",
when=(PYPY & TRAVIS) | (PYPY3 & WIN)
)
test_httpservers = Ignored(
"""
All the CGI tests hang. There appear to be subprocess problems.
""",
when=PYPY3 & WIN
)
test__pywsgi = Ignored(
"""
XXX: Re-enable this when we can investigate more. This has
started crashing with a SystemError. I cannot reproduce with
the same version on macOS and I cannot reproduce with the same
version in a Linux vm. Commenting out individual tests just
moves the crash around.
https://bitbucket.org/pypy/pypy/issues/2769/systemerror-unexpected-internal-exception
On Appveyor 3.8.0, for some reason this takes *way* too long, about 100s, which
often goes just over the default timeout of 100s. This makes no sense.
But it also takes nearly that long in 3.7. 3.6 and earlier are much faster.
It also takes just over 100s on PyPy 3.7.
""",
when=(PYPY & TRAVIS & LIBUV) | PY380_EXACTLY,
# https://bitbucket.org/pypy/pypy/issues/2769/systemerror-unexpected-internal-exception
run_alone=(CI & LEAKTEST & PY3) | (PYPY & LIBUV),
# This often takes much longer on PyPy on CI.
options={'timeout': (CI & PYPY, 180)},
)
test_subprocess = Multi().flaky(
"Unknown, can't reproduce locally; times out one test",
when=PYPY & PY3 & TRAVIS,
ignore_coverage=ALWAYS,
).ignored(
"Tests don't even start before the process times out.",
when=PYPY3 & WIN
)
test__threadpool = Ignored(
"""
XXX: Re-enable these when we have more time to investigate.
This test, which normally takes ~60s, sometimes
hangs forever after running several tests. I cannot reproduce,
it seems highly load dependent. Observed with both libev and libuv.
""",
when=TRAVIS & (PYPY | OSX),
# This often takes much longer on PyPy on CI.
options={'timeout': (CI & PYPY, 180)},
)
test__threading_2 = Ignored(
"""
This test, which normally takes 4-5s, sometimes
hangs forever after running two tests. I cannot reproduce,
it seems highly load dependent. Observed with both libev and libuv.
""",
when=TRAVIS & (PYPY | OSX),
# This often takes much longer on PyPy on CI.
options={'timeout': (CI & PYPY, 180)},
)
test__issue230 = Ignored(
"""
This rarely hangs for unknown reasons. I cannot reproduce
locally.
""",
when=TRAVIS & OSX
)
test_selectors = Flaky(
"""
Timing issues on appveyor.
""",
when=PY3 & APPVEYOR,
ignore_coverage=ALWAYS,
)
test__example_portforwarder = Flaky(
"""
This one sometimes times out, often after output "The process
with PID XXX could not be terminated. Reason: There is no
running instance of the task.",
""",
when=APPVEYOR | COVERAGE
)
test__issue302monkey = test__threading_vs_settrace = Flaky(
"""
The gevent concurrency plugin tends to slow things
down and get us past our default timeout value. These
tests in particular are sensitive to it. So in fact we just turn them
off.
""",
when=COVERAGE,
ignore_coverage=ALWAYS,
)
test__hub_join_timeout = Ignored(
r"""
This sometimes times out. It appears to happen when the
times take too long and a test raises a FlakyTestTimeout error,
aka a unittest.SkipTest error. This probably indicates that we're
not cleaning something up correctly:
.....ss
GEVENTTEST_USE_RESOURCES=-network C:\Python38-x64\python.exe -u \
-mgevent.tests.test__hub_join_timeout [code TIMEOUT] [took 100.4s]
""",
when=APPVEYOR
)
test__example_wsgiserver = test__example_webproxy = RunAlone(
"""
These share the same port, which means they can conflict
between concurrent test runs too
XXX: Fix this by dynamically picking a port.
""",
)
test__pool = RunAlone(
"""
On a heavily loaded box, these can all take upwards of 200s.
""",
when=(CI & LEAKTEST) | (PYPY3 & APPVEYOR)
)
test_socket = RunAlone(
"Sometimes has unexpected timeouts",
when=CI & PYPY & PY3,
ignore_coverage=ALWAYS, # times out
)
test__refcount = Ignored(
"Sometimes fails to connect for no reason",
when=(CI & OSX) | (CI & PYPY) | APPVEYOR,
ignore_coverage=PYPY
)
test__doctests = Ignored(
"Sometimes times out during/after gevent._config.Config",
when=CI & OSX
)
test_httplib = Ignored(
"""
Imports ``test.support.testcase.ExtraAssertions``
which doesn't exist yet.
""",
when=PY313LT5,
)
# tests that can't be run when coverage is enabled
# TODO: Now that we have this declarative, we could eliminate this list,
# just add them to the main IGNORED_TESTS list.
IGNORE_COVERAGE = [
]
# A mapping from test file basename to a dictionary of
# options that will be applied on top of the DEFAULT_RUN_OPTIONS.
TEST_FILE_OPTIONS = {
}
FAILING_TESTS = []
IGNORED_TESTS = []
# tests that don't do well when run on busy box
# or that are mutually exclusive
RUN_ALONE = [
]
def populate(): # pylint:disable=too-many-branches
# TODO: Maybe move to the metaclass.
# TODO: This could be better.
for k, v in Definitions.__dict__.items():
if isinstance(v, Multi):
actions = v._conds
else:
actions = (v,)
test_name = k + '.py'
del k, v
for action in actions:
if not isinstance(action, _Action):
continue
if action.run_alone:
RUN_ALONE.append(test_name)
if action.ignore_coverage:
IGNORE_COVERAGE.append(test_name)
if action.options:
for opt_name, (condition, value) in action.options.items():
# TODO: Verify that this doesn't match more than once.
if condition:
TEST_FILE_OPTIONS.setdefault(test_name, {})[opt_name] = value
if action.when:
if isinstance(action, Ignored):
IGNORED_TESTS.append(test_name)
elif isinstance(action, Flaky):
FAILING_TESTS.append('FLAKY ' + test_name)
elif isinstance(action, Failing):
FAILING_TESTS.append(test_name)
FAILING_TESTS.sort()
IGNORED_TESTS.sort()
RUN_ALONE.sort()
populate()
if __name__ == '__main__':
print('known_failures:\n', FAILING_TESTS)
print('ignored tests:\n', IGNORED_TESTS)
print('run alone:\n', RUN_ALONE)
print('options:\n', TEST_FILE_OPTIONS)
print("ignore during coverage:\n", IGNORE_COVERAGE)
| Definitions |
python | pytorch__pytorch | test/dynamo/cpython/3_13/test_with.py | {
"start": 25796,
"end": 29785
} | class ____(__TestCase):
class Dummy(object):
def __init__(self, value=None, gobble=False):
if value is None:
value = self
self.value = value
self.gobble = gobble
self.enter_called = False
self.exit_called = False
def __enter__(self):
self.enter_called = True
return self.value
def __exit__(self, *exc_info):
self.exit_called = True
self.exc_info = exc_info
if self.gobble:
return True
class InitRaises(object):
def __init__(self): raise RuntimeError()
class EnterRaises(object):
def __enter__(self): raise RuntimeError()
def __exit__(self, *exc_info): pass
class ExitRaises(object):
def __enter__(self): pass
def __exit__(self, *exc_info): raise RuntimeError()
def testNoExceptions(self):
with self.Dummy() as a, self.Dummy() as b:
self.assertTrue(a.enter_called)
self.assertTrue(b.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(b.exit_called)
def testExceptionInExprList(self):
try:
with self.Dummy() as a, self.InitRaises():
pass
except:
pass
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInEnter(self):
try:
with self.Dummy() as a, self.EnterRaises():
self.fail('body of bad with executed')
except RuntimeError:
pass
else:
self.fail('RuntimeError not reraised')
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
def testExceptionInExit(self):
body_executed = False
with self.Dummy(gobble=True) as a, self.ExitRaises():
body_executed = True
self.assertTrue(a.enter_called)
self.assertTrue(a.exit_called)
self.assertTrue(body_executed)
self.assertNotEqual(a.exc_info[0], None)
def testEnterReturnsTuple(self):
with self.Dummy(value=(1,2)) as (a1, a2), \
self.Dummy(value=(10, 20)) as (b1, b2):
self.assertEqual(1, a1)
self.assertEqual(2, a2)
self.assertEqual(10, b1)
self.assertEqual(20, b2)
def testExceptionLocation(self):
# The location of an exception raised from
# __init__, __enter__ or __exit__ of a context
# manager should be just the context manager expression,
# pinpointing the precise context manager in case there
# is more than one.
def init_raises():
try:
with self.Dummy(), self.InitRaises() as cm, self.Dummy() as d:
pass
except Exception as e:
return e
def enter_raises():
try:
with self.EnterRaises(), self.Dummy() as d:
pass
except Exception as e:
return e
def exit_raises():
try:
with self.ExitRaises(), self.Dummy() as d:
pass
except Exception as e:
return e
for func, expected in [(init_raises, "self.InitRaises()"),
(enter_raises, "self.EnterRaises()"),
(exit_raises, "self.ExitRaises()"),
]:
with self.subTest(func):
exc = func()
f = traceback.extract_tb(exc.__traceback__)[0]
indent = 16
co = func.__code__
self.assertEqual(f.lineno, co.co_firstlineno + 2)
self.assertEqual(f.end_lineno, co.co_firstlineno + 2)
self.assertEqual(f.line[f.colno - indent : f.end_colno - indent],
expected)
if __name__ == '__main__':
run_tests()
| NestedWith |
python | walkccc__LeetCode | solutions/1474. Delete N Nodes After M Nodes of a Linked List/1474.py | {
"start": 0,
"end": 557
} | class ____:
def deleteNodes(
self,
head: ListNode | None,
m: int,
n: int,
) -> ListNode | None:
curr = head
prev = None # prev.next == curr
while curr:
# Set the m-th node as `prev`.
for _ in range(m):
if not curr:
break
prev = curr
curr = curr.next
# Set the (m + n + 1)-th node as `curr`.
for _ in range(n):
if not curr:
break
curr = curr.next
# Delete the nodes [m + 1..n - 1].
prev.next = curr
return head
| Solution |
python | django__django | tests/queries/models.py | {
"start": 14910,
"end": 15028
} | class ____(models.Model):
annotation = models.ForeignKey(Annotation, models.CASCADE, null=True, blank=True)
| BaseUser |
python | giampaolo__psutil | tests/test_contracts.py | {
"start": 4127,
"end": 5395
} | class ____(PsutilTestCase):
def test_win_service_iter(self):
assert hasattr(psutil, "win_service_iter") == WINDOWS
def test_win_service_get(self):
assert hasattr(psutil, "win_service_get") == WINDOWS
@pytest.mark.skipif(MACOS and AARCH64, reason="skipped due to #1892")
def test_cpu_freq(self):
assert hasattr(psutil, "cpu_freq") == (
LINUX or MACOS or WINDOWS or FREEBSD or OPENBSD
)
def test_sensors_temperatures(self):
assert hasattr(psutil, "sensors_temperatures") == (LINUX or FREEBSD)
def test_sensors_fans(self):
assert hasattr(psutil, "sensors_fans") == LINUX
def test_battery(self):
assert hasattr(psutil, "sensors_battery") == (
LINUX or WINDOWS or FREEBSD or MACOS
)
def test_heap_info(self):
hasit = hasattr(psutil, "heap_info")
if LINUX:
assert hasit == bool(platform.libc_ver() != ("", ""))
else:
assert hasit == MACOS or WINDOWS or BSD
def test_heap_trim(self):
hasit = hasattr(psutil, "heap_trim")
if LINUX:
assert hasit == bool(platform.libc_ver() != ("", ""))
else:
assert hasit == MACOS or WINDOWS or BSD
| TestAvailSystemAPIs |
python | celery__celery | celery/exceptions.py | {
"start": 3866,
"end": 3948
} | class ____(UserWarning):
"""Base class for all Celery warnings."""
| CeleryWarning |
python | dagster-io__dagster | python_modules/dagster-graphql/dagster_graphql/schema/partition_keys.py | {
"start": 388,
"end": 539
} | class ____(graphene.ObjectType):
partitionKeys = non_null_list(graphene.String)
class Meta:
name = "PartitionKeys"
| GraphenePartitionKeys |
python | getsentry__sentry | tests/sentry/tasks/test_post_process.py | {
"start": 129878,
"end": 133088
} | class ____(
TestCase,
AssignmentTestMixin,
ProcessCommitsTestMixin,
CorePostProcessGroupTestMixin,
DeriveCodeMappingsProcessGroupTestMixin,
InboxTestMixin,
ResourceChangeBoundsTestMixin,
KickOffSeerAutomationTestMixin,
TriageSignalsV0TestMixin,
SeerAutomationHelperFunctionsTestMixin,
RuleProcessorTestMixin,
ServiceHooksTestMixin,
SnoozeTestMixin,
SnoozeTestSkipSnoozeMixin,
SDKCrashMonitoringTestMixin,
ReplayLinkageTestMixin,
DetectNewEscalationTestMixin,
UserReportEventLinkTestMixin,
DetectBaseUrlsForUptimeTestMixin,
ProcessSimilarityTestMixin,
CheckIfFlagsSentTestMixin,
):
def setUp(self) -> None:
super().setUp()
clear_replay_publisher()
def create_event(self, data, project_id, assert_no_errors=True):
return self.store_event(data=data, project_id=project_id, assert_no_errors=assert_no_errors)
def call_post_process_group(
self, is_new, is_regression, is_new_group_environment, event, cache_key=None
):
if cache_key is None:
cache_key = write_event_to_cache(event)
post_process_group(
is_new=is_new,
is_regression=is_regression,
is_new_group_environment=is_new_group_environment,
cache_key=cache_key,
group_id=event.group_id,
project_id=event.project_id,
eventstream_type=EventStreamEventType.Error.value,
)
return cache_key
@with_feature("organizations:escalating-metrics-backend")
@patch("sentry.sentry_metrics.client.generic_metrics_backend.counter")
@patch("sentry.utils.metrics.incr")
@patch("sentry.utils.metrics.timer")
def test_generic_metrics_backend_counter(
self, metric_timer_mock, metric_incr_mock, generic_metrics_backend_mock
):
min_ago = before_now(minutes=1).isoformat()
event = self.create_event(
data={
"exception": {
"values": [
{
"type": "ZeroDivisionError",
"stacktrace": {"frames": [{"function": f} for f in ["a", "b"]]},
}
]
},
"timestamp": min_ago,
"start_timestamp": min_ago,
"contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
},
project_id=self.project.id,
)
self.call_post_process_group(
is_new=True, is_regression=False, is_new_group_environment=True, event=event
)
assert generic_metrics_backend_mock.call_count == 1
metric_incr_mock.assert_any_call(
"sentry.tasks.post_process.post_process_group.completed",
tags={"issue_category": "error", "pipeline": "process_rules"},
)
metric_timer_mock.assert_any_call(
"tasks.post_process.run_post_process_job.pipeline.duration",
tags={
"pipeline": "process_rules",
"issue_category": "error",
"is_reprocessed": False,
},
)
| PostProcessGroupErrorTest |
python | pallets__werkzeug | src/werkzeug/routing/converters.py | {
"start": 3631,
"end": 4900
} | class ____(BaseConverter):
"""Baseclass for `IntegerConverter` and `FloatConverter`.
:internal:
"""
weight = 50
num_convert: t.Callable[[t.Any], t.Any] = int
def __init__(
self,
map: Map,
fixed_digits: int = 0,
min: int | None = None,
max: int | None = None,
signed: bool = False,
) -> None:
if signed:
self.regex = self.signed_regex
super().__init__(map)
self.fixed_digits = fixed_digits
self.min = min
self.max = max
self.signed = signed
def to_python(self, value: str) -> t.Any:
if self.fixed_digits and len(value) != self.fixed_digits:
raise ValidationError()
value_num = self.num_convert(value)
if (self.min is not None and value_num < self.min) or (
self.max is not None and value_num > self.max
):
raise ValidationError()
return value_num
def to_url(self, value: t.Any) -> str:
value_str = str(self.num_convert(value))
if self.fixed_digits:
value_str = value_str.zfill(self.fixed_digits)
return value_str
@property
def signed_regex(self) -> str:
return f"-?{self.regex}"
| NumberConverter |
python | getlogbook__logbook | src/logbook/handlers.py | {
"start": 14943,
"end": 15802
} | class ____:
"""Mixin class for handlers that are hashing records."""
def hash_record_raw(self, record):
"""Returns a hashlib object with the hash of the record."""
hash = sha1()
hash.update(("%d\x00" % record.level).encode("ascii")) # noqa: UP031
hash.update((record.channel or "").encode("utf-8") + b"\x00")
hash.update(record.filename.encode("utf-8") + b"\x00")
hash.update(str(record.lineno).encode("utf-8"))
return hash
def hash_record(self, record):
"""Returns a hash for a record to keep it apart from other records.
This is used for the `record_limit` feature. By default
The level, channel, filename and location are hashed.
Calls into :meth:`hash_record_raw`.
"""
return self.hash_record_raw(record).hexdigest()
| HashingHandlerMixin |
python | great-expectations__great_expectations | great_expectations/core/run_identifier.py | {
"start": 537,
"end": 3905
} | class ____(DataContextKey):
"""A RunIdentifier identifies a run (collection of validations) by run_name and run_time.
Args:
run_name: a string or None.
run_time: a Datetime.datetime instance, a string, or None.
"""
def __init__(
self,
run_name: Optional[str] = None,
run_time: Optional[Union[datetime.datetime, str]] = None,
) -> None:
super().__init__()
assert run_name is None or isinstance(run_name, str), "run_name must be an instance of str"
assert run_time is None or isinstance(run_time, (datetime.datetime, str)), (
"run_time must be either None or an instance of str or datetime"
)
self._run_name = run_name
if isinstance(run_time, str):
try:
run_time = parse(run_time)
except (ValueError, TypeError):
warnings.warn(
f'Unable to parse provided run_time str ("{run_time}") to datetime. Defaulting '
f"run_time to current time."
)
run_time = datetime.datetime.now(datetime.timezone.utc)
if not run_time:
try:
run_time = parse(run_name) # type: ignore[arg-type] # FIXME CoP
except (ValueError, TypeError):
run_time = None
run_time = run_time or datetime.datetime.now(tz=datetime.timezone.utc)
if not run_time.tzinfo:
# This will change the timzeone to UTC, and convert the time based
# on assuming that the current time is in local.
run_time = run_time.astimezone(tz=datetime.timezone.utc)
self._run_time = run_time
@property
def run_name(self):
return self._run_name
@property
def run_time(self):
return self._run_time
def to_tuple(self): # type: ignore[explicit-override] # FIXME
return (
self._run_name or "__none__",
self._run_time.astimezone(tz=datetime.timezone.utc).strftime("%Y%m%dT%H%M%S.%fZ"),
)
def to_fixed_length_tuple(self): # type: ignore[explicit-override] # FIXME
return (
self._run_name or "__none__",
self._run_time.astimezone(tz=datetime.timezone.utc).strftime("%Y%m%dT%H%M%S.%fZ"),
)
def __repr__(self): # type: ignore[explicit-override] # FIXME
return json.dumps(self.to_json_dict())
@override
def __str__(self):
return json.dumps(self.to_json_dict(), indent=2)
@public_api
def to_json_dict(self) -> Dict[str, JSONValues]:
"""Returns a JSON-serializable dict representation of this RunIdentifier.
Returns:
A JSON-serializable dict representation of this RunIdentifier.
"""
myself = runIdentifierSchema.dump(self)
return myself
def set_run_time_tz(self, tz: datetime.timezone | None):
"""Localize the run_time to the given timezone, or default to system local tz.
Args:
tz: The timezone to localize to.
"""
self._run_time = self._run_time.astimezone(tz=tz)
@classmethod
@override
def from_tuple(cls, tuple_):
return cls(tuple_[0], tuple_[1])
@classmethod
@override
def from_fixed_length_tuple(cls, tuple_):
return cls(tuple_[0], tuple_[1])
| RunIdentifier |
python | sympy__sympy | sympy/codegen/ast.py | {
"start": 16730,
"end": 16799
} | class ____(AugmentedAssignment):
binop = '/'
| DivAugmentedAssignment |
python | getsentry__sentry | src/sentry/workflow_engine/handlers/condition/event_seen_count_handler.py | {
"start": 310,
"end": 620
} | class ____(DataConditionHandler[WorkflowEventData]):
group = DataConditionHandler.Group.ACTION_FILTER
@staticmethod
def evaluate_value(event_data: WorkflowEventData, comparison: Any) -> bool:
group = event_data.group
return group.times_seen == comparison
| EventSeenCountConditionHandler |
python | google__jax | docs/autodidax.py | {
"start": 51772,
"end": 52241
} | class ____:
val: Any
def __init__(self, val):
self.val = val
def __hash__(self) -> int:
return id(self.val)
def __eq__(self, other):
return type(other) is IDHashable and id(self.val) == id(other.val)
# Next, we'll define the evaluation rule for `xla_call`:
# +
import io
from jax.extend.mlir import ir
from jax.extend.mlir.dialects import func
from jax.extend.mlir.dialects import stablehlo as hlo
from jax._src import xla_bridge as xb
| IDHashable |
python | django__django | tests/migrations/migrations_test_apps/mutate_state_a/migrations/0001_initial.py | {
"start": 43,
"end": 785
} | class ____(migrations.Migration):
dependencies = [
("mutate_state_b", "0001_initial"),
]
operations = [
migrations.SeparateDatabaseAndState(
[],
[
migrations.CreateModel(
name="A",
fields=[
(
"id",
models.AutoField(
serialize=False,
verbose_name="ID",
auto_created=True,
primary_key=True,
),
),
],
),
],
)
]
| Migration |
python | pytorch__pytorch | test/dynamo/test_graph_deduplication.py | {
"start": 10377,
"end": 12943
} | class ____(torch.nn.Module):
def forward(self, L_x_: "f32[10, 10]", L_y_: "f32[10, 20]"):
subgraph_1 = self.subgraph_1
subgraph_0 = self.subgraph_0
l_x_ = L_x_
l_y_ = L_y_
x0: "f32[10, 10]" = torch.cos(l_x_)
y0: "f32[10, 20]" = torch.sin(l_y_)
invoke_subgraph = torch.ops.higher_order.invoke_subgraph(subgraph_0, 'subgraph_0', l_x_, l_y_)
getitem: "f32[]" = invoke_subgraph[0]; invoke_subgraph = None
o1: "f32[]" = torch.sin(getitem); getitem = None
invoke_subgraph_1 = torch.ops.higher_order.invoke_subgraph(subgraph_0, 'subgraph_0', l_x_, y0)
getitem_1: "f32[]" = invoke_subgraph_1[0]; invoke_subgraph_1 = None
mul_2: "f32[]" = o1 * getitem_1; o1 = getitem_1 = None
invoke_subgraph_2 = torch.ops.higher_order.invoke_subgraph(subgraph_0, 'subgraph_0', l_x_, l_y_); subgraph_0 = l_x_ = l_y_ = None
getitem_2: "f32[]" = invoke_subgraph_2[0]; invoke_subgraph_2 = None
invoke_subgraph_3 = torch.ops.higher_order.invoke_subgraph(subgraph_1, 'subgraph_1', x0, y0); invoke_subgraph_3 = None
invoke_subgraph_4 = torch.ops.higher_order.invoke_subgraph(subgraph_1, 'subgraph_1', x0, y0); subgraph_1 = x0 = y0 = None
getitem_4: "f32[10, 10]" = invoke_subgraph_4[0]; invoke_subgraph_4 = None
mul_3: "f32[10, 10]" = mul_2 * getitem_4; mul_2 = getitem_4 = None
add_13: "f32[10, 10]" = mul_3 + getitem_2; mul_3 = getitem_2 = None
return (add_13,)
class subgraph_1(torch.nn.Module):
def forward(self, subgraph_input_x0, subgraph_input_y0):
a0: "f32[10, 10]" = subgraph_input_x0 + 2; subgraph_input_x0 = None
b0: "f32[10, 20]" = subgraph_input_y0 + 3; subgraph_input_y0 = None
cos_1: "f32[10, 20]" = b0.cos(); b0 = None
sum_1: "f32[]" = cos_1.sum(); cos_1 = None
c: "f32[10, 10]" = a0 * sum_1; a0 = sum_1 = None
return (c,)
class subgraph_0(torch.nn.Module):
def forward(self, subgraph_input_l_x_, subgraph_input_l_y_):
x1: "f32[10, 10]" = subgraph_input_l_x_ + 1; subgraph_input_l_x_ = None
y1: "f32[10, 20]" = subgraph_input_l_y_ + 2; subgraph_input_l_y_ = None
sum_2: "f32[]" = x1.sum(); x1 = None
sum_3: "f32[]" = y1.sum(); y1 = None
z: "f32[]" = sum_2 + sum_3; sum_2 = sum_3 = None
return (z,)
""",
)
self.assertExpectedInline(
graph_str(fw_graphs[0]),
"""\
| GraphModule |
python | getsentry__sentry | src/sentry/web/forms/accounts.py | {
"start": 8676,
"end": 8937
} | class ____(forms.Form):
otp = forms.CharField(
label=_("Authenticator code"),
max_length=20,
widget=forms.TextInput(
attrs={"placeholder": _("Authenticator or recovery code"), "autofocus": True}
),
)
| TwoFactorForm |
python | getsentry__sentry | src/sentry/integrations/github_enterprise/webhook.py | {
"start": 3798,
"end": 11599
} | class ____(Endpoint):
authentication_classes = ()
permission_classes = ()
_handlers: dict[str, type[GitHubWebhook]] = {}
# https://developer.github.com/webhooks/
def get_handler(self, event_type):
return self._handlers.get(event_type)
def is_valid_signature(self, method, body, secret, signature):
if method != "sha1" and method != "sha256":
raise UnsupportedSignatureAlgorithmError()
if method == "sha256":
expected = hmac.new(
key=secret.encode("utf-8"), msg=body, digestmod=hashlib.sha256
).hexdigest()
else:
expected = hmac.new(
key=secret.encode("utf-8"), msg=body, digestmod=hashlib.sha1
).hexdigest()
return constant_time_compare(expected, signature)
@method_decorator(csrf_exempt)
def dispatch(self, request: HttpRequest, *args, **kwargs) -> HttpResponse:
if request.method != "POST":
return HttpResponse(status=405)
return super().dispatch(request, *args, **kwargs)
def get_secret(self, event, host):
metadata = get_installation_metadata(event, host)
if metadata:
return metadata.get("webhook_secret")
else:
return None
def _handle(self, request: HttpRequest) -> HttpResponse:
clear_tags_and_context()
scope = sentry_sdk.get_isolation_scope()
try:
host = get_host(request=request)
if not host:
raise MissingRequiredHeaderError()
except MissingRequiredHeaderError as e:
logger.exception("github_enterprise.webhook.missing-enterprise-host")
sentry_sdk.capture_exception(e)
return HttpResponse(MISSING_GITHUB_ENTERPRISE_HOST_ERROR, status=400)
extra: dict[str, str | None] = {"host": host}
# If we do tag the host early we can't even investigate
scope.set_tag("host", host)
try:
body = bytes(request.body)
if len(body) == 0:
raise MissingWebhookPayloadError()
except MissingWebhookPayloadError as e:
logger.warning("github_enterprise.webhook.missing-body", extra=extra)
sentry_sdk.capture_exception(e)
return HttpResponse(MISSING_WEBHOOK_PAYLOAD_ERROR, status=400)
try:
github_event = request.headers.get("x-github-event")
if not github_event:
raise MissingRequiredHeaderError()
handler = self.get_handler(github_event)
except MissingRequiredHeaderError as e:
logger.exception("github_enterprise.webhook.missing-event", extra=extra)
sentry_sdk.capture_exception(e)
return HttpResponse(MISSING_GITHUB_EVENT_HEADER_ERROR, status=400)
if not handler:
return HttpResponse(status=204)
try:
# XXX: Sometimes they send us this b'payload=%7B%22ref%22 Support this
# See https://sentry.io/organizations/sentry/issues/2565421410
event = orjson.loads(body)
except orjson.JSONDecodeError:
logger.warning(
"github_enterprise.webhook.invalid-json",
extra=extra,
exc_info=True,
)
logger.exception("Invalid JSON.")
return HttpResponse(status=400)
secret = self.get_secret(event, host)
if not secret:
logger.warning("github_enterprise.webhook.missing-integration", extra=extra)
return HttpResponse(status=400)
try:
sha256_signature = request.headers.get("x-hub-signature-256")
sha1_signature = request.headers.get("x-hub-signature")
if not sha256_signature and not sha1_signature:
raise MissingRequiredHeaderError()
if sha256_signature:
if not re.match(SHA256_PATTERN, sha256_signature):
# before we try to parse the parts of the signature, make sure it
# looks as expected to avoid any IndexErrors when we split it
raise MalformedSignatureError()
_, signature = sha256_signature.split("=", 1)
extra["signature_algorithm"] = "sha256"
is_valid = self.is_valid_signature("sha256", body, secret, signature)
if not is_valid:
raise InvalidSignatureError()
if sha1_signature:
if not re.match(SHA1_PATTERN, sha1_signature):
# before we try to parse the parts of the signature, make sure it
# looks as expected to avoid any IndexErrors when we split it
raise MalformedSignatureError()
_, signature = sha1_signature.split("=", 1)
is_valid = self.is_valid_signature("sha1", body, secret, signature)
extra["signature_algorithm"] = "sha1"
if not is_valid:
raise InvalidSignatureError()
except InvalidSignatureError as e:
logger.warning("github_enterprise.webhook.invalid-signature", extra=extra)
sentry_sdk.capture_exception(e)
return HttpResponse(INVALID_SIGNATURE_ERROR, status=401)
except UnsupportedSignatureAlgorithmError as e:
# we should never end up here with the regex checks above on the signature format,
# but just in case
logger.exception(
"github-enterprise-app.webhook.unsupported-signature-algorithm",
extra=extra,
)
sentry_sdk.capture_exception(e)
return HttpResponse(UNSUPPORTED_SIGNATURE_ALGORITHM_ERROR, 400)
except MissingRequiredHeaderError as e:
# older versions of GitHub 2.14.0 and older do not always send signature headers
# Setting a signature secret is optional in GitHub, but we require it on Sentry
# Only a small subset of legacy hosts are allowed to skip the signature verification
# at the moment.
allowed_legacy_hosts = options.get(
"github-enterprise-app.allowed-hosts-legacy-webhooks"
)
if host not in allowed_legacy_hosts:
# the host is not allowed to skip signature verification by omitting the headers
logger.warning("github_enterprise.webhook.missing-signature", extra=extra)
sentry_sdk.capture_exception(e)
return HttpResponse(MISSING_SIGNATURE_HEADERS_ERROR, status=400)
else:
# the host is allowed to skip signature verification
# log it, and continue on.
extra["github_enterprise_version"] = request.headers.get(
"x-github-enterprise-version"
)
extra["ip_address"] = request.headers.get("x-real-ip")
logger.info("github_enterprise.webhook.allowed-missing-signature", extra=extra)
sentry_sdk.capture_message("Allowed missing signature")
except (MalformedSignatureError, IndexError) as e:
logger.warning("github_enterprise.webhook.malformed-signature", extra=extra)
sentry_sdk.capture_exception(e)
return HttpResponse(MALFORMED_SIGNATURE_ERROR, status=400)
event_handler = handler()
with IntegrationWebhookEvent(
interaction_type=event_handler.event_type,
domain=IntegrationDomain.SOURCE_CODE_MANAGEMENT,
provider_key=event_handler.provider,
).capture():
event_handler(event, host=host)
return HttpResponse(status=204)
@region_silo_endpoint
| GitHubEnterpriseWebhookBase |
python | realpython__materials | python-maze-solver/source_code_final/src/maze_solver/view/primitives.py | {
"start": 1599,
"end": 2171
} | class ____:
content: str
point: Point
def draw(self, **attributes) -> str:
return tag(
"text", self.content, x=self.point.x, y=self.point.y, **attributes
)
def tag(name: str, value: str | None = None, **attributes) -> str:
attrs = (
""
if not attributes
else " "
+ " ".join(
f'{key.replace("_", "-")}="{value}"'
for key, value in attributes.items()
)
)
if value is None:
return f"<{name}{attrs} />"
return f"<{name}{attrs}>{value}</{name}>"
| Text |
python | tensorflow__tensorflow | tensorflow/lite/python/lite.py | {
"start": 9501,
"end": 10467
} | class ____:
"""Representative dataset used to optimize the model.
This is a generator function that provides a small dataset to calibrate or
estimate the range, i.e, (min, max) of all floating-point arrays in the model
(such as model input, activation outputs of intermediate layers, and model
output) for quantization. Usually, this is a small subset of a few hundred
samples randomly chosen, in no particular order, from the training or
evaluation dataset.
"""
def __init__(self, input_gen):
"""Creates a representative dataset.
Args:
input_gen: A generator function that generates input samples for the model
and has the same order, type and shape as the inputs to the model.
Usually, this is a small subset of a few hundred samples randomly
chosen, in no particular order, from the training or evaluation dataset.
"""
self.input_gen = input_gen
@_tf_export("lite.TargetSpec")
| RepresentativeDataset |
python | celery__celery | celery/schedules.py | {
"start": 3037,
"end": 6462
} | class ____(BaseSchedule):
"""Schedule for periodic task.
Arguments:
run_every (float, ~datetime.timedelta): Time interval.
relative (bool): If set to True the run time will be rounded to the
resolution of the interval.
nowfun (Callable): Function returning the current date and time
(:class:`~datetime.datetime`).
app (Celery): Celery app instance.
"""
relative: bool = False
def __init__(self, run_every: float | timedelta | None = None,
relative: bool = False, nowfun: Callable | None = None, app: Celery
| None = None) -> None:
self.run_every = maybe_timedelta(run_every)
self.relative = relative
super().__init__(nowfun=nowfun, app=app)
def remaining_estimate(self, last_run_at: datetime) -> timedelta:
return remaining(
self.maybe_make_aware(last_run_at), self.run_every,
self.maybe_make_aware(self.now()), self.relative,
)
def is_due(self, last_run_at: datetime) -> tuple[bool, datetime]:
"""Return tuple of ``(is_due, next_time_to_check)``.
Notes:
- next time to check is in seconds.
- ``(True, 20)``, means the task should be run now, and the next
time to check is in 20 seconds.
- ``(False, 12.3)``, means the task is not due, but that the
scheduler should check again in 12.3 seconds.
The next time to check is used to save energy/CPU cycles,
it does not need to be accurate but will influence the precision
of your schedule. You must also keep in mind
the value of :setting:`beat_max_loop_interval`,
that decides the maximum number of seconds the scheduler can
sleep between re-checking the periodic task intervals. So if you
have a task that changes schedule at run-time then your next_run_at
check will decide how long it will take before a change to the
schedule takes effect. The max loop interval takes precedence
over the next check at value returned.
.. admonition:: Scheduler max interval variance
The default max loop interval may vary for different schedulers.
For the default scheduler the value is 5 minutes, but for example
the :pypi:`django-celery-beat` database scheduler the value
is 5 seconds.
"""
last_run_at = self.maybe_make_aware(last_run_at)
rem_delta = self.remaining_estimate(last_run_at)
remaining_s = max(rem_delta.total_seconds(), 0)
if remaining_s == 0:
return schedstate(is_due=True, next=self.seconds)
return schedstate(is_due=False, next=remaining_s)
def __repr__(self) -> str:
return f'<freq: {self.human_seconds}>'
def __eq__(self, other: Any) -> bool:
if isinstance(other, schedule):
return self.run_every == other.run_every
return self.run_every == other
def __reduce__(self) -> tuple[type,
tuple[timedelta, bool, Callable | None]]:
return self.__class__, (self.run_every, self.relative, self.nowfun)
@property
def seconds(self) -> int | float:
return max(self.run_every.total_seconds(), 0)
@property
def human_seconds(self) -> str:
return humanize_seconds(self.seconds)
| schedule |
python | numba__numba | numba/tests/test_random.py | {
"start": 4343,
"end": 4627
} | class ____(TestCase):
def _follow_cpython(self, ptr, seed=2):
r = random.Random(seed)
_copy_py_state(r, ptr)
return r
def _follow_numpy(self, ptr, seed=2):
r = np.random.RandomState(seed)
_copy_np_state(r, ptr)
return r
| BaseTest |
python | kubernetes-client__python | kubernetes/client/models/core_v1_event_list.py | {
"start": 383,
"end": 6840
} | class ____(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'items': 'list[CoreV1Event]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501
"""CoreV1EventList - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this CoreV1EventList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this CoreV1EventList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this CoreV1EventList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this CoreV1EventList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this CoreV1EventList. # noqa: E501
List of events # noqa: E501
:return: The items of this CoreV1EventList. # noqa: E501
:rtype: list[CoreV1Event]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this CoreV1EventList.
List of events # noqa: E501
:param items: The items of this CoreV1EventList. # noqa: E501
:type: list[CoreV1Event]
"""
if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this CoreV1EventList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this CoreV1EventList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this CoreV1EventList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this CoreV1EventList. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this CoreV1EventList. # noqa: E501
:return: The metadata of this CoreV1EventList. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this CoreV1EventList.
:param metadata: The metadata of this CoreV1EventList. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CoreV1EventList):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, CoreV1EventList):
return True
return self.to_dict() != other.to_dict()
| CoreV1EventList |
python | kamyu104__LeetCode-Solutions | Python/find-number-of-ways-to-reach-the-k-th-stair.py | {
"start": 51,
"end": 608
} | class ____(object):
def waysToReachStair(self, k):
"""
:type k: int
:rtype: int
"""
def ceil_log2_x(x):
return (x-1).bit_length()
l = ceil_log2_x(k)
while (1<<l)-k <= l+1:
l += 1
fact = [1]*(l+1)
for i in xrange(len(fact)-1):
fact[i+1] = fact[i]*(i+1)
def nCr(n, r):
if not (0 <= r <= n):
return 0
return fact[n]//fact[r]//fact[n-r]
return sum(nCr(i+1, (1<<i)-k) for i in xrange(l))
| Solution |
python | html5lib__html5lib-python | html5lib/html5parser.py | {
"start": 86414,
"end": 89246
} | class ____(Phase):
# http://www.whatwg.org/specs/web-apps/current-work/#in-cell
__slots__ = tuple()
# helper
def closeCell(self):
if self.tree.elementInScope("td", variant="table"):
self.endTagTableCell(impliedTagToken("td"))
elif self.tree.elementInScope("th", variant="table"):
self.endTagTableCell(impliedTagToken("th"))
# the rest
def processEOF(self):
self.parser.phases["inBody"].processEOF()
def processCharacters(self, token):
return self.parser.phases["inBody"].processCharacters(token)
def startTagTableOther(self, token):
if (self.tree.elementInScope("td", variant="table") or
self.tree.elementInScope("th", variant="table")):
self.closeCell()
return token
else:
# innerHTML case
assert self.parser.innerHTML
self.parser.parseError()
def startTagOther(self, token):
return self.parser.phases["inBody"].processStartTag(token)
def endTagTableCell(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.tree.generateImpliedEndTags(token["name"])
if self.tree.openElements[-1].name != token["name"]:
self.parser.parseError("unexpected-cell-end-tag",
{"name": token["name"]})
while True:
node = self.tree.openElements.pop()
if node.name == token["name"]:
break
else:
self.tree.openElements.pop()
self.tree.clearActiveFormattingElements()
self.parser.phase = self.parser.phases["inRow"]
else:
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagIgnore(self, token):
self.parser.parseError("unexpected-end-tag", {"name": token["name"]})
def endTagImply(self, token):
if self.tree.elementInScope(token["name"], variant="table"):
self.closeCell()
return token
else:
# sometimes innerHTML case
self.parser.parseError()
def endTagOther(self, token):
return self.parser.phases["inBody"].processEndTag(token)
startTagHandler = _utils.MethodDispatcher([
("html", Phase.startTagHtml),
(("caption", "col", "colgroup", "tbody", "td", "tfoot", "th",
"thead", "tr"), startTagTableOther)
])
startTagHandler.default = startTagOther
endTagHandler = _utils.MethodDispatcher([
(("td", "th"), endTagTableCell),
(("body", "caption", "col", "colgroup", "html"), endTagIgnore),
(("table", "tbody", "tfoot", "thead", "tr"), endTagImply)
])
endTagHandler.default = endTagOther
| InCellPhase |
python | imageio__imageio | imageio/config/plugins.py | {
"start": 74,
"end": 20277
} | class ____:
"""Plugin Configuration Metadata
This class holds the information needed to lazy-import plugins.
Parameters
----------
name : str
The name of the plugin.
class_name : str
The name of the plugin class inside the plugin module.
module_name : str
The name of the module/package from which to import the plugin.
is_legacy : bool
If True, this plugin is a v2 plugin and will be wrapped in a
LegacyPlugin. Default: False.
package_name : str
If the given module name points to a relative module, then the package
name determines the package it is relative to.
install_name : str
The name of the optional dependency that can be used to install this
plugin if it is missing.
legacy_args : Dict
A dictionary of kwargs to pass to the v2 plugin (Format) upon construction.
Examples
--------
>>> PluginConfig(
name="TIFF",
class_name="TiffFormat",
module_name="imageio.plugins.tifffile",
is_legacy=True,
install_name="tifffile",
legacy_args={
"description": "TIFF format",
"extensions": ".tif .tiff .stk .lsm",
"modes": "iIvV",
},
)
>>> PluginConfig(
name="pillow",
class_name="PillowPlugin",
module_name="imageio.plugins.pillow"
)
"""
def __init__(
self,
name,
class_name,
module_name,
*,
is_legacy=False,
package_name=None,
install_name=None,
legacy_args=None,
):
legacy_args = legacy_args or dict()
self.name = name
self.class_name = class_name
self.module_name = module_name
self.package_name = package_name
self.is_legacy = is_legacy
self.install_name = install_name or self.name
self.legacy_args = {"name": name, "description": "A legacy plugin"}
self.legacy_args.update(legacy_args)
@property
def format(self):
"""For backwards compatibility with FormatManager
Delete when migrating to v3
"""
if not self.is_legacy:
raise RuntimeError("Can only get format for legacy plugins.")
module = importlib.import_module(self.module_name, self.package_name)
clazz = getattr(module, self.class_name)
return clazz(**self.legacy_args)
@property
def plugin_class(self):
"""Get the plugin class (import if needed)
Returns
-------
plugin_class : Any
The class that can be used to instantiate plugins.
"""
module = importlib.import_module(self.module_name, self.package_name)
clazz = getattr(module, self.class_name)
if self.is_legacy:
legacy_plugin = clazz(**self.legacy_args)
def partial_legacy_plugin(request):
return LegacyPlugin(request, legacy_plugin)
clazz = partial_legacy_plugin
return clazz
known_plugins = dict()
known_plugins["pillow"] = PluginConfig(
name="pillow", class_name="PillowPlugin", module_name="imageio.plugins.pillow"
)
known_plugins["pyav"] = PluginConfig(
name="pyav", class_name="PyAVPlugin", module_name="imageio.plugins.pyav"
)
known_plugins["opencv"] = PluginConfig(
name="opencv", class_name="OpenCVPlugin", module_name="imageio.plugins.opencv"
)
known_plugins["tifffile"] = PluginConfig(
name="tifffile",
class_name="TifffilePlugin",
module_name="imageio.plugins.tifffile_v3",
)
known_plugins["SPE"] = PluginConfig(
name="spe", class_name="SpePlugin", module_name="imageio.plugins.spe"
)
known_plugins["rawpy"] = PluginConfig(
name="rawpy", class_name="RawPyPlugin", module_name="imageio.plugins.rawpy"
)
# Legacy plugins
# ==============
#
# Which are partly registered by format, partly by plugin, and partly by a mix
# of both. We keep the naming here for backwards compatibility.
# In v3 this should become a single entry per plugin named after the plugin
# We can choose extension-specific priority in ``config.extensions``.
#
# Note: Since python 3.7 order of insertion determines the order of dict().keys()
# This means that the order here determines the order by which plugins are
# checked during the full fallback search. We don't advertise this downstream,
# but it could be a useful thing to keep in mind to choose a sensible default
# search order.
known_plugins["TIFF"] = PluginConfig(
name="TIFF",
class_name="TiffFormat",
module_name="imageio.plugins.tifffile",
is_legacy=True,
install_name="tifffile",
legacy_args={
"description": "TIFF format",
"extensions": ".tif .tiff .stk .lsm",
"modes": "iIvV",
},
)
# PILLOW plugin formats (legacy)
PILLOW_FORMATS = [
("BMP", "Windows Bitmap", ".bmp", "PillowFormat"),
("BUFR", "BUFR", ".bufr", "PillowFormat"),
("CUR", "Windows Cursor", ".cur", "PillowFormat"),
("DCX", "Intel DCX", ".dcx", "PillowFormat"),
("DDS", "DirectDraw Surface", ".dds", "PillowFormat"),
("DIB", "Windows Bitmap", "", "PillowFormat"),
("EPS", "Encapsulated Postscript", ".ps .eps", "PillowFormat"),
("FITS", "FITS", ".fit .fits", "PillowFormat"),
("FLI", "Autodesk FLI/FLC Animation", ".fli .flc", "PillowFormat"),
("FPX", "FlashPix", ".fpx", "PillowFormat"),
("FTEX", "Texture File Format (IW2:EOC)", ".ftc .ftu", "PillowFormat"),
("GBR", "GIMP brush file", ".gbr", "PillowFormat"),
("GIF", "Compuserve GIF", ".gif", "GIFFormat"),
("GRIB", "GRIB", ".grib", "PillowFormat"),
("HDF5", "HDF5", ".h5 .hdf", "PillowFormat"),
("ICNS", "Mac OS icns resource", ".icns", "PillowFormat"),
("ICO", "Windows Icon", ".ico", "PillowFormat"),
("IM", "IFUNC Image Memory", ".im", "PillowFormat"),
("IMT", "IM Tools", "", "PillowFormat"),
("IPTC", "IPTC/NAA", ".iim", "PillowFormat"),
("JPEG", "JPEG (ISO 10918)", ".jfif .jpe .jpg .jpeg", "JPEGFormat"),
(
"JPEG2000",
"JPEG 2000 (ISO 15444)",
".jp2 .j2k .jpc .jpf .jpx .j2c",
"JPEG2000Format",
),
("MCIDAS", "McIdas area file", "", "PillowFormat"),
("MIC", "Microsoft Image Composer", ".mic", "PillowFormat"),
# skipped in legacy pillow
# ("MPEG", "MPEG", ".mpg .mpeg", "PillowFormat"),
("MPO", "MPO (CIPA DC-007)", ".mpo", "PillowFormat"),
("MSP", "Windows Paint", ".msp", "PillowFormat"),
("PCD", "Kodak PhotoCD", ".pcd", "PillowFormat"),
("PCX", "Paintbrush", ".pcx", "PillowFormat"),
("PIXAR", "PIXAR raster image", ".pxr", "PillowFormat"),
("PNG", "Portable network graphics", ".png", "PNGFormat"),
("PPM", "Pbmplus image", ".pbm .pgm .ppm", "PillowFormat"),
("PSD", "Adobe Photoshop", ".psd", "PillowFormat"),
("SGI", "SGI Image File Format", ".bw .rgb .rgba .sgi", "PillowFormat"),
("SPIDER", "Spider 2D image", "", "PillowFormat"),
("SUN", "Sun Raster File", ".ras", "PillowFormat"),
("TGA", "Targa", ".tga", "PillowFormat"),
("TIFF", "Adobe TIFF", ".tif .tiff", "TIFFFormat"),
("WMF", "Windows Metafile", ".wmf .emf", "PillowFormat"),
("XBM", "X11 Bitmap", ".xbm", "PillowFormat"),
("XPM", "X11 Pixel Map", ".xpm", "PillowFormat"),
("XVTHUMB", "XV thumbnail image", "", "PillowFormat"),
]
for id, summary, ext, class_name in PILLOW_FORMATS:
config = PluginConfig(
name=id.upper() + "-PIL",
class_name=class_name,
module_name="imageio.plugins.pillow_legacy",
is_legacy=True,
install_name="pillow",
legacy_args={
"description": summary + " via Pillow",
"extensions": ext,
"modes": "iI" if class_name == "GIFFormat" else "i",
"plugin_id": id,
},
)
known_plugins[config.name] = config
known_plugins["FFMPEG"] = PluginConfig(
name="FFMPEG",
class_name="FfmpegFormat",
module_name="imageio.plugins.ffmpeg",
is_legacy=True,
install_name="ffmpeg",
legacy_args={
"description": "Many video formats and cameras (via ffmpeg)",
"extensions": ".mov .avi .mpg .mpeg .mp4 .mkv .webm .wmv .h264",
"modes": "I",
},
)
known_plugins["BSDF"] = PluginConfig(
name="BSDF",
class_name="BsdfFormat",
module_name="imageio.plugins.bsdf",
is_legacy=True,
install_name="bsdf",
legacy_args={
"description": "Format based on the Binary Structured Data Format",
"extensions": ".bsdf",
"modes": "iIvV",
},
)
known_plugins["DICOM"] = PluginConfig(
name="DICOM",
class_name="DicomFormat",
module_name="imageio.plugins.dicom",
is_legacy=True,
install_name="dicom",
legacy_args={
"description": "Digital Imaging and Communications in Medicine",
"extensions": ".dcm .ct .mri",
"modes": "iIvV",
},
)
known_plugins["FEI"] = PluginConfig(
name="FEI",
class_name="FEISEMFormat",
module_name="imageio.plugins.feisem",
is_legacy=True,
install_name="feisem",
legacy_args={
"description": "FEI-SEM TIFF format",
"extensions": [".tif", ".tiff"],
"modes": "iv",
},
)
known_plugins["FITS"] = PluginConfig(
name="FITS",
class_name="FitsFormat",
module_name="imageio.plugins.fits",
is_legacy=True,
install_name="fits",
legacy_args={
"description": "Flexible Image Transport System (FITS) format",
"extensions": ".fits .fit .fts .fz",
"modes": "iIvV",
},
)
known_plugins["GDAL"] = PluginConfig(
name="GDAL",
class_name="GdalFormat",
module_name="imageio.plugins.gdal",
is_legacy=True,
install_name="gdal",
legacy_args={
"description": "Geospatial Data Abstraction Library",
"extensions": ".tiff .tif .img .ecw .jpg .jpeg",
"modes": "iIvV",
},
)
known_plugins["ITK"] = PluginConfig(
name="ITK",
class_name="ItkFormat",
module_name="imageio.plugins.simpleitk",
is_legacy=True,
install_name="simpleitk",
legacy_args={
"description": "Insight Segmentation and Registration Toolkit (ITK) format",
"extensions": " ".join(
(
".gipl",
".ipl",
".mha",
".mhd",
".nhdr",
".nia",
".hdr",
".nrrd",
".nii",
".nii.gz",
".img",
".img.gz",
".vtk",
".hdf5",
".lsm",
".mnc",
".mnc2",
".mgh",
".mnc",
".pic",
".bmp",
".jpeg",
".jpg",
".png",
".tiff",
".tif",
".dicom",
".dcm",
".gdcm",
)
),
"modes": "iIvV",
},
)
known_plugins["NPZ"] = PluginConfig(
name="NPZ",
class_name="NpzFormat",
module_name="imageio.plugins.npz",
is_legacy=True,
install_name="numpy",
legacy_args={
"description": "Numpy's compressed array format",
"extensions": ".npz",
"modes": "iIvV",
},
)
known_plugins["SWF"] = PluginConfig(
name="SWF",
class_name="SWFFormat",
module_name="imageio.plugins.swf",
is_legacy=True,
install_name="swf",
legacy_args={
"description": "Shockwave flash",
"extensions": ".swf",
"modes": "I",
},
)
known_plugins["SCREENGRAB"] = PluginConfig(
name="SCREENGRAB",
class_name="ScreenGrabFormat",
module_name="imageio.plugins.grab",
is_legacy=True,
install_name="pillow",
legacy_args={
"description": "Grab screenshots (Windows and OS X only)",
"extensions": [],
"modes": "i",
},
)
known_plugins["CLIPBOARDGRAB"] = PluginConfig(
name="CLIPBOARDGRAB",
class_name="ClipboardGrabFormat",
module_name="imageio.plugins.grab",
is_legacy=True,
install_name="pillow",
legacy_args={
"description": "Grab from clipboard (Windows only)",
"extensions": [],
"modes": "i",
},
)
# LYTRO plugin (legacy)
lytro_formats = [
("lytro-lfr", "Lytro Illum lfr image file", ".lfr", "i", "LytroLfrFormat"),
(
"lytro-illum-raw",
"Lytro Illum raw image file",
".raw",
"i",
"LytroIllumRawFormat",
),
("lytro-lfp", "Lytro F01 lfp image file", ".lfp", "i", "LytroLfpFormat"),
("lytro-f01-raw", "Lytro F01 raw image file", ".raw", "i", "LytroF01RawFormat"),
]
for name, des, ext, mode, class_name in lytro_formats:
config = PluginConfig(
name=name.upper(),
class_name=class_name,
module_name="imageio.plugins.lytro",
is_legacy=True,
install_name="lytro",
legacy_args={
"description": des,
"extensions": ext,
"modes": mode,
},
)
known_plugins[config.name] = config
# FreeImage plugin (legacy)
FREEIMAGE_FORMATS = [
(
"BMP",
0,
"Windows or OS/2 Bitmap",
".bmp",
"i",
"FreeimageBmpFormat",
"imageio.plugins.freeimage",
),
(
"CUT",
21,
"Dr. Halo",
".cut",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"DDS",
24,
"DirectX Surface",
".dds",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"EXR",
29,
"ILM OpenEXR",
".exr",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"G3",
27,
"Raw fax format CCITT G.3",
".g3",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"GIF",
25,
"Static and animated gif (FreeImage)",
".gif",
"iI",
"GifFormat",
"imageio.plugins.freeimagemulti",
),
(
"HDR",
26,
"High Dynamic Range Image",
".hdr",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"ICO",
1,
"Windows Icon",
".ico",
"iI",
"IcoFormat",
"imageio.plugins.freeimagemulti",
),
(
"IFF",
5,
"IFF Interleaved Bitmap",
".iff .lbm",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"J2K",
30,
"JPEG-2000 codestream",
".j2k .j2c",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"JNG",
3,
"JPEG Network Graphics",
".jng",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"JP2",
31,
"JPEG-2000 File Format",
".jp2",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"JPEG",
2,
"JPEG - JFIF Compliant",
".jpg .jif .jpeg .jpe",
"i",
"FreeimageJpegFormat",
"imageio.plugins.freeimage",
),
(
"JPEG-XR",
36,
"JPEG XR image format",
".jxr .wdp .hdp",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"KOALA",
4,
"C64 Koala Graphics",
".koa",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
# not registered in legacy pillow
# ("MNG", 6, "Multiple-image Network Graphics", ".mng", "i", "FreeimageFormat", "imageio.plugins.freeimage"),
(
"PBM",
7,
"Portable Bitmap (ASCII)",
".pbm",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"PBMRAW",
8,
"Portable Bitmap (RAW)",
".pbm",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"PCD",
9,
"Kodak PhotoCD",
".pcd",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"PCX",
10,
"Zsoft Paintbrush",
".pcx",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"PFM",
32,
"Portable floatmap",
".pfm",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"PGM",
11,
"Portable Greymap (ASCII)",
".pgm",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"PGMRAW",
12,
"Portable Greymap (RAW)",
".pgm",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"PICT",
33,
"Macintosh PICT",
".pct .pict .pic",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"PNG",
13,
"Portable Network Graphics",
".png",
"i",
"FreeimagePngFormat",
"imageio.plugins.freeimage",
),
(
"PPM",
14,
"Portable Pixelmap (ASCII)",
".ppm",
"i",
"FreeimagePnmFormat",
"imageio.plugins.freeimage",
),
(
"PPMRAW",
15,
"Portable Pixelmap (RAW)",
".ppm",
"i",
"FreeimagePnmFormat",
"imageio.plugins.freeimage",
),
(
"PSD",
20,
"Adobe Photoshop",
".psd",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"RAS",
16,
"Sun Raster Image",
".ras",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"RAW",
34,
"RAW camera image",
".3fr .arw .bay .bmq .cap .cine .cr2 .crw .cs1 .dc2 "
".dcr .drf .dsc .dng .erf .fff .ia .iiq .k25 .kc2 .kdc .mdc .mef .mos .mrw .nef .nrw .orf "
".pef .ptx .pxn .qtk .raf .raw .rdc .rw2 .rwl .rwz .sr2 .srf .srw .sti",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"SGI",
28,
"SGI Image Format",
".sgi .rgb .rgba .bw",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"TARGA",
17,
"Truevision Targa",
".tga .targa",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"TIFF",
18,
"Tagged Image File Format",
".tif .tiff",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"WBMP",
19,
"Wireless Bitmap",
".wap .wbmp .wbm",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"WebP",
35,
"Google WebP image format",
".webp",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"XBM",
22,
"X11 Bitmap Format",
".xbm",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
(
"XPM",
23,
"X11 Pixmap Format",
".xpm",
"i",
"FreeimageFormat",
"imageio.plugins.freeimage",
),
]
for name, i, des, ext, mode, class_name, module_name in FREEIMAGE_FORMATS:
config = PluginConfig(
name=name.upper() + "-FI",
class_name=class_name,
module_name=module_name,
is_legacy=True,
install_name="freeimage",
legacy_args={
"description": des,
"extensions": ext,
"modes": mode,
"fif": i,
},
)
known_plugins[config.name] = config
# exists for backwards compatibility with FormatManager
# delete in V3
_original_order = [x for x, config in known_plugins.items() if config.is_legacy]
| PluginConfig |
python | falconry__falcon | falcon/errors.py | {
"start": 99022,
"end": 101042
} | class ____(HTTPBadRequest):
"""400 Bad Request.
Exception raised by a media handler when trying to parse an empty body.
Note:
Some media handlers, like the one for URL-encoded forms, allow an
empty body. In these cases this exception will not be raised.
Args:
media_type (str): The media type that was expected.
Keyword Args:
headers (dict or list): A ``dict`` of header names and values
to set, or a ``list`` of (*name*, *value*) tuples. Both *name* and
*value* must be of type ``str`` or ``StringType``, and only
character values 0x00 through 0xFF may be used on platforms that
use wide characters.
Note:
The Content-Type header, if present, will be overridden. If
you wish to return custom error messages, you can create
your own HTTP error class, and install an error handler
to convert it into an appropriate HTTP response for the
client
Note:
Falcon can process a list of ``tuple`` slightly faster
than a ``dict``.
href (str): A URL someone can visit to find out more information
(default ``None``). Unicode characters are percent-encoded.
href_text (str): If href is given, use this as the friendly
title/description for the link (default 'API documentation
for this error').
code (int): An internal code that customers can reference in their
support request or to help them when searching for knowledge
base articles related to this error (default ``None``).
"""
def __init__(self, media_type: str, **kwargs: HTTPErrorKeywordArguments) -> None:
super().__init__(
title='Invalid {0}'.format(media_type),
description='Could not parse an empty {0} body'.format(media_type),
**kwargs, # type: ignore[arg-type]
)
| MediaNotFoundError |
python | tensorflow__tensorflow | tensorflow/python/distribute/tpu_values.py | {
"start": 5529,
"end": 8652
} | class ____(TPUVariableMixin, values.DistributedVariable):
"""DistributedVariable subclass for TPUStrategy."""
def assign_sub(self, value, use_locking=False, name=None, read_value=True):
if values_util.is_saving_non_distributed():
return self._primary.assign_sub(value, use_locking, name, read_value)
return self._policy.assign_sub(
self, value, use_locking=use_locking, name=name, read_value=read_value)
def assign_add(self, value, use_locking=False, name=None, read_value=True):
if values_util.is_saving_non_distributed():
return self._primary.assign_add(value, use_locking, name, read_value)
return self._policy.assign_add(
self, value, use_locking=use_locking, name=name, read_value=read_value)
def assign(self, value, use_locking=False, name=None, read_value=True):
if values_util.is_saving_non_distributed():
return self._primary.assign(value, use_locking, name, read_value)
return self._policy.assign(
self, value, use_locking=use_locking, name=name, read_value=read_value)
def scatter_sub(self, sparse_delta, use_locking=False, name=None):
if values_util.is_saving_non_distributed():
return self._primary.scatter_sub(sparse_delta, use_locking, name)
return self._policy.scatter_sub(
self, sparse_delta, use_locking=use_locking, name=name)
def scatter_add(self, sparse_delta, use_locking=False, name=None):
if values_util.is_saving_non_distributed():
return self._primary.scatter_add(sparse_delta, use_locking, name)
return self._policy.scatter_add(
self, sparse_delta, use_locking=use_locking, name=name)
def scatter_mul(self, sparse_delta, use_locking=False, name=None):
if values_util.is_saving_non_distributed():
return self._primary.scatter_mul(sparse_delta, use_locking, name)
return self._policy.scatter_mul(
self, sparse_delta, use_locking=use_locking, name=name)
def scatter_div(self, sparse_delta, use_locking=False, name=None):
if values_util.is_saving_non_distributed():
return self._primary.scatter_div(sparse_delta, use_locking, name)
return self._policy.scatter_div(
self, sparse_delta, use_locking=use_locking, name=name)
def scatter_min(self, sparse_delta, use_locking=False, name=None):
if values_util.is_saving_non_distributed():
return self._primary.scatter_min(sparse_delta, use_locking, name)
return self._policy.scatter_min(
self, sparse_delta, use_locking=use_locking, name=name)
def scatter_max(self, sparse_delta, use_locking=False, name=None):
if values_util.is_saving_non_distributed():
return self._primary.scatter_max(sparse_delta, use_locking, name)
return self._policy.scatter_max(
self, sparse_delta, use_locking=use_locking, name=name)
def scatter_update(self, sparse_delta, use_locking=False, name=None):
if values_util.is_saving_non_distributed():
return self._primary.scatter_update(sparse_delta, use_locking, name)
return self._policy.scatter_update(
self, sparse_delta, use_locking=use_locking, name=name)
| TPUDistributedVariable |
python | numba__numba | numba/tests/test_listimpl.py | {
"start": 5811,
"end": 16147
} | class ____(TestCase):
def setUp(self):
"""Bind to the c_helper library and provide the ctypes wrapper.
"""
list_t = ctypes.c_void_p
iter_t = ctypes.c_void_p
def wrap(name, restype, argtypes=()):
proto = ctypes.CFUNCTYPE(restype, *argtypes)
return proto(_helperlib.c_helpers[name])
# numba_test_list()
self.numba_test_list = wrap(
'test_list',
ctypes.c_int,
)
# numba_list_new(NB_List *l, Py_ssize_t item_size, Py_ssize_t allocated)
self.numba_list_new = wrap(
'list_new',
ctypes.c_int,
[ctypes.POINTER(list_t), ctypes.c_ssize_t, ctypes.c_ssize_t],
)
# numba_list_free(NB_List *l)
self.numba_list_free = wrap(
'list_free',
None,
[list_t],
)
# numba_list_length(NB_List *l)
self.numba_list_length = wrap(
'list_length',
ctypes.c_int,
[list_t],
)
# numba_list_allocated(NB_List *l)
self.numba_list_allocated = wrap(
'list_allocated',
ctypes.c_int,
[list_t],
)
# numba_list_is_mutable(NB_List *lp)
self.numba_list_is_mutable = wrap(
'list_is_mutable',
ctypes.c_int,
[list_t],
)
# numba_list_set_is_mutable(NB_List *lp, int is_mutable)
self.numba_list_set_is_mutable = wrap(
'list_set_is_mutable',
None,
[list_t, ctypes.c_int],
)
# numba_list_setitem(NB_List *l, Py_ssize_t i, const char *item)
self.numba_list_setitem = wrap(
'list_setitem',
ctypes.c_int,
[list_t, ctypes.c_ssize_t, ctypes.c_char_p],
)
# numba_list_append(NB_List *l, const char *item)
self.numba_list_append = wrap(
'list_append',
ctypes.c_int,
[list_t, ctypes.c_char_p],
)
# numba_list_getitem(NB_List *l, Py_ssize_t i, char *out)
self.numba_list_getitem = wrap(
'list_getitem',
ctypes.c_int,
[list_t, ctypes.c_ssize_t, ctypes.c_char_p],
)
# numba_list_delitem(NB_List *l, Py_ssize_t i)
self.numba_list_delitem = wrap(
'list_delitem',
ctypes.c_int,
[list_t, ctypes.c_ssize_t],
)
# numba_list_delete_slice(NB_List *l,
# Py_ssize_t start,
# Py_ssize_t stop,
# Py_ssize_t step)
self.numba_list_delete_slice = wrap(
'list_delete_slice',
ctypes.c_int,
[list_t, ctypes.c_ssize_t, ctypes.c_ssize_t, ctypes.c_ssize_t],
)
# numba_list_iter_sizeof()
self.numba_list_iter_sizeof = wrap(
'list_iter_sizeof',
ctypes.c_size_t,
)
# numba_list_iter(NB_ListIter *it, NB_List *l)
self.numba_list_iter = wrap(
'list_iter',
None,
[
iter_t,
list_t,
],
)
# numba_list_iter_next(NB_ListIter *it, const char **item_ptr)
self.numba_list_iter_next = wrap(
'list_iter_next',
ctypes.c_int,
[
iter_t, # it
ctypes.POINTER(ctypes.c_void_p), # item_ptr
],
)
def test_simple_c_test(self):
# Runs the basic test in C.
ret = self.numba_test_list()
self.assertEqual(ret, 0)
def test_length(self):
l = List(self, 8, 0)
self.assertEqual(len(l), 0)
def test_allocation(self):
for i in range(16):
l = List(self, 8, i)
self.assertEqual(len(l), 0)
self.assertEqual(l.allocated, i)
def test_append_get_string(self):
l = List(self, 8, 1)
l.append(b"abcdefgh")
self.assertEqual(len(l), 1)
r = l[0]
self.assertEqual(r, b"abcdefgh")
def test_append_get_int(self):
l = List(self, 8, 1)
l.append(struct.pack("q", 1))
self.assertEqual(len(l), 1)
r = struct.unpack("q", l[0])[0]
self.assertEqual(r, 1)
def test_append_get_string_realloc(self):
l = List(self, 8, 1)
l.append(b"abcdefgh")
self.assertEqual(len(l), 1)
l.append(b"hijklmno")
self.assertEqual(len(l), 2)
r = l[1]
self.assertEqual(r, b"hijklmno")
def test_set_item_getitem_index_error(self):
l = List(self, 8, 0)
with self.assertRaises(IndexError):
l[0]
with self.assertRaises(IndexError):
l[0] = b"abcdefgh"
def test_iter(self):
l = List(self, 1, 0)
values = [b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h']
for i in values:
l.append(i)
received = []
for j in l:
received.append(j)
self.assertEqual(values, received)
def test_pop(self):
l = List(self, 1, 0)
values = [b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h']
for i in values:
l.append(i)
self.assertEqual(len(l), 8)
received = l.pop()
self.assertEqual(b'h', received)
self.assertEqual(len(l), 7)
received = [j for j in l]
self.assertEqual(received, values[:-1])
received = l.pop(0)
self.assertEqual(b'a', received)
self.assertEqual(len(l), 6)
received = l.pop(2)
self.assertEqual(b'd', received)
self.assertEqual(len(l), 5)
expected = [b'b', b'c', b'e', b'f', b'g']
received = [j for j in l]
self.assertEqual(received, expected)
def test_pop_index_error(self):
l = List(self, 8, 0)
with self.assertRaises(IndexError):
l.pop()
def test_pop_byte(self):
l = List(self, 4, 0)
values = [b'aaaa', b'bbbb', b'cccc', b'dddd',
b'eeee', b'ffff', b'gggg', b'hhhhh']
for i in values:
l.append(i)
self.assertEqual(len(l), 8)
received = l.pop()
self.assertEqual(b'hhhh', received)
self.assertEqual(len(l), 7)
received = [j for j in l]
self.assertEqual(received, values[:-1])
received = l.pop(0)
self.assertEqual(b'aaaa', received)
self.assertEqual(len(l), 6)
received = l.pop(2)
self.assertEqual(b'dddd', received)
self.assertEqual(len(l), 5)
expected = [b'bbbb', b'cccc', b'eeee', b'ffff', b'gggg']
received = [j for j in l]
self.assertEqual(received, expected)
def test_delitem(self):
l = List(self, 1, 0)
values = [b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h']
for i in values:
l.append(i)
self.assertEqual(len(l), 8)
# delete first item
del l[0]
self.assertEqual(len(l), 7)
self.assertEqual(list(l), values[1:])
# delete last item
del l[-1]
self.assertEqual(len(l), 6)
self.assertEqual(list(l), values[1:-1])
# delete item from middle
del l[2]
self.assertEqual(len(l), 5)
self.assertEqual(list(l), [b'b', b'c', b'e', b'f', b'g'])
def test_delete_slice(self):
l = List(self, 1, 0)
values = [b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h']
for i in values:
l.append(i)
self.assertEqual(len(l), 8)
# delete every second item
# no slice default normalization here, be explicit about start anb stop
del l[0:8:2]
self.assertEqual(len(l), 4)
self.assertEqual(list(l), values[1:8:2])
# delete first item
del l[0:1:1]
self.assertEqual(len(l), 3)
self.assertEqual(list(l), [b'd', b'f', b'h'])
# delete last item
del l[2:3:1]
self.assertEqual(len(l), 2)
self.assertEqual(list(l), [b'd', b'f'])
# delete all left items
del l[0:2:1]
self.assertEqual(len(l), 0)
self.assertEqual(list(l), [])
def check_sizing(self, item_size, nmax):
# Helper to verify different item_sizes
l = List(self, item_size, 0)
def make_item(v):
tmp = "{:0{}}".format(nmax - v - 1, item_size).encode("latin-1")
return tmp[:item_size]
for i in range(nmax):
l.append(make_item(i))
self.assertEqual(len(l), nmax)
for i in range(nmax):
self.assertEqual(l[i], make_item(i))
def test_sizing(self):
# Check different sizes of the key & value.
for i in range(1, 16):
self.check_sizing(item_size=i, nmax=2**i)
def test_mutability(self):
# setup and populate a singleton
l = List(self, 8, 1)
one = struct.pack("q", 1)
l.append(one)
self.assertTrue(l.is_mutable)
self.assertEqual(len(l), 1)
r = struct.unpack("q", l[0])[0]
self.assertEqual(r, 1)
# set to immutable and test guards
l.set_immutable()
self.assertFalse(l.is_mutable)
# append
with self.assertRaises(ValueError) as raises:
l.append(one)
self.assertIn("list is immutable", str(raises.exception))
# setitem
with self.assertRaises(ValueError) as raises:
l[0] = one
self.assertIn("list is immutable", str(raises.exception))
# pop
with self.assertRaises(ValueError) as raises:
l.pop()
self.assertIn("list is immutable", str(raises.exception))
# delitem with index
with self.assertRaises(ValueError) as raises:
del l[0]
self.assertIn("list is immutable", str(raises.exception))
# delitem with slice
with self.assertRaises(ValueError) as raises:
del l[0:1:1]
self.assertIn("list is immutable", str(raises.exception))
l.set_mutable()
# check that nothing has changed
self.assertTrue(l.is_mutable)
self.assertEqual(len(l), 1)
r = struct.unpack("q", l[0])[0]
self.assertEqual(r, 1)
| TestListImpl |
python | wandb__wandb | wandb/integration/torch/wandb_torch.py | {
"start": 11148,
"end": 21526
} | class ____(wandb.data_types.Graph):
def __init__(self):
super().__init__("torch")
self._graph_hooks = set()
@classmethod
def hook_torch(cls, model, criterion=None, graph_idx=0):
wandb.termlog("logging graph, to disable use `wandb.watch(log_graph=False)`")
graph = TorchGraph()
graph.hook_torch_modules(model, criterion, graph_idx=graph_idx)
return graph
def create_forward_hook(self, name, graph_idx):
graph = self
def after_forward_hook(module, input, output):
if id(module) not in self._graph_hooks:
# hook already processed -> noop
return
if not isinstance(output, tuple):
output = (output,)
parameters = [
(pname, list(param.size()))
for pname, param in module.named_parameters()
]
node = Node(
id=id(module),
name=name,
class_name=str(module),
output_shape=nested_shape(output),
parameters=parameters,
num_parameters=[reduce(mul, size, 1) for (pname, size) in parameters],
)
graph.nodes_by_id[id(module)] = node
for param in module.parameters():
graph.nodes_by_id[id(param)] = node
graph.add_node(node)
if not graph.criterion_passed:
if hasattr(output[0], "grad_fn"):
graph.criterion = output[0].grad_fn
elif (
isinstance(output[0], list)
and output[0]
and hasattr(output[0][0], "grad_fn")
):
graph.criterion = output[0][0].grad_fn
# hook has been processed
self._graph_hooks -= {id(module)}
if not self._graph_hooks:
# we went through the entire graph
wandb.run.summary[f"graph_{graph_idx}"] = self
return after_forward_hook
def hook_torch_modules(
self, module, criterion=None, prefix=None, graph_idx=0, parent=None
):
torch = util.get_module("torch", "Could not import torch")
layers = 0
graph = self
if hasattr(module, "_wandb_watch_called") and module._wandb_watch_called:
raise ValueError(
"You can only call `wandb.watch` once per model. Pass a new instance of the model if you need to call wandb.watch again in your code."
)
module._wandb_watch_called = True
if criterion:
graph.criterion = criterion
graph.criterion_passed = True
for name, sub_module in module.named_children():
name = name or str(layers)
if prefix:
name = prefix + "." + name
layers += 1
if not isinstance(sub_module, torch.nn.Module):
# TODO: Why does this happen?
break
# Trying to support torch >0.3 making this code complicated
# We want a list of types that we should recurse into
# Torch 0.3 uses containers
# 0.4 has ModuleList
# 0.4.1 has ModuleDict
module_types = [
getattr(torch.nn, module_classname)
for module_classname in (
"Container",
"Sequential",
"ModuleList",
"ModuleDict",
)
if hasattr(torch.nn, module_classname)
]
if parent is None:
parent = module
if isinstance(sub_module, tuple(module_types)):
self.hook_torch_modules(sub_module, prefix=name, parent=parent)
else:
self._graph_hooks |= {id(sub_module)}
try:
graph_hook = sub_module.register_forward_hook(
self.create_forward_hook(name, graph_idx)
)
wandb.run._torch._hook_handles[
"topology/" + str(id(graph_hook))
] = graph_hook
if not hasattr(parent, "_wandb_hook_names"):
# should never happen but let's be extra safe
parent._wandb_hook_names = []
parent._wandb_hook_names.append("topology/" + str(id(graph_hook)))
except RuntimeError as e:
wandb.termwarn(
f"Trying to register forward_hook failed ({e}) - skipping graph tracking.",
repeat=False,
)
@classmethod
def from_torch_layers(cls, module_graph, variable):
"""Recover something like neural net layers from PyTorch Module's and the compute graph from a Variable.
Example output for a multi-layer RNN. We confusingly assign shared embedding values
to the encoder, but ordered next to the decoder.
rnns.0.linear.module.weight_raw rnns.0
rnns.0.linear.module.bias rnns.0
rnns.1.linear.module.weight_raw rnns.1
rnns.1.linear.module.bias rnns.1
rnns.2.linear.module.weight_raw rnns.2
rnns.2.linear.module.bias rnns.2
rnns.3.linear.module.weight_raw rnns.3
rnns.3.linear.module.bias rnns.3
decoder.weight encoder
decoder.bias decoder
"""
# TODO: We're currently not using this, but I left it here in case we want to resurrect! - CVP
torch = util.get_module("torch", "Could not import torch")
module_nodes_by_hash = {id(n): n for n in module_graph.nodes}
module_parameter_nodes = [
n for n in module_graph.nodes if isinstance(n.obj, torch.nn.Parameter)
]
names_by_pid = {id(n.obj): n.name for n in module_parameter_nodes}
reachable_param_nodes = module_graph[0].reachable_descendents()
reachable_params = {}
module_reachable_params = {}
names = {}
for pid, reachable_nodes in reachable_param_nodes.items():
node = module_nodes_by_hash[pid]
if not isinstance(node.obj, torch.nn.Module):
continue
module = node.obj
reachable_params = {} # by object id
module_reachable_params[id(module)] = reachable_params
names[node.name] = set()
for reachable_hash in reachable_nodes:
reachable = module_nodes_by_hash[reachable_hash]
if isinstance(reachable.obj, torch.nn.Parameter):
param = reachable.obj
reachable_params[id(param)] = param
names[node.name].add(names_by_pid[id(param)])
# we look for correspondences between sets of parameters used in subtrees of the
# computation graph and sets of parameters contained in subtrees of the module
# graph
node_depths = {id(n): d for n, d in module_graph[0].descendent_bfs()}
parameter_module_names = {}
parameter_modules = {}
for param_node in (
n for n in module_graph.nodes if isinstance(n.obj, torch.nn.Parameter)
):
pid = id(param_node.obj)
best_node = None
best_depth = None
best_reachable_params = None
for node in module_graph.nodes:
if not isinstance(node.obj, torch.nn.Module):
continue
module = node.obj
reachable_params = module_reachable_params[id(module)]
if pid in reachable_params:
depth = node_depths[id(node)]
if best_node is None or (len(reachable_params), depth) <= (
len(best_reachable_params),
best_depth,
):
best_node = node
best_depth = depth
best_reachable_params = reachable_params
parameter_modules[pid] = best_node
parameter_module_names[param_node.name] = best_node.name
# contains all parameters but only a minimal set of modules necessary
# to contain them (and which ideally correspond to conceptual layers)
reduced_module_graph = cls()
rmg_ids = itertools.count()
rmg_root = Node(id=next(rmg_ids), node=module_graph[0])
reduced_module_graph.add_node(rmg_root)
reduced_module_graph.root = rmg_root
rmg_nodes_by_pid = {}
module_nodes_by_pid = {id(n.obj): n for n in module_graph.nodes}
compute_graph, compute_node_vars = cls.from_torch_compute_graph(variable)
for node, _ in reversed(list(compute_graph[0].ancestor_bfs())):
param = compute_node_vars.get(node.id)
pid = id(param)
if not isinstance(param, torch.nn.Parameter):
continue
if pid not in module_nodes_by_pid:
# not all Parameters that occur in the compute graph come from the Module graph
continue
# add the nodes in the order we want to display them on the frontend
mid = id(parameter_modules[pid].obj)
if mid in rmg_nodes_by_pid:
rmg_module = rmg_nodes_by_pid[mid]
else:
rmg_module = rmg_nodes_by_pid[mid] = Node(
id=next(rmg_ids), node=module_nodes_by_pid[mid]
)
reduced_module_graph.add_node(rmg_module)
reduced_module_graph.add_edge(rmg_root, rmg_module)
rmg_param = Node(id=next(rmg_ids), node=module_nodes_by_pid[pid])
rmg_nodes_by_pid[pid] = rmg_param
reduced_module_graph.add_node(rmg_param)
reduced_module_graph.add_edge(rmg_module, rmg_param)
return reduced_module_graph
@classmethod
def node_from_module(cls, nid, module):
numpy = util.get_module("numpy", "Could not import numpy")
node = wandb.Node()
node.id = nid
node.child_parameters = 0
for parameter in module.parameters():
node.child_parameters += numpy.prod(parameter.size())
node.class_name = type(module).__name__
return node
| TorchGraph |
python | django__django | tests/datatypes/models.py | {
"start": 184,
"end": 585
} | class ____(models.Model):
name = models.CharField(max_length=100)
is_frosted = models.BooleanField(default=False)
has_sprinkles = models.BooleanField(null=True)
baked_date = models.DateField(null=True)
baked_time = models.TimeField(null=True)
consumed_at = models.DateTimeField(null=True)
review = models.TextField()
class Meta:
ordering = ("consumed_at",)
| Donut |
python | davidhalter__jedi | jedi/inference/value/module.py | {
"start": 639,
"end": 1296
} | class ____(AbstractNameDefinition):
"""
For module attributes like __file__, __str__ and so on.
"""
api_type = 'instance'
def __init__(self, parent_module, string_name, string_value=None):
self.parent_context = parent_module
self.string_name = string_name
self._string_value = string_value
def infer(self):
if self._string_value is not None:
s = self._string_value
return ValueSet([
create_simple_object(self.parent_context.inference_state, s)
])
return compiled.get_string_value_set(self.parent_context.inference_state)
| _ModuleAttributeName |
python | astropy__astropy | astropy/modeling/polynomial.py | {
"start": 5850,
"end": 13301
} | class ____(PolynomialBase):
"""
This is a base class for the 2D Chebyshev and Legendre models.
The polynomials implemented here require a maximum degree in x and y.
For explanation of ``x_domain``, ``y_domain``, ```x_window`` and ```y_window``
see :ref:`Notes regarding usage of domain and window <astropy:domain-window-note>`.
Parameters
----------
x_degree : int
degree in x
y_degree : int
degree in y
x_domain : tuple or None, optional
domain of the x independent variable
x_window : tuple or None, optional
range of the x independent variable
y_domain : tuple or None, optional
domain of the y independent variable
y_window : tuple or None, optional
range of the y independent variable
**params : dict
{keyword: value} pairs, representing {parameter_name: value}
"""
n_inputs = 2
n_outputs = 1
def __init__(
self,
x_degree,
y_degree,
x_domain=None,
x_window=None,
y_domain=None,
y_window=None,
n_models=None,
model_set_axis=None,
name=None,
meta=None,
**params,
):
self.x_degree = x_degree
self.y_degree = y_degree
self._order = self.get_num_coeff()
# Set the ``x/y_domain`` and ``x/y_wndow`` attributes in subclasses.
self._default_domain_window = {
"x_window": (-1, 1),
"y_window": (-1, 1),
"x_domain": None,
"y_domain": None,
}
self.x_window = x_window or self._default_domain_window["x_window"]
self.y_window = y_window or self._default_domain_window["y_window"]
self.x_domain = x_domain
self.y_domain = y_domain
self._param_names = self._generate_coeff_names()
if n_models:
if model_set_axis is None:
model_set_axis = 0
minshape = (1,) * model_set_axis + (n_models,)
else:
minshape = ()
for param_name in self._param_names:
self._parameters_[param_name] = Parameter(
param_name, default=np.zeros(minshape)
)
super().__init__(
n_models=n_models,
model_set_axis=model_set_axis,
name=name,
meta=meta,
**params,
)
@property
def x_domain(self):
return self._x_domain
@x_domain.setter
def x_domain(self, val):
self._x_domain = _validate_domain_window(val)
@property
def y_domain(self):
return self._y_domain
@y_domain.setter
def y_domain(self, val):
self._y_domain = _validate_domain_window(val)
@property
def x_window(self):
return self._x_window
@x_window.setter
def x_window(self, val):
self._x_window = _validate_domain_window(val)
@property
def y_window(self):
return self._y_window
@y_window.setter
def y_window(self, val):
self._y_window = _validate_domain_window(val)
def __repr__(self):
return self._format_repr(
[self.x_degree, self.y_degree],
kwargs={
"x_domain": self.x_domain,
"y_domain": self.y_domain,
"x_window": self.x_window,
"y_window": self.y_window,
},
defaults=self._default_domain_window,
)
def __str__(self):
return self._format_str(
[
("X_Degree", self.x_degree),
("Y_Degree", self.y_degree),
("X_Domain", self.x_domain),
("Y_Domain", self.y_domain),
("X_Window", self.x_window),
("Y_Window", self.y_window),
],
self._default_domain_window,
)
def get_num_coeff(self):
"""
Determine how many coefficients are needed.
Returns
-------
numc : int
number of coefficients
"""
if self.x_degree < 0 or self.y_degree < 0:
raise ValueError("Degree of polynomial must be positive or null")
return (self.x_degree + 1) * (self.y_degree + 1)
def _invlex(self):
# TODO: This is a very slow way to do this; fix it and related methods
# like _alpha
c = []
xvar = np.arange(self.x_degree + 1)
yvar = np.arange(self.y_degree + 1)
for j in yvar:
for i in xvar:
c.append((i, j))
return np.array(c[::-1])
def invlex_coeff(self, coeffs):
invlex_coeffs = []
xvar = np.arange(self.x_degree + 1)
yvar = np.arange(self.y_degree + 1)
for j in yvar:
for i in xvar:
name = f"c{i}_{j}"
coeff = coeffs[self.param_names.index(name)]
invlex_coeffs.append(coeff)
return np.array(invlex_coeffs[::-1])
def _alpha(self):
invlexdeg = self._invlex()
invlexdeg[:, 1] = invlexdeg[:, 1] + self.x_degree + 1
nx = self.x_degree + 1
ny = self.y_degree + 1
alpha = np.zeros((ny * nx + 3, ny + nx))
for n in range(len(invlexdeg)):
alpha[n][invlexdeg[n]] = [1, 1]
alpha[-2, 0] = 1
alpha[-3, nx] = 1
return alpha
def imhorner(self, x, y, coeff):
_coeff = list(coeff)
_coeff.extend([0, 0, 0])
alpha = self._alpha()
r0 = _coeff[0]
nalpha = len(alpha)
karr = np.diff(alpha, axis=0)
kfunc = self._fcache(x, y)
x_terms = self.x_degree + 1
y_terms = self.y_degree + 1
nterms = x_terms + y_terms
for n in range(1, nterms + 1 + 3):
setattr(self, "r" + str(n), 0.0)
for n in range(1, nalpha):
k = karr[n - 1].nonzero()[0].max() + 1
rsum = 0
for i in range(1, k + 1):
rsum = rsum + getattr(self, "r" + str(i))
val = kfunc[k - 1] * (r0 + rsum)
setattr(self, "r" + str(k), val)
r0 = _coeff[n]
for i in range(1, k):
setattr(self, "r" + str(i), 0.0)
result = r0
for i in range(1, nterms + 1 + 3):
result = result + getattr(self, "r" + str(i))
return result
def _generate_coeff_names(self):
names = []
for j in range(self.y_degree + 1):
for i in range(self.x_degree + 1):
names.append(f"c{i}_{j}")
return tuple(names)
def _fcache(self, x, y):
"""
Computation and store the individual functions.
To be implemented by subclasses"
"""
raise NotImplementedError("Subclasses should implement this")
def evaluate(self, x, y, *coeffs):
if self.x_domain is not None:
x = poly_map_domain(x, self.x_domain, self.x_window)
if self.y_domain is not None:
y = poly_map_domain(y, self.y_domain, self.y_window)
invcoeff = self.invlex_coeff(coeffs)
return self.imhorner(x, y, invcoeff)
def prepare_inputs(self, x, y, **kwargs):
inputs, broadcasted_shapes = super().prepare_inputs(x, y, **kwargs)
x, y = inputs
if x.shape != y.shape:
raise ValueError("Expected input arrays to have the same shape")
return (x, y), broadcasted_shapes
| OrthoPolynomialBase |
python | scipy__scipy | scipy/optimize/tests/test_slsqp.py | {
"start": 378,
"end": 1003
} | class ____:
"""pass a custom callback function
This makes sure it's being used.
"""
def __init__(self):
self.been_called = False
self.ncalls = 0
def __call__(self, x):
assert not isinstance(x, OptimizeResult)
self.been_called = True
self.ncalls += 1
def callback2(self, intermediate_result):
assert isinstance(intermediate_result, OptimizeResult)
self.been_called = True
self.ncalls += 1
def callback3(self, intermediate_result):
assert isinstance(intermediate_result, OptimizeResult)
raise StopIteration
| MyCallBack |
python | PyCQA__pylint | tests/functional/o/overridden_final_method_py38.py | {
"start": 178,
"end": 241
} | class ____:
@final
def my_method(self):
pass
| Base |
python | tensorflow__tensorflow | tensorflow/python/autograph/converters/control_flow_test.py | {
"start": 2021,
"end": 3497
} | class ____(ControlFlowTestBase):
def test_basic(self):
def f(n):
i = 0
j = 0
s = 0
while i < n:
while j < i:
j += 3
u = i + j # 'u' is not defined within the inner loop
s += u
i += 1
j = 0
return s, i, j, n
self.assertTransformedResult(f, constant_op.constant(5),
(25, 5, 0, 5))
def test_mixed_globals_nonglobals(self):
def f(n):
global for_mixed_globals_nonglobals
i = 0
j = 0
for_mixed_globals_nonglobals = 0
while i < n:
while j < i:
j += 3
u = i + j # 'u' is not defined within the inner loop
for_mixed_globals_nonglobals += u
i += 1
j = 0
return for_mixed_globals_nonglobals, i, j, n
self.assertTransformedResult(f, constant_op.constant(5),
(25, 5, 0, 5))
def test_composite_state_complex(self):
class TestClassX(object):
def __init__(self, x):
self.x = x
class TestClassY(object):
def __init__(self, y):
self.y = y
def f(n):
tc = TestClassX(TestClassY({'z': TestClassX(n)}))
if n > 0:
while n > 0:
if n < 2:
tc.x.y['z'].x += 1
n -= 1
return n, tc
tr = self.transform(f, control_flow)
n, tc = tr(constant_op.constant(5))
self.assertValuesEqual((n, tc.x.y['z'].x), (0, 6))
| NestedControlFlowTest |
python | django__django | django/tasks/base.py | {
"start": 7527,
"end": 7653
} | class ____:
task_result: TaskResult
@property
def attempt(self):
return self.task_result.attempts
| TaskContext |
python | PrefectHQ__prefect | src/integrations/prefect-github/prefect_github/schemas/graphql_schema.py | {
"start": 1059904,
"end": 1060072
} | class ____(sgqlc.types.Union):
"""
See source code for more info.
"""
__schema__ = graphql_schema
__types__ = (Organization, User)
| OrganizationOrUser |
python | walkccc__LeetCode | solutions/337. House Robber III/337.py | {
"start": 0,
"end": 407
} | class ____:
def rob(self, root: TreeNode | None) -> int:
def robOrNot(root: TreeNode | None) -> tuple:
if not root:
return (0, 0)
robLeft, notRobLeft = robOrNot(root.left)
robRight, notRobRight = robOrNot(root.right)
return (root.val + notRobLeft + notRobRight,
max(robLeft, notRobLeft) + max(robRight, notRobRight))
return max(robOrNot(root))
| Solution |
python | django__django | tests/many_to_one/models.py | {
"start": 2091,
"end": 2218
} | class ____(models.Model):
name = models.CharField(max_length=20)
parent = models.ForeignKey(Parent, models.CASCADE)
| Child |
python | pydantic__pydantic | tests/mypy/outputs/mypy-plugin_ini/plugin_strict_fields.py | {
"start": 286,
"end": 707
} | class ____(BaseModel):
model_config = {'strict': True}
a: int
b: int = Field(strict=True)
c: int = Field(strict=False)
# expected error: a, b
ModelStrictMode(a='1', b='2', c='3')
# MYPY: error: Argument "a" to "ModelStrictMode" has incompatible type "str"; expected "int" [arg-type]
# MYPY: error: Argument "b" to "ModelStrictMode" has incompatible type "str"; expected "int" [arg-type]
| ModelStrictMode |
python | django__django | tests/asgi/tests.py | {
"start": 1398,
"end": 34539
} | class ____(SimpleTestCase):
async_request_factory = AsyncRequestFactory()
def setUp(self):
request_started.disconnect(close_old_connections)
self.addCleanup(request_started.connect, close_old_connections)
async def test_get_asgi_application(self):
"""
get_asgi_application() returns a functioning ASGI callable.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"12"),
(b"Content-Type", b"text/html; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Allow response.close() to finish.
await communicator.wait()
async def test_asgi_cookies(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/cookie/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertIn((b"Set-Cookie", b"key=value; Path=/"), response_start["headers"])
# Allow response.close() to finish.
await communicator.wait()
# Python's file API is not async compatible. A third-party library such
# as https://github.com/Tinche/aiofiles allows passing the file to
# FileResponse as an async iterator. With a sync iterator
# StreamingHTTPResponse triggers a warning when iterating the file.
# assertWarnsMessage is not async compatible, so ignore_warnings for the
# test.
@ignore_warnings(module="django.http.response")
async def test_file_response(self):
"""
Makes sure that FileResponse works over ASGI.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/file/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
with open(test_filename, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
with captured_stderr():
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
headers = response_start["headers"]
self.assertEqual(len(headers), 3)
expected_headers = {
b"Content-Length": str(len(test_file_contents)).encode("ascii"),
b"Content-Type": b"text/x-python",
b"Content-Disposition": b'inline; filename="urls.py"',
}
for key, value in headers:
try:
self.assertEqual(value, expected_headers[key])
except AssertionError:
# Windows registry may not be configured with correct
# mimetypes.
if sys.platform == "win32" and key == b"Content-Type":
self.assertEqual(value, b"text/plain")
else:
raise
# Warning ignored here.
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
@modify_settings(INSTALLED_APPS={"append": "django.contrib.staticfiles"})
@override_settings(
STATIC_URL="static/",
STATIC_ROOT=TEST_STATIC_ROOT,
STATICFILES_DIRS=[TEST_STATIC_ROOT],
STATICFILES_FINDERS=[
"django.contrib.staticfiles.finders.FileSystemFinder",
],
)
async def test_static_file_response(self):
application = ASGIStaticFilesHandler(get_asgi_application())
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/static/file.txt")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
file_path = TEST_STATIC_ROOT / "file.txt"
with open(file_path, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
stat = file_path.stat()
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", str(len(test_file_contents)).encode("ascii")),
(b"Content-Type", b"text/plain"),
(b"Content-Disposition", b'inline; filename="file.txt"'),
(b"Last-Modified", http_date(stat.st_mtime).encode("ascii")),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
async def test_headers(self):
application = get_asgi_application()
communicator = ApplicationCommunicator(
application,
self.async_request_factory._base_scope(
path="/meta/",
headers=[
[b"content-type", b"text/plain; charset=utf-8"],
[b"content-length", b"77"],
[b"referer", b"Scotland"],
[b"referer", b"Wales"],
],
),
)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"19"),
(b"Content-Type", b"text/plain; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"From Scotland,Wales")
# Allow response.close() to finish
await communicator.wait()
async def test_post_body(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(
method="POST",
path="/post/",
query_string="echo=1",
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"Echo!"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Echo!")
async def test_create_request_error(self):
# Track request_finished signal.
signal_handler = SignalHandler()
request_finished.connect(signal_handler)
self.addCleanup(request_finished.disconnect, signal_handler)
# Request class that always fails creation with RequestDataTooBig.
class TestASGIRequest(ASGIRequest):
def __init__(self, scope, body_file):
super().__init__(scope, body_file)
raise RequestDataTooBig()
# Handler to use the custom request class.
class TestASGIHandler(ASGIHandler):
request_class = TestASGIRequest
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/not-important/")
communicator = ApplicationCommunicator(application, scope)
# Initiate request.
await communicator.send_input({"type": "http.request"})
# Give response.close() time to finish.
await communicator.wait()
self.assertEqual(len(signal_handler.calls), 1)
self.assertNotEqual(
signal_handler.calls[0]["thread"], threading.current_thread()
)
async def test_cancel_post_request_with_sync_processing(self):
"""
The request.body object should be available and readable in view
code, even if the ASGIHandler cancels processing part way through.
"""
loop = asyncio.get_event_loop()
# Events to monitor the view processing from the parent test code.
view_started_event = asyncio.Event()
view_finished_event = asyncio.Event()
# Record received request body or exceptions raised in the test view
outcome = []
# This view will run in a new thread because it is wrapped in
# sync_to_async. The view consumes the POST body data after a short
# delay. The test will cancel the request using http.disconnect during
# the delay, but because this is a sync view the code runs to
# completion. There should be no exceptions raised inside the view
# code.
@csrf_exempt
@sync_to_async
def post_view(request):
try:
loop.call_soon_threadsafe(view_started_event.set)
time.sleep(0.1)
# Do something to read request.body after pause
outcome.append({"request_body": request.body})
return HttpResponse("ok")
except Exception as e:
outcome.append({"exception": e})
finally:
loop.call_soon_threadsafe(view_finished_event.set)
# Request class to use the view.
class TestASGIRequest(ASGIRequest):
urlconf = (path("post/", post_view),)
# Handler to use request class.
class TestASGIHandler(ASGIHandler):
request_class = TestASGIRequest
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(
method="POST",
path="/post/",
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"Body data!"})
# Wait until the view code has started, then send http.disconnect.
await view_started_event.wait()
await communicator.send_input({"type": "http.disconnect"})
# Wait until view code has finished.
await view_finished_event.wait()
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
self.assertEqual(outcome, [{"request_body": b"Body data!"}])
async def test_untouched_request_body_gets_closed(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(method="POST", path="/post/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 204)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"")
# Allow response.close() to finish
await communicator.wait()
async def test_get_query_string(self):
application = get_asgi_application()
for query_string in (b"name=Andrew", "name=Andrew"):
with self.subTest(query_string=query_string):
scope = self.async_request_factory._base_scope(
path="/",
query_string=query_string,
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello Andrew!")
# Allow response.close() to finish
await communicator.wait()
async def test_disconnect(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.disconnect"})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_disconnect_both_return(self):
# Force both the disconnect listener and the task that sends the
# response to finish at the same time.
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"some body"})
# Fetch response headers (this yields to asyncio and causes
# ASGHandler.send_response() to dump the body of the response in the
# queue).
await communicator.receive_output()
# Fetch response body (there's already some data queued up, so this
# doesn't actually yield to the event loop, it just succeeds
# instantly).
await communicator.receive_output()
# Send disconnect at the same time that response finishes (this just
# puts some info in a queue, it doesn't have to yield to the event
# loop).
await communicator.send_input({"type": "http.disconnect"})
# Waiting for the communicator _does_ yield to the event loop, since
# ASGIHandler.send_response() is still waiting to do response.close().
# It so happens that there are enough remaining yield points in both
# tasks that they both finish while the loop is running.
await communicator.wait()
async def test_disconnect_with_body(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"some body"})
await communicator.send_input({"type": "http.disconnect"})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_assert_in_listen_for_disconnect(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
await communicator.send_input({"type": "http.not_a_real_message"})
msg = "Invalid ASGI message after request body: http.not_a_real_message"
with self.assertRaisesMessage(AssertionError, msg):
await communicator.wait()
async def test_delayed_disconnect_with_body(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/delayed_hello/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request", "body": b"some body"})
await communicator.send_input({"type": "http.disconnect"})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_wrong_connection_type(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", type="other")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
msg = "Django can only handle ASGI/HTTP connections, not other."
with self.assertRaisesMessage(ValueError, msg):
await communicator.receive_output()
async def test_non_unicode_query_string(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", query_string=b"\xff")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 400)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"")
async def test_request_lifecycle_signals_dispatched_with_thread_sensitive(self):
# Track request_started and request_finished signals.
signal_handler = SignalHandler()
request_started.connect(signal_handler)
self.addCleanup(request_started.disconnect, signal_handler)
request_finished.connect(signal_handler)
self.addCleanup(request_finished.disconnect, signal_handler)
# Perform a basic request.
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# AsyncToSync should have executed the signals in the same thread.
self.assertEqual(len(signal_handler.calls), 2)
request_started_call, request_finished_call = signal_handler.calls
self.assertEqual(
request_started_call["thread"], request_finished_call["thread"]
)
async def test_concurrent_async_uses_multiple_thread_pools(self):
sync_waiter.active_threads.clear()
# Send 2 requests concurrently
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/wait/")
communicators = []
for _ in range(2):
communicators.append(ApplicationCommunicator(application, scope))
await communicators[-1].send_input({"type": "http.request"})
# Each request must complete with a status code of 200
# If requests aren't scheduled concurrently, the barrier in the
# sync_wait view will time out, resulting in a 500 status code.
for communicator in communicators:
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# The requests should have scheduled on different threads. Note
# active_threads is a set (a thread can only appear once), therefore
# length is a sufficient check.
self.assertEqual(len(sync_waiter.active_threads), 2)
sync_waiter.active_threads.clear()
async def test_asyncio_cancel_error(self):
view_started = asyncio.Event()
# Flag to check if the view was cancelled.
view_did_cancel = False
# Track request_finished signal.
signal_handler = SignalHandler()
request_finished.connect(signal_handler)
self.addCleanup(request_finished.disconnect, signal_handler)
# A view that will listen for the cancelled error.
async def view(request):
nonlocal view_did_cancel
view_started.set()
try:
await asyncio.sleep(0.1)
return HttpResponse("Hello World!")
except asyncio.CancelledError:
# Set the flag.
view_did_cancel = True
raise
# Request class to use the view.
class TestASGIRequest(ASGIRequest):
urlconf = (path("cancel/", view),)
# Handler to use request class.
class TestASGIHandler(ASGIHandler):
request_class = TestASGIRequest
# Request cycle should complete since no disconnect was sent.
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/cancel/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
self.assertIs(view_did_cancel, False)
# Exactly one call to request_finished handler.
self.assertEqual(len(signal_handler.calls), 1)
handler_call = signal_handler.calls.pop()
# It was NOT on the async thread.
self.assertNotEqual(handler_call["thread"], threading.current_thread())
# The signal sender is the handler class.
self.assertEqual(handler_call["kwargs"], {"sender": TestASGIHandler})
view_started.clear()
# Request cycle with a disconnect before the view can respond.
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/cancel/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Let the view actually start.
await view_started.wait()
# Disconnect the client.
await communicator.send_input({"type": "http.disconnect"})
# The handler should not send a response.
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
await communicator.wait()
self.assertIs(view_did_cancel, True)
# Exactly one call to request_finished handler.
self.assertEqual(len(signal_handler.calls), 1)
handler_call = signal_handler.calls.pop()
# It was NOT on the async thread.
self.assertNotEqual(handler_call["thread"], threading.current_thread())
# The signal sender is the handler class.
self.assertEqual(handler_call["kwargs"], {"sender": TestASGIHandler})
async def test_asyncio_streaming_cancel_error(self):
# Similar to test_asyncio_cancel_error(), but during a streaming
# response.
view_did_cancel = False
# Track request_finished signals.
signal_handler = SignalHandler()
request_finished.connect(signal_handler)
self.addCleanup(request_finished.disconnect, signal_handler)
async def streaming_response():
nonlocal view_did_cancel
try:
await asyncio.sleep(0.2)
yield b"Hello World!"
except asyncio.CancelledError:
# Set the flag.
view_did_cancel = True
raise
async def view(request):
return StreamingHttpResponse(streaming_response())
class TestASGIRequest(ASGIRequest):
urlconf = (path("cancel/", view),)
class TestASGIHandler(ASGIHandler):
request_class = TestASGIRequest
# With no disconnect, the request cycle should complete in the same
# manner as the non-streaming response.
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/cancel/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
await communicator.wait()
self.assertIs(view_did_cancel, False)
# Exactly one call to request_finished handler.
self.assertEqual(len(signal_handler.calls), 1)
handler_call = signal_handler.calls.pop()
# It was NOT on the async thread.
self.assertNotEqual(handler_call["thread"], threading.current_thread())
# The signal sender is the handler class.
self.assertEqual(handler_call["kwargs"], {"sender": TestASGIHandler})
# Request cycle with a disconnect.
application = TestASGIHandler()
scope = self.async_request_factory._base_scope(path="/cancel/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
# Fetch the start of response so streaming can begin
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
await asyncio.sleep(0.1)
# Now disconnect the client.
await communicator.send_input({"type": "http.disconnect"})
# This time the handler should not send a response.
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
await communicator.wait()
self.assertIs(view_did_cancel, True)
# Exactly one call to request_finished handler.
self.assertEqual(len(signal_handler.calls), 1)
handler_call = signal_handler.calls.pop()
# It was NOT on the async thread.
self.assertNotEqual(handler_call["thread"], threading.current_thread())
# The signal sender is the handler class.
self.assertEqual(handler_call["kwargs"], {"sender": TestASGIHandler})
async def test_streaming(self):
scope = self.async_request_factory._base_scope(
path="/streaming/", query_string=b"sleep=0.001"
)
application = get_asgi_application()
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Fetch http.response.start.
await communicator.receive_output(timeout=1)
# Fetch the 'first' and 'last'.
first_response = await communicator.receive_output(timeout=1)
self.assertEqual(first_response["body"], b"first\n")
second_response = await communicator.receive_output(timeout=1)
self.assertEqual(second_response["body"], b"last\n")
# Fetch the rest of the response so that coroutines are cleaned up.
await communicator.receive_output(timeout=1)
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output(timeout=1)
async def test_streaming_disconnect(self):
scope = self.async_request_factory._base_scope(
path="/streaming/", query_string=b"sleep=0.1"
)
application = get_asgi_application()
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
await communicator.receive_output(timeout=1)
first_response = await communicator.receive_output(timeout=1)
self.assertEqual(first_response["body"], b"first\n")
# Disconnect the client.
await communicator.send_input({"type": "http.disconnect"})
# 'last\n' isn't sent.
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output(timeout=0.2)
async def test_read_body_thread(self):
"""Write runs on correct thread depending on rollover."""
handler = ASGIHandler()
loop_thread = threading.current_thread()
called_threads = []
def write_wrapper(data):
called_threads.append(threading.current_thread())
return original_write(data)
# In-memory write (no rollover expected).
in_memory_chunks = [
{"type": "http.request", "body": b"small", "more_body": False}
]
async def receive():
return in_memory_chunks.pop(0)
with tempfile.SpooledTemporaryFile(max_size=1024, mode="w+b") as temp_file:
original_write = temp_file.write
with (
patch(
"django.core.handlers.asgi.tempfile.SpooledTemporaryFile",
return_value=temp_file,
),
patch.object(temp_file, "write", side_effect=write_wrapper),
):
await handler.read_body(receive)
# Write was called in the event loop thread.
self.assertIn(loop_thread, called_threads)
# Clear thread log before next test.
called_threads.clear()
# Rollover to disk (write should occur in a threadpool thread).
rolled_chunks = [
{"type": "http.request", "body": b"A" * 16, "more_body": True},
{"type": "http.request", "body": b"B" * 16, "more_body": False},
]
async def receive_rolled():
return rolled_chunks.pop(0)
with (
override_settings(FILE_UPLOAD_MAX_MEMORY_SIZE=10),
tempfile.SpooledTemporaryFile(max_size=10, mode="w+b") as temp_file,
):
original_write = temp_file.write
# roll_over force in handlers.
with (
patch(
"django.core.handlers.asgi.tempfile.SpooledTemporaryFile",
return_value=temp_file,
),
patch.object(temp_file, "write", side_effect=write_wrapper),
):
await handler.read_body(receive_rolled)
# The second write should have rolled over to disk.
self.assertTrue(any(t != loop_thread for t in called_threads))
def test_multiple_cookie_headers_http2(self):
test_cases = [
{
"label": "RFC-compliant headers (no semicolon)",
"headers": [
(b"cookie", b"a=abc"),
(b"cookie", b"b=def"),
(b"cookie", b"c=ghi"),
],
},
{
# Some clients may send cookies with trailing semicolons.
"label": "Headers with trailing semicolons",
"headers": [
(b"cookie", b"a=abc;"),
(b"cookie", b"b=def;"),
(b"cookie", b"c=ghi;"),
],
},
]
for case in test_cases:
with self.subTest(case["label"]):
scope = self.async_request_factory._base_scope(
path="/", http_version="2.0"
)
scope["headers"] = case["headers"]
request = ASGIRequest(scope, None)
self.assertEqual(request.META["HTTP_COOKIE"], "a=abc; b=def; c=ghi")
self.assertEqual(request.COOKIES, {"a": "abc", "b": "def", "c": "ghi"})
| ASGITest |
python | sphinx-doc__sphinx | sphinx/util/display.py | {
"start": 2019,
"end": 3125
} | class ____:
def __init__(self, message: str, *, nonl: bool = True) -> None:
self.message = message
self.nonl = nonl
def __enter__(self) -> None:
logger.info(bold(self.message + '... '), nonl=self.nonl)
def __exit__(
self,
typ: type[BaseException] | None,
val: BaseException | None,
tb: TracebackType | None,
) -> bool:
prefix = '' if self.nonl else bold(self.message + ': ')
if isinstance(val, SkipProgressMessage):
logger.info(prefix + __('skipped')) # NoQA: G003
if val.args:
logger.info(*val.args)
return True
elif val:
logger.info(prefix + __('failed')) # NoQA: G003
else:
logger.info(prefix + __('done')) # NoQA: G003
return False
def __call__(self, f: Callable[P, R]) -> Callable[P, R]:
@functools.wraps(f)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: # type: ignore[return]
with self:
return f(*args, **kwargs)
return wrapper
| progress_message |
python | airbytehq__airbyte | airbyte-integrations/bases/connector-acceptance-test/connector_acceptance_test/config.py | {
"start": 4348,
"end": 4634
} | class ____(BaseConfig):
name: str
bypass_reason: Optional[str] = Field(default=None, description="Reason why this stream is considered empty.")
def __hash__(self): # make it hashable
return hash((type(self),) + tuple(self.__dict__.values()))
| EmptyStreamConfiguration |
python | ray-project__ray | doc/source/ray-core/doc_code/cgraph_quickstart.py | {
"start": 2455,
"end": 3074
} | class ____:
def echo(self, msg):
return msg
actor = EchoActor.remote()
with ray.dag.InputNode() as inp:
dag = actor.echo.bind(inp)
cdag = dag.experimental_compile(enable_asyncio=True)
# __cgraph_async_compile_end__
# __cgraph_async_execute_start__
import asyncio
async def async_method(i):
fut = await cdag.execute_async(i)
result = await fut
assert result == i
loop = asyncio.get_event_loop()
loop.run_until_complete(async_method(42))
# __cgraph_async_execute_end__
cdag.teardown()
# __cgraph_actor_death_start__
from ray.dag import InputNode, MultiOutputNode
@ray.remote
| EchoActor |
python | nedbat__coveragepy | coverage/plugin_support.py | {
"start": 8458,
"end": 10444
} | class ____(FileReporter):
"""A debugging `FileReporter`."""
def __init__(self, filename: str, reporter: FileReporter, debug: LabelledDebug) -> None:
super().__init__(filename)
self.reporter = reporter
self.debug = debug
def relative_filename(self) -> str:
ret = self.reporter.relative_filename()
self.debug.write(f"relative_filename() --> {ret!r}")
return ret
def lines(self) -> set[TLineNo]:
ret = self.reporter.lines()
self.debug.write(f"lines() --> {ret!r}")
return ret
def excluded_lines(self) -> set[TLineNo]:
ret = self.reporter.excluded_lines()
self.debug.write(f"excluded_lines() --> {ret!r}")
return ret
def translate_lines(self, lines: Iterable[TLineNo]) -> set[TLineNo]:
ret = self.reporter.translate_lines(lines)
self.debug.write(f"translate_lines({lines!r}) --> {ret!r}")
return ret
def translate_arcs(self, arcs: Iterable[TArc]) -> set[TArc]:
ret = self.reporter.translate_arcs(arcs)
self.debug.write(f"translate_arcs({arcs!r}) --> {ret!r}")
return ret
def no_branch_lines(self) -> set[TLineNo]:
ret = self.reporter.no_branch_lines()
self.debug.write(f"no_branch_lines() --> {ret!r}")
return ret
def exit_counts(self) -> dict[TLineNo, int]:
ret = self.reporter.exit_counts()
self.debug.write(f"exit_counts() --> {ret!r}")
return ret
def arcs(self) -> set[TArc]:
ret = self.reporter.arcs()
self.debug.write(f"arcs() --> {ret!r}")
return ret
def source(self) -> str:
ret = self.reporter.source()
self.debug.write(f"source() --> {len(ret)} chars")
return ret
def source_token_lines(self) -> TSourceTokenLines:
ret = list(self.reporter.source_token_lines())
self.debug.write(f"source_token_lines() --> {len(ret)} tokens")
return ret
| DebugFileReporterWrapper |
python | sympy__sympy | sympy/physics/quantum/hilbert.py | {
"start": 13022,
"end": 16499
} | class ____(HilbertSpace):
"""A direct sum of Hilbert spaces [1]_.
This class uses the ``+`` operator to represent direct sums between
different Hilbert spaces.
A ``DirectSumHilbertSpace`` object takes in an arbitrary number of
``HilbertSpace`` objects as its arguments. Also, addition of
``HilbertSpace`` objects will automatically return a direct sum object.
Examples
========
>>> from sympy.physics.quantum.hilbert import ComplexSpace, FockSpace
>>> c = ComplexSpace(2)
>>> f = FockSpace()
>>> hs = c+f
>>> hs
C(2)+F
>>> hs.dimension
oo
>>> list(hs.spaces)
[C(2), F]
References
==========
.. [1] https://en.wikipedia.org/wiki/Hilbert_space#Direct_sums
"""
def __new__(cls, *args):
r = cls.eval(args)
if isinstance(r, Basic):
return r
obj = Basic.__new__(cls, *args)
return obj
@classmethod
def eval(cls, args):
"""Evaluates the direct product."""
new_args = []
recall = False
#flatten arguments
for arg in args:
if isinstance(arg, DirectSumHilbertSpace):
new_args.extend(arg.args)
recall = True
elif isinstance(arg, HilbertSpace):
new_args.append(arg)
else:
raise TypeError('Hilbert spaces can only be summed with other \
Hilbert spaces: %r' % arg)
if recall:
return DirectSumHilbertSpace(*new_args)
else:
return None
@property
def dimension(self):
arg_list = [arg.dimension for arg in self.args]
if S.Infinity in arg_list:
return S.Infinity
else:
return reduce(lambda x, y: x + y, arg_list)
@property
def spaces(self):
"""A tuple of the Hilbert spaces in this direct sum."""
return self.args
def _sympyrepr(self, printer, *args):
spaces_reprs = [printer._print(arg, *args) for arg in self.args]
return "DirectSumHilbertSpace(%s)" % ','.join(spaces_reprs)
def _sympystr(self, printer, *args):
spaces_strs = [printer._print(arg, *args) for arg in self.args]
return '+'.join(spaces_strs)
def _pretty(self, printer, *args):
length = len(self.args)
pform = printer._print('', *args)
for i in range(length):
next_pform = printer._print(self.args[i], *args)
if isinstance(self.args[i], (DirectSumHilbertSpace,
TensorProductHilbertSpace)):
next_pform = prettyForm(
*next_pform.parens(left='(', right=')')
)
pform = prettyForm(*pform.right(next_pform))
if i != length - 1:
if printer._use_unicode:
pform = prettyForm(*pform.right(' \N{CIRCLED PLUS} '))
else:
pform = prettyForm(*pform.right(' + '))
return pform
def _latex(self, printer, *args):
length = len(self.args)
s = ''
for i in range(length):
arg_s = printer._print(self.args[i], *args)
if isinstance(self.args[i], (DirectSumHilbertSpace,
TensorProductHilbertSpace)):
arg_s = r'\left(%s\right)' % arg_s
s = s + arg_s
if i != length - 1:
s = s + r'\oplus '
return s
| DirectSumHilbertSpace |
python | airbytehq__airbyte | airbyte-integrations/connectors/source-shopify/source_shopify/shopify_graphql/bulk/query.py | {
"start": 7085,
"end": 11580
} | class ____(ShopifyBulkQuery):
"""
Only 2 lvl nesting is available: https://shopify.dev/docs/api/usage/bulk-operations/queries#operation-restrictions
Output example to BULK query `customers.metafields` with `filter query` by `updated_at` sorted `ASC`:
{
<Type>(
query: "updated_at:>='2023-04-13' AND updated_at:<='2023-12-01'"
sortKey: UPDATED_AT
) {
edges {
node {
__typename
id
metafields {
edges {
node {
__typename
id
namespace
value
key
description
createdAt
updatedAt
type
}
}
}
}
}
}
}
"""
sort_key = "UPDATED_AT"
record_composition = {"new_record": "Metafield"}
metafield_fields: List[Field] = [
"__typename",
"id",
"namespace",
"value",
"key",
"description",
"createdAt",
"updatedAt",
"type",
]
@property
def query_name(self) -> str:
if isinstance(self.type.value, list):
return self.type.value[0]
elif isinstance(self.type.value, str):
return self.type.value
@property
@abstractmethod
def type(self) -> MetafieldType:
"""
Defines the Metafield type to fetch, see `MetafieldType` for more info.
"""
def get_edge_node(self, name: str, fields: Union[List[str], List[Field], str]) -> Field:
"""
Defines the edge of the graph and it's fields to select for Shopify BULK Operaion.
https://shopify.dev/docs/api/usage/bulk-operations/queries#the-jsonl-data-format
"""
return Field(name=name, fields=[Field(name="edges", fields=[Field(name="node", fields=fields)])])
@property
def query_nodes(self) -> List[Field]:
"""
List of available fields:
https://shopify.dev/docs/api/admin-graphql/unstable/objects/Metafield
"""
nodes = super().query_nodes
# define metafield node
metafield_node = self.get_edge_node("metafields", self.metafield_fields)
if isinstance(self.type.value, list):
nodes = [*nodes, self.get_edge_node(self.type.value[1], [*nodes, metafield_node])]
elif isinstance(self.type.value, str):
nodes = [*nodes, metafield_node]
nodes = self.inject_parent_cursor_field(nodes)
return nodes
def _process_metafield(self, record: MutableMapping[str, Any]) -> MutableMapping[str, Any]:
# resolve parent id from `str` to `int`
record["owner_id"] = self.tools.resolve_str_id(record.get(BULK_PARENT_KEY))
# add `owner_resource` field
record["owner_resource"] = self.tools.camel_to_snake(record.get(BULK_PARENT_KEY, "").split("/")[3])
# remove `__parentId` from record
record.pop(BULK_PARENT_KEY, None)
# convert dates from ISO-8601 to RFC-3339
record["createdAt"] = self.tools.from_iso8601_to_rfc3339(record, "createdAt")
record["updatedAt"] = self.tools.from_iso8601_to_rfc3339(record, "updatedAt")
record = self.tools.fields_names_to_snake_case(record)
return record
def _process_components(self, entity: List[dict]) -> Iterable[MutableMapping[str, Any]]:
for item in entity:
# resolve the id from string
item["admin_graphql_api_id"] = item.get("id")
item["id"] = self.tools.resolve_str_id(item.get("id"))
yield self._process_metafield(item)
def record_process_components(self, record: MutableMapping[str, Any]) -> Iterable[MutableMapping[str, Any]]:
# get the joined record components collected for the record
record_components = record.get("record_components", {})
# process record components
if not record_components:
yield self._process_metafield(record)
else:
metafields = record_components.get("Metafield", [])
if len(metafields) > 0:
yield from self._process_components(metafields)
| Metafield |
python | davidhalter__jedi | test/static_analysis/attribute_error.py | {
"start": 0,
"end": 1428
} | class ____():
class_attr = ''
def __init__(self, input):
self.instance_attr = 3
self.input = input
def f(self):
#! 12 attribute-error
return self.not_existing
def undefined_object(self, obj):
"""
Uses an arbitrary object and performs an operation on it, shouldn't
be a problem.
"""
obj.arbitrary_lookup
def defined_lookup(self, obj):
"""
`obj` is defined by a call into this function.
"""
obj.upper
#! 4 attribute-error
obj.arbitrary_lookup
#! 13 name-error
class_attr = a
Cls(1).defined_lookup('')
c = Cls(1)
c.class_attr
Cls.class_attr
#! 4 attribute-error
Cls.class_attr_error
c.instance_attr
#! 2 attribute-error
c.instance_attr_error
c.something = None
#! 12 name-error
something = a
something
# -----------------
# Unused array variables should still raise attribute errors.
# -----------------
# should not raise anything.
for loop_variable in [1, 2]:
#! 4 name-error
x = undefined
loop_variable
#! 28 name-error
for loop_variable in [1, 2, undefined]:
pass
#! 7 attribute-error
[1, ''.undefined_attr]
def return_one(something):
return 1
#! 14 attribute-error
return_one(''.undefined_attribute)
#! 12 name-error
[r for r in undefined]
#! 1 name-error
[undefined for r in [1, 2]]
[r for r in [1, 2]]
# some random error that showed up
| Cls |
python | great-expectations__great_expectations | great_expectations/execution_engine/partition_and_sample/data_sampler.py | {
"start": 266,
"end": 3457
} | class ____(abc.ABC): # noqa: B024 # abstract-base-class-without-abstract-method
"""Abstract base class containing methods for sampling data accessible via Execution Engines."""
def get_sampler_method(self, sampler_method_name: str) -> Callable:
"""Get the appropriate sampler method from the method name.
Args:
sampler_method_name: name of the sampler to retrieve.
Returns:
sampler method.
"""
sampler_method_name = self._get_sampler_method_name(sampler_method_name)
return getattr(self, sampler_method_name)
def _get_sampler_method_name(self, sampler_method_name: str) -> str:
"""Accept sampler methods with or without starting with `_`.
Args:
sampler_method_name: sampler name starting with or without preceding `_`.
Returns:
sampler method name stripped of preceding underscore.
"""
if sampler_method_name.startswith("_"):
return sampler_method_name[1:]
else:
return sampler_method_name
def verify_batch_spec_sampling_kwargs_exists(self, batch_spec: BatchSpec) -> None:
"""Verify that sampling_kwargs key exists in batch_spec or raise error.
Args:
batch_spec: Can contain sampling_kwargs.
Returns:
None
Raises:
SamplerError
"""
if batch_spec.get("sampling_kwargs") is None:
raise gx_exceptions.SamplerError( # noqa: TRY003 # FIXME CoP
"Please make sure to provide sampling_kwargs in addition to your sampling_method."
)
def verify_batch_spec_sampling_kwargs_key_exists(self, key: str, batch_spec: BatchSpec) -> None:
"""Verify that a key within sampling_kwargs exists in batch_spec or raise error.
Args:
batch_spec: Can contain sampling_kwargs with nested keys.
Returns:
None
Raises:
SamplerError
"""
if batch_spec["sampling_kwargs"].get(key) is None:
raise gx_exceptions.SamplerError( # noqa: TRY003 # FIXME CoP
f"Please make sure to provide the {key} key in sampling_kwargs in addition to your sampling_method." # noqa: E501 # FIXME CoP
)
@staticmethod
def get_sampling_kwargs_value_or_default(
batch_spec: BatchSpec,
sampling_kwargs_key: str,
default_value: Optional[T] = None,
) -> T | Any:
"""Get value from batch_spec or default if provided and key doesn't exist.
Args:
batch_spec: BatchSpec to retrieve value from.
sampling_kwargs_key: key for value to retrieve.
default_value: value to return if key doesn't exist.
Returns:
Value from batch_spec corresponding to key or default_value if key doesn't exist.
"""
if "sampling_kwargs" in batch_spec:
if sampling_kwargs_key in batch_spec["sampling_kwargs"]:
return batch_spec["sampling_kwargs"][sampling_kwargs_key]
else:
return default_value
else:
return default_value
| DataSampler |
python | huggingface__transformers | tests/models/perception_lm/test_image_processing_perception_lm.py | {
"start": 3596,
"end": 10100
} | class ____(ImageProcessingTestMixin, unittest.TestCase):
fast_image_processing_class = PerceptionLMImageProcessorFast if is_torchvision_available() else None
test_slow_image_processor = False
def setUp(self):
super().setUp()
self.image_processor_tester = PerceptionLMImageProcessingTester(self)
@property
# Copied from tests.models.clip.test_image_processing_clip.CLIPImageProcessingTest.image_processor_dict
def image_processor_dict(self):
return self.image_processor_tester.prepare_image_processor_dict()
def test_image_processor_properties(self):
for image_processing_class in self.image_processor_list:
image_processing = image_processing_class(**self.image_processor_dict)
self.assertTrue(hasattr(image_processing, "do_resize"))
self.assertTrue(hasattr(image_processing, "tile_size"))
self.assertTrue(hasattr(image_processing, "do_normalize"))
self.assertTrue(hasattr(image_processing, "image_mean"))
self.assertTrue(hasattr(image_processing, "image_std"))
self.assertTrue(hasattr(image_processing, "do_convert_rgb"))
self.assertTrue(hasattr(image_processing, "max_num_tiles"))
self.assertTrue(hasattr(image_processing, "vision_input_type"))
def test_image_processor_from_dict_with_kwargs(self):
for image_processing_class in self.image_processor_list:
image_processor = image_processing_class.from_dict(self.image_processor_dict)
self.assertEqual(image_processor.tile_size, 16)
self.assertEqual(image_processor.max_num_tiles, 4)
self.assertEqual(image_processor.vision_input_type, "thumb+tile")
image_processor = image_processing_class.from_dict(
self.image_processor_dict, tile_size=42, max_num_tiles=9
)
self.assertEqual(image_processor.tile_size, 42)
self.assertEqual(image_processor.max_num_tiles, 9)
self.assertEqual(image_processor.vision_input_type, "thumb+tile")
def test_call_pil(self):
for image_processing_class in self.image_processor_list:
# Initialize image_processing
image_processing = image_processing_class(**self.image_processor_dict)
# create random PIL images
image_inputs = self.image_processor_tester.prepare_image_inputs(equal_resolution=True)
for image in image_inputs:
self.assertIsInstance(image, Image.Image)
# Test not batched input
encoded_images = image_processing(image_inputs[0], return_tensors="pt").pixel_values
expected_output_image_shape = (1, 5, 3, 16, 16)
self.assertEqual(tuple(encoded_images.shape), expected_output_image_shape)
# Test batched
encoded_images = image_processing(image_inputs, return_tensors="pt").pixel_values
expected_output_image_shape = (7, 5, 3, 16, 16)
self.assertEqual(tuple(encoded_images.shape), expected_output_image_shape)
def test_call_numpy(self):
for image_processing_class in self.image_processor_list:
# Initialize image_processing
image_processing = image_processing_class(**self.image_processor_dict)
# create random numpy tensors
image_inputs = self.image_processor_tester.prepare_image_inputs(equal_resolution=True, numpify=True)
for image in image_inputs:
self.assertIsInstance(image, np.ndarray)
# Test not batched input
encoded_images = image_processing(image_inputs[0], return_tensors="pt").pixel_values
expected_output_image_shape = (1, 5, 3, 16, 16)
self.assertEqual(tuple(encoded_images.shape), expected_output_image_shape)
# Test batched
encoded_images = image_processing(image_inputs, return_tensors="pt").pixel_values
expected_output_image_shape = (7, 5, 3, 16, 16)
self.assertEqual(tuple(encoded_images.shape), expected_output_image_shape)
def test_call_pytorch(self):
for image_processing_class in self.image_processor_list:
# Initialize image_processing
image_processing = image_processing_class(**self.image_processor_dict)
# create random PyTorch tensors
image_inputs = self.image_processor_tester.prepare_image_inputs(equal_resolution=True, torchify=True)
for image in image_inputs:
self.assertIsInstance(image, torch.Tensor)
# Test not batched input
encoded_images = image_processing(image_inputs[0], return_tensors="pt").pixel_values
expected_output_image_shape = (1, 5, 3, 16, 16)
self.assertEqual(tuple(encoded_images.shape), expected_output_image_shape)
# Test batched
encoded_images = image_processing(image_inputs, return_tensors="pt").pixel_values
expected_output_image_shape = (7, 5, 3, 16, 16)
self.assertEqual(tuple(encoded_images.shape), expected_output_image_shape)
@unittest.skip(reason="PerceptionLMImageProcessor doesn't treat 4 channel PIL and numpy consistently yet")
def test_call_numpy_4_channels(self):
pass
def test_nested_input(self):
for image_processing_class in self.image_processor_list:
image_processing = image_processing_class(**self.image_processor_dict)
image_inputs = self.image_processor_tester.prepare_image_inputs(equal_resolution=True)
# Test batched as a list of images
encoded_images = image_processing(image_inputs, return_tensors="pt").pixel_values
expected_output_image_shape = (7, 5, 3, 16, 16)
self.assertEqual(tuple(encoded_images.shape), expected_output_image_shape)
# Test batched as a nested list of images, where each sublist is one batch
image_inputs_nested = [image_inputs[:3], image_inputs[3:]]
encoded_images_nested = image_processing(image_inputs_nested, return_tensors="pt").pixel_values
expected_output_image_shape = (7, 5, 3, 16, 16)
self.assertEqual(tuple(encoded_images_nested.shape), expected_output_image_shape)
# Image processor should return same pixel values, independently of ipnut format
self.assertTrue((encoded_images_nested == encoded_images).all())
| PerceptionLMImageProcessingTest |
python | kamyu104__LeetCode-Solutions | Python/collecting-chocolates.py | {
"start": 1519,
"end": 2475
} | class ____(object):
def minCost(self, nums, x):
"""
:type nums: List[int]
:type x: int
:rtype: int
"""
def cost(k):
w = k+1
result = x*k
dq = collections.deque()
for i in xrange(len(nums)+w-1):
if dq and i-dq[0] == w:
dq.popleft()
while dq and nums[dq[-1]%len(nums)] >= nums[i%len(nums)]:
dq.pop()
dq.append(i)
if i >= w-1:
result += nums[dq[0]%len(nums)]
return result
def check(x):
return cost(x) <= cost(x+1)
left, right = 0, len(nums)
while left <= right:
mid = left + (right-left)//2
if check(mid):
right = mid-1
else:
left = mid+1
return cost(left)
# Time: O(n^2)
# Space: O(n)
# brute force
| Solution2 |
python | catalyst-team__catalyst | catalyst/contrib/schedulers/base.py | {
"start": 790,
"end": 928
} | class ____(BaseScheduler, ABC):
"""@TODO: Docs. Contribution is welcome."""
__all__ = ["BaseScheduler", "BatchScheduler"]
| BatchScheduler |
python | pypa__warehouse | tests/unit/manage/views/test_organizations.py | {
"start": 120210,
"end": 124682
} | class ____:
def test_get(self, db_request, user_service):
organization = OrganizationFactory.create()
older_event = OrganizationEventFactory.create(
source=organization,
tag="fake:event",
time=datetime.datetime(2017, 2, 5, 17, 18, 18, 462_634),
)
newer_event = OrganizationEventFactory.create(
source=organization,
tag="fake:event",
time=datetime.datetime(2018, 2, 5, 17, 18, 18, 462_634),
)
assert org_views.manage_organization_history(organization, db_request) == {
"events": [newer_event, older_event],
"get_user": user_service.get_user,
"organization": organization,
}
def test_raises_400_with_pagenum_type_str(self, monkeypatch, db_request):
params = MultiDict({"page": "abc"})
db_request.params = params
events_query = pretend.stub()
db_request.events_query = pretend.stub(
events_query=lambda *a, **kw: events_query
)
page_obj = pretend.stub(page_count=10, item_count=1000)
page_cls = pretend.call_recorder(lambda *a, **kw: page_obj)
monkeypatch.setattr(views, "SQLAlchemyORMPage", page_cls)
url_maker = pretend.stub()
url_maker_factory = pretend.call_recorder(lambda request: url_maker)
monkeypatch.setattr(views, "paginate_url_factory", url_maker_factory)
organization = OrganizationFactory.create()
with pytest.raises(HTTPBadRequest):
org_views.manage_organization_history(organization, db_request)
assert page_cls.calls == []
def test_first_page(self, db_request, user_service):
page_number = 1
params = MultiDict({"page": page_number})
db_request.params = params
organization = OrganizationFactory.create()
items_per_page = 25
total_items = items_per_page + 2
OrganizationEventFactory.create_batch(
total_items, source=organization, tag="fake:event"
)
events_query = (
db_request.db.query(Organization.Event)
.join(Organization.Event.source)
.filter(Organization.Event.source_id == organization.id)
.order_by(Organization.Event.time.desc())
)
events_page = SQLAlchemyORMPage(
events_query,
page=page_number,
items_per_page=items_per_page,
item_count=total_items,
url_maker=paginate_url_factory(db_request),
)
assert org_views.manage_organization_history(organization, db_request) == {
"events": events_page,
"get_user": user_service.get_user,
"organization": organization,
}
def test_last_page(self, db_request, user_service):
page_number = 2
params = MultiDict({"page": page_number})
db_request.params = params
organization = OrganizationFactory.create()
items_per_page = 25
total_items = items_per_page + 2
OrganizationEventFactory.create_batch(
total_items, source=organization, tag="fake:event"
)
events_query = (
db_request.db.query(Organization.Event)
.join(Organization.Event.source)
.filter(Organization.Event.source_id == organization.id)
.order_by(Organization.Event.time.desc())
)
events_page = SQLAlchemyORMPage(
events_query,
page=page_number,
items_per_page=items_per_page,
item_count=total_items,
url_maker=paginate_url_factory(db_request),
)
assert org_views.manage_organization_history(organization, db_request) == {
"events": events_page,
"get_user": user_service.get_user,
"organization": organization,
}
def test_raises_404_with_out_of_range_page(self, db_request):
page_number = 3
params = MultiDict({"page": page_number})
db_request.params = params
organization = OrganizationFactory.create()
items_per_page = 25
total_items = items_per_page + 2
OrganizationEventFactory.create_batch(
total_items, source=organization, tag="fake:event"
)
with pytest.raises(HTTPNotFound):
assert org_views.manage_organization_history(organization, db_request)
| TestManageOrganizationHistory |
python | keon__algorithms | algorithms/map/randomized_set.py | {
"start": 367,
"end": 1501
} | class ____:
def __init__(self):
self.nums = []
self.idxs = {}
def insert(self, val):
if val not in self.idxs:
self.nums.append(val)
self.idxs[val] = len(self.nums)-1
return True
return False
def remove(self, val):
if val in self.idxs:
idx, last = self.idxs[val], self.nums[-1]
self.nums[idx], self.idxs[last] = last, idx
self.nums.pop()
self.idxs.pop(val, 0)
return True
return False
def get_random(self):
idx = random.randint(0, len(self.nums)-1)
return self.nums[idx]
if __name__ == "__main__":
rs = RandomizedSet()
print("insert 1: ", rs.insert(1))
print("insert 2: ", rs.insert(2))
print("insert 3: ", rs.insert(3))
print("insert 4: ", rs.insert(4))
print("remove 3: ", rs.remove(3))
print("remove 3: ", rs.remove(3))
print("remove 1: ", rs.remove(1))
print("random: ", rs.get_random())
print("random: ", rs.get_random())
print("random: ", rs.get_random())
print("random: ", rs.get_random())
| RandomizedSet |
python | airbytehq__airbyte | airbyte-integrations/connectors/source-stripe/unit_tests/integration/test_reviews.py | {
"start": 9107,
"end": 14659
} | class ____(TestCase):
@HttpMocker()
def test_given_no_state_when_read_then_use_reviews_endpoint(self, http_mocker: HttpMocker) -> None:
cursor_value = int(_A_START_DATE.timestamp()) + 1
http_mocker.get(
_reviews_request().with_created_gte(_A_START_DATE).with_created_lte(_NOW).with_limit(100).build(),
_reviews_response().with_record(_a_review().with_cursor(cursor_value)).build(),
)
output = self._read(_config().with_start_date(_A_START_DATE), _NO_STATE)
most_recent_state = output.most_recent_state
assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME)
assert most_recent_state.stream_state.updated == str(cursor_value)
@HttpMocker()
def test_given_state_when_read_then_query_events_using_types_and_state_value_plus_1(self, http_mocker: HttpMocker) -> None:
start_date = _NOW - timedelta(days=40)
state_datetime = _NOW - timedelta(days=5)
cursor_value = int(state_datetime.timestamp()) + 1
http_mocker.get(
_events_request().with_created_gte(state_datetime).with_created_lte(_NOW).with_limit(100).with_types(_EVENT_TYPES).build(),
_events_response().with_record(_an_event().with_cursor(cursor_value).with_field(_DATA_FIELD, _a_review().build())).build(),
)
output = self._read(
_config().with_start_date(start_date),
StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(),
)
most_recent_state = output.most_recent_state
assert most_recent_state.stream_descriptor == StreamDescriptor(name=_STREAM_NAME)
assert most_recent_state.stream_state.updated == str(cursor_value)
@HttpMocker()
def test_given_state_and_pagination_when_read_then_return_records(self, http_mocker: HttpMocker) -> None:
state_datetime = _NOW - timedelta(days=5)
http_mocker.get(
_events_request().with_created_gte(state_datetime).with_created_lte(_NOW).with_limit(100).with_types(_EVENT_TYPES).build(),
_events_response()
.with_pagination()
.with_record(_an_event().with_id("last_record_id_from_first_page").with_field(_DATA_FIELD, _a_review().build()))
.build(),
)
http_mocker.get(
_events_request()
.with_starting_after("last_record_id_from_first_page")
.with_created_gte(state_datetime)
.with_created_lte(_NOW)
.with_limit(100)
.with_types(_EVENT_TYPES)
.build(),
_events_response().with_record(self._a_review_event()).build(),
)
output = self._read(
_config(),
StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(),
)
assert len(output.records) == 2
@HttpMocker()
def test_given_state_and_small_slice_range_when_read_then_perform_multiple_queries(self, http_mocker: HttpMocker) -> None:
state_datetime = _NOW - timedelta(days=5)
slice_range = timedelta(days=3)
slice_datetime = state_datetime + slice_range
http_mocker.get(
_events_request()
.with_created_gte(state_datetime)
.with_created_lte(slice_datetime - _AVOIDING_INCLUSIVE_BOUNDARIES)
.with_limit(100)
.with_types(_EVENT_TYPES)
.build(),
_events_response().with_record(self._a_review_event()).build(),
)
http_mocker.get(
_events_request().with_created_gte(slice_datetime).with_created_lte(_NOW).with_limit(100).with_types(_EVENT_TYPES).build(),
_events_response().with_record(self._a_review_event()).with_record(self._a_review_event()).build(),
)
output = self._read(
_config().with_start_date(_NOW - timedelta(days=30)).with_slice_range_in_days(slice_range.days),
StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_datetime.timestamp())}).build(),
)
assert len(output.records) == 3
@HttpMocker()
def test_given_state_earlier_than_30_days_when_read_then_query_events_using_types_and_event_lower_boundary(
self, http_mocker: HttpMocker
) -> None:
# this seems odd as we would miss some data between start_date and events_lower_boundary. In that case, we should hit the
# reviews endpoint
start_date = _NOW - timedelta(days=40)
state_value = _NOW - timedelta(days=39)
events_lower_boundary = _NOW - timedelta(days=30)
http_mocker.get(
_events_request()
.with_created_gte(events_lower_boundary)
.with_created_lte(_NOW)
.with_limit(100)
.with_types(_EVENT_TYPES)
.build(),
_events_response().with_record(self._a_review_event()).build(),
)
self._read(
_config().with_start_date(start_date),
StateBuilder().with_stream_state(_STREAM_NAME, {"updated": int(state_value.timestamp())}).build(),
)
# request matched http_mocker
def _a_review_event(self) -> RecordBuilder:
return _an_event().with_field(_DATA_FIELD, _a_review().build())
def _read(self, config: ConfigBuilder, state: Optional[Dict[str, Any]], expecting_exception: bool = False) -> EntrypointOutput:
return _read(config, SyncMode.incremental, state, expecting_exception)
| IncrementalTest |
python | PrefectHQ__prefect | src/integrations/prefect-docker/prefect_docker/worker.py | {
"start": 14625,
"end": 14775
} | class ____(BaseWorkerResult):
"""Contains information about a completed Docker container"""
P = ParamSpec("P")
R = TypeVar("R")
| DockerWorkerResult |
python | requests__requests-oauthlib | requests_oauthlib/oauth2_auth.py | {
"start": 158,
"end": 1508
} | class ____(AuthBase):
"""Adds proof of authorization (OAuth2 token) to the request."""
def __init__(self, client_id=None, client=None, token=None):
"""Construct a new OAuth 2 authorization object.
:param client_id: Client id obtained during registration
:param client: :class:`oauthlib.oauth2.Client` to be used. Default is
WebApplicationClient which is useful for any
hosted application but not mobile or desktop.
:param token: Token dictionary, must include access_token
and token_type.
"""
self._client = client or WebApplicationClient(client_id, token=token)
if token:
for k, v in token.items():
setattr(self._client, k, v)
def __call__(self, r):
"""Append an OAuth 2 token to the request.
Note that currently HTTPS is required for all requests. There may be
a token type that allows for plain HTTP in the future and then this
should be updated to allow plain HTTP on a white list basis.
"""
if not is_secure_transport(r.url):
raise InsecureTransportError()
r.url, r.headers, r.body = self._client.add_token(
r.url, http_method=r.method, body=r.body, headers=r.headers
)
return r
| OAuth2 |
python | has2k1__plotnine | doc/_renderer.py | {
"start": 915,
"end": 1146
} | class ____(QRenderer):
pass
exclude_parameters(
{
"plotnine.scale_color_hue": ("s", "color_space"),
}
)
summary_name_lookup = {
"Beside": f"{Code('|')} Beside",
"Stack": f"{Code('/')} Stack",
}
| Renderer |
python | pytorch__pytorch | torch/jit/__init__.py | {
"start": 6756,
"end": 8366
} | class ____:
"""
Give errors if not all nodes have been fused in inference, or symbolically differentiated in training.
Example:
Forcing fusion of additions.
.. code-block:: python
@torch.jit.script
def foo(x):
with torch.jit.strict_fusion():
return x + x + x
"""
def __init__(self) -> None:
if not torch._jit_internal.is_scripting():
warnings.warn("Only works in script mode", stacklevel=2)
def __enter__(self):
pass
def __exit__(self, type: Any, value: Any, tb: Any) -> None:
pass
# Context manager for globally hiding source ranges when printing graphs.
# Note that these functions are exposed to Python as static members of the
# Graph class, so mypy checks need to be skipped.
@contextmanager
def _hide_source_ranges() -> Iterator[None]:
old_enable_source_ranges = torch._C.Graph.global_print_source_ranges # type: ignore[attr-defined]
try:
torch._C.Graph.set_global_print_source_ranges(False) # type: ignore[attr-defined]
yield
finally:
torch._C.Graph.set_global_print_source_ranges(old_enable_source_ranges) # type: ignore[attr-defined]
def enable_onednn_fusion(enabled: bool) -> None:
"""Enable or disables onednn JIT fusion based on the parameter `enabled`."""
torch._C._jit_set_llga_enabled(enabled)
def onednn_fusion_enabled():
"""Return whether onednn JIT fusion is enabled."""
return torch._C._jit_llga_enabled()
del Any
if not torch._C._jit_init():
raise RuntimeError("JIT initialization failed")
| strict_fusion |
python | allegroai__clearml | clearml/backend_api/services/v2_23/datasets.py | {
"start": 15540,
"end": 21552
} | class ____(NonStrictDataModel):
"""
:param id: Source unique ID within this DatasetVersion
:type id: str
:param uri: Source data URI
:type uri: str
:param content_type: Content type (e.g. 'image/jpeg', 'image/png')
:type content_type: str
:param width: Width in pixels
:type width: int
:param height: Height in pixels
:type height: int
:param timestamp: Timestamp in the source data (for video content. for images,
this value should be 0)
:type timestamp: int
:param masks:
:type masks: Sequence[Mask]
:param preview:
:type preview: Preview
:param meta: Additional metadata dictionary for the source
:type meta: dict
"""
_schema = {
"properties": {
"content_type": {
"description": "Content type (e.g. 'image/jpeg', 'image/png')",
"type": "string",
},
"height": {"description": "Height in pixels", "type": "integer"},
"id": {
"description": "Source unique ID within this DatasetVersion",
"type": "string",
},
"masks": {"items": {"$ref": "#/definitions/mask"}, "type": "array"},
"meta": {
"additionalProperties": True,
"description": "Additional metadata dictionary for the source",
"type": "object",
},
"preview": {"$ref": "#/definitions/preview"},
"timestamp": {
"default": 0,
"description": "Timestamp in the source data (for video content. for images, this value should be 0)",
"type": "integer",
},
"uri": {"description": "Source data URI", "type": "string"},
"width": {"description": "Width in pixels", "type": "integer"},
},
"required": ["id", "uri"],
"type": "object",
}
def __init__(
self,
id,
uri,
content_type=None,
width=None,
height=None,
timestamp=0,
masks=None,
preview=None,
meta=None,
**kwargs
):
super(Source, self).__init__(**kwargs)
self.id = id
self.uri = uri
self.content_type = content_type
self.width = width
self.height = height
self.timestamp = timestamp
self.masks = masks
self.preview = preview
self.meta = meta
@schema_property("id")
def id(self):
return self._property_id
@id.setter
def id(self, value):
if value is None:
self._property_id = None
return
self.assert_isinstance(value, "id", six.string_types)
self._property_id = value
@schema_property("uri")
def uri(self):
return self._property_uri
@uri.setter
def uri(self, value):
if value is None:
self._property_uri = None
return
self.assert_isinstance(value, "uri", six.string_types)
self._property_uri = value
@schema_property("content_type")
def content_type(self):
return self._property_content_type
@content_type.setter
def content_type(self, value):
if value is None:
self._property_content_type = None
return
self.assert_isinstance(value, "content_type", six.string_types)
self._property_content_type = value
@schema_property("width")
def width(self):
return self._property_width
@width.setter
def width(self, value):
if value is None:
self._property_width = None
return
if isinstance(value, float) and value.is_integer():
value = int(value)
self.assert_isinstance(value, "width", six.integer_types)
self._property_width = value
@schema_property("height")
def height(self):
return self._property_height
@height.setter
def height(self, value):
if value is None:
self._property_height = None
return
if isinstance(value, float) and value.is_integer():
value = int(value)
self.assert_isinstance(value, "height", six.integer_types)
self._property_height = value
@schema_property("timestamp")
def timestamp(self):
return self._property_timestamp
@timestamp.setter
def timestamp(self, value):
if value is None:
self._property_timestamp = None
return
if isinstance(value, float) and value.is_integer():
value = int(value)
self.assert_isinstance(value, "timestamp", six.integer_types)
self._property_timestamp = value
@schema_property("masks")
def masks(self):
return self._property_masks
@masks.setter
def masks(self, value):
if value is None:
self._property_masks = None
return
self.assert_isinstance(value, "masks", (list, tuple))
if any(isinstance(v, dict) for v in value):
value = [Mask.from_dict(v) if isinstance(v, dict) else v for v in value]
else:
self.assert_isinstance(value, "masks", Mask, is_array=True)
self._property_masks = value
@schema_property("preview")
def preview(self):
return self._property_preview
@preview.setter
def preview(self, value):
if value is None:
self._property_preview = None
return
if isinstance(value, dict):
value = Preview.from_dict(value)
else:
self.assert_isinstance(value, "preview", Preview)
self._property_preview = value
@schema_property("meta")
def meta(self):
return self._property_meta
@meta.setter
def meta(self, value):
if value is None:
self._property_meta = None
return
self.assert_isinstance(value, "meta", (dict,))
self._property_meta = value
| Source |
python | getsentry__sentry | tests/sentry/integrations/api/endpoints/test_organization_code_mapping_details.py | {
"start": 307,
"end": 5082
} | class ____(APITestCase):
endpoint = "sentry-api-0-organization-code-mapping-details"
def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.user2 = self.create_user("nisanthan@sentry.io", is_superuser=False)
self.org = self.create_organization(owner=self.user, name="baz")
self.org.flags.allow_joinleave = False
self.org.save()
self.team = self.create_team(organization=self.org, name="Mariachi Band")
self.team2 = self.create_team(
organization=self.org,
name="Ecosystem",
)
self.create_member(
organization=self.org,
user=self.user2,
has_global_access=False,
teams=[self.team, self.team2],
)
self.project = self.create_project(organization=self.org, teams=[self.team], name="Bengal")
self.project2 = self.create_project(organization=self.org, teams=[self.team2], name="Tiger")
self.integration, self.org_integration = self.create_provider_integration_for(
self.org, self.user, provider="github", name="Example", external_id="abcd"
)
self.repo = Repository.objects.create(
name="example", organization_id=self.org.id, integration_id=self.integration.id
)
self.config = RepositoryProjectPathConfig.objects.create(
repository_id=self.repo.id,
project_id=self.project.id,
organization_integration_id=self.org_integration.id,
integration_id=self.org_integration.integration_id,
organization_id=self.org_integration.organization_id,
stack_root="/stack/root",
source_root="/source/root",
default_branch="master",
)
self.url = reverse(
self.endpoint,
args=[self.org.slug, self.config.id],
)
def make_put(self, data):
# reconstruct the original object
config_data = serialize(self.config, self.user)
return self.client.put(
self.url,
{**config_data, **data, "repositoryId": self.repo.id},
)
def test_non_project_member_permissions(self) -> None:
non_member = self.create_user()
non_member_om = self.create_member(organization=self.org, user=non_member)
self.login_as(user=non_member)
response = self.make_put({"sourceRoot": "newRoot"})
assert response.status_code == status.HTTP_403_FORBIDDEN
response = self.client.delete(self.url)
assert response.status_code == status.HTTP_403_FORBIDDEN
self.create_team_membership(team=self.team, member=non_member_om)
response = self.make_put({"projectId": self.project2.id, "sourceRoot": "newRoot"})
assert response.status_code == status.HTTP_403_FORBIDDEN
response = self.make_put({"sourceRoot": "newRoot"})
assert response.status_code == status.HTTP_200_OK
# Needed for DELETE on OrganizationIntegrationsLoosePermission
non_member_om.update(role="admin")
response = self.client.delete(self.url)
assert response.status_code == status.HTTP_204_NO_CONTENT
def test_basic_delete(self) -> None:
resp = self.client.delete(self.url)
assert resp.status_code == 204
assert not RepositoryProjectPathConfig.objects.filter(id=str(self.config.id)).exists()
def test_basic_edit(self) -> None:
resp = self.make_put({"sourceRoot": "newRoot"})
assert resp.status_code == 200
assert resp.data["id"] == str(self.config.id)
assert resp.data["sourceRoot"] == "newRoot"
def test_basic_edit_from_member_permissions(self) -> None:
self.login_as(user=self.user2)
resp = self.make_put({"sourceRoot": "newRoot"})
assert resp.status_code == 200
def test_delete_with_existing_codeowners(self) -> None:
self.create_codeowners(project=self.project, code_mapping=self.config)
resp = self.client.delete(self.url)
assert resp.status_code == 409
assert (
resp.data
== "Cannot delete Code Mapping. Must delete Code Owner that uses this mapping first."
)
assert RepositoryProjectPathConfig.objects.filter(id=str(self.config.id)).exists()
def test_delete_another_orgs_code_mapping(self) -> None:
invalid_user = self.create_user()
invalid_organization = self.create_organization(owner=invalid_user)
self.login_as(user=invalid_user)
url = reverse(
self.endpoint,
args=[invalid_organization.slug, self.config.id],
)
resp = self.client.delete(url)
assert resp.status_code == 404
| OrganizationCodeMappingDetailsTest |
python | cython__cython | Cython/Compiler/Nodes.py | {
"start": 44948,
"end": 45204
} | class ____(Node):
# Abstract base class for C base type nodes.
#
# Processing during analyse_declarations phase:
#
# analyse
# Returns the type.
def analyse_as_type(self, env):
return self.analyse(env)
| CBaseTypeNode |
python | tensorflow__tensorflow | tensorflow/python/kernel_tests/linalg/self_adjoint_eig_op_test.py | {
"start": 1509,
"end": 6560
} | class ____(test.TestCase):
@test_util.run_deprecated_v1
def testWrongDimensions(self):
# The input to self_adjoint_eig should be a tensor of
# at least rank 2.
scalar = constant_op.constant(1.)
with self.assertRaises(ValueError):
linalg_ops.self_adjoint_eig(scalar)
vector = constant_op.constant([1., 2.])
with self.assertRaises(ValueError):
linalg_ops.self_adjoint_eig(vector)
@test_util.run_deprecated_v1
def testConcurrentExecutesWithoutError(self):
all_ops = []
with self.session():
for compute_v_ in True, False:
matrix1 = random_ops.random_normal([5, 5], seed=42)
matrix2 = random_ops.random_normal([5, 5], seed=42)
if compute_v_:
e1, v1 = linalg_ops.self_adjoint_eig(matrix1)
e2, v2 = linalg_ops.self_adjoint_eig(matrix2)
all_ops += [e1, v1, e2, v2]
else:
e1 = linalg_ops.self_adjoint_eigvals(matrix1)
e2 = linalg_ops.self_adjoint_eigvals(matrix2)
all_ops += [e1, e2]
val = self.evaluate(all_ops)
self.assertAllEqual(val[0], val[2])
# The algorithm is slightly different for compute_v being True and False,
# so require approximate equality only here.
self.assertAllClose(val[2], val[4])
self.assertAllEqual(val[4], val[5])
self.assertAllEqual(val[1], val[3])
def testMatrixThatFailsWhenFlushingDenormsToZero(self):
# Test a 32x32 matrix which is known to fail if denorm floats are flushed to
# zero.
matrix = np.genfromtxt(
test.test_src_dir_path(
"python/kernel_tests/linalg/testdata/"
"self_adjoint_eig_fail_if_denorms_flushed.txt")).astype(np.float32)
self.assertEqual(matrix.shape, (32, 32))
matrix_tensor = constant_op.constant(matrix)
with self.session():
(e, v) = self.evaluate(linalg_ops.self_adjoint_eig(matrix_tensor))
self.assertEqual(e.size, 32)
self.assertAllClose(
np.matmul(v, v.transpose()), np.eye(32, dtype=np.float32), atol=2e-3)
self.assertAllClose(matrix,
np.matmul(np.matmul(v, np.diag(e)), v.transpose()))
def SortEigenDecomposition(e, v):
if v.ndim < 2:
return e, v
else:
perm = np.argsort(e, -1)
return np.take(e, perm, -1), np.take(v, perm, -1)
def EquilibrateEigenVectorPhases(x, y):
"""Equilibrate the phase of the Eigenvectors in the columns of `x` and `y`.
Eigenvectors are only unique up to an arbitrary phase. This function rotates x
such that it matches y. Precondition: The columns of x and y differ by a
multiplicative complex phase factor only.
Args:
x: `np.ndarray` with Eigenvectors
y: `np.ndarray` with Eigenvectors
Returns:
`np.ndarray` containing an equilibrated version of x.
"""
phases = np.sum(np.conj(x) * y, -2, keepdims=True)
phases /= np.abs(phases)
return phases * x
def _GetSelfAdjointEigTest(dtype_, shape_, compute_v_):
def CompareEigenVectors(self, x, y, tol):
x = EquilibrateEigenVectorPhases(x, y)
self.assertAllClose(x, y, atol=tol)
def CompareEigenDecompositions(self, x_e, x_v, y_e, y_v, tol):
num_batches = int(np.prod(x_e.shape[:-1]))
n = x_e.shape[-1]
x_e = np.reshape(x_e, [num_batches] + [n])
x_v = np.reshape(x_v, [num_batches] + [n, n])
y_e = np.reshape(y_e, [num_batches] + [n])
y_v = np.reshape(y_v, [num_batches] + [n, n])
for i in range(num_batches):
x_ei, x_vi = SortEigenDecomposition(x_e[i, :], x_v[i, :, :])
y_ei, y_vi = SortEigenDecomposition(y_e[i, :], y_v[i, :, :])
self.assertAllClose(x_ei, y_ei, atol=tol, rtol=tol)
CompareEigenVectors(self, x_vi, y_vi, tol)
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a += np.conj(a.T)
a = np.tile(a, batch_shape + (1, 1))
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
atol = 1e-4
else:
atol = 1e-12
np_e, np_v = np.linalg.eigh(a)
with self.session():
if compute_v_:
tf_e, tf_v = linalg_ops.self_adjoint_eig(constant_op.constant(a))
# Check that V*diag(E)*V^T is close to A.
a_ev = test_util.matmul_without_tf32(
test_util.matmul_without_tf32(tf_v, array_ops.matrix_diag(tf_e)),
tf_v,
adjoint_b=True)
self.assertAllClose(self.evaluate(a_ev), a, atol=atol)
# Compare to numpy.linalg.eigh.
CompareEigenDecompositions(self, np_e, np_v, self.evaluate(tf_e),
self.evaluate(tf_v), atol)
else:
tf_e = linalg_ops.self_adjoint_eigvals(constant_op.constant(a))
self.assertAllClose(
np.sort(np_e, -1), np.sort(self.evaluate(tf_e), -1), atol=atol)
return Test
| SelfAdjointEigTest |
python | ray-project__ray | python/ray/train/v2/tests/test_thread_runner.py | {
"start": 285,
"end": 5189
} | class ____(ThreadRunner):
def join(self):
"""Join both the target thread and the monitor thread.
Do not include this with the main ThreadRunner class because:
* It is tricky to avoid hangs when nested threads raise errors
* We don't need to join in that case since the controller will see the
error and shut down the worker
"""
if self._monitor_thread is None or self._thread is None:
raise RuntimeError("Must call `run` before trying to `join`.")
self._monitor_thread.join()
self._thread.join()
return self.get_return_value()
@pytest.fixture()
def thread_runner():
return ThreadRunnerWithJoin()
def test_successful_return(thread_runner):
"""Checks that a value can been successfully returned from the target function."""
def target():
return 42
thread_runner.run(target)
assert thread_runner.join() == 42
assert thread_runner.get_return_value() == 42
assert not thread_runner.is_running()
assert thread_runner.get_error() is None
def test_error(thread_runner):
"""Checks that an exception can be captured from the target function.
This test also checks that the traceback string only includes the frames
from the user function (train_func) and not the wrapper frames.
"""
original_monitor_target = thread_runner._monitor_target
monitor_event = threading.Event()
def monitor_target_patch():
monitor_event.wait()
original_monitor_target()
thread_runner._monitor_target = monitor_target_patch
def wrapped_train_func():
def train_fn_with_final_checkpoint_flush():
def train_func():
raise ValueError
train_func()
train_fn_with_final_checkpoint_flush()
thread_runner.run(wrapped_train_func)
assert thread_runner.is_running() and thread_runner.get_error() is None
monitor_event.set()
assert not thread_runner.join()
assert thread_runner.get_return_value() is None
assert not thread_runner.is_running()
error = thread_runner.get_error()
assert isinstance(error, UserExceptionWithTraceback)
assert isinstance(error._base_exc, ValueError)
print(error._traceback_str)
assert "_run_target" not in error._traceback_str
assert "wrapped_train_func" not in error._traceback_str
assert "train_fn_with_final_checkpoint_flush" not in error._traceback_str
assert "train_func" in error._traceback_str
def test_nested_thread_error(thread_runner):
"""Checks that we capture exceptions from threads kicked off by target function."""
original_monitor_target = thread_runner._monitor_target
monitor_event = threading.Event()
def monitor_target_patch():
monitor_event.wait()
original_monitor_target()
thread_runner._monitor_target = monitor_target_patch
target_event = threading.Event()
def target():
def nested():
try:
raise ValueError
except ValueError as e:
thread_runner.get_exception_queue().put(
construct_user_exception_with_traceback(e)
)
thread = threading.Thread(target=nested)
thread.start()
thread.join()
target_event.set()
thread_runner.run(target)
target_event.wait()
# While the monitor thread is processing the exception,
# the thread runner is still considered running.
assert thread_runner.is_running() and thread_runner.get_error() is None
# Unblock the monitor thread.
monitor_event.set()
assert not thread_runner.join()
assert thread_runner.get_return_value() is None
assert not thread_runner.is_running()
error = thread_runner.get_error()
assert isinstance(error, UserExceptionWithTraceback)
assert isinstance(error._base_exc, ValueError)
def test_running(thread_runner, tmp_path):
"""Checks that the running status can be queried."""
running_marker = tmp_path.joinpath("running")
running_marker.touch()
def target():
while running_marker.exists():
time.sleep(0.01)
thread_runner.run(target)
assert thread_runner.is_running()
# Let the training thread exit.
running_marker.unlink()
thread_runner.join()
assert not thread_runner.is_running()
def test_join_before_run_exception(thread_runner):
"""Checks that an error is raised if `join` is called before `run`."""
with pytest.raises(RuntimeError):
thread_runner.join()
def test_run_twice_exception(thread_runner):
"""Checks that an error is raised if `run` is called twice."""
thread_runner.run(lambda: None)
with pytest.raises(RuntimeError):
thread_runner.run(lambda: None)
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", "-x", __file__]))
| ThreadRunnerWithJoin |
python | readthedocs__readthedocs.org | readthedocs/projects/views/private.py | {
"start": 10295,
"end": 10464
} | class ____(ProjectVersionEditMixin, CreateView):
success_message = _("Version created")
template_name = "projects/project_version_detail.html"
| ProjectVersionCreate |
python | Textualize__textual | src/textual/logging.py | {
"start": 342,
"end": 1187
} | class ____(Handler):
"""A Logging handler for Textual apps."""
def __init__(self, stderr: bool = True, stdout: bool = False) -> None:
"""Initialize a Textual logging handler.
Args:
stderr: Log to stderr when there is no active app.
stdout: Log to stdout when there is no active app.
"""
super().__init__()
self._stderr = stderr
self._stdout = stdout
def emit(self, record: LogRecord) -> None:
"""Invoked by logging."""
message = self.format(record)
try:
app = active_app.get()
except LookupError:
if self._stderr:
print(message, file=sys.stderr)
elif self._stdout:
print(message, file=sys.stdout)
else:
app.log.logging(message)
| TextualHandler |
python | getsentry__sentry | tests/sentry/users/api/endpoints/test_userroles_index.py | {
"start": 1494,
"end": 2046
} | class ____(UserRolesTest):
method = "POST"
def test_simple(self) -> None:
resp = self.get_response(name="test-role", permissions=["users.admin"])
assert resp.status_code == 201
assert resp.data["name"] == "test-role"
role = UserRole.objects.get(name="test-role")
assert role.permissions == ["users.admin"]
def test_already_exists(self) -> None:
UserRole.objects.create(name="test-role")
resp = self.get_response(name="test-role")
assert resp.status_code == 410
| UserRolesPostTest |
python | celery__celery | t/unit/app/test_backends.py | {
"start": 2494,
"end": 4510
} | class ____:
@pytest.mark.parametrize('url,expect_cls', [
('cache+memory://', CacheBackend),
])
def test_get_backend_aliases(self, url, expect_cls, app):
backend, url = backends.by_url(url, app.loader)
assert isinstance(backend(app=app, url=url), expect_cls)
def test_unknown_backend(self, app):
with pytest.raises(ImportError):
backends.by_name('fasodaopjeqijwqe', app.loader)
def test_backend_by_url(self, app, url='redis://localhost/1'):
from celery.backends.redis import RedisBackend
backend, url_ = backends.by_url(url, app.loader)
assert backend is RedisBackend
assert url_ == url
def test_sym_raises_ValuError(self, app):
with patch('celery.app.backends.symbol_by_name') as sbn:
sbn.side_effect = ValueError()
with pytest.raises(ImproperlyConfigured):
backends.by_name('xxx.xxx:foo', app.loader)
def test_backend_can_not_be_module(self, app):
with pytest.raises(ImproperlyConfigured):
backends.by_name(pytest, app.loader)
@pytest.mark.celery(
result_backend=f'{CachedBackendWithTreadTrucking.__module__}.'
f'{CachedBackendWithTreadTrucking.__qualname__}'
f'+memory://')
def test_backend_thread_safety(self):
@self.app.task
def dummy_add_task(x, y):
return x + y
with embed_worker(app=self.app, pool='threads'):
result = dummy_add_task.delay(6, 9)
assert result.get(timeout=10) == 15
call_stats = CachedBackendWithTreadTrucking.test_call_stats
# check that backend instance is used without same thread
for backend_call_stats in call_stats.values():
thread_ids = set()
for call_stat in backend_call_stats:
thread_ids.add(call_stat['thread_id'])
assert len(thread_ids) <= 1, \
"The same celery backend instance is used by multiple threads"
| test_backends |
python | huggingface__transformers | src/transformers/models/mgp_str/configuration_mgp_str.py | {
"start": 784,
"end": 5810
} | class ____(PreTrainedConfig):
r"""
This is the configuration class to store the configuration of an [`MgpstrModel`]. It is used to instantiate an
MGP-STR model according to the specified arguments, defining the model architecture. Instantiating a configuration
with the defaults will yield a similar configuration to that of the MGP-STR
[alibaba-damo/mgp-str-base](https://huggingface.co/alibaba-damo/mgp-str-base) architecture.
Configuration objects inherit from [`PreTrainedConfig`] and can be used to control the model outputs. Read the
documentation from [`PreTrainedConfig`] for more information.
Args:
image_size (`list[int]`, *optional*, defaults to `[32, 128]`):
The size (resolution) of each image.
patch_size (`int`, *optional*, defaults to 4):
The size (resolution) of each patch.
num_channels (`int`, *optional*, defaults to 3):
The number of input channels.
max_token_length (`int`, *optional*, defaults to 27):
The max number of output tokens.
num_character_labels (`int`, *optional*, defaults to 38):
The number of classes for character head .
num_bpe_labels (`int`, *optional*, defaults to 50257):
The number of classes for bpe head .
num_wordpiece_labels (`int`, *optional*, defaults to 30522):
The number of classes for wordpiece head .
hidden_size (`int`, *optional*, defaults to 768):
The embedding dimension.
num_hidden_layers (`int`, *optional*, defaults to 12):
Number of hidden layers in the Transformer encoder.
num_attention_heads (`int`, *optional*, defaults to 12):
Number of attention heads for each attention layer in the Transformer encoder.
mlp_ratio (`float`, *optional*, defaults to 4.0):
The ratio of mlp hidden dim to embedding dim.
qkv_bias (`bool`, *optional*, defaults to `True`):
Whether to add a bias to the queries, keys and values.
distilled (`bool`, *optional*, defaults to `False`):
Model includes a distillation token and head as in DeiT models.
layer_norm_eps (`float`, *optional*, defaults to 1e-05):
The epsilon used by the layer normalization layers.
drop_rate (`float`, *optional*, defaults to 0.0):
The dropout probability for all fully connected layers in the embeddings, encoder.
attn_drop_rate (`float`, *optional*, defaults to 0.0):
The dropout ratio for the attention probabilities.
drop_path_rate (`float`, *optional*, defaults to 0.0):
The stochastic depth rate.
output_a3_attentions (`bool`, *optional*, defaults to `False`):
Whether or not the model should returns A^3 module attentions.
initializer_range (`float`, *optional*, defaults to 0.02):
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
Example:
```python
>>> from transformers import MgpstrConfig, MgpstrForSceneTextRecognition
>>> # Initializing a Mgpstr mgp-str-base style configuration
>>> configuration = MgpstrConfig()
>>> # Initializing a model (with random weights) from the mgp-str-base style configuration
>>> model = MgpstrForSceneTextRecognition(configuration)
>>> # Accessing the model configuration
>>> configuration = model.config
```"""
model_type = "mgp-str"
def __init__(
self,
image_size=[32, 128],
patch_size=4,
num_channels=3,
max_token_length=27,
num_character_labels=38,
num_bpe_labels=50257,
num_wordpiece_labels=30522,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
mlp_ratio=4.0,
qkv_bias=True,
distilled=False,
layer_norm_eps=1e-5,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.0,
output_a3_attentions=False,
initializer_range=0.02,
**kwargs,
):
super().__init__(**kwargs)
self.image_size = image_size
self.patch_size = patch_size
self.num_channels = num_channels
self.max_token_length = max_token_length
self.num_character_labels = num_character_labels
self.num_bpe_labels = num_bpe_labels
self.num_wordpiece_labels = num_wordpiece_labels
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.mlp_ratio = mlp_ratio
self.distilled = distilled
self.layer_norm_eps = layer_norm_eps
self.drop_rate = drop_rate
self.qkv_bias = qkv_bias
self.attn_drop_rate = attn_drop_rate
self.drop_path_rate = drop_path_rate
self.output_a3_attentions = output_a3_attentions
self.initializer_range = initializer_range
__all__ = ["MgpstrConfig"]
| MgpstrConfig |
python | pypa__warehouse | tests/unit/macaroons/test_security_policy.py | {
"start": 1756,
"end": 12678
} | class ____:
def test_verify(self):
assert verifyClass(
ISecurityPolicy,
security_policy.MacaroonSecurityPolicy,
)
def test_noops(self):
policy = security_policy.MacaroonSecurityPolicy()
with pytest.raises(NotImplementedError):
policy.authenticated_userid(pretend.stub())
def test_forget_and_remember(self):
policy = security_policy.MacaroonSecurityPolicy()
assert policy.forget(pretend.stub()) == []
assert policy.remember(pretend.stub(), pretend.stub()) == []
def test_identity_no_http_macaroon(self, monkeypatch):
policy = security_policy.MacaroonSecurityPolicy()
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb)
extract_http_macaroon = pretend.call_recorder(lambda r: None)
monkeypatch.setattr(
security_policy, "_extract_http_macaroon", extract_http_macaroon
)
request = pretend.stub(
add_response_callback=pretend.call_recorder(lambda cb: None)
)
assert policy.identity(request) is None
assert extract_http_macaroon.calls == [pretend.call(request)]
assert add_vary_cb.calls == [pretend.call("Authorization")]
assert request.add_response_callback.calls == [pretend.call(vary_cb)]
def test_identity_no_db_macaroon(self, monkeypatch):
policy = security_policy.MacaroonSecurityPolicy()
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb)
raw_macaroon = pretend.stub()
extract_http_macaroon = pretend.call_recorder(lambda r: raw_macaroon)
monkeypatch.setattr(
security_policy, "_extract_http_macaroon", extract_http_macaroon
)
macaroon_service = pretend.stub(
find_from_raw=pretend.call_recorder(pretend.raiser(InvalidMacaroonError)),
)
request = pretend.stub(
add_response_callback=pretend.call_recorder(lambda cb: None),
find_service=pretend.call_recorder(lambda iface, **kw: macaroon_service),
)
assert policy.identity(request) is None
assert extract_http_macaroon.calls == [pretend.call(request)]
assert request.find_service.calls == [
pretend.call(IMacaroonService, context=None),
]
assert macaroon_service.find_from_raw.calls == [pretend.call(raw_macaroon)]
assert add_vary_cb.calls == [pretend.call("Authorization")]
assert request.add_response_callback.calls == [pretend.call(vary_cb)]
def test_identity_disabled_user(self, monkeypatch):
policy = security_policy.MacaroonSecurityPolicy()
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb)
raw_macaroon = pretend.stub()
extract_http_macaroon = pretend.call_recorder(lambda r: raw_macaroon)
monkeypatch.setattr(
security_policy, "_extract_http_macaroon", extract_http_macaroon
)
user = pretend.stub(id="deadbeef-dead-beef-deadbeef-dead")
macaroon = pretend.stub(user=user, oidc_publisher=None)
macaroon_service = pretend.stub(
find_from_raw=pretend.call_recorder(lambda rm: macaroon),
)
user_service = pretend.stub(
is_disabled=pretend.call_recorder(lambda user_id: (True, Exception)),
)
request = pretend.stub(
add_response_callback=pretend.call_recorder(lambda cb: None),
find_service=pretend.call_recorder(
lambda iface, **kw: {
IMacaroonService: macaroon_service,
IUserService: user_service,
}[iface]
),
)
assert policy.identity(request) is None
assert extract_http_macaroon.calls == [pretend.call(request)]
assert request.find_service.calls == [
pretend.call(IMacaroonService, context=None),
pretend.call(IUserService, context=None),
]
assert macaroon_service.find_from_raw.calls == [pretend.call(raw_macaroon)]
assert user_service.is_disabled.calls == [
pretend.call("deadbeef-dead-beef-deadbeef-dead")
]
assert add_vary_cb.calls == [pretend.call("Authorization")]
assert request.add_response_callback.calls == [pretend.call(vary_cb)]
def test_identity_user(self, monkeypatch):
policy = security_policy.MacaroonSecurityPolicy()
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb)
raw_macaroon = pretend.stub()
extract_http_macaroon = pretend.call_recorder(lambda r: raw_macaroon)
monkeypatch.setattr(
security_policy, "_extract_http_macaroon", extract_http_macaroon
)
user = pretend.stub(id="deadbeef-dead-beef-deadbeef-dead")
macaroon = pretend.stub(user=user, oidc_publisher=None)
macaroon_service = pretend.stub(
find_from_raw=pretend.call_recorder(lambda rm: macaroon),
)
user_service = pretend.stub(
is_disabled=pretend.call_recorder(lambda user_id: (False, Exception)),
)
request = pretend.stub(
add_response_callback=pretend.call_recorder(lambda cb: None),
find_service=pretend.call_recorder(
lambda iface, **kw: {
IMacaroonService: macaroon_service,
IUserService: user_service,
}[iface]
),
)
assert policy.identity(request) == UserContext(user, macaroon)
assert extract_http_macaroon.calls == [pretend.call(request)]
assert request.find_service.calls == [
pretend.call(IMacaroonService, context=None),
pretend.call(IUserService, context=None),
]
assert macaroon_service.find_from_raw.calls == [pretend.call(raw_macaroon)]
assert user_service.is_disabled.calls == [
pretend.call("deadbeef-dead-beef-deadbeef-dead")
]
assert add_vary_cb.calls == [pretend.call("Authorization")]
assert request.add_response_callback.calls == [pretend.call(vary_cb)]
def test_identity_oidc_publisher(self, monkeypatch):
policy = security_policy.MacaroonSecurityPolicy()
vary_cb = pretend.stub()
add_vary_cb = pretend.call_recorder(lambda *v: vary_cb)
monkeypatch.setattr(security_policy, "add_vary_callback", add_vary_cb)
raw_macaroon = pretend.stub()
extract_http_macaroon = pretend.call_recorder(lambda r: raw_macaroon)
monkeypatch.setattr(
security_policy, "_extract_http_macaroon", extract_http_macaroon
)
oidc_publisher = pretend.stub()
oidc_additional = {"oidc": {"foo": "bar"}}
macaroon = pretend.stub(
user=None, oidc_publisher=oidc_publisher, additional=oidc_additional
)
macaroon_service = pretend.stub(
find_from_raw=pretend.call_recorder(lambda rm: macaroon),
)
request = pretend.stub(
add_response_callback=pretend.call_recorder(lambda cb: None),
find_service=pretend.call_recorder(lambda iface, **kw: macaroon_service),
)
identity = policy.identity(request)
assert identity
assert identity.publisher is oidc_publisher
assert identity == PublisherTokenContext(
oidc_publisher, SignedClaims(oidc_additional["oidc"])
)
assert extract_http_macaroon.calls == [pretend.call(request)]
assert request.find_service.calls == [
pretend.call(IMacaroonService, context=None),
pretend.call(IUserService, context=None),
]
assert macaroon_service.find_from_raw.calls == [pretend.call(raw_macaroon)]
assert add_vary_cb.calls == [pretend.call("Authorization")]
assert request.add_response_callback.calls == [pretend.call(vary_cb)]
def test_permits_invalid_macaroon(self, monkeypatch):
macaroon_service = pretend.stub(
verify=pretend.raiser(InvalidMacaroonError("foo"))
)
request = pretend.stub(
find_service=pretend.call_recorder(lambda interface, **kw: macaroon_service)
)
_extract_http_macaroon = pretend.call_recorder(lambda r: "not a real macaroon")
monkeypatch.setattr(
security_policy, "_extract_http_macaroon", _extract_http_macaroon
)
policy = security_policy.MacaroonSecurityPolicy()
result = policy.permits(request, pretend.stub(), Permissions.ProjectsUpload)
assert result == Denied("")
assert result.s == "Invalid API Token: foo"
@pytest.mark.parametrize(
("principals", "expected"), [(["user:5"], True), (["user:1"], False)]
)
def test_permits_valid_macaroon(self, monkeypatch, principals, expected):
macaroon_service = pretend.stub(
verify=pretend.call_recorder(lambda *a: pretend.stub())
)
request = pretend.stub(
identity=pretend.stub(__principals__=lambda: principals),
find_service=pretend.call_recorder(
lambda interface, **kw: macaroon_service
),
)
_extract_http_macaroon = pretend.call_recorder(lambda r: "not a real macaroon")
monkeypatch.setattr(
security_policy, "_extract_http_macaroon", _extract_http_macaroon
)
context = pretend.stub(
__acl__=[(Allow, "user:5", [Permissions.ProjectsUpload])]
)
policy = security_policy.MacaroonSecurityPolicy()
result = policy.permits(request, context, Permissions.ProjectsUpload)
assert bool(result) == expected
@pytest.mark.parametrize(
"invalid_permission",
[Permissions.AccountManage, Permissions.ProjectsWrite, "nonexistent"],
)
def test_denies_valid_macaroon_for_incorrect_permission(
self, monkeypatch, invalid_permission
):
_extract_http_macaroon = pretend.call_recorder(lambda r: "not a real macaroon")
monkeypatch.setattr(
security_policy, "_extract_http_macaroon", _extract_http_macaroon
)
policy = security_policy.MacaroonSecurityPolicy()
result = policy.permits(pretend.stub(), pretend.stub(), invalid_permission)
assert result == Denied("")
assert result.s == (
f"API tokens are not valid for permission: {invalid_permission}!"
)
| TestMacaroonSecurityPolicy |
python | hynek__structlog | tests/processors/test_processors.py | {
"start": 5952,
"end": 6966
} | class ____:
def test_removes_stack_info(self, sir):
"""
The `stack_info` key is removed from `event_dict`.
"""
ed = sir(None, None, {"stack_info": True})
assert "stack_info" not in ed
def test_adds_stack_if_asked(self, sir):
"""
If `stack_info` is true, `stack` is added.
"""
ed = sir(None, None, {"stack_info": True})
assert "stack" in ed
def test_renders_correct_stack(self, sir):
"""
The rendered stack is correct.
"""
ed = sir(None, None, {"stack_info": True})
assert 'ed = sir(None, None, {"stack_info": True})' in ed["stack"]
def test_additional_ignores(self):
"""
Filtering of names works.
"""
sir = StackInfoRenderer(["tests.additional_frame"])
ed = additional_frame(
functools.partial(sir, None, None, {"stack_info": True})
)
assert "additional_frame.py" not in ed["stack"]
| TestStackInfoRenderer |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.