text
stringlengths 1
22.8M
|
|---|
```shell
Bash history reverse search
Aliasing ssh connections
Get to know your commands with `type`
Conditional command execution
(`&&` operator)
Keep useful commands in your shell history with tags
```
|
```objective-c
/* GIO - GLib Input, Output and Streaming Library
*
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
*
* You should have received a copy of the GNU Lesser General
*
* Author: Alexander Larsson <alexl@redhat.com>
*/
#ifndef __G_VOLUMEPRIV_H__
#define __G_VOLUMEPRIV_H__
#include <gio/gvolume.h>
G_BEGIN_DECLS
GMount *_g_mount_get_for_mount_path (const char *mount_path,
GCancellable *cancellable);
G_END_DECLS
#endif /* __G_VOLUMEPRIV_H__ */
```
|
```python
import json
from copy import deepcopy
from typing import Any, Dict, List, Tuple
from samtranslator.metrics.method_decorator import cw_timer
from samtranslator.model.exceptions import InvalidEventException, InvalidResourceException
from samtranslator.model.iam import IAMRole, IAMRolePolicies
from samtranslator.model.intrinsics import fnJoin, is_intrinsic
from samtranslator.model.resource_policies import ResourcePolicies
from samtranslator.model.role_utils import construct_role_for_resource
from samtranslator.model.s3_utils.uri_parser import parse_s3_uri
from samtranslator.model.stepfunctions.resources import (
StepFunctionsStateMachine,
StepFunctionsStateMachineAlias,
StepFunctionsStateMachineVersion,
)
from samtranslator.model.tags.resource_tagging import get_tag_list
from samtranslator.model.xray_utils import get_xray_managed_policy_name
from samtranslator.utils.cfn_dynamic_references import is_dynamic_reference
class StateMachineGenerator:
_SAM_KEY = "stateMachine:createdBy"
_SAM_VALUE = "SAM"
_SUBSTITUTION_NAME_TEMPLATE = "definition_substitution_%s"
_SUBSTITUTION_KEY_TEMPLATE = "${definition_substitution_%s}"
SFN_INVALID_PROPERTY_BOTH_ROLE_POLICY = (
"Specify either 'Role' or 'Policies' (but not both at the same time) or neither of them"
)
def __init__( # type: ignore[no-untyped-def] # noqa: PLR0913
self,
logical_id,
depends_on,
managed_policy_map,
intrinsics_resolver,
definition,
definition_uri,
logging,
name,
policies,
permissions_boundary,
definition_substitutions,
role,
state_machine_type,
tracing,
events,
event_resources,
event_resolver,
role_path=None,
tags=None,
resource_attributes=None,
passthrough_resource_attributes=None,
get_managed_policy_map=None,
auto_publish_alias=None,
deployment_preference=None,
use_alias_as_event_target=None,
):
"""
Constructs an State Machine Generator class that generates a State Machine resource
:param logical_id: Logical id of the SAM State Machine Resource
:param depends_on: Any resources that need to be depended on
:param managed_policy_map: Map of managed policy names to the ARNs
:param intrinsics_resolver: Instance of the resolver that knows how to resolve parameter references
:param definition: State Machine definition
:param definition_uri: URI to State Machine definition
:param logging: Logging configuration for the State Machine
:param name: Name of the State Machine resource
:param policies: Policies attached to the execution role
:param permissions_boundary: The ARN of the policy used to set the permissions boundary for the role
:param definition_substitutions: Variable-to-value mappings to be replaced in the State Machine definition
:param role: Role ARN to use for the execution role
:param role_path: The file path of the execution role
:param state_machine_type: Type of the State Machine
:param tracing: Tracing configuration for the State Machine
:param events: List of event sources for the State Machine
:param event_resources: Event resources to link
:param event_resolver: Resolver that maps Event types to Event classes
:param tags: Tags to be associated with the State Machine resource
:param resource_attributes: Resource attributes to add to the State Machine resource
:param passthrough_resource_attributes: Attributes such as `Condition` that are added to derived resources
:param auto_publish_alias: Name of the state machine alias to automatically create and update
:deployment_preference: Settings to enable gradual state machine deployments
:param use_alias_as_event_target: Whether to use the state machine alias as the event target
"""
self.logical_id = logical_id
self.depends_on = depends_on
self.managed_policy_map = managed_policy_map
self.intrinsics_resolver = intrinsics_resolver
self.passthrough_resource_attributes = passthrough_resource_attributes
self.resource_attributes = resource_attributes
self.definition = definition
self.definition_uri = definition_uri
self.name = name
self.logging = logging
self.policies = policies
self.permissions_boundary = permissions_boundary
self.definition_substitutions = definition_substitutions
self.role = role
self.role_path = role_path
self.type = state_machine_type
self.tracing = tracing
self.events = events
self.event_resources = event_resources
self.event_resolver = event_resolver
self.tags = tags
self.state_machine = StepFunctionsStateMachine(
logical_id, depends_on=depends_on, attributes=resource_attributes
)
self.substitution_counter = 1
self.get_managed_policy_map = get_managed_policy_map
self.auto_publish_alias = auto_publish_alias
self.deployment_preference = deployment_preference
self.use_alias_as_event_target = use_alias_as_event_target
@cw_timer(prefix="Generator", name="StateMachine")
def to_cloudformation(self): # type: ignore[no-untyped-def]
"""
Constructs and returns the State Machine resource and any additional resources associated with it.
:returns: a list of resources including the State Machine resource.
:rtype: list
"""
resources: List[Any] = [self.state_machine]
# Defaulting to {} will add the DefinitionSubstitutions field on the transform output even when it is not relevant
if self.definition_substitutions:
self.state_machine.DefinitionSubstitutions = self.definition_substitutions
if self.definition and self.definition_uri:
raise InvalidResourceException(
self.logical_id, "Specify either 'Definition' or 'DefinitionUri' property and not both."
)
if self.definition:
processed_definition = deepcopy(self.definition)
substitutions = self._replace_dynamic_values_with_substitutions(processed_definition) # type: ignore[no-untyped-call]
if len(substitutions) > 0:
if self.state_machine.DefinitionSubstitutions:
self.state_machine.DefinitionSubstitutions.update(substitutions)
else:
self.state_machine.DefinitionSubstitutions = substitutions
self.state_machine.DefinitionString = self._build_definition_string(processed_definition) # type: ignore[no-untyped-call]
elif self.definition_uri:
self.state_machine.DefinitionS3Location = self._construct_definition_uri()
else:
raise InvalidResourceException(
self.logical_id, "Either 'Definition' or 'DefinitionUri' property must be specified."
)
if self.role and self.policies:
raise InvalidResourceException(self.logical_id, self.SFN_INVALID_PROPERTY_BOTH_ROLE_POLICY)
if self.role:
self.state_machine.RoleArn = self.role
else:
if not self.policies:
self.policies = []
execution_role = self._construct_role()
self.state_machine.RoleArn = execution_role.get_runtime_attr("arn")
resources.append(execution_role)
self.state_machine.StateMachineName = self.name
self.state_machine.StateMachineType = self.type
self.state_machine.LoggingConfiguration = self.logging
self.state_machine.TracingConfiguration = self.tracing
self.state_machine.Tags = self._construct_tag_list()
managed_traffic_shifting_resources = self._generate_managed_traffic_shifting_resources()
resources.extend(managed_traffic_shifting_resources)
event_resources = self._generate_event_resources()
resources.extend(event_resources)
return resources
def _construct_definition_uri(self) -> Dict[str, Any]:
"""
Constructs the State Machine's `DefinitionS3 property`_, from the SAM State Machines's DefinitionUri property.
:returns: a DefinitionUri dict, containing the S3 Bucket, Key, and Version of the State Machine definition.
:rtype: dict
"""
if isinstance(self.definition_uri, dict):
if not self.definition_uri.get("Bucket", None) or not self.definition_uri.get("Key", None):
# DefinitionUri is a dictionary but does not contain Bucket or Key property
raise InvalidResourceException(
self.logical_id, "'DefinitionUri' requires Bucket and Key properties to be specified."
)
s3_pointer = self.definition_uri
else:
# DefinitionUri is a string
parsed_s3_pointer = parse_s3_uri(self.definition_uri)
if parsed_s3_pointer is None:
raise InvalidResourceException(
self.logical_id,
"'DefinitionUri' is not a valid S3 Uri of the form "
"'s3://bucket/key' with optional versionId query parameter.",
)
s3_pointer = parsed_s3_pointer
definition_s3 = {"Bucket": s3_pointer["Bucket"], "Key": s3_pointer["Key"]}
if "Version" in s3_pointer:
definition_s3["Version"] = s3_pointer["Version"]
return definition_s3
def _build_definition_string(self, definition_dict): # type: ignore[no-untyped-def]
"""
Builds a CloudFormation definition string from a definition dictionary. The definition string constructed is
a Fn::Join intrinsic function to make it readable.
:param definition_dict: State machine definition as a dictionary
:returns: the state machine definition.
:rtype: dict
"""
# Indenting and then splitting the JSON-encoded string for readability of the state machine definition in the CloudFormation translated resource.
# Separators are passed explicitly to maintain trailing whitespace consistency across Py2 and Py3
definition_lines = json.dumps(definition_dict, sort_keys=True, indent=4, separators=(",", ": ")).split("\n")
return fnJoin("\n", definition_lines)
def _construct_role(self) -> IAMRole:
"""
Constructs a State Machine execution role based on this SAM State Machine's Policies property.
:returns: the generated IAM Role
:rtype: model.iam.IAMRole
"""
policies = self.policies[:]
if self.tracing and self.tracing.get("Enabled") is True:
policies.append(get_xray_managed_policy_name())
state_machine_policies = ResourcePolicies(
{"Policies": policies},
# No support for policy templates in the "core"
policy_template_processor=None,
)
return construct_role_for_resource(
resource_logical_id=self.logical_id,
role_path=self.role_path,
attributes=self.passthrough_resource_attributes,
managed_policy_map=self.managed_policy_map,
assume_role_policy_document=IAMRolePolicies.stepfunctions_assume_role_policy(),
resource_policies=state_machine_policies,
tags=self._construct_tag_list(),
permissions_boundary=self.permissions_boundary,
get_managed_policy_map=self.get_managed_policy_map,
)
def _construct_tag_list(self) -> List[Dict[str, Any]]:
"""
Transforms the SAM defined Tags into the form CloudFormation is expecting.
:returns: List of Tag Dictionaries
:rtype: list
"""
sam_tag = {self._SAM_KEY: self._SAM_VALUE}
return get_tag_list(sam_tag) + get_tag_list(self.tags)
def _construct_version(self) -> StepFunctionsStateMachineVersion:
"""Constructs a state machine version resource that will be auto-published when the revision id of the state machine changes.
:return: Step Functions state machine version resource
"""
# Unlike Lambda function versions, state machine versions do not need a hash suffix because
# they are always replaced when their corresponding state machine is updated.
# I.e. A SAM StateMachine resource will never have multiple version resources at the same time.
logical_id = f"{self.logical_id}Version"
attributes = self.passthrough_resource_attributes.copy()
# Both UpdateReplacePolicy and DeletionPolicy are needed to protect previous version from deletion
# to ensure gradual deployment works.
if "DeletionPolicy" not in attributes:
attributes["DeletionPolicy"] = "Retain"
if "UpdateReplacePolicy" not in attributes:
attributes["UpdateReplacePolicy"] = "Retain"
state_machine_version = StepFunctionsStateMachineVersion(logical_id=logical_id, attributes=attributes)
state_machine_version.StateMachineArn = self.state_machine.get_runtime_attr("arn")
state_machine_version.StateMachineRevisionId = self.state_machine.get_runtime_attr("state_machine_revision_id")
return state_machine_version
def _construct_alias(self, version: StepFunctionsStateMachineVersion) -> StepFunctionsStateMachineAlias:
"""Constructs a state machine alias resource pointing to the given state machine version.
:return: Step Functions state machine alias resource
"""
logical_id = f"{self.logical_id}Alias{self.auto_publish_alias}"
attributes = self.passthrough_resource_attributes
state_machine_alias = StepFunctionsStateMachineAlias(logical_id=logical_id, attributes=attributes)
state_machine_alias.Name = self.auto_publish_alias
state_machine_version_arn = version.get_runtime_attr("arn")
deployment_preference = {}
if self.deployment_preference:
deployment_preference = self.deployment_preference
else:
deployment_preference["Type"] = "ALL_AT_ONCE"
deployment_preference["StateMachineVersionArn"] = state_machine_version_arn
state_machine_alias.DeploymentPreference = deployment_preference
self.state_machine_alias = state_machine_alias
return state_machine_alias
def _generate_managed_traffic_shifting_resources(
self,
) -> List[Any]:
"""Generates and returns the version and alias resources associated with this state machine's managed traffic shifting.
:returns: a list containing the state machine's version and alias resources
:rtype: list
"""
if not self.auto_publish_alias and self.use_alias_as_event_target:
raise InvalidResourceException(
self.logical_id, "'UseAliasAsEventTarget' requires 'AutoPublishAlias' property to be specified."
)
if not self.auto_publish_alias and not self.deployment_preference:
return []
if not self.auto_publish_alias and self.deployment_preference:
raise InvalidResourceException(
self.logical_id, "'DeploymentPreference' requires 'AutoPublishAlias' property to be specified."
)
state_machine_version = self._construct_version()
return [state_machine_version, self._construct_alias(state_machine_version)]
def _generate_event_resources(self) -> List[Dict[str, Any]]:
"""Generates and returns the resources associated with this state machine's event sources.
:returns: a list containing the state machine's event resources
:rtype: list
"""
resources = []
if self.events:
for logical_id, event_dict in self.events.items():
kwargs = {
"intrinsics_resolver": self.intrinsics_resolver,
"permissions_boundary": self.permissions_boundary,
}
try:
eventsource = self.event_resolver.resolve_resource_type(event_dict).from_dict(
self.state_machine.logical_id + logical_id, event_dict, logical_id
)
for name, resource in self.event_resources[logical_id].items():
kwargs[name] = resource
except (TypeError, AttributeError) as e:
raise InvalidEventException(logical_id, str(e)) from e
target_resource = (
(self.state_machine_alias or self.state_machine)
if self.use_alias_as_event_target
else self.state_machine
)
resources += eventsource.to_cloudformation(resource=target_resource, **kwargs)
return resources
def _replace_dynamic_values_with_substitutions(self, _input): # type: ignore[no-untyped-def]
"""
Replaces the CloudFormation instrinsic functions and dynamic references within the input with substitutions.
:param _input: Input dictionary in which the dynamic values need to be replaced with substitutions
:returns: List of substitution to dynamic value mappings
:rtype: dict
"""
substitution_map = {}
for path in self._get_paths_to_intrinsics(_input): # type: ignore[no-untyped-call]
location = _input
for step in path[:-1]:
location = location[step]
sub_name, sub_key = self._generate_substitution()
substitution_map[sub_name] = location[path[-1]]
location[path[-1]] = sub_key
return substitution_map
def _get_paths_to_intrinsics(self, _input, path=None): # type: ignore[no-untyped-def]
"""
Returns all paths to dynamic values within a dictionary
:param _input: Input dictionary to find paths to dynamic values in
:param path: Optional list to keep track of the path to the input dictionary
:returns list: List of keys that defines the path to a dynamic value within the input dictionary
"""
if path is None:
path = []
dynamic_value_paths = [] # type: ignore[var-annotated]
if isinstance(_input, dict):
iterator = _input.items()
elif isinstance(_input, list):
iterator = enumerate(_input) # type: ignore[assignment]
else:
return dynamic_value_paths
for key, value in sorted(iterator, key=lambda item: item[0]): # type: ignore[no-any-return]
if is_intrinsic(value) or is_dynamic_reference(value):
dynamic_value_paths.append([*path, key])
elif isinstance(value, (dict, list)):
dynamic_value_paths.extend(self._get_paths_to_intrinsics(value, [*path, key])) # type: ignore[no-untyped-call]
return dynamic_value_paths
def _generate_substitution(self) -> Tuple[str, str]:
"""
Generates a name and key for a new substitution.
:returns: Substitution name and key
:rtype: string, string
"""
substitution_name = self._SUBSTITUTION_NAME_TEMPLATE % self.substitution_counter
substitution_key = self._SUBSTITUTION_KEY_TEMPLATE % self.substitution_counter
self.substitution_counter += 1
return substitution_name, substitution_key
```
|
Tyendinaga (Mohawk) Airport is a registered aerodrome that is open to the public and caters mainly to general aviation. The aerodrome is located in Tyendinaga Mohawk Territory, southwest of Tyendinaga, Ontario, Canada, north of the Bay of Quinte between Kingston and Belleville.
History
Originally known as Deseronto Airport, the field opened in 1917 as a training school for pilots during World War I. During World War II, it hosted the No. 1 Instrument Navigation School for the British Commonwealth Air Training Plan, providing advanced instrument-navigation training to air crews. During this time, the airport was also used as the primary relief landing field for the Central Flying School, based out of RCAF Station Trenton.
The aerodrome is currently the site of the First Nations Technical Institute and the First Peoples' Aviation Technology – Flight Training Program.
Historical aerodrome information
In approximately 1942 the aerodrome was listed as "RCAF Aerodrome - Mohawk, Ontario" at with no variation or elevation listed. The field was listed as "all hard surfaced" and had three runways listed as follows:
References
External links
Airport home page
Page about this airport on COPA's Places to Fly airport directory
Registered aerodromes in Ontario
Airports of the British Commonwealth Air Training Plan
Military airbases in Ontario
Military history of Ontario
|
is a Japanese tarento, actress, radio personality and former idol. She is the former captain of idol group Sakurazaka46, represented by Sony Music Records. She is also the former Special Ambassador for the Japan Equestrian Federation.
Career
Sakurazaka46
On August 21, 2015, Sugai along with 22 other members were announced for the newly created idol group, Toriizaka46, renamed shortly after into Keyakizaka46 and again into Sakurazaka46 in 2020. Her nicknames include and , the latter referring to her persona.
Sugai made her musical debut with Keyakizaka46's first single, Silent Majority. As of February 2019, she has participated in all eight of Keyakizaka46's singles. She is also part of the subgroup along with Risa Watanabe, Akane Moriya, Manaka Shida, and Rika Watanabe. As part of the subunit, Sugai made center appearances in three songs: "Aozora Chigau" in "Sekai ni wa Ai Shika Nai", "Wareta Sumaho" in "Fukyōwaon, and "Namiuchigiwa o Hashiranai ka?" in "Kaze ni Fukarete mo". Additionally, Sugai made two song appearances as part of Sakamichi AKB (坂道AKB), a group made of various AKB48 and Sakamichi members. She will also be included in Sakamichi AKB's next song.
During a handshake event on January 21, 2017, Sugai was named the captain of Keyakizaka46. Akane Moriya was named co-captain on the same day.
On April 11, 2018, Kodansha announced that they will be publishing a photo-book featuring Sugai that was photographed in Paris. She became the fourth member in Keyakizaka46 to have a photobook, behind Neru Nagahama, Yui Imaizumi, and Rika Watanabe. The photo-book, titled Fiancé (フィアンセ), was the bestselling photo-book in Japan for three weeks in a row.
On August 22, 2022, Sugai announced that she will be leaving Sakurazaka46 after the Tokyo Dome performances on November 8 and 9, the final leg of the national tour that will begin in September that year. Second generation member Rina Matsuda succeeded her as captain.
Equestrianism
Sugai started horseback riding in 5th grade. She has won numerous awards in various junior national competitions, particularly in dressage. She often discusses horse riding in her appearances with Sakurazaka46, which contributes to her established persona in the group.
On June 7, 2017, Sugai was appointed the Special Ambassador for the , in order to broaden the appeal of the sport. She left the position in the end of 2021.
Awards
The table below lists Sugai's top three finishes in equestrian competitions recognized by the Japan Equestrian Federation.
Post-Sakurazaka46
On November 24, 2022, Sugai opened a Twitter account.
Her first post-Sakurazaka46 activity was an acting role in the stage play Neo Bakumatsu Junjoden, which ran from January 28 to February 19, 2023.
On February 12, 2023, Sugai's official website was launched. On the same day, she was announced as a host for the horseback racing program Keiba Beat .
On March 30, 2023, Sugai made her debut as a radio personality, hosting her own radio show Yūka Sugai's #KyouMoOshiToGanbariki .
Personal life
Sugai was born on November 29, 1995, in Tokyo, Japan. As a child, she had 11 years of ballet experience.
As of March 2018, Sugai has graduated from university. She was a member and then manager of the equestrian club.
Discography
Keyakizaka46
Sakamichi AKB
References
External links
Official Website (since February 12, 2023)
(since January 11, 2022)
(since November 24, 2022)
1995 births
Living people
Japanese actresses
Japanese female equestrians
Japanese idols
Keyakizaka46 members
Musicians from Tokyo
Sakurazaka46 members
|
Spencer Lovejoy (born 14 May 1998 in Connecticut) is an American professional squash player. As of October 2021, he was ranked number 69 in the world. He has competed in multiple PSA professional tournaments and earned his first PSA title at the Mississauga Open in 2019. He played for the Yale Bulldogs men's squash team playing in the #1 seat all 4 years of his college career. He was a four-time First-Team All-American, winner of the Skillman Award for Sportsmanship in 2020, and named team captain his senior year.
References
1998 births
Living people
American male squash players
Yale Bulldogs men's squash players
|
```javascript
Explicit setting of `this` using `call` and `apply` methods
Difference between **.call** and **.apply** methods
IIFE pattern
Method chaining
Move cursor at the end of text input
```
|
```python
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit tests for printing.py."""
import StringIO
import optparse
import sys
import time
import unittest
from webkitpy.common.host_mock import MockHost
from webkitpy.common.system import logtesting
from webkitpy.layout_tests import port
from webkitpy.layout_tests.controllers import manager
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
from webkitpy.layout_tests.models import test_results
from webkitpy.layout_tests.views import printing
def get_options(args):
print_options = printing.print_options()
option_parser = optparse.OptionParser(option_list=print_options)
return option_parser.parse_args(args)
class TestUtilityFunctions(unittest.TestCase):
def test_print_options(self):
options, args = get_options([])
self.assertIsNotNone(options)
class FakeRunResults(object):
def __init__(self, total=1, expected=1, unexpected=0, fake_results=None):
fake_results = fake_results or []
self.total = total
self.expected = expected
self.expected_failures = 0
self.unexpected = unexpected
self.expected_skips = 0
self.results_by_name = {}
total_run_time = 0
for result in fake_results:
self.results_by_name[result.shard_name] = result
total_run_time += result.total_run_time
self.run_time = total_run_time + 1
class FakeShard(object):
def __init__(self, shard_name, total_run_time):
self.shard_name = shard_name
self.total_run_time = total_run_time
class Testprinter(unittest.TestCase):
def assertEmpty(self, stream):
self.assertFalse(stream.getvalue())
def assertNotEmpty(self, stream):
self.assertTrue(stream.getvalue())
def assertWritten(self, stream, contents):
self.assertEqual(stream.buflist, contents)
def reset(self, stream):
stream.buflist = []
stream.buf = ''
def get_printer(self, args=None):
args = args or []
printing_options = printing.print_options()
option_parser = optparse.OptionParser(option_list=printing_options)
options, args = option_parser.parse_args(args)
host = MockHost()
self._port = host.port_factory.get('test', options)
nproc = 2
regular_output = StringIO.StringIO()
printer = printing.Printer(self._port, options, regular_output)
return printer, regular_output
def get_result(self, test_name, result_type=test_expectations.PASS, run_time=0):
failures = []
if result_type == test_expectations.TIMEOUT:
failures = [test_failures.FailureTimeout()]
elif result_type == test_expectations.CRASH:
failures = [test_failures.FailureCrash()]
return test_results.TestResult(test_name, failures=failures, test_run_time=run_time)
def test_configure_and_cleanup(self):
# This test verifies that calling cleanup repeatedly and deleting
# the object is safe.
printer, err = self.get_printer()
printer.cleanup()
printer.cleanup()
printer = None
def test_print_config(self):
printer, err = self.get_printer()
# FIXME: it's lame that i have to set these options directly.
printer._options.pixel_tests = True
printer._options.new_baseline = True
printer._options.time_out_ms = 6000
printer._options.slow_time_out_ms = 12000
printer.print_config('/tmp')
self.assertIn("Using port 'test-mac-leopard'", err.getvalue())
self.assertIn('Test configuration: <leopard, x86, release>', err.getvalue())
self.assertIn('View the test results at file:///tmp', err.getvalue())
self.assertIn('View the archived results dashboard at file:///tmp', err.getvalue())
self.assertIn('Baseline search path: test-mac-leopard -> test-mac-snowleopard -> generic', err.getvalue())
self.assertIn('Using Release build', err.getvalue())
self.assertIn('Pixel tests enabled', err.getvalue())
self.assertIn('Command line:', err.getvalue())
self.assertIn('Regular timeout: ', err.getvalue())
self.reset(err)
printer._options.quiet = True
printer.print_config('/tmp')
self.assertNotIn('Baseline search path: test-mac-leopard -> test-mac-snowleopard -> generic', err.getvalue())
def test_print_directory_timings(self):
printer, err = self.get_printer()
printer._options.debug_rwt_logging = True
run_results = FakeRunResults()
run_results.results_by_name = {
"slowShard": FakeShard("slowShard", 16),
"borderlineShard": FakeShard("borderlineShard", 15),
"fastShard": FakeShard("fastShard", 1),
}
printer._print_directory_timings(run_results)
self.assertWritten(err, ['Time to process slowest subdirectories:\n', ' slowShard took 16.0 seconds to run 1 tests.\n', '\n'])
printer, err = self.get_printer()
printer._options.debug_rwt_logging = True
run_results.results_by_name = {
"borderlineShard": FakeShard("borderlineShard", 15),
"fastShard": FakeShard("fastShard", 1),
}
printer._print_directory_timings(run_results)
self.assertWritten(err, [])
def test_print_one_line_summary(self):
def run_test(total, exp, unexp, shards, result):
printer, err = self.get_printer(['--timing'] if shards else None)
fake_results = FakeRunResults(total, exp, unexp, shards)
total_time = fake_results.run_time + 1
printer._print_one_line_summary(total_time, fake_results)
self.assertWritten(err, result)
# Without times:
run_test(1, 1, 0, [], ["The test ran as expected.\n", "\n"])
run_test(2, 1, 1, [], ["\n", "1 test ran as expected, 1 didn't:\n", "\n"])
run_test(3, 2, 1, [], ["\n", "2 tests ran as expected, 1 didn't:\n", "\n"])
run_test(3, 2, 0, [], ["\n", "2 tests ran as expected (1 didn't run).\n", "\n"])
# With times:
fake_shards = [FakeShard("foo", 1), FakeShard("bar", 2)]
run_test(1, 1, 0, fake_shards, ["The test ran as expected in 5.00s (2.00s in rwt, 1x).\n", "\n"])
run_test(2, 1, 1, fake_shards, ["\n", "1 test ran as expected, 1 didn't in 5.00s (2.00s in rwt, 1x):\n", "\n"])
run_test(3, 2, 1, fake_shards, ["\n", "2 tests ran as expected, 1 didn't in 5.00s (2.00s in rwt, 1x):\n", "\n"])
run_test(3, 2, 0, fake_shards, ["\n", "2 tests ran as expected (1 didn't run) in 5.00s (2.00s in rwt, 1x).\n", "\n"])
def test_test_status_line(self):
printer, _ = self.get_printer()
printer._meter.number_of_columns = lambda: 80
actual = printer._test_status_line('fast/dom/HTMLFormElement/associated-elements-after-index-assertion-fail1.html', ' passed')
self.assertEqual(80, len(actual))
self.assertEqual(actual, '[0/0] fast/dom/HTMLFormElement/associa...after-index-assertion-fail1.html passed')
printer._meter.number_of_columns = lambda: 89
actual = printer._test_status_line('fast/dom/HTMLFormElement/associated-elements-after-index-assertion-fail1.html', ' passed')
self.assertEqual(89, len(actual))
self.assertEqual(actual, '[0/0] fast/dom/HTMLFormElement/associated-...ents-after-index-assertion-fail1.html passed')
printer._meter.number_of_columns = lambda: sys.maxint
actual = printer._test_status_line('fast/dom/HTMLFormElement/associated-elements-after-index-assertion-fail1.html', ' passed')
self.assertEqual(90, len(actual))
self.assertEqual(actual, '[0/0] fast/dom/HTMLFormElement/associated-elements-after-index-assertion-fail1.html passed')
printer._meter.number_of_columns = lambda: 18
actual = printer._test_status_line('fast/dom/HTMLFormElement/associated-elements-after-index-assertion-fail1.html', ' passed')
self.assertEqual(18, len(actual))
self.assertEqual(actual, '[0/0] f...l passed')
printer._meter.number_of_columns = lambda: 10
actual = printer._test_status_line('fast/dom/HTMLFormElement/associated-elements-after-index-assertion-fail1.html', ' passed')
self.assertEqual(actual, '[0/0] associated-elements-after-index-assertion-fail1.html passed')
def test_details(self):
printer, err = self.get_printer(['--details'])
result = self.get_result('passes/image.html')
printer.print_started_test('passes/image.html')
printer.print_finished_test(result, expected=False, exp_str='', got_str='')
self.assertNotEmpty(err)
def test_print_found(self):
printer, err = self.get_printer()
printer.print_found(100, 10, 1, 1)
self.assertWritten(err, ["Found 100 tests; running 10, skipping 90.\n"])
self.reset(err)
printer.print_found(100, 10, 2, 3)
self.assertWritten(err, ["Found 100 tests; running 10 (6 times each: --repeat-each=2 --iterations=3), skipping 90.\n"])
def test_debug_rwt_logging_is_throttled(self):
printer, err = self.get_printer(['--debug-rwt-logging'])
result = self.get_result('passes/image.html')
printer.print_started_test('passes/image.html')
printer.print_finished_test(result, expected=True, exp_str='', got_str='')
printer.print_started_test('passes/text.html')
result = self.get_result('passes/text.html')
printer.print_finished_test(result, expected=True, exp_str='', got_str='')
# Only the first test's start should be printed.
lines = err.buflist
self.assertEqual(len(lines), 1)
self.assertTrue(lines[0].endswith('passes/image.html\n'))
```
|
Global Goal: Unite for Our Future was a virtual event held on June 27, 2020. Created by Global Citizen and the European Commission, it consisted of a summit and a concert featuring different personalities aimed to highlight the disproportionate impact of the COVID-19 pandemic on marginalized communities.
The event was produced in collaboration with companies such as Live Nation Entertainment, The Lede Company, and Roc Nation, and also by individuals such as Michele Anthony of the Universal Music Group, Declan Kelly of Teneo, Scooter Braun (with his company SB Projects), Adam Leber (on behalf of Maverick), and Derrick Johnson (on behalf of the NAACP).
Summit
The summit, titled Global Goal: Unite for Our Future—The Summit, featured panel discussions and interviews with world leaders, corporations and philanthropists as they announce new commitments to help develop equitable distribution of COVID-19 tests, treatments and vaccines, as well as rebuild communities devastated by the pandemic. It will be produced by Michael Dempsey and hosted by journalists Katie Couric, Mallika Kapur, Morgan Radford, Isha Sesay, and Keir Simmons.
Participants
Concert
The concert, titled Global Goal: Unite for Our Future—The Concert, was hosted by actor Dwayne Johnson. It was a worldwide music and entertainment special claiming to celebrate the commitments made due to the actions Global Citizens have taken.
Performers
Additional performers
Chloe x Halle
Christine and the Queens
J’Nai Bridges with Gustavo Dudamel, the Los Angeles Philharmonic and Youth Orchestra Los Angeles
For Love Choir
Appearances
Chris Rock
Hugh Jackman
Kerry Washington
Charlize Theron
Forest Whitaker
David Beckham
Salma Hayek
Billy Porter
Diane Kruger
Antoni Porowski
Ken Jeong
Naomi Campbell
Nikolaj Coster-Waldau
Olivia Colman
Broadcast
The event was broadcast in the United States on NBC, MSNBC, Bloomberg Television, iHeartRadio, Sirius XM, and InsightTV.
International broadcasters
Africa: 1Magic, BET International, Bloomberg Television, Canal+ Afrique, Comedy Central, InsightTV, MTV Africa, and MTV Base
: Telefe
Asia: Comedy Central, MTV, and Paramount Channel
: MTV, Network 10, and Nine Network
: MTV and Pickx Live
: Multishow, Comedy Central, MTV, MTV Hits, and Paramount Channel
: CBC, Ici Radio-Canada Télé, CTV, Citytv, Global, MTV, and InsightTV
: Chilevisión
: Paramount Network and VH1
Europe: Bloomberg Television, InsightTV, and MTV
: Paramount Network
: CStar and MTV
: Das Erste, InsightTV, MTV Germany, and One
: MTV
: Star Movies and Star World
: MTV and MTV Israel
: InsightTV
: MTV and MTV Music
: Fuji TV, MTV Japan, and Tokyo Broadcasting System Television
Latin America: Comedy Central, MTV, MTV Hits, Paramount Channel, VH1, VH1 HD, VH1 MegaHits, Sony Channel, and TNT
: Azteca Uno
: MTV
: MTV Polska
: MTV
: MTV
: InsightTV
: Mediacorp Channel 5
: SABC 3
: InsightTV
: Barça TV, MTV, and RTVE
: Paramount Network
: MTV
: InsightTV
: MTV Music
: MTV
Online streaming
The special was also available on several digital platforms worldwide such as Apple Music, Apple TV, Beats 1, Deezer, Facebook, Tidal, Twitch, Twitter, Yahoo!, and YouTube. It will also be streamed live on the Facebook pages of Brut, LADbible, and Vogue Paris.
Production
The producers aimed for each musical performance to have a unique setting.
Impact
The special raised $6.9 billion from 41 countries towards a COVID-19 vaccine.
References
External links
COVID-19 pandemic benefit concerts
2020 concerts
2020 television specials
June 2020 events in the United States
Music television specials
Cultural responses to the COVID-19 pandemic
Simulcasts
Television shows about the COVID-19 pandemic
Benefit concerts
|
Hiram G. Andrews (September 12, 1876 – March 1968) was a Speaker of the Pennsylvania House of Representatives.
Andrews was first elected to the Pennsylvania House of Representatives in 1933 and served through 1936. He was reelected in 1939 and completed his two-year term in 1940. He was reelected for a third nonconsecutive tenure in 1945 and served in the Pennsylvania House through 1962, for a total of 24 years.
There is a college named after him called the Hiram G. Andrews Center in Johnstown..
Andrews was from Johnstown, Cambria County, Pennsylvania.
1876 births
1968 deaths
Members of the Pennsylvania House of Representatives
Speakers of the Pennsylvania House of Representatives
|
```shell
Test disk speed with `dd`
Changing the `/tmp` cleanup frequency
Fixing `locale` issues in Debian systems
Get hardware stack details with `lspci`
Cancel a system shutdown
```
|
```smalltalk
Package { #name : 'Fuel-Core-Tests' }
```
|
```smalltalk
using System.Linq.Expressions;
namespace Chloe.Query
{
public class ScopeParameterDictionary : Dictionary<ParameterExpression, IObjectModel>
{
public ScopeParameterDictionary()
{
}
public ScopeParameterDictionary(int capacity) : base(capacity)
{
}
public IObjectModel Get(ParameterExpression parameter)
{
IObjectModel model;
if (!this.TryGetValue(parameter, out model))
{
throw new Exception("Can not find the ParameterExpression");
}
return model;
}
public ScopeParameterDictionary Clone()
{
return this.Clone(this.Count);
}
public ScopeParameterDictionary Clone(int capacity)
{
ScopeParameterDictionary ret = new ScopeParameterDictionary(capacity);
foreach (var kv in this)
{
ret.Add(kv.Key, kv.Value);
}
return ret;
}
public ScopeParameterDictionary Clone(ParameterExpression key, IObjectModel valueOfkey)
{
ScopeParameterDictionary ret = this.Clone(this.Count + 1);
ret[key] = valueOfkey;
return ret;
}
}
}
```
|
Scissurella quadrata is a species of minute sea snail, a marine gastropod mollusk in the family Scissurellidae.
Description
Distribution
This marine species occurs off the Society Islands and Australia.
References
Geiger D.L. & Jansen P. 2004. Revision of the Australian species of Anatomidae (Mollusca: Gastropoda: Vetigastropoda). Zootaxa 415 : 1–35
Geiger D.L. (2012) Monograph of the little slit shells. Volume 1. Introduction, Scissurellidae. pp. 1–728. Volume 2. Anatomidae, Larocheidae, Depressizonidae, Sutilizonidae, Temnocinclidae. pp. 729–1291. Santa Barbara Museum of Natural History Monographs Number 7
Scissurellidae
Gastropods described in 2004
|
Eugène Mayor (7 June 1877, Neuchâtel – 14 September 1976, Neuchâtel) was a Swiss physician and mycologist.
He studied medicine in Geneva, and from 1906 worked as a physician in his hometown of Neuchâtel. In 1910, with parasitologist Otto Fuhrmann, he embarked on a scientific expedition to Colombia, about which, the book "Voyage d'exploration scientifique en Colombie" was written. From 1913 to 1942 he served as a physician at the cantonal hospice of Perreux-sur-Boudry (Neuchâtel).
During his career he received honorary degrees form the universities of Bern and Neuchâtel. From 1912 to 1914 he was president of the Société Neuchâteloise des Sciences Naturelles. He was the author of numerous articles on mycology (Notes mycologiques) in the publication, "Bulletin de la Société Neuchâteloise".
Selected works
Contribution a l’étude des champignons du Canton de Neuchâtel, 1911 – Contribution to the study of fungi of the canton of Neuchâtel.
Recherches expérimentales sur quelques urédinées hétéroiques, 1911 – Experimental research on some heteroecious rusts.
Contribution à l’étude de la flore cryptogamique du Canton du Valais, 1911 (with Denis Cruchet, Paul Cruchet) – Contribution to the study of cryptogamic flora of the canton of Valais.
Contribution à l’etude des urédinées de Colombie, 1913 – Contribution to the study of rusts of Colombia.
Les maladies de nos cultures maraichères, 1915.
Contribution à l’étude des micromycètes de Languedoc et de Provence, 1949 (with Georges Viennot-Bourgin) – Contribution to the study of microfungi of Languedoc and Provence.
Contribution à l’étude des micromycètes de Corse, 1950 (with Georges Viennot-Bourgin) – Contribution to the study of microfungi of Corsica.
Contribution à la connaissance de micromycètes de la Côte d’Ivoire, 1951 (with Georges Viennot-Bourgin) – Contribution to the knowledge of microfungi of the Ivory Coast.
References
1877 births
1976 deaths
People from Neuchâtel
Swiss mycologists
|
```objective-c
//
//
//
// path_to_url
//
// Unless required by applicable law or agreed to in writing, software
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
#import "Common/GREYErrorConstants.h"
NSString *const kErrorDetailStepperKey = @"Stepper";
NSString *const kErrorDetailUserValueKey = @"UserValue";
NSString *const kErrorDetailStepMaxValueKey = @"Stepper Max Value";
NSString *const kErrorDetailStepMinValueKey = @"Stepper Min Value";
NSString *const kErrorDetailElementDescriptionKey = @"Element Description";
NSString *const kErrorDetailConstraintRequirementKey = @"Failed Constraint(s)";
NSString *const kErrorDetailConstraintDetailsKey = @"All Constraint(s)";
NSString *const kGREYInteractionErrorDomain = @"com.google.earlgrey.ElementInteractionErrorDomain";
NSString *const kGREYWillPerformActionNotification = @"GREYWillPerformActionNotification";
NSString *const kGREYDidPerformActionNotification = @"GREYDidPerformActionNotification";
NSString *const kGREYWillPerformAssertionNotification = @"GREYWillPerformAssertionNotification";
NSString *const kGREYDidPerformAssertionNotification = @"GREYDidPerformAssertionNotification";
NSString *const kGREYActionUserInfoKey = @"kGREYActionUserInfoKey";
NSString *const kGREYActionElementUserInfoKey = @"kGREYActionElementUserInfoKey";
NSString *const kGREYActionErrorUserInfoKey = @"kGREYActionErrorUserInfoKey";
NSString *const kGREYAssertionUserInfoKey = @"kGREYAssertionUserInfoKey";
NSString *const kGREYAssertionElementUserInfoKey = @"kGREYAssertionElementUserInfoKey";
NSString *const kGREYAssertionErrorUserInfoKey = @"kGREYAssertionErrorUserInfoKey";
NSString *const kErrorDetailElementMatcherKey = @"Element Matcher";
NSString *const kGREYPinchErrorDomain = @"com.google.earlgrey.PinchErrorDomain";
NSString *const kErrorDetailElementKey = @"Element";
NSString *const kErrorDetailWindowKey = @"Window";
```
|
```java
/*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.roncoo.pay.trade.entity;
import com.roncoo.pay.common.core.entity.BaseEntity;
import com.roncoo.pay.common.core.enums.PublicEnum;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
/**
* <b>:</b>
* @author Peter
* <a href="path_to_url">(www.roncoo.com)</a>
*/
public class RpTradePaymentOrder extends BaseEntity implements Serializable {
private static final long serialVersionUID = 1L;
/** **/
private String productName;
/** **/
private String merchantOrderNo;
/** **/
private BigDecimal orderAmount;
/** **/
private String orderFrom;
/** **/
private String merchantName;
/** **/
private String merchantNo;
/** **/
private Date orderTime;
/** **/
private Date orderDate;
/** IP **/
private String orderIp;
/** **/
private String orderRefererUrl;
/** **/
private String returnUrl;
/** **/
private String notifyUrl;
/** **/
private String cancelReason;
/** **/
private Integer orderPeriod;
/** **/
private Date expireTime;
/** **/
private String payWayCode;
/** **/
private String payWayName;
/** **/
private String remark;
/** **/
private String trxType;
/** **/
private String trxNo;
/** **/
private String payTypeCode;
/** **/
private String payTypeName;
/** **/
private String fundIntoType;
/** **/
private String isRefund = PublicEnum.NO.name();
/** **/
private Short refundTimes;
/** **/
private BigDecimal successRefundAmount;
/** 1 **/
private String field1;
/** 2 **/
private String field2;
/** 3 **/
private String field3;
/** 4 **/
private String field4;
/** 5 **/
private String field5;
public String getProductName() {
return productName;
}
public void setProductName(String productName) {
this.productName = productName == null ? null : productName.trim();
}
public String getMerchantOrderNo() {
return merchantOrderNo;
}
public void setMerchantOrderNo(String merchantOrderNo) {
this.merchantOrderNo = merchantOrderNo == null ? null : merchantOrderNo.trim();
}
public BigDecimal getOrderAmount() {
return orderAmount;
}
public void setOrderAmount(BigDecimal orderAmount) {
this.orderAmount = orderAmount;
}
public String getOrderFrom() {
return orderFrom;
}
public void setOrderFrom(String orderFrom) {
this.orderFrom = orderFrom == null ? null : orderFrom.trim();
}
public String getMerchantName() {
return merchantName;
}
public void setMerchantName(String merchantName) {
this.merchantName = merchantName == null ? null : merchantName.trim();
}
public String getMerchantNo() {
return merchantNo;
}
public void setMerchantNo(String merchantNo) {
this.merchantNo = merchantNo == null ? null : merchantNo.trim();
}
public Date getOrderTime() {
return orderTime;
}
public void setOrderTime(Date orderTime) {
this.orderTime = orderTime;
}
public Date getOrderDate() {
return orderDate;
}
public void setOrderDate(Date orderDate) {
this.orderDate = orderDate;
}
public String getOrderIp() {
return orderIp;
}
public void setOrderIp(String orderIp) {
this.orderIp = orderIp == null ? null : orderIp.trim();
}
public String getOrderRefererUrl() {
return orderRefererUrl;
}
public void setOrderRefererUrl(String orderRefererUrl) {
this.orderRefererUrl = orderRefererUrl == null ? null : orderRefererUrl.trim();
}
public String getReturnUrl() {
return returnUrl;
}
public void setReturnUrl(String returnUrl) {
this.returnUrl = returnUrl == null ? null : returnUrl.trim();
}
public String getNotifyUrl() {
return notifyUrl;
}
public void setNotifyUrl(String notifyUrl) {
this.notifyUrl = notifyUrl == null ? null : notifyUrl.trim();
}
public String getCancelReason() {
return cancelReason;
}
public void setCancelReason(String cancelReason) {
this.cancelReason = cancelReason == null ? null : cancelReason.trim();
}
public Integer getOrderPeriod() {
return orderPeriod;
}
public void setOrderPeriod(Integer orderPeriod) {
this.orderPeriod = orderPeriod;
}
public Date getExpireTime() {
return expireTime;
}
public void setExpireTime(Date expireTime) {
this.expireTime = expireTime;
}
public String getPayWayCode() {
return payWayCode;
}
public void setPayWayCode(String payWayCode) {
this.payWayCode = payWayCode == null ? null : payWayCode.trim();
}
public String getPayWayName() {
return payWayName;
}
public void setPayWayName(String payWayName) {
this.payWayName = payWayName == null ? null : payWayName.trim();
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark == null ? null : remark.trim();
}
public String getTrxType() {
return trxType;
}
public void setTrxType(String trxType) {
this.trxType = trxType == null ? null : trxType.trim();
}
public String getPayTypeCode() {
return payTypeCode;
}
public void setPayTypeCode(String payTypeCode) {
this.payTypeCode = payTypeCode == null ? null : payTypeCode.trim();
}
public String getPayTypeName() {
return payTypeName;
}
public void setPayTypeName(String payTypeName) {
this.payTypeName = payTypeName == null ? null : payTypeName.trim();
}
public String getFundIntoType() {
return fundIntoType;
}
public void setFundIntoType(String fundIntoType) {
this.fundIntoType = fundIntoType == null ? null : fundIntoType.trim();
}
public String getIsRefund() {
return isRefund;
}
public void setIsRefund(String isRefund) {
this.isRefund = isRefund == null ? null : isRefund.trim();
}
public Short getRefundTimes() {
return refundTimes;
}
public void setRefundTimes(Short refundTimes) {
this.refundTimes = refundTimes;
}
public BigDecimal getSuccessRefundAmount() {
return successRefundAmount;
}
public void setSuccessRefundAmount(BigDecimal successRefundAmount) {
this.successRefundAmount = successRefundAmount;
}
public String getTrxNo() {
return trxNo;
}
public void setTrxNo(String trxNo) {
this.trxNo = trxNo;
}
public String getField1() {
return field1;
}
public void setField1(String field1) {
this.field1 = field1 == null ? null : field1.trim();
}
public String getField2() {
return field2;
}
public void setField2(String field2) {
this.field2 = field2 == null ? null : field2.trim();
}
public String getField3() {
return field3;
}
public void setField3(String field3) {
this.field3 = field3 == null ? null : field3.trim();
}
public String getField4() {
return field4;
}
public void setField4(String field4) {
this.field4 = field4 == null ? null : field4.trim();
}
public String getField5() {
return field5;
}
public void setField5(String field5) {
this.field5 = field5 == null ? null : field5.trim();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(" [");
sb.append("Hash = ").append(hashCode());
sb.append(", id=").append(super.getId());
sb.append(", version=").append(super.getVersion());
sb.append(", createTime=").append(super.getCreateTime());
sb.append(", editor=").append(super.getEditor());
sb.append(", creater=").append(super.getCreater());
sb.append(", editTime=").append(super.getEditTime());
sb.append(", status=").append(super.getStatus());
sb.append(", productName=").append(productName);
sb.append(", merchantOrderNo=").append(merchantOrderNo);
sb.append(", orderAmount=").append(orderAmount);
sb.append(", orderFrom=").append(orderFrom);
sb.append(", merchantName=").append(merchantName);
sb.append(", merchantNo=").append(merchantNo);
sb.append(", orderTime=").append(orderTime);
sb.append(", orderDate=").append(orderDate);
sb.append(", orderIp=").append(orderIp);
sb.append(", orderRefererUrl=").append(orderRefererUrl);
sb.append(", returnUrl=").append(returnUrl);
sb.append(", notifyUrl=").append(notifyUrl);
sb.append(", cancelReason=").append(cancelReason);
sb.append(", orderPeriod=").append(orderPeriod);
sb.append(", expireTime=").append(expireTime);
sb.append(", payWayCode=").append(payWayCode);
sb.append(", payWayName=").append(payWayName);
sb.append(", remark=").append(remark);
sb.append(", trxType=").append(trxType);
sb.append(", payTypeCode=").append(payTypeCode);
sb.append(", payTypeName=").append(payTypeName);
sb.append(", fundIntoType=").append(fundIntoType);
sb.append(", isRefund=").append(isRefund);
sb.append(", refundTimes=").append(refundTimes);
sb.append(", successRefundAmount=").append(successRefundAmount);
sb.append(", trxNo=").append(trxNo);
sb.append(", field1=").append(field1);
sb.append(", field2=").append(field2);
sb.append(", field3=").append(field3);
sb.append(", field4=").append(field4);
sb.append(", field5=").append(field5);
sb.append(", serialVersionUID=").append(serialVersionUID);
sb.append("]");
return sb.toString();
}
}
```
|
```php
<?php
declare(strict_types=1);
return [
[
'#N/A',
null,
],
[
'#N/A',
-1,
],
[
'#N/A',
1.25,
],
[
'#N/A',
'',
],
[
'#N/A',
'2.5',
],
[
'#N/A',
true,
],
[
1,
'#NULL!',
],
[
2,
'#DIV/0!',
],
[
3,
'#VALUE!',
],
[
4,
'#REF!',
],
[
5,
'#NAME?',
],
[
6,
'#NUM!',
],
[
7,
'#N/A',
],
[
9,
'#SPILL!',
],
[
14,
'#CALC!',
],
];
```
|
Art Bowman was a professional basketball player. He played in the National Basketball League for the Hammond Ciesar All-Americans in 1940–41 and averaged 1.1 points per game.
References
Guards (basketball)
Hammond Ciesar All-Americans players
|
The Gog Group is a stratigraphic unit in the Western Canada Sedimentary Basin. It is present in the western main ranges of the Canadian Rockies in Alberta and British Columbia, and in the Cariboo Mountains and in the central Purcell Mountains in southwestern British Columbia. It was named by C.F. Deiss in 1940 for a type locality near Mount Assiniboine.
Lithology and environment of deposition
The Gog Group consists primarily of thick deposits of cross-bedded quartzose sandstone and quartzite, with minor quartzitic conglomerate and sub-arkosic sandstone. It also includes mudstone, siltstone, limestone and dolomite formations. The Gog sediments are thought to have been deposited in shallow marine environments on the subsiding margin of the North American craton (Laurentia).
Stratigraphy
Subdivisions
The Gog Group is subdivided into the following formations:
Jasper area (north)
Kicking Horse Pass area (south)
Paleontology
Trace fossils such as Skolithos, Cruziana, Diplocraterion, Chondrites, Planolites, Rusophycus and others are abundant in the Gog Group sediments, and Early Cambrian trilobites of the genus Olenellus are found in the Peyto Formation limestones at the top of the Group. Small archaeocyathid bioherms have been reported from the base of the Mahato Formation, and archaeocyathids, salterellids, primitive brachiopods and echinoderms have been reported from the Mural Formation.
See also
List of fossiliferous stratigraphic units in Alberta
Big Rock (glacial erratic)
Foothills Erratics Train
References
Western Canadian Sedimentary Basin
Cambrian Alberta
Geologic groups of Alberta
Cambrian British Columbia
Geologic groups of British Columbia
Sandstone formations of Canada
Quartzite formations
|
Modest Fok was a R&B band that was created in Atlanta, Georgia, in 1980 by Debra Killings, James Killings Jr, and William "Vybe Chyle" Burke. The band was previously known as Princess & Starbreeze was signed to Eastwest Records in the 1990s. Their debut album Love or the Single Life was released in 1992 and scored two minor R&B hits on the Billboard Hot R&B/Hip-Hop Singles & Tracks chart.
The band made their debut on the Billboard charts on July 11, 1992, when "Promise Me" entered the R&B chart at number 99.
Discography
Albums
Singles
Band members
Current members
Debra Killings – lead vocals, bass guitar, lead guitar, Keyboards (1980–present)
James Killings Jr – musical director, backing vocals, lead vocals, bass guitar, lead guitar, Keyboards (1980–present)
Wesley B Allen – keyboards, lead vocals, backing vocals (1982–present)
Kenneth Wright – keyboards, backing vocals (1985–present)
Donald Lee Simpson – lead vocals (1984–present)
Carol E Killings - Artist Management
Carol "Dee Dee" Killings - Hair Stylist
Former members
William "Vybe Chyle" Burke – drums, programming, production manager, talent agent (1980–1990)
Alvin J Speights - audio engineer (1984–1990)
Monyea Z Crawford - lighting director, crew chief (1984–1990)
Geno Jordan - Road Manager
Larry Wimby - Lead vocals, backing vocals, trombone, choreographer, stylist
Thomas "Butch" Harris – lead vocals (1980–1984)
Marty Heyward – sax, keyboards, musical director (1980–1983)
Jerome Dukes - trombone, backing vocals
Rory Core - trumpet, trombone, backing vocals
Vernon Maddox - trumpet, trombone, backing vocals
Derrick Cleveland - trumpet, backing vocals
Max Spalding - keyboards, guitar, lead vocals, backing vocals
External links
[] Album and singles info, chart info and music video links.
References
American contemporary R&B musical groups
|
```c++
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ui/events/gesture_event_details.h"
namespace ui {
GestureEventDetails::GestureEventDetails()
: type_(ET_UNKNOWN)
, touch_points_(0)
{
}
GestureEventDetails::GestureEventDetails(ui::EventType type)
: type_(type)
, touch_points_(1)
{
DCHECK_GE(type, ET_GESTURE_TYPE_START);
DCHECK_LE(type, ET_GESTURE_TYPE_END);
}
GestureEventDetails::GestureEventDetails(ui::EventType type,
float delta_x,
float delta_y)
: type_(type)
, touch_points_(1)
{
DCHECK_GE(type, ET_GESTURE_TYPE_START);
DCHECK_LE(type, ET_GESTURE_TYPE_END);
switch (type_) {
case ui::ET_GESTURE_SCROLL_BEGIN:
data_.scroll_begin.x_hint = delta_x;
data_.scroll_begin.y_hint = delta_y;
break;
case ui::ET_GESTURE_SCROLL_UPDATE:
data_.scroll_update.x = delta_x;
data_.scroll_update.y = delta_y;
break;
case ui::ET_SCROLL_FLING_START:
data_.fling_velocity.x = delta_x;
data_.fling_velocity.y = delta_y;
break;
case ui::ET_GESTURE_TWO_FINGER_TAP:
data_.first_finger_enclosing_rectangle.width = delta_x;
data_.first_finger_enclosing_rectangle.height = delta_y;
break;
case ui::ET_GESTURE_SWIPE:
data_.swipe.left = delta_x < 0;
data_.swipe.right = delta_x > 0;
data_.swipe.up = delta_y < 0;
data_.swipe.down = delta_y > 0;
break;
default:
NOTREACHED() << "Invalid event type for constructor: " << type;
}
}
GestureEventDetails::GestureEventDetails(ui::EventType type,
const GestureEventDetails& other)
: type_(type)
, data_(other.data_)
, touch_points_(other.touch_points_)
, bounding_box_(other.bounding_box_)
{
DCHECK_GE(type, ET_GESTURE_TYPE_START);
DCHECK_LE(type, ET_GESTURE_TYPE_END);
switch (type) {
case ui::ET_GESTURE_SCROLL_BEGIN:
// Synthetic creation of SCROLL_BEGIN from PINCH_BEGIN is explicitly
// allowed as an exception.
if (other.type() == ui::ET_GESTURE_PINCH_BEGIN)
break;
case ui::ET_GESTURE_SCROLL_UPDATE:
case ui::ET_SCROLL_FLING_START:
case ui::ET_GESTURE_SWIPE:
case ui::ET_GESTURE_PINCH_UPDATE:
DCHECK_EQ(type, other.type()) << " - Invalid gesture conversion from "
<< other.type() << " to " << type;
break;
default:
break;
}
}
GestureEventDetails::Details::Details()
{
memset(this, 0, sizeof(Details));
}
} // namespace ui
```
|
```julia
#!/usr/bin/env julia
#
# @license Apache-2.0
#
#
#
# path_to_url
#
# Unless required by applicable law or agreed to in writing, software
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import JSON
"""
gen( re, im, name )
Generate fixture data and write to file.
# Arguments
* `re`: real components for first complex number
* `im`: imaginary components for first complex number
* `name::AbstractString`: output filename
# Examples
``` julia
julia> re = rand( 1000 );
julia> im = rand( 1000 );
julia> gen( re, im, \"data.json\" );
```
"""
function gen( re, im, name )
zre = Array{Float64}( undef, length(re) );
zim = Array{Float64}( undef, length(re) );
for i in eachindex(re)
z = 1.0 / Complex{Float64}( re[i], im[i] );
zre[ i ] = real( z );
zim[ i ] = imag( z );
end
# Store data to be written to file as a collection:
data = Dict([
("re", re),
("im", im),
("qre", zre),
("qim", zim),
]);
# Based on the script directory, create an output filepath:
filepath = joinpath( dir, name );
# Write the data to the output filepath as JSON:
outfile = open( filepath, "w" );
write( outfile, JSON.json(data) );
write( outfile, "\n" );
close( outfile );
end
# Get the filename:
file = @__FILE__;
# Extract the directory in which this file resides:
dir = dirname( file );
# Large positive real components:
re = rand( 500 ) .* 1.0e300;
im = ( rand( 500 ) .* 20.0 ) .- 10.0;
gen( re, im, "large_positive_real_components.json" );
# Large negative real components:
re = -rand( 500 ) .* 1.0e300;
im = ( rand( 500 ) .* 20.0 ) .- 10.0;
gen( re, im, "large_negative_real_components.json" );
# Large positive imaginary components:
re = ( rand( 500 ) .* 20.0 ) .- 10.0;
im = rand( 500 ) .* 1.0e300;
gen( re, im, "large_positive_imaginary_components.json" );
# Large negative imaginary components:
re = ( rand( 500 ) .* 20.0 ) .- 10.0;
im = -rand( 500 ) .* 1.0e300;
gen( re, im, "large_negative_imaginary_components.json" );
# Tiny positive real components:
re = rand( 500 ) .* 1.0e-324;
im = ( rand( 500 ) .* 20.0 ) .- 10.0;
gen( re, im, "tiny_positive_real_components.json" );
# Tiny negative real components:
re = -rand( 500 ) .* 1.0e-324;
im = ( rand( 500 ) .* 20.0 ) .- 10.0;
gen( re, im, "tiny_negative_real_components.json" );
# Tiny positive imaginary components:
re = ( rand( 500 ) .* 20.0 ) .- 10.0;
im = rand( 500 ) .* 1.0e-324;
gen( re, im, "tiny_positive_imaginary_components.json" );
# Tiny negative imaginary components:
re = ( rand( 500 ) .* 20.0 ) .- 10.0;
im = -rand( 500 ) .* 1.0e-324;
gen( re, im, "tiny_negative_imaginary_components.json" );
# Normal:
re = ( rand( 500 ) .* 100.0 ) .- 50.0;
im = ( rand( 500 ) .* 100.0 ) .- 50.0;
gen( re, im, "data.json" );
```
|
In mathematics, and more specifically in differential geometry, a Hermitian manifold is the complex analogue of a Riemannian manifold. More precisely, a Hermitian manifold is a complex manifold with a smoothly varying Hermitian inner product on each (holomorphic) tangent space. One can also define a Hermitian manifold as a real manifold with a Riemannian metric that preserves a complex structure.
A complex structure is essentially an almost complex structure with an integrability condition, and this condition yields a unitary structure (U(n) structure) on the manifold. By dropping this condition, we get an almost Hermitian manifold.
On any almost Hermitian manifold, we can introduce a fundamental 2-form (or cosymplectic structure) that depends only on the chosen metric and the almost complex structure. This form is always non-degenerate. With the extra integrability condition that it is closed (i.e., it is a symplectic form), we get an almost Kähler structure. If both the almost complex structure and the fundamental form are integrable, then we have a Kähler structure.
Formal definition
A Hermitian metric on a complex vector bundle E over a smooth manifold M is a smoothly varying positive-definite Hermitian form on each fiber. Such a metric can be viewed as a smooth global section h of the vector bundle such that for every point p in M,
for all , in the fiber Ep and
for all nonzero in Ep.
A Hermitian manifold is a complex manifold with a Hermitian metric on its holomorphic tangent bundle. Likewise, an almost Hermitian manifold is an almost complex manifold with a Hermitian metric on its holomorphic tangent bundle.
On a Hermitian manifold the metric can be written in local holomorphic coordinates (zα) as
where are the components of a positive-definite Hermitian matrix.
Riemannian metric and associated form
A Hermitian metric h on an (almost) complex manifold M defines a Riemannian metric g on the underlying smooth manifold. The metric g is defined to be the real part of h:
The form g is a symmetric bilinear form on TMC, the complexified tangent bundle. Since g is equal to its conjugate it is the complexification of a real form on TM. The symmetry and positive-definiteness of g on TM follow from the corresponding properties of h. In local holomorphic coordinates the metric g can be written
One can also associate to h a complex differential form ω of degree (1,1). The form ω is defined as minus the imaginary part of h:
Again since ω is equal to its conjugate it is the complexification of a real form on TM. The form ω is called variously the associated (1,1) form, the fundamental form, or the Hermitian form. In local holomorphic coordinates ω can be written
It is clear from the coordinate representations that any one of the three forms , , and uniquely determine the other two. The Riemannian metric and associated (1,1) form are related by the almost complex structure as follows
for all complex tangent vectors and . The Hermitian metric can be recovered from and via the identity
All three forms h, g, and ω preserve the almost complex structure . That is,
for all complex tangent vectors and .
A Hermitian structure on an (almost) complex manifold can therefore be specified by either
a Hermitian metric as above,
a Riemannian metric that preserves the almost complex structure , or
a nondegenerate 2-form which preserves and is positive-definite in the sense that for all nonzero real tangent vectors .
Note that many authors call itself the Hermitian metric.
Properties
Every (almost) complex manifold admits a Hermitian metric. This follows directly from the analogous statement for Riemannian metric. Given an arbitrary Riemannian metric g on an almost complex manifold M one can construct a new metric g′ compatible with the almost complex structure J in an obvious manner:
Choosing a Hermitian metric on an almost complex manifold M is equivalent to a choice of U(n)-structure on M; that is, a reduction of the structure group of the frame bundle of M from GL(n, C) to the unitary group U(n). A unitary frame on an almost Hermitian manifold is complex linear frame which is orthonormal with respect to the Hermitian metric. The unitary frame bundle of M is the principal U(n)-bundle of all unitary frames.
Every almost Hermitian manifold M has a canonical volume form which is just the Riemannian volume form determined by g. This form is given in terms of the associated (1,1)-form by
where is the wedge product of with itself times. The volume form is therefore a real (n,n)-form on M. In local holomorphic coordinates the volume form is given by
One can also consider a hermitian metric on a holomorphic vector bundle.
Kähler manifolds
The most important class of Hermitian manifolds are Kähler manifolds. These are Hermitian manifolds for which the Hermitian form is closed:
In this case the form ω is called a Kähler form. A Kähler form is a symplectic form, and so Kähler manifolds are naturally symplectic manifolds.
An almost Hermitian manifold whose associated (1,1)-form is closed is naturally called an almost Kähler manifold. Any symplectic manifold admits a compatible almost complex structure making it into an almost Kähler manifold.
Integrability
A Kähler manifold is an almost Hermitian manifold satisfying an integrability condition. This can be stated in several equivalent ways.
Let be an almost Hermitian manifold of real dimension and let be the Levi-Civita connection of . The following are equivalent conditions for to be Kähler:
is closed and is integrable,
,
,
the holonomy group of is contained in the unitary group associated to ,
The equivalence of these conditions corresponds to the "2 out of 3" property of the unitary group.
In particular, if is a Hermitian manifold, the condition dω = 0 is equivalent to the apparently much stronger conditions . The richness of Kähler theory is due in part to these properties.
References
Complex manifolds
Differential geometry
Riemannian geometry
Riemannian manifolds
Structures on manifolds
|
Marie-Louise Coleiro Preca, (born 7 December 1958) is a Maltese politician who served as President of Malta from 2014 to 2019. She has been president of Eurochild since 2019.
Previously, as a member of the Labour Party, Coleiro Preca was a Member of Parliament (MP) in the House of Representatives of Malta from 1998 to 2014. She served as the Minister for the Family and Social Solidarity from 2013 to 2014 under Prime Minister Joseph Muscat.
Early life and career
Born in Qormi, Coleiro Preca studied at the University of Malta where she graduated with a BA in Legal and Humanistic Studies (International Studies) and a notary public diploma.
Within the Labour Party, Coleiro Preca served within its executive. She was a member of the National Executive, the Assistant General Secretary, and as the General-Secretary. She was the only woman to have served in such a senior post of a Maltese political party. In addition to these posts, Coleiro Preca was also a member of the National Bureau of Socialist Youths (now the Labour Youth Forum), President of the Women's Section of the Party (1996-2001), founding member of the Ġużè Ellul Mercer Foundation and publisher of the Party's weekly newspaper Il-Ħelsien (now defunct).
She served as MP in the Maltese Parliament from 1998 to 2014. In the 2008 general election she was the first elected MP. As an Opposition MP Coleiro Preca served as Shadow Minister for Social Policy and as member of the Parliamentary Permanent Committee for Social Affairs beginning in 1998.
After Alfred Sant resigned as Leader of the Labour Party in 2008, Coleiro Preca unsuccessfully contested the Leadership election.
She served on the Maltese delegation to the Parliamentary Assembly of the Council of Europe from 2008 to 2013.
Following the end of her tenure as President of Malta in April 2019, Coleiro Preca, under the newly set up Office of the President Emeritus Coleiro Preca, will continue to serve as Chair for the Malta Foundation for the Wellbeing of Society, The Emanuele Cancer Research Foundation Malta, and of the Malta Trust Foundation. Moreover, Coleiro Preca was appointed as President of Eurochild, during Eurochild's General Assembly in Brussels.
Marie-Louise Coleiro Preca is Goodwill Ambassador for the United Nations Industrial Development Organisation (UNIDO), Champion for UNIDO's Third Industrial Development Decade for Africa (IDDAIII) and Special Ambassador for the United Nations World Tourism Organisation (UNWTO).
Moreover, Marie-Louise Coleiro Preca is a member of the Advisory Board of Women Political Leaders Global Forum, and leads its #Girl2Leader Campaign. She is also Chairperson of the Senior Advisory Board of the Blockchain Charity Foundation (BCF).
Presidency
On 1 March 2014, Coleiro Preca accepted the nomination for president. Succeeding George Abela, she was sworn in as President on 4 April 2014. Coleiro Preca is the youngest person to assume the office of President, sworn in at the age of 55, and is the second woman to hold the post after Agatha Barbara.
The President's Foundation for the Wellbeing of Society
The President's Foundation for the Wellbeing of Society was established by Coleiro Preca on 25 June 2014. The Foundation is a non-governmental organization that focuses on community-building.
The Foundation operates through a consultation process and conducts scientific research through five Research Entities:
National Institute for Childhood
National Centre for Family Research
National Observatory for Living with Dignity
National Centre for Freedom from Addictions
National Hub for Ethnobotanical Research
The Malta Trust Foundation
Coleiro Preca founded the Malta Trust Foundation on 14 May 2015. The aim of this Foundation is to encourage vulnerable young people, experiencing difficulties in life, such as unemployment, being at risk of poverty, or social exclusion, to improve their lives through education and training.
The President's Secret Garden
The President's Secret Garden, one of the gardens in San Anton Palace, was opened to the public by Coleiro Preca in 2015. Coleiro Preca opened the garden in response to calls for more open play space for children.
Arraiolos Group
In 2015, Coleiro Preca was the first Maltese President to join the Arraiolos Group of non-executive Presidents of the European Union. The Presidents meet annually to discuss current state of affairs and the future development of the European Union. Coleiro Preca hosted the Arraiolos 2017 in Malta.
Empower
On the Day of the Girl 2017, Coleiro Preca launched a platform, called Empower, "to encourage more women to participate in positions of influence and leadership, while also creating closer synergies among nationally-active groups for women". Empower is an umbrella organization of seventeen Maltese women's organizations.
The Emanuele Cancer Research Foundation Malta
The Emanuele Cancer Research Foundation Malta (ECRFM) was founded by The President's Trust, the Fondazione Terzo Pilastro, Italia e Mediterraneo, and the University of Malta. Coleiro Preca stated that "this new and most important venture will provide the much-needed research, development, and education about cancer, for the benefit of individuals, families, communities, and societies, across the Maltese Islands and the Mediterranean Region".
The foundation is located at the Biomedical Sciences Building at the University of Malta, supported by the Fondazione Terzo Pilastro, Italia e Mediterraneo.
Honours and awards
Honours
National honours
: Former Grand Master Grand Cross with Collar of the Order of Merit
Foreign honours
: Grand Cross with Collar of the Order of Merit (20 January 2016)
: Grand Cross of the Order of the Balkan Mountains (2 November 2016)
: Member with Collar of the Order of Cyril and Methodius (5 February 2018)
: Grand Cross Special Class of the Order of Merit of the Federal Republic of Germany, Special Class (29 April 2015)
: Grand Cross with Collar of the Order of Merit of the Italian Republic (7 September 2017)
: Knight Grand Cross with Collar of the Order of Merit (21 April 2015)
: Grand Cross with Collar of the Order of Infante Henry (15 May 2018)
Russian Imperial Family: Dame of the Imperial Order of Saint Anastasia, 1st Class (14 June 2017)
: Grand Cross of the Order of the Republic (5 February 2019)
: Grand Cross with Collar of the Order of Prince Yaroslav the Wise (15 May 2017)
: Honorary Knight Grand Cross with Collar of the Order of St Michael and St George (26 November 2015)
Awards
UN Women and Global Partnership Forum Agent of Change Award
Crans Montana Prix de la Fondation 2014
Student Wellbeing and Prevention of Violence Award from Flinders University, South Australia
WPL Award 2017 by Women Political Leaders Global Forum
Ukraine International Person of the Year 2017 – For Political and Social Activity
The ISESCO Gold Medal, for President Coleiro Preca's distinguished and tireless contribution, to the promotion of peace, in Malta, throughout the Mediterranean, and globally.
International Virdimura Award 2018
Premio Margutta – Sezione Empowerment
Honorary degrees
Honorary Professor of Politics and International Studies, University of Warwick, 2015
Honorary Doctor of Laws, University of Leicester, 2019
References
External links
Official Website
1958 births
Living people
Labour Party (Malta) politicians
Members of the House of Representatives of Malta
Presidents of Malta
Female heads of state
Women presidents in Europe
United Nations officials
Maltese officials of the United Nations
Ambassadors of supra-national bodies
World Tourism Organization people
United Nations Industrial Development Organization people
Women's International League for Peace and Freedom people
Companions of Honour of the National Order of Merit (Malta)
Grand Crosses Special Class of the Order of Merit of the Federal Republic of Germany
Knights Grand Cross with Collar of the Order of Merit of the Italian Republic
Grand Collars of the Order of Prince Henry
Recipients of the Order of Prince Yaroslav the Wise, 1st class
Honorary Knights Grand Cross of the Order of St Michael and St George
University of Malta alumni
20th-century Maltese women politicians
20th-century Maltese politicians
21st-century Maltese women politicians
21st-century Maltese politicians
Maltese Roman Catholics
People from Qormi
Women members of the House of Representatives of Malta
|
City Reflections is an outdoor 2009 bronze sculpture by Patti Warashina, located in downtown Portland, Oregon.
Description and history
City Reflections was designed by American artist Patti Warashina, who was inspired by life on a public walkway in an urban environment. It was installed at the intersection of Southwest 6th Avenue and Southwest Main Street on the Portland Transit Mall in 2009. It consists of two bronze sculptures depicting a standing figure and dog, respectively. According to TriMet, which funded the work, the humanoid figure is a "stylized version of a strong female in both a classical and minimal form", while the dog serves as a "counterbalance" and is the "friendly canine companion that is so much a part of our popular culture". The woman measures x x and the dog measures x x .
Both sculptures feature black and copper geometric shapes. The black shapes allude to aspects of the human body as well as the "shapes and shadows" of nearby buildings. The copper shapes represent "computer-generated forms, which speak to the times we live in". Dark shadows are depicted on the sidewalk, serving as "natural transitions" to pedestrians and signaling the time of day. The woman's shadow measures x and the dog's shadow measures x .
City Reflections is part of the City of Portland and Multnomah County Public Art Collection courtesy of the Regional Arts & Culture Council (RACC), which administers the work. In 2013, RACC staff members deemed the sculpture too sensitive to corrosion due to its glossy finish to participate in the Downtown Marketing Initiative's "UglySweaterPDX" campaign, which outfits sculptures with "yard-bombed" articles of clothing, to promote shopping in downtown Portland.
See also
2009 in art
Cultural depictions of dogs
References
External links
A Guide to Portland Public Art (PDF), Regional Arts & Culture Council
Public Art in Portland, Oregon (October 13, 2014), PC Housing Blog
2009 establishments in Oregon
2009 sculptures
Bronze sculptures in Oregon
Sculptures of dogs in the United States
Outdoor sculptures in Portland, Oregon
Sculptures of women in Oregon
Sculptures on the MAX Green Line
Southwest Portland, Oregon
Statues in Portland, Oregon
|
```python
import math
import pytest
from vyper.compiler import compile_code
from vyper.exceptions import InvalidType, OverflowException
VALID_BITS = list(range(8, 257, 8))
@pytest.mark.parametrize("bits", VALID_BITS)
def test_mkstr(get_contract, bits):
n_digits = math.ceil(bits * math.log(2) / math.log(10))
code = f"""
@external
def foo(inp: uint{bits}) -> String[{n_digits}]:
return uint2str(inp)
"""
c = get_contract(code)
for i in [1, 2, 2**bits - 1, 0]:
assert c.foo(i) == str(i), (i, c.foo(i))
# test for buffer overflow
@pytest.mark.parametrize("bits", VALID_BITS)
def test_mkstr_buffer(get_contract, bits):
n_digits = math.ceil(bits * math.log(2) / math.log(10))
code = f"""
some_string: String[{n_digits}]
@internal
def _foo(x: uint{bits}):
self.some_string = uint2str(x)
@external
def foo(x: uint{bits}) -> uint256:
y: uint256 = 0
self._foo(x)
return y
"""
c = get_contract(code)
assert c.foo(2**bits - 1) == 0, bits
def test_bignum_throws():
code = """
@external
def test():
a: String[78] = uint2str(2**256)
pass
"""
with pytest.raises(OverflowException):
compile_code(code)
def test_int_fails():
code = """
@external
def test():
a: String[78] = uint2str(-1)
pass
"""
with pytest.raises(InvalidType):
compile_code(code)
```
|
Michael Yates may refer to:
Michael Yates (television designer) (1919–2001), English television, opera, and stage designer
Michael Yates (economist) (born 1946), American economist and labor educator
J. Michael Yates (born 1938), Canadian poet, dramatist and fiction writer
Mike Yates, fictional character in Doctor Who
See also
Michael Yeats (1921–2007), Irish politician
|
```php
<?php declare(strict_types=1);
/*
* This file is part of the Monolog package.
*
* (c) Jordi Boggiano <j.boggiano@seld.be>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Monolog\Formatter;
use Monolog\Utils;
/**
* Class FluentdFormatter
*
* Serializes a log message to Fluentd unix socket protocol
*
* Fluentd config:
*
* <source>
* type unix
* path /var/run/td-agent/td-agent.sock
* </source>
*
* Monolog setup:
*
* $logger = new Monolog\Logger('fluent.tag');
* $fluentHandler = new Monolog\Handler\SocketHandler('unix:///var/run/td-agent/td-agent.sock');
* $fluentHandler->setFormatter(new Monolog\Formatter\FluentdFormatter());
* $logger->pushHandler($fluentHandler);
*
* @author Andrius Putna <fordnox@gmail.com>
*/
class FluentdFormatter implements FormatterInterface
{
/**
* @var bool $levelTag should message level be a part of the fluentd tag
*/
protected $levelTag = false;
public function __construct(bool $levelTag = false)
{
if (!function_exists('json_encode')) {
throw new \RuntimeException('PHP\'s json extension is required to use Monolog\'s FluentdUnixFormatter');
}
$this->levelTag = $levelTag;
}
public function isUsingLevelsInTag(): bool
{
return $this->levelTag;
}
public function format(array $record): string
{
$tag = $record['channel'];
if ($this->levelTag) {
$tag .= '.' . strtolower($record['level_name']);
}
$message = [
'message' => $record['message'],
'context' => $record['context'],
'extra' => $record['extra'],
];
if (!$this->levelTag) {
$message['level'] = $record['level'];
$message['level_name'] = $record['level_name'];
}
return Utils::jsonEncode([$tag, $record['datetime']->getTimestamp(), $message]);
}
public function formatBatch(array $records): string
{
$message = '';
foreach ($records as $record) {
$message .= $this->format($record);
}
return $message;
}
}
```
|
```xml
import { trackEvent } from "modules/analytics";
import { RecordingOptions, SessionRecordingMode, SessionSaveMode, Visibility } from "../types";
import { SESSIONS } from "./constants";
import { trackRQLastActivity } from "utils/AnalyticsUtils";
export const trackDraftSessionViewed = (recording_mode: SessionRecordingMode) => {
trackEvent(SESSIONS.DRAFT_SESSION_RECORDING_VIEWED, { recording_mode });
};
export const trackDraftSessionDiscarded = () => trackEvent(SESSIONS.DRAFT_SESSION_DISCARDED);
export const trackDraftSessionNamed = () => trackEvent(SESSIONS.DRAFT_SESSION_RECORDING_NAMED);
export const trackSessionRecordingFailed = (reason: string) =>
trackEvent(SESSIONS.SESSION_RECORDING_FAILED, { reason });
export const trackDraftSessionSaved = ({
session_length,
options,
type,
source,
recording_mode,
}: {
session_length: number;
options: RecordingOptions;
type: SessionSaveMode;
source: string;
recording_mode: SessionRecordingMode;
}) => {
trackEvent(SESSIONS.DRAFT_SESSION_RECORDING_SAVED, {
type,
session_length,
options,
source,
recording_mode,
});
trackRQLastActivity(SESSIONS.DRAFT_SESSION_RECORDING_SAVED);
};
export const trackDraftSessionSaveFailed = (reason: string) =>
trackEvent(SESSIONS.DRAFT_SESSION_RECORDING_SAVE_FAILED, { reason });
export const trackSavedSessionViewed = (source: string, session_id: string) =>
trackEvent(SESSIONS.SAVED_SESSION_RECORDING_VIEWED, { source, session_id });
export const trackSessionRecordingShareClicked = () => {
trackEvent(SESSIONS.SESSION_RECORDING_SHARE_CLICKED);
trackRQLastActivity(SESSIONS.SESSION_RECORDING_SHARE_CLICKED);
};
export const trackSessionRecordingShareLinkCopied = (source = "app") =>
trackEvent(SESSIONS.SESSION_RECORDING_SHARE_LINK_COPIED, { source });
export const trackSessionRecordingVisibilityUpdated = (visibility: Visibility) => {
trackEvent(SESSIONS.SESSION_RECORDING_VISIBILITY_UPDATED, {
visibility,
});
trackRQLastActivity(SESSIONS.SESSION_RECORDING_VISIBILITY_UPDATED);
};
export const trackSessionRecordingDeleted = () => {
trackEvent(SESSIONS.SESSION_RECORDING_DELETED);
trackRQLastActivity(SESSIONS.SESSION_RECORDING_DELETED);
};
export const trackSessionRecordingDescriptionUpdated = () => trackEvent(SESSIONS.SESSION_RECORDING_DESCRIPTION_ADDED);
export const trackSessionRecordingNameUpdated = () => {
trackEvent(SESSIONS.SESSION_RECORDING_NAME_UPDATED);
};
export const trackBadSessionRecordingViewed = () => trackEvent(SESSIONS.BAD_SESSION_RECORDING_VIEWED);
export const trackSessionRecordingBottomSheetTabClicked = (tab: string) =>
trackEvent(SESSIONS.SESSION_RECORDING_BOTTOM_SHEET_TAB_CLICKED, { tab });
```
|
```python
"""Multi-threaded word2vec mini-batched skip-gram model.
Trains the model described in:
(Mikolov, et. al.) Efficient Estimation of Word Representations in Vector Space
ICLR 2013.
path_to_url
This model does traditional minibatching.
The key ops used are:
* placeholder for feeding in tensors for each example.
* embedding_lookup for fetching rows from the embedding matrix.
* sigmoid_cross_entropy_with_logits to calculate the loss.
* GradientDescentOptimizer for optimizing the loss.
* skipgram custom op that does input processing.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import threading
import time
from six.moves import xrange # pylint: disable=redefined-builtin
import numpy as np
import tensorflow as tf
word2vec = tf.load_op_library(
os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'word2vec_ops.so'))
flags = tf.app.flags
flags.DEFINE_string("save_path", None, "Directory to write the model and "
"training summaries.")
flags.DEFINE_string("train_data", None, "Training text file. "
"E.g., unzipped file path_to_url")
flags.DEFINE_string("eval_data", None,
"File consisting of analogies of four tokens."
"embedding 2 - embedding 1 + embedding 3 should be close "
"to embedding 4."
"See README.md for how to get 'questions-words.txt'.")
flags.DEFINE_integer("embedding_size", 200, "The embedding dimension size.")
flags.DEFINE_integer(
"epochs_to_train", 15,
"Number of epochs to train. Each epoch processes the training data once "
"completely.")
flags.DEFINE_float("learning_rate", 0.2, "Initial learning rate.")
flags.DEFINE_integer("num_neg_samples", 100,
"Negative samples per training example.")
flags.DEFINE_integer("batch_size", 16,
"Number of training examples processed per step "
"(size of a minibatch).")
flags.DEFINE_integer("concurrent_steps", 12,
"The number of concurrent training steps.")
flags.DEFINE_integer("window_size", 5,
"The number of words to predict to the left and right "
"of the target word.")
flags.DEFINE_integer("min_count", 5,
"The minimum number of word occurrences for it to be "
"included in the vocabulary.")
flags.DEFINE_float(
"subsample", 1e-3,
"Subsample threshold for word occurrence. Words that appear "
"with higher frequency will be randomly down-sampled. Set "
"to 0 to disable.")
flags.DEFINE_boolean(
"interactive", False,
"If true, enters an IPython interactive session to play with the trained "
"model. E.g., try model.analogy(b'france', b'paris', b'russia') and "
"model.nearby([b'proton', b'elephant', b'maxwell'])")
flags.DEFINE_integer("statistics_interval", 5,
"Print statistics every n seconds.")
flags.DEFINE_integer("summary_interval", 5,
"Save training summary to file every n seconds (rounded "
"up to statistics interval).")
flags.DEFINE_integer("checkpoint_interval", 600,
"Checkpoint the model (i.e. save the parameters) every n "
"seconds (rounded up to statistics interval).")
FLAGS = flags.FLAGS
class Options(object):
"""Options used by our word2vec model."""
def __init__(self):
# Model options.
# Embedding dimension.
self.emb_dim = FLAGS.embedding_size
# Training options.
# The training text file.
self.train_data = FLAGS.train_data
# Number of negative samples per example.
self.num_samples = FLAGS.num_neg_samples
# The initial learning rate.
self.learning_rate = FLAGS.learning_rate
# Number of epochs to train. After these many epochs, the learning
# rate decays linearly to zero and the training stops.
self.epochs_to_train = FLAGS.epochs_to_train
# Concurrent training steps.
self.concurrent_steps = FLAGS.concurrent_steps
# Number of examples for one training step.
self.batch_size = FLAGS.batch_size
# The number of words to predict to the left and right of the target word.
self.window_size = FLAGS.window_size
# The minimum number of word occurrences for it to be included in the
# vocabulary.
self.min_count = FLAGS.min_count
# Subsampling threshold for word occurrence.
self.subsample = FLAGS.subsample
# How often to print statistics.
self.statistics_interval = FLAGS.statistics_interval
# How often to write to the summary file (rounds up to the nearest
# statistics_interval).
self.summary_interval = FLAGS.summary_interval
# How often to write checkpoints (rounds up to the nearest statistics
# interval).
self.checkpoint_interval = FLAGS.checkpoint_interval
# Where to write out summaries.
self.save_path = FLAGS.save_path
if not os.path.exists(self.save_path):
os.makedirs(self.save_path)
# Eval options.
# The text file for eval.
self.eval_data = FLAGS.eval_data
class Word2Vec(object):
"""Word2Vec model (Skipgram)."""
def __init__(self, options, session):
self._options = options
self._session = session
self._word2id = {}
self._id2word = []
self.build_graph()
self.build_eval_graph()
self.save_vocab()
def read_analogies(self):
"""Reads through the analogy question file.
Returns:
questions: a [n, 4] numpy array containing the analogy question's
word ids.
questions_skipped: questions skipped due to unknown words.
"""
questions = []
questions_skipped = 0
with open(self._options.eval_data, "rb") as analogy_f:
for line in analogy_f:
if line.startswith(b":"): # Skip comments.
continue
words = line.strip().lower().split(b" ")
ids = [self._word2id.get(w.strip()) for w in words]
if None in ids or len(ids) != 4:
questions_skipped += 1
else:
questions.append(np.array(ids))
print("Eval analogy file: ", self._options.eval_data)
print("Questions: ", len(questions))
print("Skipped: ", questions_skipped)
self._analogy_questions = np.array(questions, dtype=np.int32)
def forward(self, examples, labels):
"""Build the graph for the forward pass."""
opts = self._options
# Declare all variables we need.
# Embedding: [vocab_size, emb_dim]
init_width = 0.5 / opts.emb_dim
emb = tf.Variable(
tf.random_uniform([opts.vocab_size, opts.emb_dim], -init_width,
init_width),
name="emb")
self._emb = emb
# Softmax weight: [vocab_size, emb_dim]. Transposed.
sm_w_t = tf.Variable(
tf.zeros([opts.vocab_size, opts.emb_dim]), name="sm_w_t")
# Softmax bias: [vocab_size].
sm_b = tf.Variable(tf.zeros([opts.vocab_size]), name="sm_b")
# Global step: scalar, i.e., shape [].
self.global_step = tf.Variable(0, name="global_step")
# Nodes to compute the nce loss w/ candidate sampling.
labels_matrix = tf.reshape(
tf.cast(
labels, dtype=tf.int64), [opts.batch_size, 1])
# Negative sampling.
sampled_ids, _, _ = (tf.nn.fixed_unigram_candidate_sampler(
true_classes=labels_matrix,
num_true=1,
num_sampled=opts.num_samples,
unique=True,
range_max=opts.vocab_size,
distortion=0.75,
unigrams=opts.vocab_counts.tolist()))
# Embeddings for examples: [batch_size, emb_dim]
example_emb = tf.nn.embedding_lookup(emb, examples)
# Weights for labels: [batch_size, emb_dim]
true_w = tf.nn.embedding_lookup(sm_w_t, labels)
# Biases for labels: [batch_size, 1]
true_b = tf.nn.embedding_lookup(sm_b, labels)
# Weights for sampled ids: [num_sampled, emb_dim]
sampled_w = tf.nn.embedding_lookup(sm_w_t, sampled_ids)
# Biases for sampled ids: [num_sampled, 1]
sampled_b = tf.nn.embedding_lookup(sm_b, sampled_ids)
# True logits: [batch_size, 1]
true_logits = tf.reduce_sum(tf.multiply(example_emb, true_w),
1) + true_b
# Sampled logits: [batch_size, num_sampled]
# We replicate sampled noise labels for all examples in the batch
# using the matmul.
sampled_b_vec = tf.reshape(sampled_b, [opts.num_samples])
sampled_logits = tf.matmul(
example_emb, sampled_w, transpose_b=True) + sampled_b_vec
return true_logits, sampled_logits
def nce_loss(self, true_logits, sampled_logits):
"""Build the graph for the NCE loss."""
# cross-entropy(logits, labels)
opts = self._options
true_xent = tf.nn.sigmoid_cross_entropy_with_logits(
labels=tf.ones_like(true_logits), logits=true_logits)
sampled_xent = tf.nn.sigmoid_cross_entropy_with_logits(
labels=tf.zeros_like(sampled_logits), logits=sampled_logits)
# NCE-loss is the sum of the true and noise (sampled words)
# contributions, averaged over the batch.
nce_loss_tensor = (
tf.reduce_sum(true_xent) + tf.reduce_sum(sampled_xent)
) / opts.batch_size
return nce_loss_tensor
def optimize(self, loss):
"""Build the graph to optimize the loss function."""
# Optimizer nodes.
# Linear learning rate decay.
opts = self._options
words_to_train = float(opts.words_per_epoch * opts.epochs_to_train)
lr = opts.learning_rate * tf.maximum(
0.0001, 1.0 - tf.cast(self._words, tf.float32) / words_to_train)
self._lr = lr
optimizer = tf.train.GradientDescentOptimizer(lr)
train = optimizer.minimize(
loss,
global_step=self.global_step,
gate_gradients=optimizer.GATE_NONE)
self._train = train
def build_eval_graph(self):
"""Build the eval graph."""
# Eval graph
# Each analogy task is to predict the 4th word (d) given three
# words: a, b, c. E.g., a=italy, b=rome, c=france, we should
# predict d=paris.
# The eval feeds three vectors of word ids for a, b, c, each of
# which is of size N, where N is the number of analogies we want to
# evaluate in one batch.
analogy_a = tf.placeholder(dtype=tf.int32) # [N]
analogy_b = tf.placeholder(dtype=tf.int32) # [N]
analogy_c = tf.placeholder(dtype=tf.int32) # [N]
# Normalized word embeddings of shape [vocab_size, emb_dim].
nemb = tf.nn.l2_normalize(self._emb, 1)
# Each row of a_emb, b_emb, c_emb is a word's embedding vector.
# They all have the shape [N, emb_dim]
a_emb = tf.gather(nemb, analogy_a) # a's embs
b_emb = tf.gather(nemb, analogy_b) # b's embs
c_emb = tf.gather(nemb, analogy_c) # c's embs
# We expect that d's embedding vectors on the unit hyper-sphere is
# near: c_emb + (b_emb - a_emb), which has the shape [N, emb_dim].
target = c_emb + (b_emb - a_emb)
# Compute cosine distance between each pair of target and vocab.
# dist has shape [N, vocab_size].
dist = tf.matmul(target, nemb, transpose_b=True)
# For each question (row in dist), find the top 4 words.
_, pred_idx = tf.nn.top_k(dist, 4)
# Nodes for computing neighbors for a given word according to
# their cosine distance.
nearby_word = tf.placeholder(dtype=tf.int32) # word id
nearby_emb = tf.gather(nemb, nearby_word)
nearby_dist = tf.matmul(nearby_emb, nemb, transpose_b=True)
nearby_val, nearby_idx = tf.nn.top_k(
nearby_dist, min(1000, self._options.vocab_size))
# Nodes in the construct graph which are used by training and
# evaluation to run/feed/fetch.
self._analogy_a = analogy_a
self._analogy_b = analogy_b
self._analogy_c = analogy_c
self._analogy_pred_idx = pred_idx
self._nearby_word = nearby_word
self._nearby_val = nearby_val
self._nearby_idx = nearby_idx
def build_graph(self):
"""Build the graph for the full model."""
opts = self._options
# The training data. A text file.
(words, counts, words_per_epoch, self._epoch, self._words, examples,
labels) = word2vec.skipgram_word2vec(
filename=opts.train_data,
batch_size=opts.batch_size,
window_size=opts.window_size,
min_count=opts.min_count,
subsample=opts.subsample)
(opts.vocab_words, opts.vocab_counts,
opts.words_per_epoch) = self._session.run(
[words, counts, words_per_epoch])
opts.vocab_size = len(opts.vocab_words)
print("Data file: ", opts.train_data)
print("Vocab size: ", opts.vocab_size - 1, " + UNK")
print("Words per epoch: ", opts.words_per_epoch)
self._examples = examples
self._labels = labels
self._id2word = opts.vocab_words
for i, w in enumerate(self._id2word):
self._word2id[w] = i
true_logits, sampled_logits = self.forward(examples, labels)
loss = self.nce_loss(true_logits, sampled_logits)
try:
tf.summary.scalar("NCE loss", loss)
except AttributeError:
tf.scalar_summary("NCE loss", loss)
self._loss = loss
self.optimize(loss)
# Properly initialize all variables.
tf.global_variables_initializer().run()
self.saver = tf.train.Saver()
def save_vocab(self):
"""Save the vocabulary to a file so the model can be reloaded."""
opts = self._options
with open(os.path.join(opts.save_path, "vocab.txt"), "w") as f:
for i in xrange(opts.vocab_size):
vocab_word = tf.compat.as_text(opts.vocab_words[i]).encode(
"utf-8")
f.write("%s %d\n" % (vocab_word, opts.vocab_counts[i]))
def _train_thread_body(self):
initial_epoch, = self._session.run([self._epoch])
while True:
_, epoch = self._session.run([self._train, self._epoch])
if epoch != initial_epoch:
break
def train(self):
"""Train the model."""
opts = self._options
initial_epoch, initial_words = self._session.run(
[self._epoch, self._words])
try:
summary_op = tf.summary.merge_all()
except AttributeError:
summary_op = tf.merge_all_summary()
summary_writer = tf.summary.FileWriter(opts.save_path,
self._session.graph)
workers = []
for _ in xrange(opts.concurrent_steps):
t = threading.Thread(target=self._train_thread_body)
t.start()
workers.append(t)
last_words, last_time, last_summary_time = initial_words, time.time(
), 0
last_checkpoint_time = 0
while True:
time.sleep(
opts.statistics_interval) # Reports our progress once a while.
(epoch, step, loss, words, lr) = self._session.run([
self._epoch, self.global_step, self._loss, self._words,
self._lr
])
now = time.time()
last_words, last_time, rate = words, now, (words - last_words) / (
now - last_time)
print(
"Epoch %4d Step %8d: lr = %5.3f loss = %6.2f words/sec = %8.0f\r"
% (epoch, step, lr, loss, rate),
end="")
sys.stdout.flush()
if now - last_summary_time > opts.summary_interval:
summary_str = self._session.run(summary_op)
summary_writer.add_summary(summary_str, step)
last_summary_time = now
if now - last_checkpoint_time > opts.checkpoint_interval:
self.saver.save(
self._session,
os.path.join(opts.save_path, "model.ckpt"),
global_step=step.astype(int))
last_checkpoint_time = now
if epoch != initial_epoch:
break
for t in workers:
t.join()
return epoch
def _predict(self, analogy):
"""Predict the top 4 answers for analogy questions."""
idx, = self._session.run([self._analogy_pred_idx], {
self._analogy_a: analogy[:, 0],
self._analogy_b: analogy[:, 1],
self._analogy_c: analogy[:, 2]
})
return idx
def eval(self):
"""Evaluate analogy questions and reports accuracy."""
# How many questions we get right at precision@1.
correct = 0
try:
total = self._analogy_questions.shape[0]
except AttributeError as e:
raise AttributeError("Need to read analogy questions.")
start = 0
while start < total:
limit = start + 2500
sub = self._analogy_questions[start:limit, :]
idx = self._predict(sub)
start = limit
for question in xrange(sub.shape[0]):
for j in xrange(4):
if idx[question, j] == sub[question, 3]:
# Bingo! We predicted correctly. E.g., [italy, rome, france, paris].
correct += 1
break
elif idx[question, j] in sub[question, :3]:
# We need to skip words already in the question.
continue
else:
# The correct label is not the precision@1
break
print()
print("Eval %4d/%d accuracy = %4.1f%%" % (correct, total,
correct * 100.0 / total))
def analogy(self, w0, w1, w2):
"""Predict word w3 as in w0:w1 vs w2:w3."""
wid = np.array([[self._word2id.get(w, 0) for w in [w0, w1, w2]]])
idx = self._predict(wid)
for c in [self._id2word[i] for i in idx[0, :]]:
if c not in [w0, w1, w2]:
print(c)
break
print("unknown")
def nearby(self, words, num=20):
"""Prints out nearby words given a list of words."""
ids = np.array([self._word2id.get(x, 0) for x in words])
vals, idx = self._session.run([self._nearby_val, self._nearby_idx],
{self._nearby_word: ids})
for i in xrange(len(words)):
print("\n%s\n=====================================" % (words[i]))
for (neighbor, distance) in zip(idx[i, :num], vals[i, :num]):
print("%-20s %6.4f" % (self._id2word[neighbor], distance))
def _start_shell(local_ns=None):
# An interactive shell is useful for debugging/development.
import IPython
user_ns = {}
if local_ns:
user_ns.update(local_ns)
user_ns.update(globals())
IPython.start_ipython(argv=[], user_ns=user_ns)
def main(_):
"""Train a word2vec model."""
if not FLAGS.train_data or not FLAGS.eval_data or not FLAGS.save_path:
print("--train_data --eval_data and --save_path must be specified.")
sys.exit(1)
opts = Options()
with tf.Graph().as_default(), tf.Session() as session:
with tf.device("/cpu:0"):
model = Word2Vec(opts, session)
# model.read_analogies() # Read analogy questions
for _ in xrange(opts.epochs_to_train):
model.train() # Process one epoch
# model.eval() # Eval analogies.
# Perform a final save.
model.saver.save(
session,
os.path.join(opts.save_path, "model.ckpt"),
global_step=model.global_step)
if FLAGS.interactive:
# E.g.,
# [0]: model.analogy(b'france', b'paris', b'russia')
# [1]: model.nearby([b'proton', b'elephant', b'maxwell'])
_start_shell(locals())
if __name__ == "__main__":
tf.app.run()
```
|
The 2023 Cherokee Nation deputy chief election was held on June 3, 2023, concurrently with the 2023 Cherokee Nation tribal council elections and 2023 Cherokee Nation principal chief election, to elect the Deputy Chief of the Cherokee Nation. Incumbent deputy chief Bryan Warner ran for re-election to a second term in office with incumbent principal chief Chuck Hoskin Jr. as his running mate. Warner won re-election with over 61% of the vote.
Candidates
Meredith Frailey, former tribal councilor
Bill Pearson, U.S. Navy veteran and chair of the Rogers County Republican Party
David Walkingstick, former tribal councilor for district 3 (2011-2019)
Running mate: Cara Cowan Watts, former tribal councilor (2003-2015)
Bryan Warner, incumbent deputy chief (2019–present) and former Cherokee Nation tribal councilor for district 6
Running mate: Chuck Hoskin Jr., incumbent principal chief (2019–present)
Results
The election will be held on June 3, 2023.
Legal issues
The Cherokee Nation Election Commission rejected the candidacy of David Comingdeer for deputy chief for his failure to pay attorney's fees related to a case he brought during the 2021 Cherokee Nation elections.
Notes
References
Cherokee Nation deputy chief
Cherokee Nation elections
Cherokee Nation Deputy Chief
|
```go
/*
*/
package queryutil
import (
"fmt"
"github.com/hyperledger/fabric-protos-go/ledger/queryresult"
commonledger "github.com/hyperledger/fabric/common/ledger"
"github.com/hyperledger/fabric/core/ledger/kvledger/txmgmt/statedb"
)
type itrCombiner struct {
namespace string
holders []*itrHolder
}
func newItrCombiner(namespace string, baseIterators []statedb.ResultsIterator) (*itrCombiner, error) {
var holders []*itrHolder
for _, itr := range baseIterators {
res, err := itr.Next()
if err != nil {
for _, holder := range holders {
holder.itr.Close()
}
return nil, err
}
if res != nil {
holders = append(holders, &itrHolder{itr, res})
}
}
return &itrCombiner{namespace, holders}, nil
}
// Next returns the next eligible item from the underlying iterators.
// This function evaluates the underlying iterators, and picks the one which is
// gives the lexicographically smallest key. Then, it saves that value, and advances the chosen iterator.
// If the chosen iterator is out of elements, then that iterator is closed, and removed from the list of iterators.
func (combiner *itrCombiner) Next() (commonledger.QueryResult, error) {
logger.Debugf("Iterators position at beginning: %s", combiner.holders)
if len(combiner.holders) == 0 {
return nil, nil
}
smallestHolderIndex := 0
for i := 1; i < len(combiner.holders); i++ {
smallestKey, holderKey := combiner.keyAt(smallestHolderIndex), combiner.keyAt(i)
switch {
case holderKey == smallestKey: // we found the same key in the lower order iterator (stale value of the key);
// we already have the latest value for this key (in smallestHolder). Ignore this value and move the iterator
// to next item (to a greater key) so that for next round of key selection, we do not consider this key again
removed, err := combiner.moveItrAndRemoveIfExhausted(i)
if err != nil {
return nil, err
}
if removed { // if the current iterator is exhausted and hence removed, decrement the index
// because indexes of the remaining iterators are decremented by one
i--
}
case holderKey < smallestKey:
smallestHolderIndex = i
default:
// the current key under evaluation is greater than the smallestKey - do nothing
}
}
kv := combiner.kvAt(smallestHolderIndex)
if _, err := combiner.moveItrAndRemoveIfExhausted(smallestHolderIndex); err != nil {
return nil, err
}
if kv.IsDelete() {
return combiner.Next()
}
logger.Debugf("Key [%s] selected from iterator at index [%d]", kv.Key, smallestHolderIndex)
logger.Debugf("Iterators position at end: %s", combiner.holders)
return &queryresult.KV{Namespace: combiner.namespace, Key: kv.Key, Value: kv.Value}, nil
}
// moveItrAndRemoveIfExhausted moves the iterator at index i to the next item. If the iterator gets exhausted
// then the iterator is removed from the underlying slice
func (combiner *itrCombiner) moveItrAndRemoveIfExhausted(i int) (removed bool, err error) {
holder := combiner.holders[i]
exhausted, err := holder.moveToNext()
if err != nil {
return false, err
}
if exhausted {
combiner.holders[i].itr.Close()
combiner.holders = append(combiner.holders[:i], combiner.holders[i+1:]...)
}
return exhausted, nil
}
// kvAt returns the kv available from iterator at index i
func (combiner *itrCombiner) kvAt(i int) *statedb.VersionedKV {
return combiner.holders[i].kv
}
// keyAt returns the key available from iterator at index i
func (combiner *itrCombiner) keyAt(i int) string {
return combiner.kvAt(i).Key
}
// Close closes all the underlying iterators
func (combiner *itrCombiner) Close() {
for _, holder := range combiner.holders {
holder.itr.Close()
}
}
// itrHolder encloses an iterator and keeps the next item available from the iterator in the buffer
type itrHolder struct {
itr statedb.ResultsIterator
kv *statedb.VersionedKV
}
// moveToNext fetches the next item to keep in buffer and returns true if the iterator is exhausted
func (holder *itrHolder) moveToNext() (exhausted bool, err error) {
var res *statedb.VersionedKV
if res, err = holder.itr.Next(); err != nil {
return false, err
}
if res != nil {
holder.kv = res
}
return res == nil, nil
}
// String returns the key that the holder has in the buffer for serving as a next key
func (holder *itrHolder) String() string {
return fmt.Sprintf("{%s}", holder.kv.Key)
}
```
|
```objective-c
/* GIO - GLib Input, Output and Streaming Library
*
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
*
* You should have received a copy of the GNU Lesser General
*
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.co.uk>
*/
#ifndef __G_HTTP_PROXY_H__
#define __G_HTTP_PROXY_H__
#include <gio/giotypes.h>
G_BEGIN_DECLS
#define G_TYPE_HTTP_PROXY (_g_http_proxy_get_type ())
#define G_HTTP_PROXY(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), G_TYPE_HTTP_PROXY, GHttpProxy))
#define G_HTTP_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_CAST((k), G_TYPE_HTTP_PROXY, GHttpProxyClass))
#define G_IS_HTTP_PROXY(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), G_TYPE_HTTP_PROXY))
#define G_IS_HTTP_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), G_TYPE_HTTP_PROXY))
#define G_HTTP_PROXY_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), G_TYPE_HTTP_PROXY, GHttpProxyClass))
typedef struct _GHttpProxy GHttpProxy;
typedef struct _GHttpProxyClass GHttpProxyClass;
GType _g_http_proxy_get_type (void);
#define G_TYPE_HTTPS_PROXY (_g_https_proxy_get_type ())
#define G_HTTPS_PROXY(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), G_TYPE_HTTPS_PROXY, GHttpsProxy))
#define G_HTTPS_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_CAST((k), G_TYPE_HTTPS_PROXY, GHttpsProxyClass))
#define G_IS_HTTPS_PROXY(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), G_TYPE_HTTPS_PROXY))
#define G_IS_HTTPS_PROXY_CLASS(k) (G_TYPE_CHECK_CLASS_TYPE ((k), G_TYPE_HTTPS_PROXY))
#define G_HTTPS_PROXY_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), G_TYPE_HTTPS_PROXY, GHttpsProxyClass))
typedef struct _GHttpsProxy GHttpsProxy;
typedef struct _GHttpsProxyClass GHttpsProxyClass;
GType _g_https_proxy_get_type (void);
G_END_DECLS
#endif /* __G_HTTP_PROXY_H__ */
```
|
Dharmapuri is one of the 38 districts in the state of Tamil-Nadu, India. It is the first district created in Tamil Nadu after the independence of India by splitting it from then-Salem district on 2 October 1965. Dharmapuri District is one of the major producers of mango in the state, fine quality granite is found in the district. It is also one of the main sericulture belts in the state. Around 30 percent of the district's area is under forest cover. Kaveri enters Tamil Nadu through this district. Dharmapuri district had the lowest literacy rate of 74.23% in Tamilnadu during the 2011 census.
Etymology
Dharmapuri was called Thagadur during the Sangam era. The name Thagadur is derived from two Tamil words, Thagadu meaning iron ore, and ur meaning place. The name Thagadur was changed to Dharmapuri after the Sangam period, possibly during the period of the Vijayanagara Empire or the period of the Kingdom of Mysore. The name Thagadur is still used by some of the people of Dharmapuri.
History
The region is believed to have been controlled by the Pallava dynasty in the 8th century. The Rashtrakutas took over in the 9th century. They were defeated by the Cholas in the 11th century, and subsequently the district came under the Chola sphere of governance.
During 18th century, present-day Dharmapuri district was part of the Kingdom of Mysore and was called Baramahal. As part of the Treaty of Seringapatam (signed on 18 March 1792) after the Third Anglo-Mysore War, Tipu Sultan agreed to give part of his territories including present-day Dharmapuri district to the British East India Company which was then merged into the Madras Presidency an administrative subdivision of British India.
The present-day district was part of the Salem District under British rule, until the founding of Dharmapuri district on 2 October 1965. The Dharmapuri district was split into Dharmapuri and Krishnagiri districts in 2004. Many historical rock sculptures are found in this district. Modhur, a village near Dharmapuri has remains that date back to the Neolithic age. A government museum in Dharmapuri town displays some of these significant sculptures.
Geography
The district is located between latitudes N 11 47’ and 12 33’ and longitudes E 77 02’ and 78 40’. Occupies an area of (i.e. 3.46% of Tamil Nadu). It is bounded on the north by Krishnagiri District, on the east by Tiruvannamalai District and Kallakurichi district, on the south by Salem District, and on the west by Karnataka's Chamarajanagar District. The whole district is surrounded by hills and forests, and the terrain consists mostly of rolling plains.
Forests
The whole district is predominantly covered with forests. Spider Valley located near Hogenakkal is home to many wild animals. The district falls in the migratory path of elephants. Conflicts between man and elephant are most common in these parts. Many tribal communities depend on these forests. Vathalmalai, a mountain hamlet on top of Shervarayan hill chain has suitable conditions to cultivate coffee and jackfruit. Wild boars and spotted deer are commonly seen in Morappur and Harur forest region. Gaurs sometimes stroll near villages in the Bommidi region. Thoppur ghat has one of the area's scenic highways surrounded by mountains and forests.
Rivers and dams
Waterbodies in Dharmapuri - Harur taluk, Chinnaru river Ramakkal Lake
Governance
Dharmapuri is the district headquarters. The district has been divided into two revenue divisions, namely, Dharmapuri and Harur consisting seven taluks.
Administrative divisions
Revenue divisions:
Dharmapuri and Harur.
Revenue taluks:
Dharmapuri, Harur, Karimangalam, Nallampalli, Palacode, Pappireddipatti, Pennagaram.
Town Panchayats:
Harur, Marandahalli, Bommidi, Palacode, Pennagaram, Karimangalam, Kambainallur, Papparapatti, Kadathur, Pappireddipatti.
Panchayat Unions:
Dharmapuri, Harur, Nallampalli, Palacode, Pennagaram, Karimangalam, Morappur, Pappireddipatti, Kadathur, Eriyur.
Demographics
According to 2011 census, Dharmapuri district had a population of 1,506,843 with a sex-ratio of 946 females for every 1,000 males, much above the national average of 929. A total of 167,940 were under the age of six, constituting 87,777 males and 80,163 females. The average literacy of the district was 91.2% compared to the national average of 72.99%. The district had a total of 375,873 households. Scheduled Castes and Scheduled Tribes made up 16.29% and 4.18% of the population respectively. There were a total of 751,170 workers, comprising 191,080 cultivators, 217,062 main agricultural labourers, 11,308 in house hold industries, 233,546 other workers, 98,174 marginal workers, 10,248 marginal cultivators, 50,283 marginal agricultural labourers, 4,033 marginal workers in household industries and 33,610 other marginal workers. 17.32% of the population lived in urban areas. The population of the district is roughly equal to the nation of Gabon or the US state of Hawaii.
At the time of the 2011 census, 87.90% of the population spoke Tamil, 6.51% Telugu, 2.95% Kannada and 2.26% Urdu as their first language.
Politics
|}
Economy
Agriculture
Dharmapuri and Krishnagiri districts account for more than 60% to 70% total mango production in Tamil Nadu. It is a major producer of Ragi and saamai crops in the state. Exotic crops like dates are also being cultivated by some farmers in the areas around Ariyakulam.
Kaveri river, Ponnaiyar river, Thoppaiyaru river, Chinnar river, Nagavathy river, Vanniyar river and Sanathkumara river are the rivers that drains the district
See also
Tribal Health Initiative
List of districts of Tamil Nadu
References
External links
Dharmapuri District's Government website
Dharmapuri District's Support website
Dharmapuri- Idhu Namba Ooru Machi
1965 establishments in Madras State
Districts of Tamil Nadu
|
Oblivion Hymns is the sixth studio album by American ambient/post-rock band Hammock. It was released on November 26, 2013 by the band's own label, Hammock Music.
The album cover for Oblivion Hymns was created as a commissioned piece by artist Amy Pleasant.
Reception
Oblivion Hymns was met with positive critical reception and hit No. 17 on the Billboard Top Heatseekers Album Chart in 2013. Mike Diver at Clash magazine loved the album, stating that Oblivion Hymns is "…some of the most blissful music Clash has ever had the luxury of bathing in. [Hammock] has gone on to become one of the foremost purveyors of affecting ambient post-rock on the scene."
John Diliberto, the host of Echoes, listed Oblivion Hymns as No. 8 in the "25 Essential Echoes CDs for 2013" as an album that represented the best, most innovative aspects of the ambient music soundscape in the past year, stating that "Hammock goes deeper into their ambient chamber music with children’s choirs emerging out of swirling deeply processed guitars." Echoes also selected Oblivion Hymns as the CD of the Month for January 2014. Fred Pessaro, writing for Vice magazine's Noisey site, stated that the music of Oblivion Hymns is "expansive and dramatic ambient post-rock… the kind that would fit perfectly as a soundtrack to a film."
In his review for KEXP, Don Yates notes how Hammock "reworks their ambient post-rock sound on their latest album, moving in a more neo classical musical direction while adding a string quartet, horns, accordion, glockenspiel, a children’s choir and more to their layers of atmospheric shoegazer guitars for an often-transportive set of glacial instrumentals." KEXP also charted Oblivion Hymns at No. 7 in the KEXP Variety Music Chart for December 2013 and named the first track, "My Mind Was a Fog... My Heart Became a Bomb" as Song of the Day on January 8, 2014.
Raul Stanciu, writing for Sputnikmusic, felt that Oblivion Hymns is a "natural progression to [Hammock's] transcending discography. This neoclassical-meets-post-rock direction opens new doors…", while Elizabeth Klisiewicz, writing for The Big Takeover, described how songs on the record are "stuffed full of emotion, as [one] can imagine… standing on the edge of an infinite abyss, marvelling… Hammock’s music draws out such powerful emotions that one can be blinded with joy even while tears blur your vision. Perhaps it’s like witnessing a miracle, albeit a sonic one."
A feature for the October 2013 issue of Magnet stated that Oblivion Hymns "rewrites Hammock's script, bringing strings to the fore in a manner that would make composer Max Richter or Hammock's peers in A Winged Victory for the Sullen proud."
Vevo's Alt/Indie Spotlight highlighted Hammock's music video for "I Could Hear The Water at the Edge of All Things" via Vevo's Twitter feed, calling the video both "tragic" and "incredibly beautiful."
Track listing
References
Hammock (band) albums
Hammock Music albums
2013 albums
|
Epameinondas Deligiorgis (, ; 10 January 1829 – 14 May 1879) was a Greek lawyer, newspaper reporter and politician who served as the 20th Prime Minister of Greece.
He was born in Tripoli, Arcadia, the son of Dimitrios Deligeorgis, a politician from Missolonghi who participated in the Greek War of Independence. Deligiorgis studied law at the University of Athens and entered politics in 1854. He was not a proponent of the Megali Idea (Great Idea) and thought that a better solution to the Eastern Question would be to improve the condition of the Greeks living in Ottoman-controlled Macedonia, Epirus, Thrace and Asia Minor by liberalising the Ottoman Empire. Deligiorgis was the person who, on 10 October 1862, declared the end of the reign of King Otto and the convening of a national assembly. He died in Athens, aged 50.
References
Sources
Georg Veloudis: "Delijeorjis, Epaminondas", in Biographisches Lexikon zur Geschichte Südosteuropas. Vol. 1. Munich 1974, pp. 385–387.
1829 births
1879 deaths
19th-century prime ministers of Greece
19th-century Greek lawyers
National and Kapodistrian University of Athens alumni
Foreign ministers of Greece
Prime Ministers of Greece
Speakers of the Hellenic Parliament
Greek MPs 1862–1864
People from Tripoli, Greece
|
```xml
/**
*/
import * as assert from 'assert';
import { WindowsTerminal } from './windowsTerminal';
import { UnixTerminal } from './unixTerminal';
import { Terminal } from './terminal';
import { Socket } from 'net';
const terminalConstructor = (process.platform === 'win32') ? WindowsTerminal : UnixTerminal;
const SHELL = (process.platform === 'win32') ? 'cmd.exe' : '/bin/bash';
let terminalCtor: WindowsTerminal | UnixTerminal;
if (process.platform === 'win32') {
terminalCtor = require('./windowsTerminal');
} else {
terminalCtor = require('./unixTerminal');
}
class TestTerminal extends Terminal {
public checkType<T>(name: string, value: T, type: string, allowArray: boolean = false): void {
this._checkType(name, value, type, allowArray);
}
protected _write(data: string): void {
throw new Error('Method not implemented.');
}
public resize(cols: number, rows: number): void {
throw new Error('Method not implemented.');
}
public clear(): void {
throw new Error('Method not implemented.');
}
public destroy(): void {
throw new Error('Method not implemented.');
}
public kill(signal?: string): void {
throw new Error('Method not implemented.');
}
public get process(): string {
throw new Error('Method not implemented.');
}
public get master(): Socket {
throw new Error('Method not implemented.');
}
public get slave(): Socket {
throw new Error('Method not implemented.');
}
}
describe('Terminal', () => {
describe('constructor', () => {
it('should do basic type checks', () => {
assert.throws(
() => new (<any>terminalCtor)('a', 'b', { 'name': {} }),
'name must be a string (not a object)'
);
});
});
describe('checkType', () => {
it('should throw for the wrong type', () => {
const t = new TestTerminal();
assert.doesNotThrow(() => t.checkType('foo', 'test', 'string'));
assert.doesNotThrow(() => t.checkType('foo', 1, 'number'));
assert.doesNotThrow(() => t.checkType('foo', {}, 'object'));
assert.throws(() => t.checkType('foo', 'test', 'number'));
assert.throws(() => t.checkType('foo', 1, 'object'));
assert.throws(() => t.checkType('foo', {}, 'string'));
});
it('should throw for wrong types within arrays', () => {
const t = new TestTerminal();
assert.doesNotThrow(() => t.checkType('foo', ['test'], 'string', true));
assert.doesNotThrow(() => t.checkType('foo', [1], 'number', true));
assert.doesNotThrow(() => t.checkType('foo', [{}], 'object', true));
assert.throws(() => t.checkType('foo', ['test'], 'number', true));
assert.throws(() => t.checkType('foo', [1], 'object', true));
assert.throws(() => t.checkType('foo', [{}], 'string', true));
});
});
describe('automatic flow control', () => {
it('should respect ctor flow control options', () => {
const pty = new terminalConstructor(SHELL, [], {handleFlowControl: true, flowControlPause: 'abc', flowControlResume: '123'});
assert.equal(pty.handleFlowControl, true);
assert.equal((pty as any)._flowControlPause, 'abc');
assert.equal((pty as any)._flowControlResume, '123');
});
// TODO: I don't think this test ever worked due to pollUntil being used incorrectly
// it('should do flow control automatically', async function(): Promise<void> {
// // Flow control doesn't work on Windows
// if (process.platform === 'win32') {
// return;
// }
// this.timeout(10000);
// const pty = new terminalConstructor(SHELL, [], {handleFlowControl: true, flowControlPause: 'PAUSE', flowControlResume: 'RESUME'});
// let read: string = '';
// pty.on('data', data => read += data);
// pty.on('pause', () => read += 'paused');
// pty.on('resume', () => read += 'resumed');
// pty.write('1');
// pty.write('PAUSE');
// pty.write('2');
// pty.write('RESUME');
// pty.write('3');
// await pollUntil(() => {
// return stripEscapeSequences(read).endsWith('1pausedresumed23');
// }, 100, 10);
// });
});
});
function stripEscapeSequences(data: string): string {
return data.replace(/\u001b\[0K/, '');
}
```
|
```php
<?php
namespace Illuminate\Contracts\Container;
use Exception;
use Psr\Container\ContainerExceptionInterface;
class CircularDependencyException extends Exception implements ContainerExceptionInterface
{
//
}
```
|
```xml
import * as React from 'react';
import { PersonaCoin } from '@fluentui/react-experiments';
import { Stack, Text } from '@fluentui/react';
import { PersonaTestImages } from '@fluentui/react-experiments/lib/common/TestImages';
const tokens = {
sectionStack: {
childrenGap: 32,
},
headingStack: {
childrenGap: 16,
},
personaCoinStack: {
childrenGap: 12,
},
};
export class PersonaCoinSizeAndColorExample extends React.Component<{}, {}> {
public render(): JSX.Element {
return (
<Stack tokens={tokens.sectionStack}>
<Stack tokens={tokens.headingStack} padding={8}>
<Stack tokens={tokens.personaCoinStack}>
<Text>Sizes</Text>
<Stack horizontal disableShrink tokens={tokens.personaCoinStack}>
<PersonaCoin text="Kevin Jameson" size={10} />
<PersonaCoin text="Kevin Jameson" size={24} />
<PersonaCoin text="Kevin Jameson" size={28} />
<PersonaCoin text="Kevin Jameson" size={40} />
<PersonaCoin text="Kevin Jameson" size={48} />
<PersonaCoin text="Kevin Jameson" size={56} />
<PersonaCoin text="Kevin Jameson" size={72} />
<PersonaCoin text="Kevin Jameson" size={100} />
</Stack>
<Stack horizontal disableShrink tokens={tokens.personaCoinStack}>
<PersonaCoin text="Kevin Jameson" size={10} imageUrl={PersonaTestImages.personMale} />
<PersonaCoin text="Kevin Jameson" size={24} imageUrl={PersonaTestImages.personMale} presence={1} />
<PersonaCoin text="Kevin Jameson" size={28} imageUrl={PersonaTestImages.personMale} />
<PersonaCoin text="Kevin Jameson" size={40} imageUrl={PersonaTestImages.personMale} presence={2} />
<PersonaCoin text="Kevin Jameson" size={48} imageUrl={PersonaTestImages.personMale} />
<PersonaCoin text="Kevin Jameson" size={56} imageUrl={PersonaTestImages.personMale} presence={3} />
<PersonaCoin text="Kevin Jameson" size={72} imageUrl={PersonaTestImages.personMale} />
<PersonaCoin text="Kevin Jameson" size={100} imageUrl={PersonaTestImages.personMale} presence={4} />
</Stack>
</Stack>
<Stack tokens={tokens.personaCoinStack}>
<Text>Custom colors</Text>
<Stack horizontal disableShrink tokens={tokens.personaCoinStack}>
<PersonaCoin text="Kevin Jameson" coinColor="red" initialsColor="black" />
<PersonaCoin text="Kevin Jameson" coinColor="beige" initialsColor="black" />
<PersonaCoin text="Kevin Jameson" coinColor="blue" />
<PersonaCoin text="Kevin Jameson" coinColor="orange" />
</Stack>
</Stack>
</Stack>
</Stack>
);
}
}
```
|
The Bantamweight class in the 1st AIBA African Olympic Boxing Qualifying Tournament competition.
List of boxers
Medalists
Results
Preliminary round
Quarterfinal Round
Semifinal Round
3rd place Round
Final Round
Qualification to Olympic games
References
AIBA
AIBA African 2008 Olympic Qualifying Tournament
|
```xml
// This file is required by karma.conf.js and loads recursively all the .spec and framework files
import 'zone.js/testing';
import { getTestBed } from '@angular/core/testing';
import {
BrowserDynamicTestingModule,
platformBrowserDynamicTesting,
} from '@angular/platform-browser-dynamic/testing';
// First, initialize the Angular testing environment.
getTestBed().initTestEnvironment(BrowserDynamicTestingModule, platformBrowserDynamicTesting());
```
|
```go
/*
path_to_url
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package v1alpha1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// +genclient
// +genclient:nonNamespaced
// +k8s:prerelease-lifecycle-gen:introduced=1.26
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// ClusterTrustBundle is a cluster-scoped container for X.509 trust anchors
// (root certificates).
//
// ClusterTrustBundle objects are considered to be readable by any authenticated
// user in the cluster, because they can be mounted by pods using the
// `clusterTrustBundle` projection. All service accounts have read access to
// ClusterTrustBundles by default. Users who only have namespace-level access
// to a cluster can read ClusterTrustBundles by impersonating a serviceaccount
// that they have access to.
//
// It can be optionally associated with a particular assigner, in which case it
// contains one valid set of trust anchors for that signer. Signers may have
// multiple associated ClusterTrustBundles; each is an independent set of trust
// anchors for that signer. Admission control is used to enforce that only users
// with permissions on the signer can create or modify the corresponding bundle.
type ClusterTrustBundle struct {
metav1.TypeMeta `json:",inline"`
// metadata contains the object metadata.
// +optional
metav1.ObjectMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// spec contains the signer (if any) and trust anchors.
Spec ClusterTrustBundleSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
}
// ClusterTrustBundleSpec contains the signer and trust anchors.
type ClusterTrustBundleSpec struct {
// signerName indicates the associated signer, if any.
//
// In order to create or update a ClusterTrustBundle that sets signerName,
// you must have the following cluster-scoped permission:
// group=certificates.k8s.io resource=signers resourceName=<the signer name>
// verb=attest.
//
// If signerName is not empty, then the ClusterTrustBundle object must be
// named with the signer name as a prefix (translating slashes to colons).
// For example, for the signer name `example.com/foo`, valid
// ClusterTrustBundle object names include `example.com:foo:abc` and
// `example.com:foo:v1`.
//
// If signerName is empty, then the ClusterTrustBundle object's name must
// not have such a prefix.
//
// List/watch requests for ClusterTrustBundles can filter on this field
// using a `spec.signerName=NAME` field selector.
//
// +optional
SignerName string `json:"signerName,omitempty" protobuf:"bytes,1,opt,name=signerName"`
// trustBundle contains the individual X.509 trust anchors for this
// bundle, as PEM bundle of PEM-wrapped, DER-formatted X.509 certificates.
//
// The data must consist only of PEM certificate blocks that parse as valid
// X.509 certificates. Each certificate must include a basic constraints
// extension with the CA bit set. The API server will reject objects that
// contain duplicate certificates, or that use PEM block headers.
//
// Users of ClusterTrustBundles, including Kubelet, are free to reorder and
// deduplicate certificate blocks in this file according to their own logic,
// as well as to drop PEM block headers and inter-block data.
TrustBundle string `json:"trustBundle" protobuf:"bytes,2,opt,name=trustBundle"`
}
// +k8s:prerelease-lifecycle-gen:introduced=1.26
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// ClusterTrustBundleList is a collection of ClusterTrustBundle objects
type ClusterTrustBundleList struct {
metav1.TypeMeta `json:",inline"`
// metadata contains the list metadata.
//
// +optional
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// items is a collection of ClusterTrustBundle objects
Items []ClusterTrustBundle `json:"items" protobuf:"bytes,2,rep,name=items"`
}
```
|
Ben Cunnington may refer to:
Ben Cunnington (archaeologist) (1861–1950), British archaeologist
Ben Cunnington (footballer) (born 1991), Australian rules footballer
See also
Cunnington (surname)
|
The men's light middleweight event was part of the boxing programme at the 1972 Summer Olympics. The weight class allowed boxers of up to 71 kilograms to compete. The competition was held from 28 August to 10 September 1972. 34 boxers from 34 nations competed.
Medalists
Results
The following boxers took part in the event:
First round
Rolando Garbey (CUB) def. Ricky Barnor (GHA), 5:0
Svetomir Belic (YUG) def. Oumar Fall (SEN), 4:1
Second round
Anthony Richardson (HOL) def. Svetomir Belic (YUG), 3:2
Loucif Hanmani (ALG) def. José Antonio Colon (PUR), 5:0
Alan Minter (GBR) def. Reginald Ford (GUY), KO-2
Valeri Tregubov (URS) def. Reggie Jones (USA), 3:2
Reggie Jones was controversially eliminated in the second round of the light middleweight division (– 71 kg) by Valeri Tregubov of the Soviet Union in a fight he was generally accepted to have won.
Evengelos Oikonomakos (GRE) def. Nicolas Aquilino (PHI), 5:0
Dieter Kottysch (FRG) def. Bonifacio Avila (COL), TKO-2
Mohamed Majeri (TUN) def. Issoufou Habou (NIG), 5:0
Alan Jenkinson (AUS) def. Michel Belliard (FRA), 4:1
Mikko Saarinen (FIN) def. David Attan (KEN), TKO-2
Peter Tiepold (GDR) def. Ion Györfi (ROU), 4:1
Christopher Elliott (IRL) def. Farouk Kesrouan (LEB), 5:0
Emeterio Villanueva (MEX) def. Alfredo Lemus (VEN), 4:1
Wiesław Rudkowski (POL) def. Antonio Castellini (ITA), 5:0
Nayden Stanchev (BUL) def. John Opio (UGA), 3:2
Rolando Garbey (CUB) def. Franz Csandl (AUT), 5:0
Jae Keun-Lim (KOR) def. Namchal Tsendaiush (MGL), 3:2
Third round
Loucif Hanmani (ALG) def. Anthony Richardson (HOL), TKO-2
Alan Minter (GBR) def. Valeri Tregubov (URS), 5:0
Dieter Kottysch (FRG) def. Evengelos Oikonomakos (GRE), 5:0
Mohamed Majeri (TUN) def. Alan Jenkinson (AUS), 5:0
Peter Tiepold (GDR) def. Mikko Saarinen (FIN), 5:0
Emeterio Villanueva (MEX) def. Christopher Elliott (IRL), TKO-3
Wiesław Rudkowski (POL) def. Nayden Stanchev (BUL), 5:0
Rolando Garbey (CUB) def. Jae Keun-Lim (KOR), TKO-2
Quarterfinals
Alan Minter (GBR) def. Loucif Hanmani (ALG), 4:1
Dieter Kottysch (FRG) def. Mohamed Majeri (TUN), 5:0
Peter Tiepold (GDR) def. Emeterio Villanueva (MEX), 5:0
Wiesław Rudkowski (POL) def. Rolando Garbey (CUB), 4:1
Semifinals
Dieter Kottysch (FRG) def. Alan Minter (GBR), 3:2
Wiesław Rudkowski (POL) def. Peter Tiepold (GDR), 4:1
Final
Dieter Kottysch (FRG) def. Wiesław Rudkowski (POL), 3:2
References
Light Middleweight
|
```java
/*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
/**
*
*/
package org.dromara.maxkey.util;
import java.io.Serializable;
import org.dromara.maxkey.crypto.HexUtils;
/**
* ObjectTransformer<br>
* serialize & deserialize<br>
* object serialize to ByteArray,and ByteArray deserialize to object<br>
* object serialize to HEX String,and HEX String deserialize to object<br>
* @version 2.0
* @since 1.6
* @author Crystal.Sea
*/
public class ObjectTransformer {
/**
* serialize Serializable Object 2 HEX String
* @param Serializable Object
* @return String
*/
public static final String serialize(Serializable s){
return HexUtils.hex2String(SerializationUtils.serialize(s));
}
/**
* deserialize 2 Object
* @param HEX String
* @return Object
*/
public static final <T> T deserialize(String hex) {
return SerializationUtils.deserialize(HexUtils.hex2Bytes(hex));
}
}
```
|
```objective-c
/* Exception handling and frame unwind runtime interface routines.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it
the Free Software Foundation; either version 2, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA. */
/* As a special exception, if you include this header file into source
files compiled by GCC, this header file does not by itself cause
the resulting executable to be covered by the GNU General Public
reasons why the executable file might be covered by the GNU General
/* This is derived from the C++ ABI for IA-64. Where we diverge
for cross-architecture compatibility are noted with "@@@". */
#ifndef _UNWIND_H
#define _UNWIND_H
#ifndef HIDE_EXPORTS
#pragma GCC visibility push(default)
#endif
#ifdef __cplusplus
extern "C" {
#endif
/* Level 1: Base ABI */
/* @@@ The IA-64 ABI uses uint64 throughout. Most places this is
inefficient for 32-bit and smaller machines. */
typedef unsigned _Unwind_Word __attribute__((__mode__(__word__)));
typedef signed _Unwind_Sword __attribute__((__mode__(__word__)));
#if defined(__ia64__) && defined(__hpux__)
typedef unsigned _Unwind_Ptr __attribute__((__mode__(__word__)));
#else
typedef unsigned _Unwind_Ptr __attribute__((__mode__(__pointer__)));
#endif
typedef unsigned _Unwind_Internal_Ptr __attribute__((__mode__(__pointer__)));
/* @@@ The IA-64 ABI uses a 64-bit word to identify the producer and
consumer of an exception. We'll go along with this for now even on
32-bit machines. We'll need to provide some other option for
16-bit machines and for machines with > 8 bits per byte. */
typedef unsigned _Unwind_Exception_Class __attribute__((__mode__(__DI__)));
/* The unwind interface uses reason codes in several contexts to
identify the reasons for failures or other actions. */
typedef enum
{
_URC_NO_REASON = 0,
_URC_FOREIGN_EXCEPTION_CAUGHT = 1,
_URC_FATAL_PHASE2_ERROR = 2,
_URC_FATAL_PHASE1_ERROR = 3,
_URC_NORMAL_STOP = 4,
_URC_END_OF_STACK = 5,
_URC_HANDLER_FOUND = 6,
_URC_INSTALL_CONTEXT = 7,
_URC_CONTINUE_UNWIND = 8
} _Unwind_Reason_Code;
/* The unwind interface uses a pointer to an exception header object
as its representation of an exception being thrown. In general, the
full representation of an exception object is language- and
implementation-specific, but it will be prefixed by a header
understood by the unwind interface. */
struct _Unwind_Exception;
typedef void (*_Unwind_Exception_Cleanup_Fn) (_Unwind_Reason_Code,
struct _Unwind_Exception *);
struct _Unwind_Exception
{
_Unwind_Exception_Class exception_class;
_Unwind_Exception_Cleanup_Fn exception_cleanup;
_Unwind_Word private_1;
_Unwind_Word private_2;
/* @@@ The IA-64 ABI says that this structure must be double-word aligned.
Taking that literally does not make much sense generically. Instead we
provide the maximum alignment required by any type for the machine. */
} __attribute__((__aligned__));
/* The ACTIONS argument to the personality routine is a bitwise OR of one
or more of the following constants. */
typedef int _Unwind_Action;
#define _UA_SEARCH_PHASE 1
#define _UA_CLEANUP_PHASE 2
#define _UA_HANDLER_FRAME 4
#define _UA_FORCE_UNWIND 8
#define _UA_END_OF_STACK 16
/* This is an opaque type used to refer to a system-specific data
structure used by the system unwinder. This context is created and
destroyed by the system, and passed to the personality routine
during unwinding. */
struct _Unwind_Context;
/* Raise an exception, passing along the given exception object. */
extern _Unwind_Reason_Code _Unwind_RaiseException (struct _Unwind_Exception *);
/* Raise an exception for forced unwinding. */
typedef _Unwind_Reason_Code (*_Unwind_Stop_Fn)
(int, _Unwind_Action, _Unwind_Exception_Class,
struct _Unwind_Exception *, struct _Unwind_Context *, void *);
extern _Unwind_Reason_Code _Unwind_ForcedUnwind (struct _Unwind_Exception *,
_Unwind_Stop_Fn,
void *);
/* Helper to invoke the exception_cleanup routine. */
extern void _Unwind_DeleteException (struct _Unwind_Exception *);
/* Resume propagation of an existing exception. This is used after
e.g. executing cleanup code, and not to implement rethrowing. */
extern void _Unwind_Resume (struct _Unwind_Exception *);
/* @@@ Resume propagation of an FORCE_UNWIND exception, or to rethrow
a normal exception that was handled. */
extern _Unwind_Reason_Code _Unwind_Resume_or_Rethrow (struct _Unwind_Exception *);
/* @@@ Use unwind data to perform a stack backtrace. The trace callback
is called for every stack frame in the call chain, but no cleanup
actions are performed. */
typedef _Unwind_Reason_Code (*_Unwind_Trace_Fn)
(struct _Unwind_Context *, void *);
extern _Unwind_Reason_Code _Unwind_Backtrace (_Unwind_Trace_Fn, void *);
/* These functions are used for communicating information about the unwind
context (i.e. the unwind descriptors and the user register state) between
the unwind library and the personality routine and landing pad. Only
selected registers maybe manipulated. */
extern _Unwind_Word _Unwind_GetGR (struct _Unwind_Context *, int);
extern void _Unwind_SetGR (struct _Unwind_Context *, int, _Unwind_Word);
extern _Unwind_Ptr _Unwind_GetIP (struct _Unwind_Context *);
extern _Unwind_Ptr _Unwind_GetIPInfo (struct _Unwind_Context *, int *);
extern void _Unwind_SetIP (struct _Unwind_Context *, _Unwind_Ptr);
/* @@@ Retrieve the CFA of the given context. */
extern _Unwind_Word _Unwind_GetCFA (struct _Unwind_Context *);
extern void *_Unwind_GetLanguageSpecificData (struct _Unwind_Context *);
extern _Unwind_Ptr _Unwind_GetRegionStart (struct _Unwind_Context *);
/* The personality routine is the function in the C++ (or other language)
runtime library which serves as an interface between the system unwind
library and language-specific exception handling semantics. It is
specific to the code fragment described by an unwind info block, and
it is always referenced via the pointer in the unwind info block, and
hence it has no ABI-specified name.
Note that this implies that two different C++ implementations can
use different names, and have different contents in the language
specific data area. Moreover, that the language specific data
area contains no version info because name of the function invoked
provides more effective versioning by detecting at link time the
lack of code to handle the different data format. */
typedef _Unwind_Reason_Code (*_Unwind_Personality_Fn)
(int, _Unwind_Action, _Unwind_Exception_Class,
struct _Unwind_Exception *, struct _Unwind_Context *);
/* @@@ The following alternate entry points are for setjmp/longjmp
based unwinding. */
struct SjLj_Function_Context;
extern void _Unwind_SjLj_Register (struct SjLj_Function_Context *);
extern void _Unwind_SjLj_Unregister (struct SjLj_Function_Context *);
extern _Unwind_Reason_Code _Unwind_SjLj_RaiseException
(struct _Unwind_Exception *);
extern _Unwind_Reason_Code _Unwind_SjLj_ForcedUnwind
(struct _Unwind_Exception *, _Unwind_Stop_Fn, void *);
extern void _Unwind_SjLj_Resume (struct _Unwind_Exception *);
extern _Unwind_Reason_Code _Unwind_SjLj_Resume_or_Rethrow (struct _Unwind_Exception *);
/* @@@ The following provide access to the base addresses for text
and data-relative addressing in the LDSA. In order to stay link
compatible with the standard ABI for IA-64, we inline these. */
#ifdef __ia64__
#include <stdlib.h>
static inline _Unwind_Ptr
_Unwind_GetDataRelBase (struct _Unwind_Context *_C)
{
/* The GP is stored in R1. */
return _Unwind_GetGR (_C, 1);
}
static inline _Unwind_Ptr
_Unwind_GetTextRelBase (struct _Unwind_Context *_C __attribute__ ((__unused__)))
{
abort ();
return 0;
}
/* @@@ Retrieve the Backing Store Pointer of the given context. */
extern _Unwind_Word _Unwind_GetBSP (struct _Unwind_Context *);
#else
extern _Unwind_Ptr _Unwind_GetDataRelBase (struct _Unwind_Context *);
extern _Unwind_Ptr _Unwind_GetTextRelBase (struct _Unwind_Context *);
#endif
/* @@@ Given an address, return the entry point of the function that
contains it. */
extern void * _Unwind_FindEnclosingFunction (void *pc);
#ifdef __cplusplus
}
#endif
#ifndef HIDE_EXPORTS
#pragma GCC visibility pop
#endif
#endif /* unwind.h */
```
|
```objective-c
/*
* VC-1 and WMV3 decoder - DSP functions
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
*
* You should have received a copy of the GNU Lesser General Public
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* VC-1 and WMV3 decoder
*
*/
#ifndef AVCODEC_VC1DSP_H
#define AVCODEC_VC1DSP_H
#include "hpeldsp.h"
#include "h264chroma.h"
typedef void (*vc1op_pixels_func)(uint8_t *block/*align width (8 or 16)*/, const uint8_t *pixels/*align 1*/, ptrdiff_t line_size, int h);
typedef struct VC1DSPContext {
/* vc1 functions */
void (*vc1_inv_trans_8x8)(int16_t *b);
void (*vc1_inv_trans_8x4)(uint8_t *dest, int line_size, int16_t *block);
void (*vc1_inv_trans_4x8)(uint8_t *dest, int line_size, int16_t *block);
void (*vc1_inv_trans_4x4)(uint8_t *dest, int line_size, int16_t *block);
void (*vc1_inv_trans_8x8_dc)(uint8_t *dest, int line_size, int16_t *block);
void (*vc1_inv_trans_8x4_dc)(uint8_t *dest, int line_size, int16_t *block);
void (*vc1_inv_trans_4x8_dc)(uint8_t *dest, int line_size, int16_t *block);
void (*vc1_inv_trans_4x4_dc)(uint8_t *dest, int line_size, int16_t *block);
void (*vc1_v_overlap)(uint8_t *src, int stride);
void (*vc1_h_overlap)(uint8_t *src, int stride);
void (*vc1_v_s_overlap)(int16_t *top, int16_t *bottom);
void (*vc1_h_s_overlap)(int16_t *left, int16_t *right);
void (*vc1_v_loop_filter4)(uint8_t *src, int stride, int pq);
void (*vc1_h_loop_filter4)(uint8_t *src, int stride, int pq);
void (*vc1_v_loop_filter8)(uint8_t *src, int stride, int pq);
void (*vc1_h_loop_filter8)(uint8_t *src, int stride, int pq);
void (*vc1_v_loop_filter16)(uint8_t *src, int stride, int pq);
void (*vc1_h_loop_filter16)(uint8_t *src, int stride, int pq);
/* put 8x8 block with bicubic interpolation and quarterpel precision
* last argument is actually round value instead of height
*/
vc1op_pixels_func put_vc1_mspel_pixels_tab[2][16];
vc1op_pixels_func avg_vc1_mspel_pixels_tab[2][16];
/* This is really one func used in VC-1 decoding */
h264_chroma_mc_func put_no_rnd_vc1_chroma_pixels_tab[3];
h264_chroma_mc_func avg_no_rnd_vc1_chroma_pixels_tab[3];
/* Windows Media Image functions */
void (*sprite_h)(uint8_t *dst, const uint8_t *src, int offset, int advance, int count);
void (*sprite_v_single)(uint8_t *dst, const uint8_t *src1a, const uint8_t *src1b, int offset, int width);
void (*sprite_v_double_noscale)(uint8_t *dst, const uint8_t *src1a, const uint8_t *src2a, int alpha, int width);
void (*sprite_v_double_onescale)(uint8_t *dst, const uint8_t *src1a, const uint8_t *src1b, int offset1,
const uint8_t *src2a, int alpha, int width);
void (*sprite_v_double_twoscale)(uint8_t *dst, const uint8_t *src1a, const uint8_t *src1b, int offset1,
const uint8_t *src2a, const uint8_t *src2b, int offset2,
int alpha, int width);
/**
* Search buf from the start for up to size bytes. Return the index
* of a zero byte, or >= size if not found. Ideally, use lookahead
* to filter out any zero bytes that are known to not be followed by
* one or more further zero bytes and a one byte.
*/
int (*startcode_find_candidate)(const uint8_t *buf, int size);
} VC1DSPContext;
void ff_vc1dsp_init(VC1DSPContext* c);
void ff_vc1dsp_init_aarch64(VC1DSPContext* dsp);
void ff_vc1dsp_init_arm(VC1DSPContext* dsp);
void ff_vc1dsp_init_ppc(VC1DSPContext *c);
void ff_vc1dsp_init_x86(VC1DSPContext* dsp);
#endif /* AVCODEC_VC1DSP_H */
```
|
```go
/*
path_to_url
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package watch
import (
"sync"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/tools/cache"
)
func newEventProcessor(out chan<- watch.Event) *eventProcessor {
return &eventProcessor{
out: out,
cond: sync.NewCond(&sync.Mutex{}),
done: make(chan struct{}),
}
}
// eventProcessor buffers events and writes them to an out chan when a reader
// is waiting. Because of the requirement to buffer events, it synchronizes
// input with a condition, and synchronizes output with a channels. It needs to
// be able to yield while both waiting on an input condition and while blocked
// on writing to the output channel.
type eventProcessor struct {
out chan<- watch.Event
cond *sync.Cond
buff []watch.Event
done chan struct{}
}
func (e *eventProcessor) run() {
for {
batch := e.takeBatch()
e.writeBatch(batch)
if e.stopped() {
return
}
}
}
func (e *eventProcessor) takeBatch() []watch.Event {
e.cond.L.Lock()
defer e.cond.L.Unlock()
for len(e.buff) == 0 && !e.stopped() {
e.cond.Wait()
}
batch := e.buff
e.buff = nil
return batch
}
func (e *eventProcessor) writeBatch(events []watch.Event) {
for _, event := range events {
select {
case e.out <- event:
case <-e.done:
return
}
}
}
func (e *eventProcessor) push(event watch.Event) {
e.cond.L.Lock()
defer e.cond.L.Unlock()
defer e.cond.Signal()
e.buff = append(e.buff, event)
}
func (e *eventProcessor) stopped() bool {
select {
case <-e.done:
return true
default:
return false
}
}
func (e *eventProcessor) stop() {
close(e.done)
e.cond.Signal()
}
// NewIndexerInformerWatcher will create an IndexerInformer and wrap it into watch.Interface
// so you can use it anywhere where you'd have used a regular Watcher returned from Watch method.
// it also returns a channel you can use to wait for the informers to fully shutdown.
func NewIndexerInformerWatcher(lw cache.ListerWatcher, objType runtime.Object) (cache.Indexer, cache.Controller, watch.Interface, <-chan struct{}) {
ch := make(chan watch.Event)
w := watch.NewProxyWatcher(ch)
e := newEventProcessor(ch)
indexer, informer := cache.NewIndexerInformer(lw, objType, 0, cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
e.push(watch.Event{
Type: watch.Added,
Object: obj.(runtime.Object),
})
},
UpdateFunc: func(old, new interface{}) {
e.push(watch.Event{
Type: watch.Modified,
Object: new.(runtime.Object),
})
},
DeleteFunc: func(obj interface{}) {
staleObj, stale := obj.(cache.DeletedFinalStateUnknown)
if stale {
// We have no means of passing the additional information down using
// watch API based on watch.Event but the caller can filter such
// objects by checking if metadata.deletionTimestamp is set
obj = staleObj.Obj
}
e.push(watch.Event{
Type: watch.Deleted,
Object: obj.(runtime.Object),
})
},
}, cache.Indexers{})
go e.run()
doneCh := make(chan struct{})
go func() {
defer close(doneCh)
defer e.stop()
informer.Run(w.StopChan())
}()
return indexer, informer, w, doneCh
}
```
|
The Armenian Genocide: A Complete History is a 2006 book by Raymond Kévorkian that aims to give a comprehensive account of the Armenian genocide. The book was originally published in French as Le Génocide des Arméniens. it was published in English in 2011, by I.B. Tauris.
References
2006 non-fiction books
I.B. Tauris books
History books about the Armenian genocide
|
```smalltalk
using System;
using System.Runtime.InteropServices;
using Foundation;
using ObjCRuntime;
using CoreMedia;
using Speech;
using SoundAnalysis;
#if __MACCATALYST__
using ARFaceAnchor = Foundation.NSObject;
#else
using ARKit;
#endif
#if !NET
using NativeHandle = System.IntPtr;
#endif
namespace SensorKit {
// helpers for code generation
interface NSUnitDuration : NSUnit { }
interface NSUnitIlluminance : NSUnit { }
interface NSUnitLength : NSUnit { }
interface NSUnitElectricPotentialDifference : NSUnit { }
interface NSUnitFrequency : NSUnit { }
interface NSUnitAcceleration : NSUnit { }
interface NSUnitTemperature : NSUnit { }
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[Native]
enum SRAmbientLightSensorPlacement : long {
Unknown,
FrontTop,
FrontBottom,
FrontRight,
FrontLeft,
FrontTopRight,
FrontTopLeft,
FrontBottomRight,
FrontBottomLeft,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[Native]
public enum SRAuthorizationStatus : long {
NotDetermined = 0,
Authorized,
Denied,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[Native]
enum SRCrownOrientation : long {
Left,
Right,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[Native]
enum SRDeletionReason : long {
UserInitiated,
LowDiskSpace,
AgeLimit,
NoInterestedClients,
SystemInitiated,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[Native]
[ErrorDomain ("SRErrorDomain")]
enum SRErrorCode : long {
InvalidEntitlement,
NoAuthorization,
DataInaccessible,
FetchRequestInvalid,
PromptDeclined,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[Native]
enum SRLocationCategory : long {
Unknown,
Home,
Work,
School,
Gym,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[Native]
enum SRNotificationEvent : long {
Unknown,
Received,
DefaultAction,
SupplementaryAction,
Clear,
NotificationCenterClearAll,
Removed,
Hide,
LongLook,
Silence,
AppLaunch,
Expired,
BannerPulldown,
TapCoalesce,
Deduped,
DeviceActivated,
DeviceUnlocked,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[Native]
enum SRWristLocation : long {
Left,
Right,
}
[NoWatch, NoTV, NoMac, iOS (15, 0), MacCatalyst (15, 0)]
[Native]
public enum SRKeyboardMetricsSentimentCategory : long {
Absolutist,
Down,
Death,
Anxiety,
Anger,
Health,
Positive,
Sad,
LowEnergy,
Confused,
}
[NoWatch, NoTV, NoMac, iOS (15, 0), MacCatalyst (15, 0)]
[Native]
public enum SRTextInputSessionType : long {
Keyboard = 1,
ThirdPartyKeyboard,
Pencil,
Dictation,
}
[NoWatch, NoTV, NoMac, iOS (16, 4), MacCatalyst (16, 4)]
[Native]
public enum SRMediaEventType : long {
OnScreen = 1,
OffScreen,
}
[Flags, NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[Native]
public enum SRElectrocardiogramDataFlags : ulong {
None = 0x0,
SignalInvalid = 1uL << 0,
CrownTouched = 1uL << 1,
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[Native]
public enum SRElectrocardiogramLead : long {
RightArmMinusLeftArm = 1,
LeftArmMinusRightArm,
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[Native]
public enum SRElectrocardiogramSessionState : long {
Begin = 1,
Active,
End,
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[Native]
public enum SRElectrocardiogramSessionGuidance : long {
Guided = 1,
Unguided,
}
[Flags, NoWatch, NoTV, NoMac, iOS (17, 0), MacCatalyst (17, 0)]
[Native]
public enum SRFaceMetricsContext : ulong {
DeviceUnlock = 1uL << 0,
MessagingAppUsage = 1uL << 1,
}
[Flags, NoWatch, NoTV, NoMac, iOS (17, 0), MacCatalyst (17, 0)]
[Native]
public enum SRSpeechMetricsSessionFlags : ulong {
Default = 0x0,
BypassVoiceProcessing = (1uL << 0),
}
[Flags, NoWatch, NoTV, NoMac, iOS (17, 0), MacCatalyst (17, 0)]
[Native]
public enum SRWristTemperatureCondition : ulong {
None = 0x0,
OffWrist = 1uL << 0,
OnCharger = 1uL << 1,
InMotion = 1uL << 2,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
enum SRDeviceUsageCategory {
[Field ("SRDeviceUsageCategoryGames")]
Games,
[Field ("SRDeviceUsageCategoryBusiness")]
Business,
[Field ("SRDeviceUsageCategoryWeather")]
Weather,
[Field ("SRDeviceUsageCategoryUtilities")]
Utilities,
[Field ("SRDeviceUsageCategoryTravel")]
Travel,
[Field ("SRDeviceUsageCategorySports")]
Sports,
[Field ("SRDeviceUsageCategorySocialNetworking")]
SocialNetworking,
[Field ("SRDeviceUsageCategoryReference")]
Reference,
[Field ("SRDeviceUsageCategoryProductivity")]
Productivity,
[Field ("SRDeviceUsageCategoryPhotoAndVideo")]
PhotoAndVideo,
[Field ("SRDeviceUsageCategoryNews")]
News,
[Field ("SRDeviceUsageCategoryNavigation")]
Navigation,
[Field ("SRDeviceUsageCategoryMusic")]
Music,
[Field ("SRDeviceUsageCategoryLifestyle")]
Lifestyle,
[Field ("SRDeviceUsageCategoryHealthAndFitness")]
HealthAndFitness,
[Field ("SRDeviceUsageCategoryFinance")]
Finance,
[Field ("SRDeviceUsageCategoryEntertainment")]
Entertainment,
[Field ("SRDeviceUsageCategoryEducation")]
Education,
[Field ("SRDeviceUsageCategoryBooks")]
Books,
[Field ("SRDeviceUsageCategoryMedical")]
Medical,
[Field ("SRDeviceUsageCategoryNewsstand")]
Newsstand,
[Field ("SRDeviceUsageCategoryCatalogs")]
Catalogs,
[Field ("SRDeviceUsageCategoryKids")]
Kids,
[Field ("SRDeviceUsageCategoryMiscellaneous")]
Miscellaneous,
[Field ("SRDeviceUsageCategoryFoodAndDrink")]
FoodAndDrink,
[Field ("SRDeviceUsageCategoryDeveloperTools")]
DeveloperTools,
[Field ("SRDeviceUsageCategoryGraphicsAndDesign")]
GraphicsAndDesign,
[Field ("SRDeviceUsageCategoryShopping")]
Shopping,
[Field ("SRDeviceUsageCategoryStickers")]
Stickers,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRAmbientLightSample {
[Export ("placement")]
SRAmbientLightSensorPlacement Placement { get; }
[Export ("chromaticity")]
SRAmbientLightChromaticity Chromaticity { get; }
[Export ("lux", ArgumentSemantic.Copy)]
NSMeasurement<NSUnitIlluminance> Lux { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRApplicationUsage {
[NullAllowed, Export ("bundleIdentifier")]
string BundleIdentifier { get; }
[Export ("usageTime")]
double /* NSTimeInterval */ UsageTime { get; }
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("reportApplicationIdentifier")]
string ReportApplicationIdentifier { get; }
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("textInputSessions", ArgumentSemantic.Copy)]
SRTextInputSession [] TextInputSessions { get; }
[iOS (16, 4), MacCatalyst (16, 4)]
[Export ("supplementalCategories", ArgumentSemantic.Copy)]
SRSupplementalCategory [] SupplementalCategories { get; }
[iOS (16, 4), MacCatalyst (16, 4)]
[Export ("relativeStartTime")]
double RelativeStartTime { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRDeletionRecord : NSSecureCoding {
[Export ("startTime")]
double /* SRAbsoluteTime */ StartTime { get; }
[Export ("endTime")]
double /* SRAbsoluteTime */ EndTime { get; }
[Export ("reason")]
SRDeletionReason Reason { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRDevice : NSSecureCoding, NSCopying {
[Static]
[Export ("currentDevice")]
SRDevice CurrentDevice { get; }
[Export ("name")]
string Name { get; }
[Export ("model")]
string Model { get; }
[Export ("systemName")]
string SystemName { get; }
[Export ("systemVersion")]
string SystemVersion { get; }
[iOS (17, 0), MacCatalyst (17, 0)]
[Export ("productType")]
string ProductType { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRDeviceUsageReport {
[Export ("duration")]
double /* NSTimeInterval */ Duration { get; }
[Export ("applicationUsageByCategory", ArgumentSemantic.Copy)]
NSDictionary<NSString, NSArray<SRApplicationUsage>> ApplicationUsageByCategory { get; }
[Export ("notificationUsageByCategory", ArgumentSemantic.Copy)]
NSDictionary<NSString, NSArray<SRNotificationUsage>> NotificationUsageByCategory { get; }
[Export ("webUsageByCategory", ArgumentSemantic.Copy)]
NSDictionary<NSString, NSArray<SRWebUsage>> WebUsageByCategory { get; }
[Export ("totalScreenWakes")]
nint TotalScreenWakes { get; }
[Export ("totalUnlocks")]
nint TotalUnlocks { get; }
[Export ("totalUnlockDuration")]
double /* NSTimeInterval */ TotalUnlockDuration { get; }
[iOS (16, 4), MacCatalyst (16, 4)]
[Export ("version")]
string Version { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
interface SRFetchRequest {
[Export ("from")]
double /* SRAbsoluteTime */ From { get; set; }
[Export ("to")]
double /* SRAbsoluteTime */ To { get; set; }
[Export ("device", ArgumentSemantic.Strong)]
SRDevice Device { get; set; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRFetchResult<SampleType> : NSCopying where SampleType : NSObject {
[Export ("sample", ArgumentSemantic.Copy)]
SampleType Sample { get; }
[Export ("timestamp")]
double /* SRAbsoluteTime */ Timestamp { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRKeyboardProbabilityMetric<UnitType> where UnitType : NSUnit {
[Export ("distributionSampleValues", ArgumentSemantic.Copy)]
NSMeasurement<UnitType> [] DistributionSampleValues { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRKeyboardMetrics {
[Export ("duration")]
double /* NSTimeInterval */ Duration { get; }
[Export ("keyboardIdentifier")]
string KeyboardIdentifier { get; }
[Export ("version")]
string Version { get; }
[Export ("width")]
NSMeasurement<NSUnitLength> Width { get; }
[Export ("height")]
NSMeasurement<NSUnitLength> Height { get; }
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("inputModes", ArgumentSemantic.Copy)]
string [] InputModes { get; }
[iOS (16, 4), MacCatalyst (16, 4)]
[Export ("sessionIdentifiers", ArgumentSemantic.Copy)]
string [] SessionIdentifiers { get; }
// SRKeyboardMetrics_ScalarMetrics
[Export ("totalWords")]
nint TotalWords { get; }
[Export ("totalAlteredWords")]
nint TotalAlteredWords { get; }
[Export ("totalTaps")]
nint TotalTaps { get; }
[Export ("totalDrags")]
nint TotalDrags { get; }
[Export ("totalDeletes")]
nint TotalDeletes { get; }
[Export ("totalEmojis")]
nint TotalEmojis { get; }
[Export ("totalPaths")]
nint TotalPaths { get; }
[Export ("totalPathTime")]
double TotalPathTime { get; }
[Export ("totalPathLength")]
NSMeasurement<NSUnitLength> TotalPathLength { get; }
[Export ("totalAutoCorrections")]
nint TotalAutoCorrections { get; }
[Export ("totalSpaceCorrections")]
nint TotalSpaceCorrections { get; }
[Export ("totalRetroCorrections")]
nint TotalRetroCorrections { get; }
[Export ("totalTranspositionCorrections")]
nint TotalTranspositionCorrections { get; }
[Export ("totalInsertKeyCorrections")]
nint TotalInsertKeyCorrections { get; }
[Export ("totalSkipTouchCorrections")]
nint TotalSkipTouchCorrections { get; }
[Export ("totalNearKeyCorrections")]
nint TotalNearKeyCorrections { get; }
[Export ("totalSubstitutionCorrections")]
nint TotalSubstitutionCorrections { get; }
[Export ("totalHitTestCorrections")]
nint TotalHitTestCorrections { get; }
[Export ("totalTypingDuration")]
double TotalTypingDuration { get; }
// SRKeyboardMetrics_ProbabilityMetrics
[Export ("upErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> UpErrorDistance { get; }
[Export ("downErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> DownErrorDistance { get; }
[Export ("spaceUpErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> SpaceUpErrorDistance { get; }
[Export ("spaceDownErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> SpaceDownErrorDistance { get; }
[Export ("deleteUpErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> DeleteUpErrorDistance { get; }
[Export ("deleteDownErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> DeleteDownErrorDistance { get; }
[Export ("shortWordCharKeyUpErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> ShortWordCharKeyUpErrorDistance { get; }
[Export ("shortWordCharKeyDownErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> ShortWordCharKeyDownErrorDistance { get; }
[Export ("touchDownUp", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> TouchDownUp { get; }
[Export ("spaceTouchDownUp", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> SpaceTouchDownUp { get; }
[Export ("deleteTouchDownUp", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> DeleteTouchDownUp { get; }
[Export ("shortWordCharKeyTouchDownUp", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> ShortWordCharKeyTouchDownUp { get; }
[Export ("touchDownDown", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> TouchDownDown { get; }
[iOS (16, 4), MacCatalyst (16, 4)]
[Export ("touchUpDown", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> TouchUpDown { get; }
[Export ("charKeyToPrediction", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> CharKeyToPrediction { get; }
[Export ("shortWordCharKeyToCharKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> ShortWordCharKeyToCharKey { get; }
[Export ("charKeyToAnyTapKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> CharKeyToAnyTapKey { get; }
[Export ("anyTapToCharKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> AnyTapToCharKey { get; }
[Export ("spaceToCharKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> SpaceToCharKey { get; }
[Export ("charKeyToSpaceKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> CharKeyToSpaceKey { get; }
[Export ("spaceToDeleteKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> SpaceToDeleteKey { get; }
[Export ("deleteToSpaceKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> DeleteToSpaceKey { get; }
[Export ("spaceToSpaceKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> SpaceToSpaceKey { get; }
[Export ("spaceToShiftKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> SpaceToShiftKey { get; }
[Export ("spaceToPlaneChangeKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> SpaceToPlaneChangeKey { get; }
[Export ("spaceToPredictionKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> SpaceToPredictionKey { get; }
[Export ("deleteToCharKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> DeleteToCharKey { get; }
[Export ("charKeyToDelete", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> CharKeyToDelete { get; }
[Export ("deleteToDelete", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> DeleteToDelete { get; }
[Export ("deleteToShiftKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> DeleteToShiftKey { get; }
[Export ("deleteToPlaneChangeKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> DeleteToPlaneChangeKey { get; }
[Export ("anyTapToPlaneChangeKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> AnyTapToPlaneChangeKey { get; }
[Export ("planeChangeToAnyTap", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> PlaneChangeToAnyTap { get; }
[Export ("charKeyToPlaneChangeKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> CharKeyToPlaneChangeKey { get; }
[Export ("planeChangeKeyToCharKey", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> PlaneChangeKeyToCharKey { get; }
[Export ("pathErrorDistanceRatio", ArgumentSemantic.Strong)]
NSNumber [] PathErrorDistanceRatio { get; }
[Export ("deleteToPath", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> DeleteToPath { get; }
[Export ("pathToDelete", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> PathToDelete { get; }
[Export ("spaceToPath", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> SpaceToPath { get; }
[Export ("pathToSpace", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> PathToSpace { get; }
[Export ("pathToPath", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> PathToPath { get; }
// SRKeyboardMetrics_PositionalMetrics
[Export ("longWordUpErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> [] LongWordUpErrorDistance { get; }
[Export ("longWordDownErrorDistance", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitLength> [] LongWordDownErrorDistance { get; }
[Export ("longWordTouchDownUp", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> [] LongWordTouchDownUp { get; }
[Export ("longWordTouchDownDown", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> [] LongWordTouchDownDown { get; }
[iOS (16, 4), MacCatalyst (16, 4)]
[Export ("longWordTouchUpDown", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> [] LongWordTouchUpDown { get; }
[Export ("deleteToDeletes", ArgumentSemantic.Strong)]
SRKeyboardProbabilityMetric<NSUnitDuration> [] DeleteToDeletes { get; }
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("pathTypingSpeed")]
double PathTypingSpeed { get; }
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("totalPathPauses")]
nint TotalPathPauses { get; }
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("totalPauses")]
nint TotalPauses { get; }
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("totalTypingEpisodes")]
nint TotalTypingEpisodes { get; }
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("typingSpeed")]
double TypingSpeed { get; }
// SRKeyboardMetrics_SentimentCounts
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("wordCountForSentimentCategory:")]
nint WordCount (SRKeyboardMetricsSentimentCategory category);
[iOS (15, 0), MacCatalyst (15, 0)]
[Export ("emojiCountForSentimentCategory:")]
nint EmojiCount (SRKeyboardMetricsSentimentCategory category);
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRMessagesUsageReport {
[Export ("duration")]
double /* NSTimeInterval */ Duration { get; }
[Export ("totalOutgoingMessages")]
nint TotalOutgoingMessages { get; }
[Export ("totalIncomingMessages")]
nint TotalIncomingMessages { get; }
[Export ("totalUniqueContacts")]
nint TotalUniqueContacts { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRNotificationUsage {
[NullAllowed, Export ("bundleIdentifier")]
string BundleIdentifier { get; }
[Export ("event")]
SRNotificationEvent Event { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRPhoneUsageReport {
[Export ("duration")]
double /* NSTimeInterval */ Duration { get; }
[Export ("totalOutgoingCalls")]
nint TotalOutgoingCalls { get; }
[Export ("totalIncomingCalls")]
nint TotalIncomingCalls { get; }
[Export ("totalUniqueContacts")]
nint TotalUniqueContacts { get; }
[Export ("totalPhoneCallDuration")]
double /* NSTimeInterval */ TotalPhoneCallDuration { get; }
}
interface ISRSensorReaderDelegate { }
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
#if NET
[Protocol, Model]
#else
[Protocol, Model (AutoGeneratedName = true)]
#endif
[BaseType (typeof (NSObject))]
interface SRSensorReaderDelegate {
[Export ("sensorReader:fetchingRequest:didFetchResult:")]
bool DidFetchResult (SRSensorReader reader, SRFetchRequest fetchRequest, SRFetchResult<NSObject> result);
[Export ("sensorReader:didCompleteFetch:")]
void DidCompleteFetch (SRSensorReader reader, SRFetchRequest fetchRequest);
[Export ("sensorReader:fetchingRequest:failedWithError:")]
void FetchingRequestFailed (SRSensorReader reader, SRFetchRequest fetchRequest, NSError error);
[Export ("sensorReader:didChangeAuthorizationStatus:")]
void DidChangeAuthorizationStatus (SRSensorReader reader, SRAuthorizationStatus authorizationStatus);
[Export ("sensorReaderWillStartRecording:")]
void WillStartRecording (SRSensorReader reader);
[Export ("sensorReader:startRecordingFailedWithError:")]
void StartRecordingFailed (SRSensorReader reader, NSError error);
[Export ("sensorReaderDidStopRecording:")]
void DidStopRecording (SRSensorReader reader);
[Export ("sensorReader:stopRecordingFailedWithError:")]
void StopRecordingFailed (SRSensorReader reader, NSError error);
[Export ("sensorReader:didFetchDevices:")]
void DidFetchDevices (SRSensorReader reader, SRDevice [] devices);
[Export ("sensorReader:fetchDevicesDidFailWithError:")]
void FetchDevicesFailed (SRSensorReader reader, NSError error);
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
enum SRSensor {
[Field (null)]
Invalid = -1,
[Field ("SRSensorAmbientLightSensor")]
AmbientLightSensor,
[Field ("SRSensorAccelerometer")]
Accelerometer,
[Field ("SRSensorRotationRate")]
RotationRate,
[Field ("SRSensorVisits")]
Visits,
[Field ("SRSensorPedometerData")]
PedometerData,
[Field ("SRSensorDeviceUsageReport")]
DeviceUsageReport,
[Field ("SRSensorMessagesUsageReport")]
MessagesUsageReport,
[Field ("SRSensorPhoneUsageReport")]
PhoneUsageReport,
[Field ("SRSensorOnWristState")]
OnWristState,
[Field ("SRSensorKeyboardMetrics")]
KeyboardMetrics,
[iOS (15, 0), MacCatalyst (15, 0)]
[Field ("SRSensorSiriSpeechMetrics")]
SiriSpeechMetrics,
[iOS (15, 0), MacCatalyst (15, 0)]
[Field ("SRSensorTelephonySpeechMetrics")]
TelephonySpeechMetrics,
[iOS (15, 4), MacCatalyst (15, 4)]
[Field ("SRSensorAmbientPressure")]
AmbientPressure,
[iOS (16, 4), MacCatalyst (16, 4)]
[Field ("SRSensorMediaEvents")]
MediaEvents,
[iOS (17, 0), MacCatalyst (17, 0)]
[Field ("SRSensorFaceMetrics")]
FaceMetrics,
[iOS (17, 0), MacCatalyst (17, 0)]
[Field ("SRSensorHeartRate")]
HeartRate,
[iOS (17, 0), MacCatalyst (17, 0)]
[Field ("SRSensorOdometer")]
Odometer,
[iOS (17, 0), MacCatalyst (17, 0)]
[Field ("SRSensorWristTemperature")]
WristTemperature,
[iOS (17, 4), MacCatalyst (17, 4)]
[Field ("SRSensorElectrocardiogram")]
Electrocardiogram,
[iOS (17, 4), MacCatalyst (17, 4)]
[Field ("SRSensorPhotoplethysmogram")]
Photoplethysmogram,
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRSensorReader {
[Export ("initWithSensor:")]
NativeHandle Constructor (NSString sensor);
[Wrap ("this (sensor.GetConstant ()!)")]
NativeHandle Constructor (SRSensor sensor);
[Export ("startRecording")]
void StartRecording ();
[Export ("stopRecording")]
void StopRecording ();
[Export ("fetchDevices")]
void FetchDevices ();
[Export ("fetch:")]
void Fetch (SRFetchRequest request);
[Export ("authorizationStatus")]
SRAuthorizationStatus AuthorizationStatus { get; }
[Export ("sensor")]
NSString WeakSensor { get; }
SRSensor Sensor {
[Wrap ("SRSensorExtensions.GetValue (WeakSensor)")]
get;
}
[Wrap ("WeakDelegate")]
[NullAllowed]
ISRSensorReaderDelegate Delegate { get; set; }
[NullAllowed, Export ("delegate", ArgumentSemantic.Weak)]
NSObject WeakDelegate { get; set; }
[Async]
[Static]
[Export ("requestAuthorizationForSensors:completion:")]
void RequestAuthorization (NSSet<NSString> sensors, Action<NSError> completion);
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRVisit {
[Export ("distanceFromHome")]
double /* CLLocationDistance */DistanceFromHome { get; }
[Export ("arrivalDateInterval", ArgumentSemantic.Strong)]
NSDateInterval ArrivalDateInterval { get; }
[Export ("departureDateInterval", ArgumentSemantic.Strong)]
NSDateInterval DepartureDateInterval { get; }
[Export ("locationCategory")]
SRLocationCategory LocationCategory { get; }
[Export ("identifier", ArgumentSemantic.Strong)]
NSUuid Identifier { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRWebUsage {
[Export ("totalUsageTime")]
double /* NSTimeInterval */ TotalUsageTime { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRWristDetection {
[Export ("onWrist")]
bool OnWrist { get; }
[Export ("wristLocation")]
SRWristLocation WristLocation { get; }
[Export ("crownOrientation")]
SRCrownOrientation CrownOrientation { get; }
[iOS (16, 4), MacCatalyst (16, 4)]
[NullAllowed]
[Export ("onWristDate", ArgumentSemantic.Strong)]
NSDate OnWristDate { get; }
[iOS (16, 4), MacCatalyst (16, 4)]
[NullAllowed]
[Export ("offWristDate", ArgumentSemantic.Strong)]
NSDate OffWristDate { get; }
}
[NoWatch, NoTV, NoMac]
[iOS (14, 0)]
[MacCatalyst (14, 0)]
[Category]
[BaseType (typeof (NSString))]
[Internal] // exposed thru SRSensor
interface NSString_SRDeletionRecord {
[return: NullAllowed]
[Export ("sr_sensorForDeletionRecordsFromSensor")]
NSString _GetSensorForDeletionRecordsFromSensor ();
}
[NoWatch, NoTV, NoMac, iOS (15, 0), MacCatalyst (15, 0)]
[BaseType (typeof (NSObject))]
interface SRTextInputSession /* privately conforms to NSCoding and NSSecureCoding */
{
[Export ("duration")]
double Duration { get; }
[Export ("sessionType")]
SRTextInputSessionType SessionType { get; }
[iOS (16, 4), MacCatalyst (16, 4)]
[Export ("sessionIdentifier")]
string SessionIdentifier { get; }
}
[NoWatch, NoTV, NoMac, iOS (16, 4), MacCatalyst (16, 4)]
[BaseType (typeof (NSObject))]
interface SRMediaEvent : NSCopying, NSSecureCoding {
[Export ("mediaIdentifier", ArgumentSemantic.Strong)]
string MediaIdentifier { get; }
[Export ("eventType", ArgumentSemantic.Assign)]
SRMediaEventType EventType { get; }
}
[NoWatch, NoTV, NoMac, iOS (16, 4), MacCatalyst (16, 4)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRSupplementalCategory : NSCopying, NSSecureCoding {
[BindAs (typeof (SRDeviceUsageCategory))]
[Export ("identifier")]
NSString Identifier { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 0), MacCatalyst (17, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRAudioLevel : NSCopying, NSSecureCoding {
[Export ("timeRange", ArgumentSemantic.Assign)]
CMTimeRange TimeRange { get; }
[Export ("loudness")]
double Loudness { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 0), MacCatalyst (17, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRFaceMetricsExpression : NSCopying, NSSecureCoding {
[Export ("identifier")]
string Identifier { get; }
[Export ("value")]
double Value { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 0), MacCatalyst (17, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRSpeechExpression : NSCopying, NSSecureCoding {
[Export ("version")]
string Version { get; }
[Export ("timeRange", ArgumentSemantic.Assign)]
CMTimeRange TimeRange { get; }
[Export ("confidence")]
double Confidence { get; }
[Export ("mood")]
double Mood { get; }
[Export ("valence")]
double Valence { get; }
[Export ("activation")]
double Activation { get; }
[Export ("dominance")]
double Dominance { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 0), MacCatalyst (17, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRSpeechMetrics : NSCopying, NSSecureCoding {
[Export ("sessionIdentifier")]
string SessionIdentifier { get; }
[Export ("sessionFlags", ArgumentSemantic.Assign)]
SRSpeechMetricsSessionFlags SessionFlags { get; }
[Export ("timestamp", ArgumentSemantic.Strong)]
NSDate Timestamp { get; }
[iOS (17, 2), MacCatalyst (17, 2)]
[Export ("timeSinceAudioStart")]
double TimeSinceAudioStart { get; }
[NullAllowed, Export ("audioLevel", ArgumentSemantic.Strong)]
SRAudioLevel AudioLevel { get; }
[NullAllowed, Export ("speechRecognition", ArgumentSemantic.Strong)]
SFSpeechRecognitionResult SpeechRecognition { get; }
[NullAllowed, Export ("soundClassification", ArgumentSemantic.Strong)]
SNClassificationResult SoundClassification { get; }
[NullAllowed, Export ("speechExpression", ArgumentSemantic.Strong)]
SRSpeechExpression SpeechExpression { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 0), MacCatalyst (17, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRWristTemperature : NSCopying, NSSecureCoding {
[Export ("timestamp", ArgumentSemantic.Strong)]
NSDate Timestamp { get; }
[Export ("value", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitTemperature> Value { get; }
[Export ("condition")]
SRWristTemperatureCondition Condition { get; }
[Export ("errorEstimate", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitTemperature> ErrorEstimate { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 0), MacCatalyst (17, 0)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRWristTemperatureSession : NSCopying, NSSecureCoding {
[Export ("startDate", ArgumentSemantic.Strong)]
NSDate StartDate { get; }
[Export ("duration")]
double Duration { get; }
[Export ("version")]
string Version { get; }
[Export ("temperatures", ArgumentSemantic.Copy)]
NSEnumerator<SRWristTemperature> Temperatures { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 0), NoMacCatalyst]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRFaceMetrics : NSCopying, NSSecureCoding {
[Export ("version")]
string Version { get; }
[Export ("sessionIdentifier")]
string SessionIdentifier { get; }
[Export ("context", ArgumentSemantic.Assign)]
SRFaceMetricsContext Context { get; }
[Export ("faceAnchor", ArgumentSemantic.Copy)]
ARFaceAnchor FaceAnchor { get; }
[Export ("wholeFaceExpressions", ArgumentSemantic.Copy)]
SRFaceMetricsExpression [] WholeFaceExpressions { get; }
[Export ("partialFaceExpressions", ArgumentSemantic.Copy)]
SRFaceMetricsExpression [] PartialFaceExpressions { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRElectrocardiogramData : NSCopying, NSSecureCoding {
[Export ("flags", ArgumentSemantic.Assign)]
SRElectrocardiogramDataFlags Flags { get; }
[Export ("value", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitElectricPotentialDifference> Value { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRElectrocardiogramSample : NSCopying, NSSecureCoding {
[Export ("date", ArgumentSemantic.Strong)]
NSDate Date { get; }
[Export ("frequency", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitFrequency> Frequency { get; }
[Export ("session", ArgumentSemantic.Strong)]
SRElectrocardiogramSession Session { get; }
[Export ("lead", ArgumentSemantic.Assign)]
SRElectrocardiogramLead Lead { get; }
[Export ("data", ArgumentSemantic.Copy)]
SRElectrocardiogramData [] Data { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRElectrocardiogramSession : NSCopying, NSSecureCoding {
[Export ("state", ArgumentSemantic.Assign)]
SRElectrocardiogramSessionState State { get; }
[Export ("sessionGuidance", ArgumentSemantic.Assign)]
SRElectrocardiogramSessionGuidance SessionGuidance { get; }
[Export ("identifier")]
string Identifier { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
public enum SRPhotoplethysmogramOpticalSampleCondition {
[Field ("SRPhotoplethysmogramOpticalSampleConditionSignalSaturation")]
SignalSaturation,
[Field ("SRPhotoplethysmogramOpticalSampleConditionUnreliableNoise")]
UnreliableNoise,
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRPhotoplethysmogramOpticalSample : NSCopying, NSSecureCoding {
[Export ("emitter")]
nint Emitter { get; }
[Export ("activePhotodiodeIndexes", ArgumentSemantic.Strong)]
NSIndexSet ActivePhotodiodeIndexes { get; }
[Export ("signalIdentifier")]
nint SignalIdentifier { get; }
[Export ("nominalWavelength", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitLength> NominalWavelength { get; }
[Export ("effectiveWavelength", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitLength> EffectiveWavelength { get; }
[Export ("samplingFrequency", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitFrequency> SamplingFrequency { get; }
[Export ("nanosecondsSinceStart")]
long NanosecondsSinceStart { get; }
[NullAllowed]
[BindAs (typeof (double?))]
[Export ("normalizedReflectance", ArgumentSemantic.Strong)]
NSNumber NormalizedReflectance { get; }
[NullAllowed]
[BindAs (typeof (double?))]
[Export ("whiteNoise", ArgumentSemantic.Strong)]
NSNumber WhiteNoise { get; }
[NullAllowed]
[BindAs (typeof (double?))]
[Export ("pinkNoise", ArgumentSemantic.Strong)]
NSNumber PinkNoise { get; }
[NullAllowed]
[BindAs (typeof (double?))]
[Export ("backgroundNoise", ArgumentSemantic.Strong)]
NSNumber BackgroundNoise { get; }
[NullAllowed]
[BindAs (typeof (double?))]
[Export ("backgroundNoiseOffset", ArgumentSemantic.Strong)]
NSNumber BackgroundNoiseOffset { get; }
[Export ("conditions", ArgumentSemantic.Copy)]
NSString [] Conditions { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRPhotoplethysmogramAccelerometerSample : NSCopying, NSSecureCoding {
[Export ("nanosecondsSinceStart")]
long NanosecondsSinceStart { get; }
[Export ("samplingFrequency", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitFrequency> SamplingFrequency { get; }
[Export ("x", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitAcceleration> X { get; }
[Export ("y", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitAcceleration> Y { get; }
[Export ("z", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitAcceleration> Z { get; }
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
public enum SRPhotoplethysmogramSampleUsage {
[Field ("SRPhotoplethysmogramSampleUsageForegroundHeartRate")]
ForegroundHeartRate,
[Field ("SRPhotoplethysmogramSampleUsageDeepBreathing")]
DeepBreathing,
[Field ("SRPhotoplethysmogramSampleUsageForegroundBloodOxygen")]
ForegroundBloodOxygen,
[Field ("SRPhotoplethysmogramSampleUsageBackgroundSystem")]
BackgroundSystem,
}
[NoWatch, NoTV, NoMac, iOS (17, 4), MacCatalyst (17, 4)]
[BaseType (typeof (NSObject))]
[DisableDefaultCtor]
interface SRPhotoplethysmogramSample : NSCopying, NSSecureCoding {
[Export ("startDate", ArgumentSemantic.Strong)]
NSDate StartDate { get; }
[Export ("nanosecondsSinceStart")]
long NanosecondsSinceStart { get; }
[Export ("usage", ArgumentSemantic.Copy)]
NSString [] Usage { get; }
[Export ("opticalSamples", ArgumentSemantic.Copy)]
SRPhotoplethysmogramOpticalSample [] OpticalSamples { get; }
[Export ("accelerometerSamples", ArgumentSemantic.Copy)]
SRPhotoplethysmogramAccelerometerSample [] AccelerometerSamples { get; }
[NullAllowed, Export ("temperature", ArgumentSemantic.Strong)]
NSMeasurement<NSUnitTemperature> Temperature { get; }
}
}
```
|
Jack Cosgrove (born October 30, 1956) is an American football coach. He is the head football coach at Colby College. Cosgrove served as the head football coach at the University of Maine from 1993 to 2015. He is an alumnus of Maine and played college football as a quarterback on the Maine Black Bears football team. Prior to receiving the head coaching position as his alma mater, Cosgrove served as an assistant at Maine and Boston College and was head coach at Stoughton High School.
Head coaching record
References
External links
Colby profile
Maine profile
1956 births
Living people
American football quarterbacks
Boston College Eagles football coaches
Colby Mules football coaches
Maine Black Bears football coaches
Maine Black Bears football players
High school football coaches in Massachusetts
People from Sharon, Massachusetts
Coaches of American football from Massachusetts
Players of American football from Norfolk County, Massachusetts
|
Nasa Q'ara (Aymara nasa nose, q'ara bare, bald, also spelled Nazacara) is a mountain in the Cordillera Real in the Andes of Bolivia. It is situated in the La Paz Department, Los Andes Province, Pucarani Municipality, southwest of the lake Allqa Quta. Nasa Q'ara lies southwest of the main peak of the Kunturiri massif and Wawanaki and southeast of Ch'iyar K'ark'a.
References
Mountains of La Paz Department (Bolivia)
|
The Hackney Scout Song Book contains a collection of songs which were popular in the early days of the Scout Movement in the United Kingdom. Although originally intended for the use of Scouts in the Hackney district of East London, it quickly became the standard work of its type in the UK and around the world. First printed in December 1921, the last edition was published in 1972.
The book has its origin in a series of indoor "campfire" gatherings for Hackney Scouts organised by Stanly Ince, a local Scout Commissioner, who had been paralyzed by polio following his service in World War I. Guests to these meetings included Robert Baden-Powell, the founder of Scouting. Lacking "a common fund of song", Ince founded a "Song Book Committee" and in December 1921, the first edition of the new song book was printed. It was a soft-covered pocket-sized book in the traditional Songster format and included a mix of folk and popular songs, together with some hymns and items composed specifically for Scouts. The "National Anthem of the Ancient Britons" and "Michael Finnagen" appeared in print for the first time in its pages. The book was "dedicated to the undying memory of those Hackney Scouts who died in the service of their country 1914 – 1918".
Originally intended only for Hackney Scouts, the first edition sold out within a few months. It remained in print for more than 50 years, sold over 130,000 copies and was translated into 27 languages.
References
Further reading
Scouting
1921 children's books
Song books
|
The Zeeburgereiland is a triangular island on the east side of Amsterdam, in the Dutch province of North Holland. It lies between the Oranje Locks and the and on the east is bordered by the IJ. Formerly an industrial area, the island is being redeveloped as part of the IJburg new neighbourhood. It is crossed by the A10 motorway and, since 2005, the IJ Tram line of the Amsterdam Tram system.
References
Stadsdeel Amsterdam-Oost
Militair Zeeburg
Annie M.G. Schmidt Huis
External links
Neighbourhoods of Amsterdam
Artificial islands of Amsterdam
Amsterdam-Oost
|
Ingvald Garbo (3 October 1891 – 22 November 1941) was a Norwegian resistance member.
He was born in Bergen, to a mother from Bergen and a father from Førde. He married Sara Haugland (1897–1977) in 1924, and had two children. His son Gunnar Garbo, born 1924, was a notable politician.
He was a teacher by occupation. He also contributed to newspapers such as Bergens Arbeiderblad. During the occupation of Norway by Nazi Germany he listened to illegal radio broadcasts from the BBC, and distributed the news among German soldiers. He was discovered by the German occupying authorities, arrested on 8 October 1941 and executed on 22 November 1941. He was the first Norwegian with a connection to the illegal press to be executed, and according to Berit Nøkleby's count the seventeenth overall.
References
1891 births
1941 deaths
Schoolteachers from Bergen
Norwegian resistance members
World War II resistance press activists
Norwegian civilians killed in World War II
Deaths by firearm in Norway
Norwegian people executed by Nazi Germany
People executed by Nazi courts
People executed by Nazi Germany by firing squad
|
Midway is an unincorporated community in Bedford County, Tennessee, United States. Midway is located on Tennessee State Route 276 and Thompson Creek east-southeast of Shelbyville.
References
Unincorporated communities in Bedford County, Tennessee
Unincorporated communities in Tennessee
|
```python
#
#
# path_to_url
#
# Unless required by applicable law or agreed to in writing, software
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# ==============================================================================
"""The Tuner interface for hyper-parameters tuning."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.contrib.framework.python.framework import experimental
class Tuner(object):
"""Tuner class is the interface for Experiment hyper-parameters tuning.
Example:
```
def _create_my_experiment(run_config, hparams):
hidden_units = [hparams.unit_per_layer] * hparams.num_hidden_layers
return tf.contrib.learn.Experiment(
estimator=DNNClassifier(config=run_config, hidden_units=hidden_units),
train_input_fn=my_train_input,
eval_input_fn=my_eval_input)
tuner = create_tuner(study_configuration, objective_key)
learn_runner.tune(experiment_fn=_create_my_experiment, tuner)
"""
__metaclass__ = abc.ABCMeta
@experimental
@abc.abstractmethod
def next_trial(self):
"""Switch to the next trial.
Ask the tuning service for a new trial for hyper-parameters tuning.
Returns:
A boolean indicating if a trial was assigned to the tuner.
Raises:
RuntimeError: If the tuner is initialized correctly.
"""
raise NotImplementedError("Calling an abstract method.")
@experimental
@abc.abstractmethod
def run_experiment(self, experiment_fn):
"""Creates an Experiment by calling `experiment_fn` and executes it.
It creates a `RunConfig`, which captures the current execution environment
configuration and retrieves the hyper-parameters for current trial from the
tuning service. Both are passed to the `experiment_fn` and used to create
the Experiment for current trial execution. When finished, the measure will
be reported to the tuning service.
If the `RunConfig` does not include a task type, then an exception is
raised. The task type should be one of the types supported by the tuner. If
tuner does not support the task type directly, it could delegate the task to
Experiment, which is usually a function of Experiment. An exception would be
raised, if neither tuner nor Experiment could support the task type.
Args:
experiment_fn: A function that creates an `Experiment`. It should accept
an argument `run_config` which should be used to create the `Estimator`
(passed as `config` to its constructor), and an argument `hparams`,
which should be used for hyper-parameters tuning. It must return an
`Experiment`.
"""
raise NotImplementedError("Calling an abstract method.")
```
|
```dart
import 'dart:async';
import 'dart:mirrors';
import 'package:angel_framework/angel_framework.dart';
import 'plural.dart' as pluralize;
import 'no_service.dart';
HookedServiceEventListener hasManyThrough(String servicePath, String pivotPath,
{String as,
String localKey,
String pivotKey,
String foreignKey,
getLocalKey(obj),
getPivotKey(obj),
getForeignKey(obj),
assignForeignObjects(foreign, obj)}) {
var foreignName =
as?.isNotEmpty == true ? as : pluralize.plural(servicePath.toString());
return (HookedServiceEvent e) async {
var pivotService = e.getService(pivotPath);
var foreignService = e.getService(servicePath);
if (pivotService == null)
throw noService(pivotPath);
else if (foreignService == null) throw noService(servicePath);
_assignForeignObjects(foreign, obj) {
if (assignForeignObjects != null)
return assignForeignObjects(foreign, obj);
else if (obj is Map)
obj[foreignName] = foreign;
else if (obj is Extensible)
obj.properties[foreignName] = foreign;
else
reflect(obj).setField(new Symbol(foreignName), foreign);
}
_getLocalKey(obj) {
if (getLocalKey != null)
return getLocalKey(obj);
else if (obj is Map)
return obj[localKey ?? 'id'];
else if (obj is Extensible)
return obj.properties[localKey ?? 'id'];
else if (localKey == null || localKey == 'id')
return obj.id;
else
return reflect(obj).getField(new Symbol(localKey ?? 'id')).reflectee;
}
_getPivotKey(obj) {
if (getPivotKey != null)
return getPivotKey(obj);
else if (obj is Map)
return obj[pivotKey ?? 'id'];
else if (obj is Extensible)
return obj.properties[pivotKey ?? 'id'];
else if (pivotKey == null || pivotKey == 'id')
return obj.id;
else
return reflect(obj).getField(new Symbol(pivotKey ?? 'id')).reflectee;
}
_normalize(obj) async {
// First, resolve pivot
var id = await _getLocalKey(obj);
var indexed = await pivotService.index({
'query': {pivotKey ?? 'userId': id}
});
if (indexed == null || indexed is! List || indexed.isNotEmpty != true) {
await _assignForeignObjects([], obj);
} else {
// Now, resolve from foreign service
var mapped = await Future.wait(indexed.map((pivot) async {
var id = await _getPivotKey(obj);
var indexed = await foreignService.index({
'query': {foreignKey ?? 'postId': id}
});
if (indexed == null ||
indexed is! List ||
indexed.isNotEmpty != true) {
await _assignForeignObjects([], pivot);
} else {
await _assignForeignObjects(indexed, pivot);
}
return pivot;
}));
await _assignForeignObjects(mapped, obj);
}
}
if (e.result is Iterable) {
await Future.wait(e.result.map(_normalize));
} else
await _normalize(e.result);
};
}
```
|
```objective-c
//
// YPNavigationController.m
// Wuxianda
//
// Created by MichaelPPP on 16/1/21.
//
#import "YPNavigationController.h"
@interface YPNavigationController ()
@end
@implementation YPNavigationController
#pragma mark -
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return [self.topViewController shouldAutorotateToInterfaceOrientation:interfaceOrientation];
}
- (BOOL)shouldAutorotate
{
return self.topViewController.shouldAutorotate;
}
- (UIInterfaceOrientationMask)supportedInterfaceOrientations
{
return self.topViewController.supportedInterfaceOrientations;
}
- (UIStatusBarStyle)preferredStatusBarStyle {
return self.topViewController.preferredStatusBarStyle;
}
+ (void)initialize
{
// YPNavigationControllerappearance
UINavigationBar *bar = [UINavigationBar appearanceWhenContainedIn:[self class], nil];
[bar setTintColor:YPMainColor];
NSMutableDictionary *titleAttrs = [NSMutableDictionary dictionary];
titleAttrs[NSForegroundColorAttributeName] = YPBlackColor;
titleAttrs[NSFontAttributeName] = [UIFont systemFontOfSize:17];
[bar setTitleTextAttributes:titleAttrs];
}
- (void)viewDidLoad {
[super viewDidLoad];
}
/**
* push
*/
- (void)pushViewController:(UIViewController *)viewController animated:(BOOL)animated
{
if (self.childViewControllers.count > 0) { // push
if ([viewController isKindOfClass:NSClassFromString(@"YPBilibiliWebViewController")]) {
UIButton *button = [UIButton buttonWithType:UIButtonTypeCustom];
button.titleLabel.font = [UIFont boldSystemFontOfSize:17];
button.contentEdgeInsets = UIEdgeInsetsMake(0, -10, 0, 0);
[button setTitle:@"" forState:UIControlStateNormal];
[button sizeToFit];
[button setTitleColor:YPMainColor forState:UIControlStateNormal];
[button setTitleColor:YPMainColor forState:UIControlStateHighlighted];
@weakify(self);
[[button rac_signalForControlEvents:UIControlEventTouchUpInside] subscribeNext:^(id x) {
@strongify(self);
[self popViewControllerAnimated:YES];
}];
viewController.navigationItem.leftBarButtonItem = [[UIBarButtonItem alloc] initWithCustomView:button];
}
// tabbar
viewController.hidesBottomBarWhenPushed = YES;
}
// superpush, viewControllerleftBarButtonItem
[super pushViewController:viewController animated:animated];
}
@end
```
|
```xml
import type { ComponentPropsWithoutRef } from 'react';
import { useState } from 'react';
import { generateUID } from '@proton/components';
import { VPN_APP_NAME } from '@proton/shared/lib/constants';
import clsx from '@proton/utils/clsx';
import type { LogoProps } from './Logo';
type Props = ComponentPropsWithoutRef<'svg'> & Pick<LogoProps, 'variant' | 'size' | 'hasTitle'>;
const VpnLogo = ({ variant = 'with-wordmark', size, className, hasTitle = true, ...rest }: Props) => {
// This logo can be several times in the view, ids has to be different each time
const [uid] = useState(generateUID('logo'));
let logoWidth: number;
switch (variant) {
case 'glyph-only':
logoWidth = 36;
break;
case 'wordmark-only':
logoWidth = 233;
break;
default:
logoWidth = 138;
break;
}
return (
<svg
xmlns="path_to_url"
xmlnsXlink="path_to_url"
viewBox={`0 0 ${logoWidth} 36`}
width={logoWidth}
height="36"
fill="none"
role="img"
className={clsx('logo', size && variant === 'glyph-only' && `icon-size-${size}`, variant, className)}
aria-labelledby={`${uid}-title`}
{...rest}
>
{hasTitle && <title id={`${uid}-title`}>{VPN_APP_NAME}</title>}
{variant === 'glyph-only' && (
<>
<path
fill={`url(#${uid}-a)`}
fillRule="evenodd"
d="M15.247 29.149c1.064 1.913 3.797 2.017 5.005.19l11.265-17.035c1.195-1.806.052-4.228-2.111-4.475L7.263 5.31c-2.36-.269-4.041 2.22-2.893 4.285l.09.16 9.88 6.77-.12 10.77 1.027 1.854Z"
clipRule="evenodd"
/>
<path
fill={`url(#${uid}-b)`}
d="m15.881 27.364 1-1.49 7.594-11.472c.664-1.003.03-2.349-1.17-2.487L4.456 9.752l9.764 17.552a.979.979 0 0 0 1.66.06Z"
/>
<defs>
<linearGradient
id={`${uid}-a`}
x1="29.32"
x2="11.303"
y1="29.148"
y2="-1.922"
gradientUnits="userSpaceOnUse"
>
<stop offset=".066" stopColor="#8EFFEE" />
<stop offset=".45" stopColor="#C9C7FF" />
<stop offset="1" stopColor="#7341FF" />
</linearGradient>
<linearGradient
id={`${uid}-b`}
x1="30.967"
x2="5.738"
y1="-22.452"
y2="31.512"
gradientUnits="userSpaceOnUse"
>
<stop offset=".48" stopColor="#6D4AFF" />
<stop offset=".994" stopColor="#00F0C3" />
</linearGradient>
</defs>
</>
)}
{variant === 'with-wordmark' && (
<>
<path
fill={`url(#${uid}-a)`}
fillRule="evenodd"
d="M11.247 29.149c1.064 1.913 3.797 2.017 5.005.19l11.265-17.035c1.195-1.806.052-4.228-2.111-4.475L3.263 5.31C.903 5.041-.778 7.53.37 9.595l.09.16 9.88 6.77-.12 10.77 1.027 1.854Z"
clipRule="evenodd"
/>
<path
fill={`url(#${uid}-b)`}
d="m11.881 27.364 1-1.49 7.594-11.472c.664-1.003.03-2.349-1.17-2.486L.456 9.752l9.764 17.552a.979.979 0 0 0 1.66.06Z"
/>
<path
fill="var(--logo-text-product-color)"
d="M119.842 10.897h-6.572v14.25h2.604V21.6a1.303 1.303 0 0 1 1.301-1.303h2.667a4.682 4.682 0 0 0 4.684-4.689 4.688 4.688 0 0 0-2.887-4.352 4.65 4.65 0 0 0-1.797-.36Zm2.051 4.674a2.218 2.218 0 0 1-1.374 2.053 2.192 2.192 0 0 1-.85.168h-3.807v-4.44h3.807a2.222 2.222 0 0 1 2.219 2.233l.005-.014Zm-17.766 9.524-5.245-14.239h2.962l3.451 10.19c.136.366.235.742.292 1.127h.029c.044-.388.142-.767.293-1.126l3.462-10.19h2.982l-5.27 14.238h-2.956Zm21.373 0V10.853h2.283l6.716 8.832c.243.304.452.632.628.98h.041a11.14 11.14 0 0 1-.064-1.292v-8.52h2.69v14.239h-2.282l-6.737-8.832a6.517 6.517 0 0 1-.625-.98h-.023c.038.43.052.86.043 1.292v8.52h-2.67v.003Z"
/>
<path
fill="var(--logo-text-proton-color)"
d="M38 21.26v3.664h2.56V21.42a1.282 1.282 0 0 1 1.279-1.286h2.624a4.592 4.592 0 0 0 3.261-1.361 4.652 4.652 0 0 0 1.351-3.28c0-1.228-.486-2.41-1.35-3.281a4.603 4.603 0 0 0-3.265-1.358H38v4.58h2.56v-2.159h3.73c.58 0 1.134.232 1.544.644a2.2 2.2 0 0 1 0 3.104c-.41.412-.964.644-1.544.644h-2.71a3.551 3.551 0 0 0-2.528 1.055 3.65 3.65 0 0 0-.776 1.166A3.54 3.54 0 0 0 38 21.259Zm11.47 3.664v-5.583c0-2.279 1.322-4.091 3.97-4.091a5.09 5.09 0 0 1 1.262.14v2.296c-.301-.02-.56-.02-.682-.02-1.402 0-2.005.646-2.005 1.955v5.303H49.47Zm5.994-4.734c0-2.802 2.104-4.937 5.033-4.937 2.929 0 5.033 2.135 5.033 4.937 0 2.802-2.104 4.957-5.033 4.957-2.929 0-5.033-2.158-5.033-4.957Zm7.558 0c0-1.592-1.064-2.722-2.525-2.722-1.465 0-2.525 1.127-2.525 2.722 0 1.612 1.063 2.722 2.525 2.722 1.464 0 2.525-1.113 2.525-2.722Zm10.646 0c0-2.802 2.104-4.937 5.032-4.937 2.926 0 5.03 2.135 5.03 4.937 0 2.802-2.104 4.957-5.03 4.957-2.928 0-5.032-2.158-5.032-4.957Zm7.554 0c0-1.592-1.063-2.722-2.524-2.722-1.462 0-2.525 1.127-2.525 2.722 0 1.612 1.063 2.722 2.525 2.722 1.461 0 2.525-1.113 2.525-2.722Zm3.831 4.734v-5.38c0-2.499 1.583-4.294 4.41-4.294 2.806 0 4.39 1.792 4.39 4.294v5.38h-2.525v-5.18c0-1.39-.623-2.259-1.865-2.259-1.243 0-1.865.867-1.865 2.259v5.18h-2.545Zm-12.147-7.436h-2.747v3.528c0 1.23.44 1.793 1.703 1.793.12 0 .42 0 .802-.02v2.075c-.52.14-.981.223-1.484.223-2.124 0-3.569-1.29-3.569-3.728v-3.87h-1.706v-2.036h.427a1.3 1.3 0 0 0 .489-.097 1.285 1.285 0 0 0 .694-.698 1.28 1.28 0 0 0 .096-.492v-1.918h2.545v3.205h2.747v2.035h.003Z"
/>
<defs>
<linearGradient
id={`${uid}-a`}
x1="25.32"
x2="7.303"
y1="29.148"
y2="-1.922"
gradientUnits="userSpaceOnUse"
>
<stop offset=".066" stopColor="#8EFFEE" />
<stop offset=".45" stopColor="#C9C7FF" />
<stop offset="1" stopColor="#7341FF" />
</linearGradient>
<linearGradient
id={`${uid}-b`}
x1="26.967"
x2="1.738"
y1="-22.452"
y2="31.512"
gradientUnits="userSpaceOnUse"
>
<stop offset=".48" stopColor="#6D4AFF" />
<stop offset=".994" stopColor="#00F0C3" />
</linearGradient>
</defs>
</>
)}
{variant === 'wordmark-only' && (
<>
<path
d="M203.615 34.395V3.19281H208.335L224.096 24.9565C224.676 25.7506 225.074 26.4074 225.287 26.9266L225.379 26.8808C225.287 26.1477 225.242 25.2314 225.242 24.1317V3.19281H230.19V34.395H225.425L209.709 12.6314C209.159 11.8677 208.747 11.2263 208.472 10.707L208.426 10.7528C208.487 11.2415 208.518 12.1426 208.518 13.4561V34.395H203.615Z"
fill="var(--logo-text-proton-color)"
/>
<path
d="M176.412 34.395V3.19281H188.921C191.762 3.19281 194.129 4.07862 196.023 5.85028C197.947 7.6219 198.909 9.88228 198.909 12.6314C198.909 15.3805 197.947 17.6561 196.023 19.4583C194.129 21.2299 191.762 22.1157 188.921 22.1157H181.315V34.395H176.412ZM181.315 17.5339H188.004C189.929 17.5339 191.395 17.0757 192.403 16.1594C193.411 15.243 193.915 14.067 193.915 12.6314C193.915 11.1957 193.411 10.035 192.403 9.14919C191.426 8.23282 189.99 7.77464 188.096 7.77464H181.315V17.5339Z"
fill="var(--logo-text-proton-color)"
/>
<path
d="M155.192 34.395L143.371 3.19281H148.869L157.163 26.6976C157.498 27.5223 157.682 28.2096 157.712 28.7594H157.804C157.865 28.2706 158.048 27.5833 158.354 26.6976L166.693 3.19281H172.145L160.37 34.395H155.192Z"
fill="var(--logo-text-proton-color)"
/>
<path
d="M109.822 21.8867C109.822 19.0765 110.677 16.8008 112.388 15.0598C114.099 13.3187 116.435 12.4481 119.398 12.4481C122.361 12.4481 124.698 13.3187 126.408 15.0598C128.119 16.8008 128.974 19.0765 128.974 21.8867V34.3951H124.163V22.2532C124.163 20.5427 123.751 19.2139 122.926 18.267C122.132 17.2896 120.956 16.8008 119.398 16.8008C117.84 16.8008 116.649 17.2896 115.824 18.267C115.03 19.2139 114.633 20.5427 114.633 22.2532V34.3951H109.822V21.8867Z"
fill="var(--logo-text-proton-color)"
/>
<path
d="M94.1717 34.8532C90.9645 34.8532 88.2916 33.784 86.1536 31.6459C84.0155 29.5078 82.9463 26.8503 82.9463 23.6736C82.9463 20.4968 84.0155 17.8394 86.1536 15.7012C88.2916 13.5325 90.9645 12.4481 94.1717 12.4481C97.4095 12.4481 100.098 13.5325 102.236 15.7012C104.374 17.8394 105.443 20.4968 105.443 23.6736C105.443 26.8503 104.374 29.5078 102.236 31.6459C100.098 33.784 97.4095 34.8532 94.1717 34.8532ZM87.7572 23.6736C87.7572 25.659 88.3528 27.3085 89.5441 28.6219C90.7658 29.9048 92.3086 30.5463 94.1717 30.5463C96.0349 30.5463 97.5777 29.9048 98.7994 28.6219C100.021 27.3085 100.632 25.659 100.632 23.6736C100.632 21.6576 100.021 20.0081 98.7994 18.7252C97.5777 17.4118 96.0349 16.755 94.1717 16.755C92.3086 16.755 90.7658 17.4118 89.5441 18.7252C88.3528 20.0081 87.7572 21.6576 87.7572 23.6736Z"
fill="var(--logo-text-proton-color)"
/>
<path
d="M76.573 34.7158C74.3125 34.7158 72.4644 34.0439 71.029 32.6997C69.6237 31.3252 68.9213 29.4314 68.9213 27.0183V16.8925H65.0268V12.9063H68.8755V5.71283H73.7322V12.9063H80.0093V16.8925H73.7322V26.4226C73.7322 27.8583 74.053 28.8815 74.6944 29.4925C75.3663 30.0728 76.4355 30.363 77.9017 30.363C78.4208 30.363 78.986 30.3477 79.597 30.3172V34.3034C78.5278 34.5783 77.5198 34.7158 76.573 34.7158Z"
fill="var(--logo-text-proton-color)"
/>
<path
d="M51.713 34.8532C48.5057 34.8532 45.8329 33.784 43.6948 31.6459C41.5568 29.5078 40.4876 26.8503 40.4876 23.6736C40.4876 20.4968 41.5568 17.8394 43.6948 15.7012C45.8329 13.5325 48.5057 12.4481 51.713 12.4481C54.9507 12.4481 57.6389 13.5325 59.777 15.7012C61.9151 17.8394 62.9843 20.4968 62.9843 23.6736C62.9843 26.8503 61.9151 29.5078 59.777 31.6459C57.6389 33.784 54.9507 34.8532 51.713 34.8532ZM45.2985 23.6736C45.2985 25.659 45.8941 27.3085 47.0854 28.6219C48.3071 29.9048 49.8499 30.5463 51.713 30.5463C53.5762 30.5463 55.1189 29.9048 56.3407 28.6219C57.5624 27.3085 58.1734 25.659 58.1734 23.6736C58.1734 21.6576 57.5624 20.0081 56.3407 18.7252C55.1189 17.4118 53.5762 16.755 51.713 16.755C49.8499 16.755 48.3071 17.4118 47.0854 18.7252C45.8941 20.0081 45.2985 21.6576 45.2985 23.6736Z"
fill="var(--logo-text-proton-color)"
/>
<path
d="M26.5155 21.1994C26.5155 18.5419 27.2638 16.419 28.7606 14.8307C30.2878 13.2423 32.3344 12.4481 34.9004 12.4481C35.6639 12.4481 36.5191 12.555 37.4662 12.7688V17.1674C37.1913 17.1369 36.6415 17.1216 35.8167 17.1216C34.3198 17.1216 33.1897 17.5034 32.426 18.267C31.6929 19.0307 31.3264 20.283 31.3264 22.0241V34.3951H26.5155V21.1994Z"
fill="var(--logo-text-proton-color)"
/>
<path
d="M0 34.395V3.19281H12.5084C15.3491 3.19281 17.7164 4.07862 19.6102 5.85028C21.5345 7.6219 22.4967 9.88228 22.4967 12.6314C22.4967 15.3805 21.5345 17.6561 19.6102 19.4583C17.7164 21.2299 15.3491 22.1157 12.5084 22.1157H4.90255V34.395H0ZM4.90255 17.5339H11.592C13.5164 17.5339 14.9825 17.0757 15.9905 16.1594C16.9985 15.243 17.5025 14.067 17.5025 12.6314C17.5025 11.1957 16.9985 10.035 15.9905 9.14919C15.0131 8.23282 13.5775 7.77464 11.6836 7.77464H4.90255V17.5339Z"
fill="var(--logo-text-proton-color)"
/>
</>
)}
</svg>
);
};
export default VpnLogo;
```
|
Goldman Sachs Capital Partners is the private equity arm of Goldman Sachs, focused on leveraged buyout and growth capital investments globally. The group, which is based in New York City, was founded in 1986.
History
Goldman Sachs has historically invested capital in a variety of businesses alongside its investment banking clients. In the early and mid-1980s, Goldman was a slow entrant into the financing of leveraged buyouts and junk bonds and preferred to focus on its traditional mergers and acquisitions advisory business. Beginning in 1983, however, Goldman began making longer-term equity investments in private equity transactions that came through its investment banking and other clients.
Goldman Sachs Capital Partners was founded in 1986, at the same time that similar groups were founded at other investment banks including Lehman Brothers Merchant Banking, Morgan Stanley Capital Partners and DLJ Merchant Banking Partners. Goldman established investment partnerships that allowed its clients to participate alongside the firm in private equity transactions.
On April 23, 2007, Goldman closed GS Capital Partners VI with $20 billion in committed capital, $11 billion from institutional and high-net-worth investors and $9 billion from Goldman Sachs and its employees. In late 2019, Goldman's Chief Executive, David M. Solomon, announced that the firm would combine GS Capital Partners into one division with Goldman's other direct-investing units, such as the Special Situations Group and Growth Equity unit, called the Merchant Banking Division (MBD), which added up to $140 billion under management.
Investment funds
Since 1992, GSCP has raised third party capital as well as investing on behalf of Goldman, its clients and its employees through institutional private equity funds. GSCP's third party investors include pension funds, insurance companies, endowments, fund of funds, high-net-worth individuals, sovereign wealth funds and other institutional investors.
As of the end of 2008, GSCP had completed fundraising for seven investment funds with total committed capital of approximately US$39.9 billion:
Source: Preqin
Investments
GS Capital Partners emerged in the late 1990s as one of the largest private equity investors globally competing and partnering with the largest independent firms, Kohlberg Kravis Roberts, Blackstone Group, Bain Capital, Carlyle Group and TPG Capital. Since the raising of its Goldman Sachs Capital Partners 2000 Fund, GS Capital Partners has completed some of the most notable leveraged buyouts:
In addition to its successful buyout transactions, Goldman was involved in the high-profile failed buyout of Harman International Industries , an upscale audio equipment maker. On April 26, 2007, Harman announced it had entered an agreement to be acquired by GS Capital Partners and Kohlberg Kravis Roberts. As the financing markets became more adverse in the summer of 2007, the buyout was on tenuous ground. In September 2007, Goldman and KKR backed out of the $8 billion buyout of Harman. By the end of the day, Harman's shares had plummeted by more than 24% on the news.
References
External links
Goldman Sachs
Investment banking private equity groups
Mezzanine capital investment firms
Private equity firms of the United States
Financial services companies based in New York City
American companies established in 1986
Financial services companies established in 1986
1986 establishments in New York (state)
|
Esenyurt is a neighbourhood in the municipality and district of Korkuteli, Antalya Province, Turkey. Its population is 512 (2022).
References
Neighbourhoods in Korkuteli District
|
Frakes may refer to:
Frakes, Kentucky, unincorporated community in Bell County
Mount Frakes, prominent mountain marking the highest elevation in the Crary Mountains in Marie Byrd Land
Frakes Aviation, American aircraft manufacturer
People with the surname
Frakes family:
Frank Frakes (1860–1933), American pioneer rancher, grandfather of George and cousin of William and Laurence
George E. Frakes (born 1932), American historian, father of Robert, grandson of Frank
Robert Frakes (born 1962), American historian, son of George
Lawrence A. Frakes (born 1930), American and Australian geologist and paleoclimatologist, cousin of Frank and William
William Franklin Frakes (1858–1942), American rancher, naturalist, adventurer, and author, cousin of Frank and Laurence
Bill Frakes, American photographer
Jerold Frakes, American literature historian
Jonathan Frakes (born 1952), American actor and director
Laura J. Frakes, American educator
Randall Frakes, American film and science fiction writer
|
Azuragrion granti, the Socotra bluet or Grant's bluet, is a species of narrow-winged damselfly in the family Coenagrionidae. It is endemic to Socotra in the Indian Ocean. It is a member of the narrow-winged damselfly family Coenagrionidae with its closest relatives originating in Africa, for example Azuragrion nigridorsum.
Description
Azuragrion granti is, typically for narrow-winged dragonflies, a largely blue damselfly with black markings on top of the head, black stripes along the thorax and on the upper part of the abdomen which is paler underneath. It has narrow, transparent wings it holds vertically over their body when it is at rest.
Distribution
Endemic to the island of Socotra, part of Yemen where it occurs mostly in the eastern granitic, mountainous half of the island over an area of 550 km² in the Hagheir Mountains. Azuragrion granti apparently does not occur the karstic western half of Socotra as there are few open freshwater bodies to be found there.
Habitat
The natural habitats of Azuragrion granti are rivers, intermittent freshwater lakes, freshwater marshes, and ponds. It is threatened by habitat loss although said to be locally abundant. This species breeds in mountain running water. Nothing is known about the biology of this species.
Conservation status
There has been a decline in the quality of the habitat available to Azuragrion granti due to water extraction and pollution, and this has also been observed in the lowlands where the rapidly developing tourism industry concentrates. As a result, Azuragrion granti is therefore listed as near-threatened species. Due to its endemicity and the small range of the species, any future predicted impacts by infrastructure development on Socotra will almost certainly have negative effects.
References
Coenagrionidae
Endemic fauna of Socotra
Insects described in 1903
Taxa named by Robert McLachlan (entomologist)
Taxonomy articles created by Polbot
|
```objective-c
#ifndef BLAS_HELPERS_H_
#define BLAS_HELPERS_H_
#include "osqp_configure.h"
#include <mkl.h>
#include <mkl_cblas.h>
#include <mkl_spblas.h>
#include <mkl_vml.h>
/* Let the user override the MKL memory alignment if they really want,
but default to 64-bytes alignment if nothing is specified. */
#ifndef OSQP_MKL_MEMORY_ALIGNMENT
# define OSQP_MKL_MEMORY_ALIGNMENT 64
#endif
/* Define the memory management functions for the MKL algebra */
#define blas_malloc(alloc_size) mkl_malloc(alloc_size, OSQP_MKL_MEMORY_ALIGNMENT)
#define blas_calloc(num, size) mkl_calloc(num, size, OSQP_MKL_MEMORY_ALIGNMENT)
#define blas_free mkl_free
#define blas_realloc mkl_realloc
/* Define the blas functions based on the data type we are using */
#ifdef OSQP_USE_FLOAT
// MKL Level 1 BLAS functions
#define blas_copy cblas_scopy
#define blas_dot cblas_sdot
#define blas_scale cblas_sscal
#define blas_swap cblas_sswap
#define blas_axpy cblas_saxpy
#define blas_2norm cblas_snrm2
#define blas_asum cblas_sasum
#define blas_iamax cblas_isamax
// MKL Vector Math functions
#define vml_add vsAdd
#define vml_sub vsSub
#define vml_mul vsMul
#define vml_max vsFmax
#define vml_maxinc vsFmaxI
#define vml_min vsFmin
#define vml_mininc vsFminI
#define vml_inv vsInv
#define vml_sqrt vsSqrt
// MKL Sparse BLAS functions
#define spblas_create_csc mkl_sparse_s_create_csc
#define spblas_set_value mkl_sparse_s_set_value
#define spblas_export_csc mkl_sparse_s_export_csc
#define spblas_mv mkl_sparse_s_mv
#else
// MKL Level 1 BLAS functions
#define blas_copy cblas_dcopy
#define blas_dot cblas_ddot
#define blas_scale cblas_dscal
#define blas_swap cblas_dswap
#define blas_axpy cblas_daxpy
#define blas_2norm cblas_dnrm2
#define blas_asum cblas_dasum
#define blas_iamax cblas_idamax
// MKL Vector Math functions
#define vml_add vdAdd
#define vml_sub vdSub
#define vml_mul vdMul
#define vml_max vdFmax
#define vml_maxinc vdFmaxI
#define vml_min vdFmin
#define vml_mininc vdFminI
#define vml_inv vdInv
#define vml_sqrt vdSqrt
// MKL Sparse BLAS functions
#define spblas_create_csc mkl_sparse_d_create_csc
#define spblas_set_value mkl_sparse_d_set_value
#define spblas_export_csc mkl_sparse_d_export_csc
#define spblas_mv mkl_sparse_d_mv
#endif /* OSQP_USE_FLOAT */
#endif
```
|
```scss
.callout {
@apply rounded p-4 border-l-4 border-secondary bg-secondary/5 space-y-2;
&.success {
@apply border-success bg-success/5;
}
&.alert {
@apply border-alert bg-alert/5;
}
&.warning {
@apply border-warning bg-warning/5;
}
& + * {
@apply mt-4;
}
a {
@apply text-secondary;
}
&[data-component="accordion"] {
[id*="panel"][aria-hidden="true"] {
@apply block max-h-14 overflow-hidden relative before:content-[''] before:absolute before:inset-0 before:h-full before:w-full before:bg-gradient-to-b before:from-transparent before:to-white after:content-[''] after:absolute after:inset-0 after:h-full after:w-full after:bg-gradient-to-b after:from-transparent after:to-secondary/5;
}
&.success [id*="panel"][aria-hidden="true"] {
@apply after:to-success/5;
}
&.alert [id*="panel"][aria-hidden="true"] {
@apply after:to-alert/5;
}
&.warning [id*="panel"][aria-hidden="true"] {
@apply after:to-warning/5;
}
[aria-expanded="false"] > svg:last-of-type,
[aria-expanded="false"] > span:last-of-type,
[aria-expanded="true"] > span:first-of-type,
[aria-expanded="true"] > svg:first-of-type {
@apply hidden;
}
[aria-expanded="true"] > svg:last-of-type,
[aria-expanded="true"] > span:last-of-type,
[aria-expanded="false"] > span:first-of-type,
[aria-expanded="false"] > svg:first-of-type {
@apply block;
}
}
}
```
|
```ruby
# frozen_string_literal: true
module Decidim
module Proposals
module Admin
# A command with all the business logic when an admin batch updates proposals category.
class UpdateProposalCategory < Decidim::Command
include TranslatableAttributes
# Public: Initializes the command.
#
# category_id - the category id to update
# proposal_ids - the proposals ids to update.
def initialize(category_id, proposal_ids)
@category = Decidim::Category.find_by id: category_id
@proposal_ids = proposal_ids
@response = { category_name: "", successful: [], errored: [] }
end
# Executes the command. Broadcasts these events:
#
# - :update_proposals_category - when everything is ok, returns @response.
# - :invalid_category - if the category is blank.
# - :invalid_proposal_ids - if the proposal_ids is blank.
#
# Returns @response hash:
#
# - :category_name - the translated_name of the category assigned
# - :successful - Array of names of the updated proposals
# - :errored - Array of names of the proposals not updated because they already had the category assigned
def call
return broadcast(:invalid_category) if @category.blank?
return broadcast(:invalid_proposal_ids) if @proposal_ids.blank?
@response[:category_name] = @category.translated_name
Proposal.where(id: @proposal_ids).find_each do |proposal|
if @category == proposal.category
@response[:errored] << translated_attribute(proposal.title)
else
transaction do
update_proposal_category proposal
notify_author proposal if proposal.coauthorships.any?
end
@response[:successful] << translated_attribute(proposal.title)
end
end
broadcast(:update_proposals_category, @response)
end
private
def update_proposal_category(proposal)
proposal.update!(
category: @category
)
end
def notify_author(proposal)
Decidim::EventsManager.publish(
event: "decidim.events.proposals.proposal_update_category",
event_class: Decidim::Proposals::Admin::UpdateProposalCategoryEvent,
resource: proposal,
affected_users: proposal.notifiable_identities
)
end
end
end
end
end
```
|
"Never Kill a Boy on the First Date" is the fifth episode of the first season of the television series Buffy the Vampire Slayer. The episode aired on The WB on March 31, 1997. The episode was written by story editors Rob Des Hotel and Dean Batali, and directed by David Semel. The narrative follows Buffy Summers (Sarah Michelle Gellar), as she struggles to find a date and stop the rise of the Anointed One.
Plot
Owen asks Buffy out on a date at The Bronze. Giles has found out about a prophecy from the symbol on a ring they found in the cemetery. He is convinced that the Anointed One will rise that night, and so despite Buffy's protests, they spend hours sitting on graves waiting for a vampire to rise. None does and though Giles is certain that his calculations are correct, he calls their stake-out quits. Buffy rushes to The Bronze, only to see Owen dancing with Cordelia.
At the same time, in a bus on the way to Sunnydale, a man stands up and begins to lecture the other passengers on God's judgment, quoting prophecies. Suddenly, a vampire walks in front of the bus, causing it to crash. Other vampires swarm the wreck, attacking the passengers, including the religious fanatic.
The next morning, Owen asks Buffy out on another date and even gives her a pocket-watch so that she does not miss it this time. When evening comes, Giles shows up at Buffy's house, waving a newspaper that shows five people died when the bus crashed, among them the suspected murderer Andrew Borba, the man who was quoting prophecies. Buffy insists on going to the Bronze so Giles decides to check the Sunnydale funeral home himself. Unfortunately, there are vampires present to get the Anointed One, and they trap Giles in a room. Xander and Willow have followed him, though, and run back to The Bronze to get Buffy.
There, first Cordelia, then Angel tries to come in between Buffy and Owen. Finally, Xander and Willow managed to get her to come to the funeral home by pretending to be a couple that wants to do something daring on a double date. When Buffy figures out what has happened, she tries to ditch Owen, but unfortunately, he tags along. Even worse, he is present in the funeral home when Borba rises as a vampire. In the fight, Owen is knocked unconscious. Buffy kills Borba by sliding him into the lit furnace.
The next morning, Owen is excited about the thrill of the action, but Buffy turns him down, realizing that there is no way that she can have a relationship with him without putting him in danger. Giles tries to comfort her by telling her what a burden it was for him as a ten-year-old to find out his destiny was to be a watcher when he wanted a more fun career. Both agree that at least the Master will be unhappy, too, because the Anointed One was destroyed.
But in his underground lair, the Master is overjoyed as he welcomes the real Anointed One—not Borba after all, but a young boy who was on the bus with him.
Reception
"Never Kill a Boy on the First Date" first aired on The WB on March 31, 1997. It earned a Nielsen rating of 2.8 on its original airing. It was the 104th most watched show out of all 115 primetime shows of its time; fifth out of the eleven shows from The WB.
Noel Murray of The A.V. Club liked that the episode explored a new side of Buffy, but felt that the four previous episodes had not set up Buffy as a "person with normal teenage tastes and desires". He still found "a lot to like" in the episode. A BBC review stated that some "very amusing scenes compensate for the absence of an involving plot". The review noted that the plot took a while to get started and the direction of the funeral home sequences made it fall short of its potential. DVD Talk's Philip Duncan identified "Never Kill a Boy on the First Date" the "weakest" episode of those relating to the Master plotline in the season.
References
External links
Buffy the Vampire Slayer (season 1) episodes
1997 American television episodes
it:Episodi di Buffy l'ammazzavampiri (prima stagione)#Il primo appuntamento
|
```xml
import { screen } from '@testing-library/react';
import { addDays } from 'date-fns';
import { CHECKLIST_DISPLAY_TYPE } from '@proton/shared/lib/interfaces';
import type { ContextState } from '../../../containers/onboardingChecklist/provider/GetStartedChecklistProvider';
import { useGetStartedChecklist } from '../../../containers/onboardingChecklist/provider/GetStartedChecklistProvider';
import { render } from '../../../helpers/test/helper';
import MailboxContainerPlaceholder from '../MailboxContainerPlaceholder';
jest.mock('../../../containers/onboardingChecklist/provider/GetStartedChecklistProvider', () => ({
__esModule: true,
useGetStartedChecklist: jest.fn(),
default: ({ children }: { children: any }) => <>{children}</>,
}));
jest.mock('../../../containers/onboardingChecklist/provider/GetStartedChecklistProvider');
const mockedReturn = useGetStartedChecklist as jest.MockedFunction<typeof useGetStartedChecklist>;
describe('MailboxContainerPlaceholder', () => {
it('Should display checklist when no mails are present', async () => {
mockedReturn.mockReturnValue({
displayState: CHECKLIST_DISPLAY_TYPE.FULL,
items: new Set(),
expiresAt: addDays(new Date(), 10),
canDisplayChecklist: true,
} as ContextState);
await render(
<MailboxContainerPlaceholder
showPlaceholder={true}
welcomeFlag={false}
labelID="labelID"
checkedIDs={[]}
handleCheckAll={jest.fn()}
/>
);
screen.getByTestId('onboarding-checklist');
});
it('Should display section pane when checklist is reduced', async () => {
mockedReturn.mockReturnValue({
displayState: CHECKLIST_DISPLAY_TYPE.REDUCED,
items: new Set(),
expiresAt: addDays(new Date(), 10),
canDisplayChecklist: true,
} as ContextState);
await render(
<MailboxContainerPlaceholder
showPlaceholder={true}
welcomeFlag={false}
labelID="labelID"
checkedIDs={[]}
handleCheckAll={jest.fn()}
/>
);
screen.getByTestId('section-pane--wrapper');
});
it('Should display section pane when checklist is hidden', async () => {
mockedReturn.mockReturnValue({
displayState: CHECKLIST_DISPLAY_TYPE.HIDDEN,
items: new Set(),
expiresAt: addDays(new Date(), 10),
canDisplayChecklist: true,
} as ContextState);
await render(
<MailboxContainerPlaceholder
showPlaceholder={true}
welcomeFlag={false}
labelID="labelID"
checkedIDs={[]}
handleCheckAll={jest.fn()}
/>
);
screen.getByTestId('section-pane--wrapper');
});
});
```
|
```javascript
// Extra validation errors messages for Parsley
// Load this after Parsley
Parsley.addMessages('sr', {
dateiso: "Unesite validan datum u formatu YYYY-MM-DD.",
minwords: "Potrebno je da unesete %s ili vie rei.",
maxwords: "Mogue je uneti maksimalno %s rei.",
words: "Potrebno je da unesete izmeu %s i %s rei.",
gt: "Ova vrednost mora da bude vea.",
gte: "Ova vrednost mora da bude vea ili jednaka.",
lt: "Ova vrednost mora da bude manja.",
lte: "Ova vrednost mora da bude manja ili jednaka.",
notequalto: "Sadraj ovog polja mora biti razliit."
});
```
|
Jhantipahari High School is a Higher secondary school of Bankura district, India. It was established in 1945. This school is affiliated under two Boards. It is affiliated to West Bengal Board of Secondary Education for Madhyamik(10th) and to West Bengal Council of Higher Secondary Education for Higher Secondary.
It offers grades from class V to class XII. For Grade V to Grade X, it is only for Boys. But both Boys and Girls are there in Grade XI and XII.
Location
It is situated next to Jhantipahari Bus Stand and around one and half Kilometer away from Jhantipahari Railway Station.
References
High schools and secondary schools in West Bengal
Schools in Bankura district
Educational institutions established in 1945
1945 establishments in India
|
```smalltalk
using Microsoft.EntityFrameworkCore;
namespace Volo.Abp.EntityFrameworkCore.TestApp.FourthContext;
public interface IFourthDbContext : IEfCoreDbContext
{
DbSet<FourthDbContextDummyEntity> FourthDummyEntities { get; set; }
}
```
|
```c++
//
//
// See accompanying file LICENSE_1_0.txt or copy at
// path_to_url
#include <boost/mp11/function.hpp>
#include <boost/mp11/integral.hpp>
#include <boost/core/lightweight_test_trait.hpp>
#include <type_traits>
int main()
{
using boost::mp11::mp_and;
using boost::mp11::mp_true;
using boost::mp11::mp_false;
using boost::mp11::mp_int;
using boost::mp11::mp_size_t;
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_false>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_int<-7>>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_int<0>>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_size_t<7>>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_size_t<0>>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true, mp_true>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true, mp_false>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_false, void>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_int<-4>, mp_int<5>>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_int<-4>, mp_int<0>>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_int<0>, void>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_size_t<7>, mp_size_t<8>>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_size_t<7>, mp_size_t<0>>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_size_t<0>, void>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true, mp_true, mp_true>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true, mp_true, mp_false>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true, mp_false, void>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_false, void, void>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true, mp_true, mp_true, mp_true>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true, mp_true, mp_true, mp_false>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true, mp_true, mp_false, void>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_true, mp_false, void, void>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_false, void, void, void>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_int<1>, mp_int<2>, mp_int<-11>, mp_int<14>>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_int<1>, mp_int<0>, void, void>, mp_false>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_size_t<1>, mp_size_t<2>, mp_size_t<114>, mp_size_t<8>, mp_size_t<94>>, mp_true>));
BOOST_TEST_TRAIT_TRUE((std::is_same<mp_and<mp_size_t<1>, mp_size_t<2>, mp_size_t<0>, void, void>, mp_false>));
return boost::report_errors();
}
```
|
Massimo "Max" Pigoli (born 23 February 1958 in Menaggio) is an Italian auto racing driver. He presently competes in the Italian Superstars Series.
Career
Most of his career has been spent in touring car racing. He was independent champion in the Italian Superturismo Championship in 1997. Between 2003 and 2005 he competed in the Italian GT championship with a Porsche GT-3 RS. In 2006 he first competed in the Superstars Series, winning the drivers title in his debut year in a Jaguar S-Type. His best year since came in 2009, finishing third in the championship and runner-up in the International Superstars Series. For 2010 he drives for the Romeo Ferraris team with a Mercedes C63 AMG.
External links
Official website
Profile at Superstars Series official site
1958 births
Living people
Italian racing drivers
People from Menaggio
Superstars Series drivers
European Touring Car Championship drivers
Sportspeople from the Province of Como
|
```c
/* $OpenBSD: fpgetmask.c,v 1.1 2020/06/25 02:03:55 drahn Exp $ */
/* $NetBSD: fpgetmask.c,v 1.1 1999/07/07 01:55:07 danw Exp $ */
/*
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Dan Winship.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <sys/types.h>
#include <ieeefp.h>
fp_except
fpgetmask()
{
u_int64_t fpscr;
__asm__ volatile("mffs %0" : "=f"(fpscr));
return ((fpscr >> 3) & 0x1f);
}
```
|
```python
"""
Script for fast image reconstruction from gradients.
Based on Ramesh Raskar's Matlab script, available here:
path_to_url~raskar/photo/code.pdf
Adapted slightly for doing "mixed" Poisson Image Editing [Perez et al.]
Paper: path_to_url~misha/Fall07/Papers/Perez03.pdf
"""
from __future__ import division
import numpy as np
import scipy.fftpack
import scipy.ndimage
import cv2
import matplotlib.pyplot as plt
#sns.set(style="darkgrid")
def DST(x):
"""
Converts Scipy's DST output to Matlab's DST (scaling).
"""
X = scipy.fftpack.dst(x,type=1,axis=0)
return X/2.0
def IDST(X):
"""
Inverse DST. Python -> Matlab
"""
n = X.shape[0]
x = np.real(scipy.fftpack.idst(X,type=1,axis=0))
return x/(n+1.0)
def get_grads(im):
"""
return the x and y gradients.
"""
[H,W] = im.shape
Dx,Dy = np.zeros((H,W),'float32'), np.zeros((H,W),'float32')
j,k = np.atleast_2d(np.arange(0,H-1)).T, np.arange(0,W-1)
Dx[j,k] = im[j,k+1] - im[j,k]
Dy[j,k] = im[j+1,k] - im[j,k]
return Dx,Dy
def get_laplacian(Dx,Dy):
"""
return the laplacian
"""
[H,W] = Dx.shape
Dxx, Dyy = np.zeros((H,W)), np.zeros((H,W))
j,k = np.atleast_2d(np.arange(0,H-1)).T, np.arange(0,W-1)
Dxx[j,k+1] = Dx[j,k+1] - Dx[j,k]
Dyy[j+1,k] = Dy[j+1,k] - Dy[j,k]
return Dxx+Dyy
def poisson_solve(gx,gy,bnd):
# convert to double:
gx = gx.astype('float32')
gy = gy.astype('float32')
bnd = bnd.astype('float32')
H,W = bnd.shape
L = get_laplacian(gx,gy)
# set the interior of the boundary-image to 0:
bnd[1:-1,1:-1] = 0
# get the boundary laplacian:
L_bp = np.zeros_like(L)
L_bp[1:-1,1:-1] = -4*bnd[1:-1,1:-1] \
+ bnd[1:-1,2:] + bnd[1:-1,0:-2] \
+ bnd[2:,1:-1] + bnd[0:-2,1:-1] # delta-x
L = L - L_bp
L = L[1:-1,1:-1]
# compute the 2D DST:
L_dst = DST(DST(L).T).T #first along columns, then along rows
# normalize:
[xx,yy] = np.meshgrid(np.arange(1,W-1),np.arange(1,H-1))
D = (2*np.cos(np.pi*xx/(W-1))-2) + (2*np.cos(np.pi*yy/(H-1))-2)
L_dst = L_dst/D
img_interior = IDST(IDST(L_dst).T).T # inverse DST for rows and columns
img = bnd.copy()
img[1:-1,1:-1] = img_interior
return img
def blit_images(im_top,im_back,scale_grad=1.0,mode='max'):
"""
combine images using poission editing.
IM_TOP and IM_BACK should be of the same size.
"""
assert np.all(im_top.shape==im_back.shape)
im_top = im_top.copy().astype('float32')
im_back = im_back.copy().astype('float32')
im_res = np.zeros_like(im_top)
# frac of gradients which come from source:
for ch in xrange(im_top.shape[2]):
ims = im_top[:,:,ch]
imd = im_back[:,:,ch]
[gxs,gys] = get_grads(ims)
[gxd,gyd] = get_grads(imd)
gxs *= scale_grad
gys *= scale_grad
gxs_idx = gxs!=0
gys_idx = gys!=0
# mix the source and target gradients:
if mode=='max':
gx = gxs.copy()
gxm = (np.abs(gxd))>np.abs(gxs)
gx[gxm] = gxd[gxm]
gy = gys.copy()
gym = np.abs(gyd)>np.abs(gys)
gy[gym] = gyd[gym]
# get gradient mixture statistics:
f_gx = np.sum((gx[gxs_idx]==gxs[gxs_idx]).flat) / (np.sum(gxs_idx.flat)+1e-6)
f_gy = np.sum((gy[gys_idx]==gys[gys_idx]).flat) / (np.sum(gys_idx.flat)+1e-6)
if min(f_gx, f_gy) <= 0.35:
m = 'max'
if scale_grad > 1:
m = 'blend'
return blit_images(im_top, im_back, scale_grad=1.5, mode=m)
elif mode=='src':
gx,gy = gxd.copy(), gyd.copy()
gx[gxs_idx] = gxs[gxs_idx]
gy[gys_idx] = gys[gys_idx]
elif mode=='blend': # from recursive call:
# just do an alpha blend
gx = gxs+gxd
gy = gys+gyd
im_res[:,:,ch] = np.clip(poisson_solve(gx,gy,imd),0,255)
return im_res.astype('uint8')
def contiguous_regions(mask):
"""
return a list of (ind0, ind1) such that mask[ind0:ind1].all() is
True and we cover all such regions
"""
in_region = None
boundaries = []
for i, val in enumerate(mask):
if in_region is None and val:
in_region = i
elif in_region is not None and not val:
boundaries.append((in_region, i))
in_region = None
if in_region is not None:
boundaries.append((in_region, i+1))
return boundaries
if __name__=='__main__':
"""
example usage:
"""
import seaborn as sns
im_src = cv2.imread('i2.jpg').astype('float32')
im_dst = cv2.imread('gg.jpg').astype('float32')
mu = np.mean(np.reshape(im_src,[im_src.shape[0]*im_src.shape[1],3]),axis=0)
# print mu
sz = (700,700)
im_src = cv2.resize(im_src,sz)
im_dst = cv2.resize(im_dst,sz)
im0 = im_dst[:,:,0] > 100
im_dst[im0,:] = im_src[im0,:]
im_dst[~im0,:] = 50
im_dst = cv2.GaussianBlur(im_dst,(5,5),5)
im_alpha = 0.8*im_dst + 0.2*im_src
# plt.imshow(im_dst)
# plt.show()
im_res = blit_images(im_src,im_dst)
import scipy
scipy.misc.imsave('orig.png',im_src[:,:,::-1].astype('uint8'))
scipy.misc.imsave('alpha.png',im_alpha[:,:,::-1].astype('uint8'))
scipy.misc.imsave('poisson.png',im_res[:,:,::-1].astype('uint8'))
im_actual_L = cv2.cvtColor(im_src.astype('uint8'),cv2.cv.CV_BGR2Lab)[:,:,0]
im_alpha_L = cv2.cvtColor(im_alpha.astype('uint8'),cv2.cv.CV_BGR2Lab)[:,:,0]
im_poisson_L = cv2.cvtColor(im_res.astype('uint8'),cv2.cv.CV_BGR2Lab)[:,:,0]
# plt.imshow(im_alpha_L)
# plt.show()
for i in xrange(500,im_alpha_L.shape[1],5):
l_actual = im_actual_L[i,:]#-im_actual_L[i,:-1]
l_alpha = im_alpha_L[i,:]#-im_alpha_L[i,:-1]
l_poisson = im_poisson_L[i,:]#-im_poisson_L[i,:-1]
with sns.axes_style("darkgrid"):
plt.subplot(2,1,2)
plt.plot(l_alpha,label='alpha')
plt.hold(True)
plt.plot(l_poisson,label='poisson')
plt.plot(l_actual,label='actual')
plt.legend()
# find "text regions":
is_txt = ~im0[i,:]
t_loc = contiguous_regions(is_txt)
ax = plt.gca()
for b0,b1 in t_loc:
ax.axvspan(b0, b1, facecolor='red', alpha=0.1)
with sns.axes_style("white"):
plt.subplot(2,1,1)
plt.imshow(im_alpha[:,:,::-1].astype('uint8'))
plt.hold(True)
plt.plot([0,im_alpha_L.shape[0]-1],[i,i],'r')
plt.axis('image')
plt.show()
plt.subplot(1,3,1)
plt.imshow(im_src[:,:,::-1].astype('uint8'))
plt.subplot(1,3,2)
plt.imshow(im_alpha[:,:,::-1].astype('uint8'))
plt.subplot(1,3,3)
plt.imshow(im_res[:,:,::-1]) #cv2 reads in BGR
plt.show()
```
|
```smalltalk
// See the LICENCE file in the repository root for full licence text.
#nullable disable
using System;
using System.Threading;
using System.Threading.Tasks;
using osu.Framework.Platform;
using osu.Framework.Testing;
namespace osu.Framework.Tests.IO
{
/// <summary>
/// A headless host for testing purposes. Contains an arbitrary game that is running after construction.
/// </summary>
public partial class BackgroundGameHeadlessGameHost : TestRunHeadlessGameHost
{
public BackgroundGameHeadlessGameHost(string gameName = null, HostOptions options = null, bool realtime = true)
: base(gameName, options, realtime: realtime)
{
var testGame = new TestGame();
Task.Factory.StartNew(() => Run(testGame), TaskCreationOptions.LongRunning);
if (!testGame.HasProcessed.Wait(10000))
throw new TimeoutException("Game took too long to process a frame");
}
private partial class TestGame : Game
{
internal readonly ManualResetEventSlim HasProcessed = new ManualResetEventSlim(false);
protected override void Update()
{
base.Update();
HasProcessed.Set();
}
protected override void Dispose(bool isDisposing)
{
HasProcessed.Dispose();
base.Dispose(isDisposing);
}
}
protected override void Dispose(bool isDisposing)
{
if (ExecutionState != ExecutionState.Stopped)
Exit();
base.Dispose(isDisposing);
}
}
}
```
|
```objective-c
/*
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing,
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* specific language governing permissions and limitations
*/
#import <Foundation/Foundation.h>
#import <QuartzCore/QuartzCore.h>
#import <UIKit/UIKit.h>
@interface WXInnerLayer : CAGradientLayer
@property CGFloat boxShadowRadius;
@property (nonatomic,strong) UIColor *boxShadowColor;
@property CGSize boxShadowOffset;
@property CGFloat boxShadowOpacity;
@end
```
|
Everspin Technologies is a public semiconductor company headquartered in Chandler, Arizona, United States. It develops and manufactures discrete magnetoresistive RAM or magnetoresistive random-access memory (MRAM) products, including Toggle MRAM and Spin-Transfer Torque MRAM (STT-MRAM) product families. It also licenses its technology for use in embedded MRAM (eMRAM) applications, magnetic sensor applications as well as performs backend foundry services for eMRAM.
MRAM has the performance characteristics close to static random-access memory (SRAM) while also having the persistence of non-volatile memory, meaning that it will not lose its charge or data if power is removed from the system. This characteristic makes MRAM suitable for a large number of applications where persistence, performance, endurance and reliability are critical.
History
The path to MRAM began in 1984 when the GMR effect was discovered by Albert Fert and Peter Grünberg. Twelve years later, in 1996, spin-transfer torque is proposed, enabling a magnetic tunnel junction or spin valve to be modified with a spin-polarized current. At this point, Motorola began their MRAM research, which led to their first MTJ in 1998. A year later, in 1999, Motorola developed a 256Kb MRAM Test Chip that enabled work to begin on productizing MRAM technology, which was followed by a patent for Toggle being granted to Motorola in 2002. The industry's first MRAM (4Mb) product became commercially available in 2006.
Much of the early MRAM work was done by Motorola, who spun off their semiconductor business in 2004, creating Freescale Semiconductor in 2008, which eventually spun out the MRAM business as Everspin Technologies.
In 2008, Everspin announced BGA packages for their MRAM product family that would support densities from 256Kb to 4Mb. The following year, in 2009, Everspin released their first generation SPI MRAM product family and began shipping the first embedded MRAM samples in conjunction with GlobalFoundries. By 2010, Everspin had begun ramping production and sold its first million MRAMs. That same year qualification had completed on the industry's first embedded MRAM and 16Mb densities had been released.
With production ramping, Everspin shipped its four millionth stand-alone MRAM and its two millionth embedded MRAM by 2011. The 64Mb ST-MRAM, which was produced on a 90 nm process occurred in 2012.
In 2014 Everspin partnered with GlobalFoundries for production of in-plane and perpendicular MTJ ST-MRAM on 300mm wafers, utilizing 40 nm and 28 nm node processes.
By 2016, Everspin had announced it was shipping samples of the industry's first 256Mb ST-MRAM to customers, GlobalFoundries announced 22 nm embedded MRAM in conjunction with Everspin, and Everspin went public in an IPO later in the year on October 7.
In 2017, Everspin expanded support for MRAM to FPGAs by bringing DDR3 and DDR4 compatibility to its ST-MRAM products, making it compatible with Xilinx's UltraScale FPGA memory controller. On September 1, 2017, Kevin Conley was named Everspin CEO and President. Conley was the former CTO of SanDisk and brings enterprise storage expertise to the company.
In 2018, Everspin ramped production volumes of its 256Mb STT-MRAM and in December shipped its first customer samples of the 1Gb STT-MRAM.
In 2019, Everspin began pre-production of its 1Gb STT-MRAM in June and announced the expansion of the design-in ecosystem to enable system designers to implement the 1Gb ST-DDR4 product in their designs.
Technology
MRAM uses the magnetism of electron spin to provide fast and enduring non-volatile memory. MRAM stores information in magnetic material that is integrated with silicon circuitry to deliver the speed of RAM with the non-volatility of Flash.
Headquartered in Chandler, Arizona, Everspin owns and operates a manufacturing line for its magnetic back-end-of-line wafer processing, using standard CMOS wafers from foundries. Everspin's current MRAM products are based on 180-nm, 130-nm, 40-nm, and 28-nm process technology nodes and industry standard packages.
Products
Toggle MRAM
Toggle MRAM memory utilizes the magnetism of electron spin, enabling the storage of data without volatility or wear-out. Toggle MRAM utilizes a single transistor and a single MTJ cell in order to provide a durable, high-density memory. Because of the non-volatility of Toggle MRAM, data that is held in this memory is accessible for 20 years, at temperature (from -40c to 150c). The MTJ is composed of a fixed magnetic layer, a thin dielectric tunnel barrier, and a free magnetic layer. When a bias is applied to the Spin Toggle's MTJ, electrons that are spin polarized by the magnetic layers "tunnel" across the dielectric barrier. The MTJ device has a low resistance when the magnetic moment of the free layer is parallel to the fixed layer and a high resistance when the free layer moment is oriented anti-parallel to the fixed layer moment.
Production densities include 128Kb to 16Mb; available in Parallel and SPI interfaces; DFN, SOIC, BGA, and TSOP2 packages
Spin-transfer torque MRAM
Spin-transfer torque is a type of MRAM memory (STT-MRAM) built with a perpendicular MTJ that uses the spin-transfer torque property (the manipulation of the spin of electrons with a polarizing current) to manipulate the magnetic state of the free layer to program, or write, the bits in the memory array. Everspin's Perpendicular MTJ stack designs with high perpendicular magnetic anisotropy bring long data retention, small cell size, high density, high endurance, and low power. STT-MRAM has lower switching energy compared to Toggle MRAM, and can reach higher densities. STT-MRAM products from Everspin are compatible with JEDEC standard interfaces for DDR3 and DDR4 (with some modifications needed for MRAM technology). In this mode, the DDR3 product can act like a persistent (non-volatile) DRAM and require no refresh, while the DDR4 product has self-refresh mode under idle state conditions. The DDR4 compatible STT-MRAM devices, with a 1Gb density, began early sampling to customers in early August 2017. In June 2019, the 1Gb STT-MRAM entered pilot production.
nvNITRO Storage Accelerators
Everspin developed nvNITRO products to address storage requirements that are typically being served by NVMe products. There are two different form factors, HHHL (PCIe Gen3 x8), and U.2. These devices can store up to 1GB in data today, with greater capacities planned as MRAM densities scale up over time. nvNITRO products can handle both NVMe 1.1 and block storage requirements. Because these products are built on MRAM, they do not require the battery backup of typical magnetic storage products in order to protect data in flight. Everspin officially launched the first version of the nvNITRO in August 2017, based on 256Mb ST-MRAM (1GB and 2GB capacities). Future versions will be based on the upcoming 1Gb ST-MRAM densities which recently began sampling to customers. SMART Modular Technologies has signed up as an nvNITRO technology partner and will sell nvNITRO storage accelerators under their brand name.
Embedded MRAM
Everspin has partnered with GlobalFoundries to integrate MRAM into standard CMOS technology, enabling it to be integrated, non-destructively, into CMOS logic designs. The embedded MRAM can replace embedded flash, DRAM or SRAM in any CMOS design, delivering similar capacities of memory with non-volatility. Embedded MRAM can be integrated into 65 nm, 40 nm, 28 nm and now in GlobalFoundries 22FDX process which is 22 nm and utilizes fully depleted silicon-on-insulator (FD-SOI).
References
External links
Everspin official website
Semiconductor companies of the United States
American companies established in 2008
Companies listed on the Nasdaq
2016 initial public offerings
Companies based in Chandler, Arizona
|
Adam Samuel Oller (born October 17, 1994) is an American professional baseball pitcher in the Seattle Mariners organization. He has previously played in Major League Baseball (MLB) for the Oakland Athletics. He was drafted by the Pittsburgh Pirates in the 20th round of the 2016 MLB draft.
Amateur career
A native of Conroe, Texas, Oller attended Concordia Lutheran High School in Tomball, Texas, where he was a teammate of current major leaguer Glenn Otto. As a senior, Oller helped the Crusaders reach the State Championship Game and was named all-state at both pitcher and catcher. Oller was also a standout football player and was named to the all-state team twice during his high school career.
Oller played college baseball at Northwestern State University, where in addition to pitching, he played catcher. In 2015, he played collegiate summer baseball with the Falmouth Commodores of the Cape Cod Baseball League, becoming the first NSU player invited to play in the league since 2010.
At NSU, Oller earned second-team All-America honors from Collegiate Baseball Magazine and third-team honors from the American Baseball Coaches Association following the 2016 season. Oller concluded his Demons career ranked fourth in career innings pitched (310 1-3), career ERA (2.06) and career starts (43), eighth in career wins (20) and ninth in career winning percentage (.741). He holds single-season top-10 marks in innings pitched (109.2, 2nd and 108.1, 3rd), ERA (1.23, 5th) and winning percentage (.889, T-8th).
Professional career
Pittsburgh Pirates
Oller was drafted by the Pittsburgh Pirates in the 20th round of the 2016 Major League Baseball Draft. He signed with the Pirates for a $70,000 signing bonus. He made his professional debut for the rookie-level Bristol Pirates, posting a 4.45 ERA in 13 appearances. Oller spent the 2017 season with the Low–A West Virginia Black Bears, pitching to a 1.59 ERA with 50 strikeouts in 45.1 innings pitched. In 2018, Oller split the year between the Low-A West Virginia Power and the High-A Bradenton Marauders, struggling to a 6.29 ERA with 68 strikeouts in 73.0 innings of work across 29 contests between the two teams. He was released by the Pirates organization on November 6, 2018.
Windy City ThunderBolts
On January 4, 2019, Oller signed with the Windy City ThunderBolts of the independent Frontier League. He made 4 appearances (all starts) for Windy City, posting a 2-1 record and stellar 0.67 ERA with 45 strikeouts in 27.0 innings of work.
San Francisco Giants
On May 27, 2019, Oller signed a minor league contract with the San Francisco Giants organization. He spent the remainder of the year with the Single-A Augusta GreenJackets, logging a 5-6 record and 4.02 ERA with 93 strikeouts in 17 starts for the team.
New York Mets
On December 12, 2019, the New York Mets selected Oller in the minor league phase of the Rule 5 Draft. Oller did not play in a game in 2020 due to the cancellation of the minor league season because of the COVID-19 pandemic. He split the 2021 season between the Double-A Binghamton Rumble Ponies and the Triple-A Syracuse Mets, pitching to a 9-4 record and 3.45 ERA with 138 strikeouts in 120 innings pitched across 23 starts between the two affiliates. The Mets added him to their 40-man roster on November 19, 2021 to protect him from the Rule 5 draft.
Oakland Athletics
On March 12, 2022, the Mets traded Oller and J. T. Ginn to the Oakland Athletics in exchange for Chris Bassitt. On April 2, Oakland announced that Oller had made the Opening Day roster. Oller made his MLB debut on April 12 as the starting pitcher against the Tampa Bay Rays. Coincidentally, the starting pitcher for the Rays was Tommy Romero, who was also making his MLB debut in the game. On July 25, Oller earned his first career win after pitching 5.0 innings against the Houston Astros, allowing 4 runs (3 earned) on 4 hits and 2 walks with 4 strikeouts. He finished his rookie campaign with a 2–8 record and 6.30 ERA with 46 strikeouts in innings pitched.
Oller began the 2023 season up and down between Oakland and Las Vegas. In 12 games (9 starts) for the Aviators, he posted a 7.11 ERA with 59 strikeouts in innings pitched; in 9 contests for Oakland, Oller struggled to a 10.07 ERA with 13 strikeouts in innings of work.
Seattle Mariners
On July 12, 2023, Oller was claimed off waivers by the Seattle Mariners. In 12 starts for the Triple–A Tacoma Rainiers, he registered a 6–4 record and 5.51 ERA with 62 strikeouts in innings pitched. Following the season on October 31, the Mariners removed Oller from their 40–man roster and sent him outright to Triple–A Tacoma.
See also
Rule 5 draft results
References
External links
1994 births
Living people
American expatriate baseball players in Australia
Augusta GreenJackets players
Baseball players from Montgomery County, Texas
Binghamton Rumble Ponies players
Bradenton Marauders players
Bristol Pirates players
Falmouth Commodores players
Las Vegas Aviators players
Major League Baseball pitchers
Mat-Su Miners players
Northwestern State Demons baseball players
Oakland Athletics players
People from Conroe, Texas
Sydney Blue Sox players
Syracuse Mets players
Tacoma Rainiers players
West Virginia Black Bears players
West Virginia Power players
Windy City ThunderBolts players
|
```shell
Intro to `iptables`
Debugging `ssh` client issues
SSH tunneling made easy
Find services running on your host
Sending emails from terminal / scripts
```
|
Sir Vincent Henry Penalver Caillard (23 October 1856 – 18 March 1930) was a British Army officer, diplomat, financier, company director and municipal politician.
Education and career
The fifth child and eldest son of Camille Felix Désiré Caillard, a county court judge, he was educated at Eton College and the Royal Military Academy, Woolwich. He was commissioned in the Royal Engineers in 1876. In 1882 he spent short periods in the Intelligence Department and among headquarters staff during the invasion of Egypt.
He served as President of the Ottoman Public Debt Council from 1883 to 1898, receiving a knighthood in 1896. He was then a director of the National Bank of Egypt until 1908. From 1898 he was a director of the armaments manufacturer Vickers, becoming financial director in 1906 and overseeing much of the company's overseas operations. Vickers entered a long decline, and he was forced to resign in 1927. A director of several other undertakings, including railway companies, he co-founded the Federation of British Industries in 1916 and was its president in 1919.
He was an energetic supporter of Joseph Chamberlain's campaign to protect British companies by imposing tariffs on imports, and in 1904 was the chairman of Chamberlain's Tariff Commission. He was a county alderman on the London County Council for the Municipal Reform Party. In 1920 he was commissioned as a Deputy Lieutenant of Wiltshire.
A close associate of Sir Basil Zaharoff, Caillard played a key role in making Zaharoff's services available to H. H. Asquith and David Lloyd George as an agent of influence in the Levant.
Personal life
Caillard married his stepsister Eliza Frances Hanham in 1881; she was a sister of Sir John Alexander Hanham, 9th Baronet. They had a son and a daughter. In 1927, the year after Eliza's death, he married Zoë Gertrude (1868–1935), widow of the banker, businessman and mountaineer John Oakley Maund.
On the death of his father in 1898, Caillard inherited Wingfield House, near Bradford-on-Avon in Wiltshire. Around that time he made further purchases which resulted in his owning much of the land in Wingfield parish.
Caillard died in Paris on 18 March 1930, aged 73. His funeral was held at Wingfield church on 26 March, and a memorial service took place on the same day at St Margaret's, Westminster.
References
External links
1856 births
1930 deaths
Deputy Lieutenants of Wiltshire
British financiers
British industrialists
Municipal Reform Party politicians
Knights Bachelor
Members of London County Council
People educated at Eton College
Graduates of the Royal Military Academy, Woolwich
Royal Engineers officers
|
is a railway station in Higashi-ku, Nagoya, Aichi Prefecture, Japan, operated by Meitetsu.
Lines
Sakaemachi Station is a terminus of the Meitetsu Seto Line, and is located 20.6 kilometers from opposing terminus of the line at .
Station layout
The station has one dead-headed underground island platform. The station has automated ticket machines, Manaca automated turnstiles and is staffed.
Adjacent stations
|-
!colspan=5|Nagoya Railroad
Station history
Sakaemachi Station was opened on August 20, 1978. On December 16, 2006, the Tranpass system of magnetic fare cards with automatic turnstiles was implemented.
Passenger statistics
In fiscal year 2017, the station was used by an average of 20,623 passengers daily.
See also
List of Railway Stations in Japan
References
External links
Official web page
Railway stations in Japan opened in 1978
Railway stations in Aichi Prefecture
Stations of Nagoya Railroad
Railway stations in Nagoya
Sakae, Nagoya
|
Manuel Velez Pangilinan, (born July 14, 1946), also known as Manny Pangilinan or sometimes by his initials MVP, is a Filipino businessman and sports patron. He is the managing director and CEO of First Pacific Company Limited, a Hong Kong-based investment management and holding company with operations in the Asia-Pacific region. Pangilinan is also First Pacific's chairman for the group's investments in the Philippines, such as Metro Pacific Investments Corporation, PLDT, and Philex Mining Corporation. He is chairman Emeritus of the Samahang Basketbol ng Pilipinas (SBP) and was its first president, having served two consecutive terms from 2007 to 2018.
Education
Pangilinan completed his primary and secondary education at San Beda. He graduated cum laude from the Ateneo de Manila University with a Bachelor of Arts degree in Economics. Pangilinan won a competition sponsored by Procter & Gamble for a scholarship to the Wharton School of the University of Pennsylvania and graduated in 1968 with a Master of Business Administration degree.
Career
Pangilinan's first job was as the executive assistant to the president of the Philippine Investment Management Consultants, Inc. (PHINMA) where he served for six years.
In 1976, Pangilinan relocated to Hong Kong as executive director at Bancom International, an investment bank, where he gained experience in international finance. He then went on to the American Express in Hong Kong as an investment banker.
In Hong Kong, Pangilinan co-founded First Pacific in 1981 with Sudono Salim and Salim's son, Anthoni Salim. In 1987, Pangilinan established Metro Pacific as First Pacific's investment arm in the Philippines. As chairman of Metro Pacific, Pangilinan contributed to nation building through major investments in power distribution and energy innovations (Meralco), urban water concession (Maynilad Water Services), tollways (Metro Pacific Tollways Corporation), upgrading hospitals and health services (Metro Pacific Investments Corporation). He is also Chairman of TV5 and Philex Mining Corporation.
In 1998, First Pacific acquired PLDT, the largest telecommunications firm in the Philippines. The company, which had once been troubled by debt and technological issues, improved under First Pacific's management to become the nation's leading provider of digital communications.
Pangilinan is also a major patron of Philippine sports. He owns three PBA teams – TNT KaTropa, the Meralco Bolts and the NLEX Road Warriors. He founded the Samahang Basketbol ng Pilipinas (SBP), the national sport association for basketball in the Philippines. He served as the first president of SBP for two consecutive terms (2007 to 2016). He is currently the chairman emeritus of SBP. He was elected to the Central Board of FIBA in 2014, and served until August 2023. Moreover, his MVP Sports Foundation has supported other Philippine athletes, some of whom won gold medals in the 2018 Asian Games.
Involvement in sports
Pangilinan is a sports patron having been raised from a family that regularly enjoyed basketball. His mother was a fan of the San Beda Red Lions basketball team and was best friends with Caloy Loyzaga. His father played the sport along with baseball and tennis. Pangilinan himself plays badminton. Through the MVP Foundation, Pangilinan funds initiatives on badminton, boxing, golf and taekwondo.
Honors
National Honors
: Order of Lakandula, Commander – (May 24, 2006)
: Order of Lakandula, Grand Cross – (June 5, 2010)
See also
Kapampangan Development Foundation
Smart Communications
PLDT
Sun Cellular
Meralco
References
External links
Biography at Asian Affairs
Biography at Metro Pacific
Manny Pangilinan Speech to 2006 Ateneo Graduates
1946 births
Living people
Ateneo de Manila University alumni
Basketball people in the Philippines
Businesspeople in telecommunications
21st-century Filipino businesspeople
Kapampangan people
Businesspeople from Manila
PLDT people
Wharton School of the University of Pennsylvania alumni
Filipino sports executives and administrators
TV5 Network executives
Filipino company founders
Filipino chief executives
Filipino chairpersons of corporations
Grand Crosses of the Order of Lakandula
|
```shell
Revision control of configuration files with git
CPU benchmark with `dd`
Test disk speed with `dd`
Cancel a system shutdown
System uptime
```
|
```python
from datetime import datetime
from enum import StrEnum
from typing import Dict, List, Optional, TypedDict
from localstack.aws.api import RequestContext, ServiceException, ServiceRequest, handler
ArchivalReason = str
AttributeName = str
AutoScalingPolicyName = str
AutoScalingRoleArn = str
Backfilling = bool
BackupArn = str
BackupName = str
BackupsInputLimit = int
BooleanAttributeValue = bool
BooleanObject = bool
ClientRequestToken = str
ClientToken = str
CloudWatchLogGroupArn = str
Code = str
ConditionExpression = str
ConfirmRemoveSelfResourceAccess = bool
ConsistentRead = bool
ConsumedCapacityUnits = float
ContributorInsightsRule = str
CsvDelimiter = str
CsvHeader = str
DeletionProtectionEnabled = bool
DoubleObject = float
ErrorMessage = str
ExceptionDescription = str
ExceptionName = str
ExportArn = str
ExportManifest = str
ExportNextToken = str
ExpressionAttributeNameVariable = str
ExpressionAttributeValueVariable = str
FailureCode = str
FailureMessage = str
GlobalTableArnString = str
ImportArn = str
ImportNextToken = str
IndexName = str
Integer = int
IntegerObject = int
ItemCollectionSizeEstimateBound = float
KMSMasterKeyArn = str
KMSMasterKeyId = str
KeyExpression = str
KeySchemaAttributeName = str
ListContributorInsightsLimit = int
ListExportsMaxLimit = int
ListImportsMaxLimit = int
ListTablesInputLimit = int
NextTokenString = str
NonKeyAttributeName = str
NullAttributeValue = bool
NumberAttributeValue = str
PartiQLNextToken = str
PartiQLStatement = str
PolicyRevisionId = str
PositiveIntegerObject = int
ProjectionExpression = str
RegionName = str
ReplicaStatusDescription = str
ReplicaStatusPercentProgress = str
ResourceArnString = str
ResourcePolicy = str
RestoreInProgress = bool
S3Bucket = str
S3BucketOwner = str
S3Prefix = str
S3SseKmsKeyId = str
SSEEnabled = bool
ScanSegment = int
ScanTotalSegments = int
StreamArn = str
StreamEnabled = bool
String = str
StringAttributeValue = str
TableArn = str
TableId = str
TableName = str
TagKeyString = str
TagValueString = str
TimeToLiveAttributeName = str
TimeToLiveEnabled = bool
UpdateExpression = str
class ApproximateCreationDateTimePrecision(StrEnum):
MILLISECOND = "MILLISECOND"
MICROSECOND = "MICROSECOND"
class AttributeAction(StrEnum):
ADD = "ADD"
PUT = "PUT"
DELETE = "DELETE"
class BackupStatus(StrEnum):
CREATING = "CREATING"
DELETED = "DELETED"
AVAILABLE = "AVAILABLE"
class BackupType(StrEnum):
USER = "USER"
SYSTEM = "SYSTEM"
AWS_BACKUP = "AWS_BACKUP"
class BackupTypeFilter(StrEnum):
USER = "USER"
SYSTEM = "SYSTEM"
AWS_BACKUP = "AWS_BACKUP"
ALL = "ALL"
class BatchStatementErrorCodeEnum(StrEnum):
ConditionalCheckFailed = "ConditionalCheckFailed"
ItemCollectionSizeLimitExceeded = "ItemCollectionSizeLimitExceeded"
RequestLimitExceeded = "RequestLimitExceeded"
ValidationError = "ValidationError"
ProvisionedThroughputExceeded = "ProvisionedThroughputExceeded"
TransactionConflict = "TransactionConflict"
ThrottlingError = "ThrottlingError"
InternalServerError = "InternalServerError"
ResourceNotFound = "ResourceNotFound"
AccessDenied = "AccessDenied"
DuplicateItem = "DuplicateItem"
class BillingMode(StrEnum):
PROVISIONED = "PROVISIONED"
PAY_PER_REQUEST = "PAY_PER_REQUEST"
class ComparisonOperator(StrEnum):
EQ = "EQ"
NE = "NE"
IN = "IN"
LE = "LE"
LT = "LT"
GE = "GE"
GT = "GT"
BETWEEN = "BETWEEN"
NOT_NULL = "NOT_NULL"
NULL = "NULL"
CONTAINS = "CONTAINS"
NOT_CONTAINS = "NOT_CONTAINS"
BEGINS_WITH = "BEGINS_WITH"
class ConditionalOperator(StrEnum):
AND = "AND"
OR = "OR"
class ContinuousBackupsStatus(StrEnum):
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class ContributorInsightsAction(StrEnum):
ENABLE = "ENABLE"
DISABLE = "DISABLE"
class ContributorInsightsStatus(StrEnum):
ENABLING = "ENABLING"
ENABLED = "ENABLED"
DISABLING = "DISABLING"
DISABLED = "DISABLED"
FAILED = "FAILED"
class DestinationStatus(StrEnum):
ENABLING = "ENABLING"
ACTIVE = "ACTIVE"
DISABLING = "DISABLING"
DISABLED = "DISABLED"
ENABLE_FAILED = "ENABLE_FAILED"
UPDATING = "UPDATING"
class ExportFormat(StrEnum):
DYNAMODB_JSON = "DYNAMODB_JSON"
ION = "ION"
class ExportStatus(StrEnum):
IN_PROGRESS = "IN_PROGRESS"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
class ExportType(StrEnum):
FULL_EXPORT = "FULL_EXPORT"
INCREMENTAL_EXPORT = "INCREMENTAL_EXPORT"
class ExportViewType(StrEnum):
NEW_IMAGE = "NEW_IMAGE"
NEW_AND_OLD_IMAGES = "NEW_AND_OLD_IMAGES"
class GlobalTableStatus(StrEnum):
CREATING = "CREATING"
ACTIVE = "ACTIVE"
DELETING = "DELETING"
UPDATING = "UPDATING"
class ImportStatus(StrEnum):
IN_PROGRESS = "IN_PROGRESS"
COMPLETED = "COMPLETED"
CANCELLING = "CANCELLING"
CANCELLED = "CANCELLED"
FAILED = "FAILED"
class IndexStatus(StrEnum):
CREATING = "CREATING"
UPDATING = "UPDATING"
DELETING = "DELETING"
ACTIVE = "ACTIVE"
class InputCompressionType(StrEnum):
GZIP = "GZIP"
ZSTD = "ZSTD"
NONE = "NONE"
class InputFormat(StrEnum):
DYNAMODB_JSON = "DYNAMODB_JSON"
ION = "ION"
CSV = "CSV"
class KeyType(StrEnum):
HASH = "HASH"
RANGE = "RANGE"
class PointInTimeRecoveryStatus(StrEnum):
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class ProjectionType(StrEnum):
ALL = "ALL"
KEYS_ONLY = "KEYS_ONLY"
INCLUDE = "INCLUDE"
class ReplicaStatus(StrEnum):
CREATING = "CREATING"
CREATION_FAILED = "CREATION_FAILED"
UPDATING = "UPDATING"
DELETING = "DELETING"
ACTIVE = "ACTIVE"
REGION_DISABLED = "REGION_DISABLED"
INACCESSIBLE_ENCRYPTION_CREDENTIALS = "INACCESSIBLE_ENCRYPTION_CREDENTIALS"
class ReturnConsumedCapacity(StrEnum):
INDEXES = "INDEXES"
TOTAL = "TOTAL"
NONE = "NONE"
class ReturnItemCollectionMetrics(StrEnum):
SIZE = "SIZE"
NONE = "NONE"
class ReturnValue(StrEnum):
NONE = "NONE"
ALL_OLD = "ALL_OLD"
UPDATED_OLD = "UPDATED_OLD"
ALL_NEW = "ALL_NEW"
UPDATED_NEW = "UPDATED_NEW"
class ReturnValuesOnConditionCheckFailure(StrEnum):
ALL_OLD = "ALL_OLD"
NONE = "NONE"
class S3SseAlgorithm(StrEnum):
AES256 = "AES256"
KMS = "KMS"
class SSEStatus(StrEnum):
ENABLING = "ENABLING"
ENABLED = "ENABLED"
DISABLING = "DISABLING"
DISABLED = "DISABLED"
UPDATING = "UPDATING"
class SSEType(StrEnum):
AES256 = "AES256"
KMS = "KMS"
class ScalarAttributeType(StrEnum):
S = "S"
N = "N"
B = "B"
class Select(StrEnum):
ALL_ATTRIBUTES = "ALL_ATTRIBUTES"
ALL_PROJECTED_ATTRIBUTES = "ALL_PROJECTED_ATTRIBUTES"
SPECIFIC_ATTRIBUTES = "SPECIFIC_ATTRIBUTES"
COUNT = "COUNT"
class StreamViewType(StrEnum):
NEW_IMAGE = "NEW_IMAGE"
OLD_IMAGE = "OLD_IMAGE"
NEW_AND_OLD_IMAGES = "NEW_AND_OLD_IMAGES"
KEYS_ONLY = "KEYS_ONLY"
class TableClass(StrEnum):
STANDARD = "STANDARD"
STANDARD_INFREQUENT_ACCESS = "STANDARD_INFREQUENT_ACCESS"
class TableStatus(StrEnum):
CREATING = "CREATING"
UPDATING = "UPDATING"
DELETING = "DELETING"
ACTIVE = "ACTIVE"
INACCESSIBLE_ENCRYPTION_CREDENTIALS = "INACCESSIBLE_ENCRYPTION_CREDENTIALS"
ARCHIVING = "ARCHIVING"
ARCHIVED = "ARCHIVED"
class TimeToLiveStatus(StrEnum):
ENABLING = "ENABLING"
DISABLING = "DISABLING"
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class BackupInUseException(ServiceException):
code: str = "BackupInUseException"
sender_fault: bool = False
status_code: int = 400
class BackupNotFoundException(ServiceException):
code: str = "BackupNotFoundException"
sender_fault: bool = False
status_code: int = 400
class AttributeValue(TypedDict, total=False):
S: Optional["StringAttributeValue"]
N: Optional["NumberAttributeValue"]
B: Optional["BinaryAttributeValue"]
SS: Optional["StringSetAttributeValue"]
NS: Optional["NumberSetAttributeValue"]
BS: Optional["BinarySetAttributeValue"]
M: Optional["MapAttributeValue"]
L: Optional["ListAttributeValue"]
NULL: Optional["NullAttributeValue"]
BOOL: Optional["BooleanAttributeValue"]
ListAttributeValue = List[AttributeValue]
MapAttributeValue = Dict[AttributeName, AttributeValue]
BinaryAttributeValue = bytes
BinarySetAttributeValue = List[BinaryAttributeValue]
NumberSetAttributeValue = List[NumberAttributeValue]
StringSetAttributeValue = List[StringAttributeValue]
AttributeMap = Dict[AttributeName, AttributeValue]
class ConditionalCheckFailedException(ServiceException):
code: str = "ConditionalCheckFailedException"
sender_fault: bool = False
status_code: int = 400
Item: Optional[AttributeMap]
class ContinuousBackupsUnavailableException(ServiceException):
code: str = "ContinuousBackupsUnavailableException"
sender_fault: bool = False
status_code: int = 400
class DuplicateItemException(ServiceException):
code: str = "DuplicateItemException"
sender_fault: bool = False
status_code: int = 400
class ExportConflictException(ServiceException):
code: str = "ExportConflictException"
sender_fault: bool = False
status_code: int = 400
class ExportNotFoundException(ServiceException):
code: str = "ExportNotFoundException"
sender_fault: bool = False
status_code: int = 400
class GlobalTableAlreadyExistsException(ServiceException):
code: str = "GlobalTableAlreadyExistsException"
sender_fault: bool = False
status_code: int = 400
class GlobalTableNotFoundException(ServiceException):
code: str = "GlobalTableNotFoundException"
sender_fault: bool = False
status_code: int = 400
class IdempotentParameterMismatchException(ServiceException):
code: str = "IdempotentParameterMismatchException"
sender_fault: bool = False
status_code: int = 400
class ImportConflictException(ServiceException):
code: str = "ImportConflictException"
sender_fault: bool = False
status_code: int = 400
class ImportNotFoundException(ServiceException):
code: str = "ImportNotFoundException"
sender_fault: bool = False
status_code: int = 400
class IndexNotFoundException(ServiceException):
code: str = "IndexNotFoundException"
sender_fault: bool = False
status_code: int = 400
class InternalServerError(ServiceException):
code: str = "InternalServerError"
sender_fault: bool = False
status_code: int = 400
class InvalidExportTimeException(ServiceException):
code: str = "InvalidExportTimeException"
sender_fault: bool = False
status_code: int = 400
class InvalidRestoreTimeException(ServiceException):
code: str = "InvalidRestoreTimeException"
sender_fault: bool = False
status_code: int = 400
class ItemCollectionSizeLimitExceededException(ServiceException):
code: str = "ItemCollectionSizeLimitExceededException"
sender_fault: bool = False
status_code: int = 400
class LimitExceededException(ServiceException):
code: str = "LimitExceededException"
sender_fault: bool = False
status_code: int = 400
class PointInTimeRecoveryUnavailableException(ServiceException):
code: str = "PointInTimeRecoveryUnavailableException"
sender_fault: bool = False
status_code: int = 400
class PolicyNotFoundException(ServiceException):
code: str = "PolicyNotFoundException"
sender_fault: bool = False
status_code: int = 400
class ProvisionedThroughputExceededException(ServiceException):
code: str = "ProvisionedThroughputExceededException"
sender_fault: bool = False
status_code: int = 400
class ReplicaAlreadyExistsException(ServiceException):
code: str = "ReplicaAlreadyExistsException"
sender_fault: bool = False
status_code: int = 400
class ReplicaNotFoundException(ServiceException):
code: str = "ReplicaNotFoundException"
sender_fault: bool = False
status_code: int = 400
class RequestLimitExceeded(ServiceException):
code: str = "RequestLimitExceeded"
sender_fault: bool = False
status_code: int = 400
class ResourceInUseException(ServiceException):
code: str = "ResourceInUseException"
sender_fault: bool = False
status_code: int = 400
class ResourceNotFoundException(ServiceException):
code: str = "ResourceNotFoundException"
sender_fault: bool = False
status_code: int = 400
class TableAlreadyExistsException(ServiceException):
code: str = "TableAlreadyExistsException"
sender_fault: bool = False
status_code: int = 400
class TableInUseException(ServiceException):
code: str = "TableInUseException"
sender_fault: bool = False
status_code: int = 400
class TableNotFoundException(ServiceException):
code: str = "TableNotFoundException"
sender_fault: bool = False
status_code: int = 400
class CancellationReason(TypedDict, total=False):
Item: Optional[AttributeMap]
Code: Optional[Code]
Message: Optional[ErrorMessage]
CancellationReasonList = List[CancellationReason]
class TransactionCanceledException(ServiceException):
code: str = "TransactionCanceledException"
sender_fault: bool = False
status_code: int = 400
CancellationReasons: Optional[CancellationReasonList]
class TransactionConflictException(ServiceException):
code: str = "TransactionConflictException"
sender_fault: bool = False
status_code: int = 400
class TransactionInProgressException(ServiceException):
code: str = "TransactionInProgressException"
sender_fault: bool = False
status_code: int = 400
Date = datetime
class ArchivalSummary(TypedDict, total=False):
ArchivalDateTime: Optional[Date]
ArchivalReason: Optional[ArchivalReason]
ArchivalBackupArn: Optional[BackupArn]
class AttributeDefinition(TypedDict, total=False):
AttributeName: KeySchemaAttributeName
AttributeType: ScalarAttributeType
AttributeDefinitions = List[AttributeDefinition]
AttributeNameList = List[AttributeName]
class AttributeValueUpdate(TypedDict, total=False):
Value: Optional[AttributeValue]
Action: Optional[AttributeAction]
AttributeUpdates = Dict[AttributeName, AttributeValueUpdate]
AttributeValueList = List[AttributeValue]
class AutoScalingTargetTrackingScalingPolicyConfigurationDescription(TypedDict, total=False):
DisableScaleIn: Optional[BooleanObject]
ScaleInCooldown: Optional[IntegerObject]
ScaleOutCooldown: Optional[IntegerObject]
TargetValue: DoubleObject
class AutoScalingPolicyDescription(TypedDict, total=False):
PolicyName: Optional[AutoScalingPolicyName]
TargetTrackingScalingPolicyConfiguration: Optional[
AutoScalingTargetTrackingScalingPolicyConfigurationDescription
]
AutoScalingPolicyDescriptionList = List[AutoScalingPolicyDescription]
class AutoScalingTargetTrackingScalingPolicyConfigurationUpdate(TypedDict, total=False):
DisableScaleIn: Optional[BooleanObject]
ScaleInCooldown: Optional[IntegerObject]
ScaleOutCooldown: Optional[IntegerObject]
TargetValue: DoubleObject
class AutoScalingPolicyUpdate(TypedDict, total=False):
PolicyName: Optional[AutoScalingPolicyName]
TargetTrackingScalingPolicyConfiguration: (
AutoScalingTargetTrackingScalingPolicyConfigurationUpdate
)
PositiveLongObject = int
class AutoScalingSettingsDescription(TypedDict, total=False):
MinimumUnits: Optional[PositiveLongObject]
MaximumUnits: Optional[PositiveLongObject]
AutoScalingDisabled: Optional[BooleanObject]
AutoScalingRoleArn: Optional[String]
ScalingPolicies: Optional[AutoScalingPolicyDescriptionList]
class AutoScalingSettingsUpdate(TypedDict, total=False):
MinimumUnits: Optional[PositiveLongObject]
MaximumUnits: Optional[PositiveLongObject]
AutoScalingDisabled: Optional[BooleanObject]
AutoScalingRoleArn: Optional[AutoScalingRoleArn]
ScalingPolicyUpdate: Optional[AutoScalingPolicyUpdate]
BackupCreationDateTime = datetime
class SSEDescription(TypedDict, total=False):
Status: Optional[SSEStatus]
SSEType: Optional[SSEType]
KMSMasterKeyArn: Optional[KMSMasterKeyArn]
InaccessibleEncryptionDateTime: Optional[Date]
class TimeToLiveDescription(TypedDict, total=False):
TimeToLiveStatus: Optional[TimeToLiveStatus]
AttributeName: Optional[TimeToLiveAttributeName]
class StreamSpecification(TypedDict, total=False):
StreamEnabled: StreamEnabled
StreamViewType: Optional[StreamViewType]
LongObject = int
class OnDemandThroughput(TypedDict, total=False):
MaxReadRequestUnits: Optional[LongObject]
MaxWriteRequestUnits: Optional[LongObject]
class ProvisionedThroughput(TypedDict, total=False):
ReadCapacityUnits: PositiveLongObject
WriteCapacityUnits: PositiveLongObject
NonKeyAttributeNameList = List[NonKeyAttributeName]
class Projection(TypedDict, total=False):
ProjectionType: Optional[ProjectionType]
NonKeyAttributes: Optional[NonKeyAttributeNameList]
class KeySchemaElement(TypedDict, total=False):
AttributeName: KeySchemaAttributeName
KeyType: KeyType
KeySchema = List[KeySchemaElement]
class GlobalSecondaryIndexInfo(TypedDict, total=False):
IndexName: Optional[IndexName]
KeySchema: Optional[KeySchema]
Projection: Optional[Projection]
ProvisionedThroughput: Optional[ProvisionedThroughput]
OnDemandThroughput: Optional[OnDemandThroughput]
GlobalSecondaryIndexes = List[GlobalSecondaryIndexInfo]
class LocalSecondaryIndexInfo(TypedDict, total=False):
IndexName: Optional[IndexName]
KeySchema: Optional[KeySchema]
Projection: Optional[Projection]
LocalSecondaryIndexes = List[LocalSecondaryIndexInfo]
class SourceTableFeatureDetails(TypedDict, total=False):
LocalSecondaryIndexes: Optional[LocalSecondaryIndexes]
GlobalSecondaryIndexes: Optional[GlobalSecondaryIndexes]
StreamDescription: Optional[StreamSpecification]
TimeToLiveDescription: Optional[TimeToLiveDescription]
SSEDescription: Optional[SSEDescription]
ItemCount = int
TableCreationDateTime = datetime
class SourceTableDetails(TypedDict, total=False):
TableName: TableName
TableId: TableId
TableArn: Optional[TableArn]
TableSizeBytes: Optional[LongObject]
KeySchema: KeySchema
TableCreationDateTime: TableCreationDateTime
ProvisionedThroughput: ProvisionedThroughput
OnDemandThroughput: Optional[OnDemandThroughput]
ItemCount: Optional[ItemCount]
BillingMode: Optional[BillingMode]
BackupSizeBytes = int
class BackupDetails(TypedDict, total=False):
BackupArn: BackupArn
BackupName: BackupName
BackupSizeBytes: Optional[BackupSizeBytes]
BackupStatus: BackupStatus
BackupType: BackupType
BackupCreationDateTime: BackupCreationDateTime
BackupExpiryDateTime: Optional[Date]
class BackupDescription(TypedDict, total=False):
BackupDetails: Optional[BackupDetails]
SourceTableDetails: Optional[SourceTableDetails]
SourceTableFeatureDetails: Optional[SourceTableFeatureDetails]
class BackupSummary(TypedDict, total=False):
TableName: Optional[TableName]
TableId: Optional[TableId]
TableArn: Optional[TableArn]
BackupArn: Optional[BackupArn]
BackupName: Optional[BackupName]
BackupCreationDateTime: Optional[BackupCreationDateTime]
BackupExpiryDateTime: Optional[Date]
BackupStatus: Optional[BackupStatus]
BackupType: Optional[BackupType]
BackupSizeBytes: Optional[BackupSizeBytes]
BackupSummaries = List[BackupSummary]
PreparedStatementParameters = List[AttributeValue]
class BatchStatementRequest(TypedDict, total=False):
Statement: PartiQLStatement
Parameters: Optional[PreparedStatementParameters]
ConsistentRead: Optional[ConsistentRead]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
PartiQLBatchRequest = List[BatchStatementRequest]
class BatchExecuteStatementInput(ServiceRequest):
Statements: PartiQLBatchRequest
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
class Capacity(TypedDict, total=False):
ReadCapacityUnits: Optional[ConsumedCapacityUnits]
WriteCapacityUnits: Optional[ConsumedCapacityUnits]
CapacityUnits: Optional[ConsumedCapacityUnits]
SecondaryIndexesCapacityMap = Dict[IndexName, Capacity]
class ConsumedCapacity(TypedDict, total=False):
TableName: Optional[TableArn]
CapacityUnits: Optional[ConsumedCapacityUnits]
ReadCapacityUnits: Optional[ConsumedCapacityUnits]
WriteCapacityUnits: Optional[ConsumedCapacityUnits]
Table: Optional[Capacity]
LocalSecondaryIndexes: Optional[SecondaryIndexesCapacityMap]
GlobalSecondaryIndexes: Optional[SecondaryIndexesCapacityMap]
ConsumedCapacityMultiple = List[ConsumedCapacity]
class BatchStatementError(TypedDict, total=False):
Code: Optional[BatchStatementErrorCodeEnum]
Message: Optional[String]
Item: Optional[AttributeMap]
class BatchStatementResponse(TypedDict, total=False):
Error: Optional[BatchStatementError]
TableName: Optional[TableName]
Item: Optional[AttributeMap]
PartiQLBatchResponse = List[BatchStatementResponse]
class BatchExecuteStatementOutput(TypedDict, total=False):
Responses: Optional[PartiQLBatchResponse]
ConsumedCapacity: Optional[ConsumedCapacityMultiple]
ExpressionAttributeNameMap = Dict[ExpressionAttributeNameVariable, AttributeName]
Key = Dict[AttributeName, AttributeValue]
KeyList = List[Key]
class KeysAndAttributes(TypedDict, total=False):
Keys: KeyList
AttributesToGet: Optional[AttributeNameList]
ConsistentRead: Optional[ConsistentRead]
ProjectionExpression: Optional[ProjectionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
BatchGetRequestMap = Dict[TableArn, KeysAndAttributes]
class BatchGetItemInput(ServiceRequest):
RequestItems: BatchGetRequestMap
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
ItemList = List[AttributeMap]
BatchGetResponseMap = Dict[TableArn, ItemList]
class BatchGetItemOutput(TypedDict, total=False):
Responses: Optional[BatchGetResponseMap]
UnprocessedKeys: Optional[BatchGetRequestMap]
ConsumedCapacity: Optional[ConsumedCapacityMultiple]
class DeleteRequest(TypedDict, total=False):
Key: Key
PutItemInputAttributeMap = Dict[AttributeName, AttributeValue]
class PutRequest(TypedDict, total=False):
Item: PutItemInputAttributeMap
class WriteRequest(TypedDict, total=False):
PutRequest: Optional[PutRequest]
DeleteRequest: Optional[DeleteRequest]
WriteRequests = List[WriteRequest]
BatchWriteItemRequestMap = Dict[TableArn, WriteRequests]
class BatchWriteItemInput(ServiceRequest):
RequestItems: BatchWriteItemRequestMap
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
ReturnItemCollectionMetrics: Optional[ReturnItemCollectionMetrics]
ItemCollectionSizeEstimateRange = List[ItemCollectionSizeEstimateBound]
ItemCollectionKeyAttributeMap = Dict[AttributeName, AttributeValue]
class ItemCollectionMetrics(TypedDict, total=False):
ItemCollectionKey: Optional[ItemCollectionKeyAttributeMap]
SizeEstimateRangeGB: Optional[ItemCollectionSizeEstimateRange]
ItemCollectionMetricsMultiple = List[ItemCollectionMetrics]
ItemCollectionMetricsPerTable = Dict[TableArn, ItemCollectionMetricsMultiple]
class BatchWriteItemOutput(TypedDict, total=False):
UnprocessedItems: Optional[BatchWriteItemRequestMap]
ItemCollectionMetrics: Optional[ItemCollectionMetricsPerTable]
ConsumedCapacity: Optional[ConsumedCapacityMultiple]
BilledSizeBytes = int
class BillingModeSummary(TypedDict, total=False):
BillingMode: Optional[BillingMode]
LastUpdateToPayPerRequestDateTime: Optional[Date]
class Condition(TypedDict, total=False):
AttributeValueList: Optional[AttributeValueList]
ComparisonOperator: ComparisonOperator
ExpressionAttributeValueMap = Dict[ExpressionAttributeValueVariable, AttributeValue]
class ConditionCheck(TypedDict, total=False):
Key: Key
TableName: TableArn
ConditionExpression: ConditionExpression
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
ExpressionAttributeValues: Optional[ExpressionAttributeValueMap]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
class PointInTimeRecoveryDescription(TypedDict, total=False):
PointInTimeRecoveryStatus: Optional[PointInTimeRecoveryStatus]
EarliestRestorableDateTime: Optional[Date]
LatestRestorableDateTime: Optional[Date]
class ContinuousBackupsDescription(TypedDict, total=False):
ContinuousBackupsStatus: ContinuousBackupsStatus
PointInTimeRecoveryDescription: Optional[PointInTimeRecoveryDescription]
ContributorInsightsRuleList = List[ContributorInsightsRule]
class ContributorInsightsSummary(TypedDict, total=False):
TableName: Optional[TableName]
IndexName: Optional[IndexName]
ContributorInsightsStatus: Optional[ContributorInsightsStatus]
ContributorInsightsSummaries = List[ContributorInsightsSummary]
class CreateBackupInput(ServiceRequest):
TableName: TableArn
BackupName: BackupName
class CreateBackupOutput(TypedDict, total=False):
BackupDetails: Optional[BackupDetails]
class CreateGlobalSecondaryIndexAction(TypedDict, total=False):
IndexName: IndexName
KeySchema: KeySchema
Projection: Projection
ProvisionedThroughput: Optional[ProvisionedThroughput]
OnDemandThroughput: Optional[OnDemandThroughput]
class Replica(TypedDict, total=False):
RegionName: Optional[RegionName]
ReplicaList = List[Replica]
class CreateGlobalTableInput(ServiceRequest):
GlobalTableName: TableName
ReplicationGroup: ReplicaList
class TableClassSummary(TypedDict, total=False):
TableClass: Optional[TableClass]
LastUpdateDateTime: Optional[Date]
class OnDemandThroughputOverride(TypedDict, total=False):
MaxReadRequestUnits: Optional[LongObject]
class ProvisionedThroughputOverride(TypedDict, total=False):
ReadCapacityUnits: Optional[PositiveLongObject]
class ReplicaGlobalSecondaryIndexDescription(TypedDict, total=False):
IndexName: Optional[IndexName]
ProvisionedThroughputOverride: Optional[ProvisionedThroughputOverride]
OnDemandThroughputOverride: Optional[OnDemandThroughputOverride]
ReplicaGlobalSecondaryIndexDescriptionList = List[ReplicaGlobalSecondaryIndexDescription]
class ReplicaDescription(TypedDict, total=False):
RegionName: Optional[RegionName]
ReplicaStatus: Optional[ReplicaStatus]
ReplicaStatusDescription: Optional[ReplicaStatusDescription]
ReplicaStatusPercentProgress: Optional[ReplicaStatusPercentProgress]
KMSMasterKeyId: Optional[KMSMasterKeyId]
ProvisionedThroughputOverride: Optional[ProvisionedThroughputOverride]
OnDemandThroughputOverride: Optional[OnDemandThroughputOverride]
GlobalSecondaryIndexes: Optional[ReplicaGlobalSecondaryIndexDescriptionList]
ReplicaInaccessibleDateTime: Optional[Date]
ReplicaTableClassSummary: Optional[TableClassSummary]
ReplicaDescriptionList = List[ReplicaDescription]
class GlobalTableDescription(TypedDict, total=False):
ReplicationGroup: Optional[ReplicaDescriptionList]
GlobalTableArn: Optional[GlobalTableArnString]
CreationDateTime: Optional[Date]
GlobalTableStatus: Optional[GlobalTableStatus]
GlobalTableName: Optional[TableName]
class CreateGlobalTableOutput(TypedDict, total=False):
GlobalTableDescription: Optional[GlobalTableDescription]
class CreateReplicaAction(TypedDict, total=False):
RegionName: RegionName
class ReplicaGlobalSecondaryIndex(TypedDict, total=False):
IndexName: IndexName
ProvisionedThroughputOverride: Optional[ProvisionedThroughputOverride]
OnDemandThroughputOverride: Optional[OnDemandThroughputOverride]
ReplicaGlobalSecondaryIndexList = List[ReplicaGlobalSecondaryIndex]
class CreateReplicationGroupMemberAction(TypedDict, total=False):
RegionName: RegionName
KMSMasterKeyId: Optional[KMSMasterKeyId]
ProvisionedThroughputOverride: Optional[ProvisionedThroughputOverride]
OnDemandThroughputOverride: Optional[OnDemandThroughputOverride]
GlobalSecondaryIndexes: Optional[ReplicaGlobalSecondaryIndexList]
TableClassOverride: Optional[TableClass]
class Tag(TypedDict, total=False):
Key: TagKeyString
Value: TagValueString
TagList = List[Tag]
class SSESpecification(TypedDict, total=False):
Enabled: Optional[SSEEnabled]
SSEType: Optional[SSEType]
KMSMasterKeyId: Optional[KMSMasterKeyId]
class GlobalSecondaryIndex(TypedDict, total=False):
IndexName: IndexName
KeySchema: KeySchema
Projection: Projection
ProvisionedThroughput: Optional[ProvisionedThroughput]
OnDemandThroughput: Optional[OnDemandThroughput]
GlobalSecondaryIndexList = List[GlobalSecondaryIndex]
class LocalSecondaryIndex(TypedDict, total=False):
IndexName: IndexName
KeySchema: KeySchema
Projection: Projection
LocalSecondaryIndexList = List[LocalSecondaryIndex]
class CreateTableInput(ServiceRequest):
AttributeDefinitions: AttributeDefinitions
TableName: TableArn
KeySchema: KeySchema
LocalSecondaryIndexes: Optional[LocalSecondaryIndexList]
GlobalSecondaryIndexes: Optional[GlobalSecondaryIndexList]
BillingMode: Optional[BillingMode]
ProvisionedThroughput: Optional[ProvisionedThroughput]
StreamSpecification: Optional[StreamSpecification]
SSESpecification: Optional[SSESpecification]
Tags: Optional[TagList]
TableClass: Optional[TableClass]
DeletionProtectionEnabled: Optional[DeletionProtectionEnabled]
ResourcePolicy: Optional[ResourcePolicy]
OnDemandThroughput: Optional[OnDemandThroughput]
class RestoreSummary(TypedDict, total=False):
SourceBackupArn: Optional[BackupArn]
SourceTableArn: Optional[TableArn]
RestoreDateTime: Date
RestoreInProgress: RestoreInProgress
NonNegativeLongObject = int
class ProvisionedThroughputDescription(TypedDict, total=False):
LastIncreaseDateTime: Optional[Date]
LastDecreaseDateTime: Optional[Date]
NumberOfDecreasesToday: Optional[PositiveLongObject]
ReadCapacityUnits: Optional[NonNegativeLongObject]
WriteCapacityUnits: Optional[NonNegativeLongObject]
class GlobalSecondaryIndexDescription(TypedDict, total=False):
IndexName: Optional[IndexName]
KeySchema: Optional[KeySchema]
Projection: Optional[Projection]
IndexStatus: Optional[IndexStatus]
Backfilling: Optional[Backfilling]
ProvisionedThroughput: Optional[ProvisionedThroughputDescription]
IndexSizeBytes: Optional[LongObject]
ItemCount: Optional[LongObject]
IndexArn: Optional[String]
OnDemandThroughput: Optional[OnDemandThroughput]
GlobalSecondaryIndexDescriptionList = List[GlobalSecondaryIndexDescription]
class LocalSecondaryIndexDescription(TypedDict, total=False):
IndexName: Optional[IndexName]
KeySchema: Optional[KeySchema]
Projection: Optional[Projection]
IndexSizeBytes: Optional[LongObject]
ItemCount: Optional[LongObject]
IndexArn: Optional[String]
LocalSecondaryIndexDescriptionList = List[LocalSecondaryIndexDescription]
class TableDescription(TypedDict, total=False):
AttributeDefinitions: Optional[AttributeDefinitions]
TableName: Optional[TableName]
KeySchema: Optional[KeySchema]
TableStatus: Optional[TableStatus]
CreationDateTime: Optional[Date]
ProvisionedThroughput: Optional[ProvisionedThroughputDescription]
TableSizeBytes: Optional[LongObject]
ItemCount: Optional[LongObject]
TableArn: Optional[String]
TableId: Optional[TableId]
BillingModeSummary: Optional[BillingModeSummary]
LocalSecondaryIndexes: Optional[LocalSecondaryIndexDescriptionList]
GlobalSecondaryIndexes: Optional[GlobalSecondaryIndexDescriptionList]
StreamSpecification: Optional[StreamSpecification]
LatestStreamLabel: Optional[String]
LatestStreamArn: Optional[StreamArn]
GlobalTableVersion: Optional[String]
Replicas: Optional[ReplicaDescriptionList]
RestoreSummary: Optional[RestoreSummary]
SSEDescription: Optional[SSEDescription]
ArchivalSummary: Optional[ArchivalSummary]
TableClassSummary: Optional[TableClassSummary]
DeletionProtectionEnabled: Optional[DeletionProtectionEnabled]
OnDemandThroughput: Optional[OnDemandThroughput]
class CreateTableOutput(TypedDict, total=False):
TableDescription: Optional[TableDescription]
CsvHeaderList = List[CsvHeader]
class CsvOptions(TypedDict, total=False):
Delimiter: Optional[CsvDelimiter]
HeaderList: Optional[CsvHeaderList]
class Delete(TypedDict, total=False):
Key: Key
TableName: TableArn
ConditionExpression: Optional[ConditionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
ExpressionAttributeValues: Optional[ExpressionAttributeValueMap]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
class DeleteBackupInput(ServiceRequest):
BackupArn: BackupArn
class DeleteBackupOutput(TypedDict, total=False):
BackupDescription: Optional[BackupDescription]
class DeleteGlobalSecondaryIndexAction(TypedDict, total=False):
IndexName: IndexName
class ExpectedAttributeValue(TypedDict, total=False):
Value: Optional[AttributeValue]
Exists: Optional[BooleanObject]
ComparisonOperator: Optional[ComparisonOperator]
AttributeValueList: Optional[AttributeValueList]
ExpectedAttributeMap = Dict[AttributeName, ExpectedAttributeValue]
class DeleteItemInput(ServiceRequest):
TableName: TableArn
Key: Key
Expected: Optional[ExpectedAttributeMap]
ConditionalOperator: Optional[ConditionalOperator]
ReturnValues: Optional[ReturnValue]
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
ReturnItemCollectionMetrics: Optional[ReturnItemCollectionMetrics]
ConditionExpression: Optional[ConditionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
ExpressionAttributeValues: Optional[ExpressionAttributeValueMap]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
class DeleteItemOutput(TypedDict, total=False):
Attributes: Optional[AttributeMap]
ConsumedCapacity: Optional[ConsumedCapacity]
ItemCollectionMetrics: Optional[ItemCollectionMetrics]
class DeleteReplicaAction(TypedDict, total=False):
RegionName: RegionName
class DeleteReplicationGroupMemberAction(TypedDict, total=False):
RegionName: RegionName
class DeleteResourcePolicyInput(ServiceRequest):
ResourceArn: ResourceArnString
ExpectedRevisionId: Optional[PolicyRevisionId]
class DeleteResourcePolicyOutput(TypedDict, total=False):
RevisionId: Optional[PolicyRevisionId]
class DeleteTableInput(ServiceRequest):
TableName: TableArn
class DeleteTableOutput(TypedDict, total=False):
TableDescription: Optional[TableDescription]
class DescribeBackupInput(ServiceRequest):
BackupArn: BackupArn
class DescribeBackupOutput(TypedDict, total=False):
BackupDescription: Optional[BackupDescription]
class DescribeContinuousBackupsInput(ServiceRequest):
TableName: TableArn
class DescribeContinuousBackupsOutput(TypedDict, total=False):
ContinuousBackupsDescription: Optional[ContinuousBackupsDescription]
class DescribeContributorInsightsInput(ServiceRequest):
TableName: TableArn
IndexName: Optional[IndexName]
class FailureException(TypedDict, total=False):
ExceptionName: Optional[ExceptionName]
ExceptionDescription: Optional[ExceptionDescription]
LastUpdateDateTime = datetime
class DescribeContributorInsightsOutput(TypedDict, total=False):
TableName: Optional[TableName]
IndexName: Optional[IndexName]
ContributorInsightsRuleList: Optional[ContributorInsightsRuleList]
ContributorInsightsStatus: Optional[ContributorInsightsStatus]
LastUpdateDateTime: Optional[LastUpdateDateTime]
FailureException: Optional[FailureException]
class DescribeEndpointsRequest(ServiceRequest):
pass
Long = int
class Endpoint(TypedDict, total=False):
Address: String
CachePeriodInMinutes: Long
Endpoints = List[Endpoint]
class DescribeEndpointsResponse(TypedDict, total=False):
Endpoints: Endpoints
class DescribeExportInput(ServiceRequest):
ExportArn: ExportArn
ExportToTime = datetime
ExportFromTime = datetime
class IncrementalExportSpecification(TypedDict, total=False):
ExportFromTime: Optional[ExportFromTime]
ExportToTime: Optional[ExportToTime]
ExportViewType: Optional[ExportViewType]
ExportTime = datetime
ExportEndTime = datetime
ExportStartTime = datetime
class ExportDescription(TypedDict, total=False):
ExportArn: Optional[ExportArn]
ExportStatus: Optional[ExportStatus]
StartTime: Optional[ExportStartTime]
EndTime: Optional[ExportEndTime]
ExportManifest: Optional[ExportManifest]
TableArn: Optional[TableArn]
TableId: Optional[TableId]
ExportTime: Optional[ExportTime]
ClientToken: Optional[ClientToken]
S3Bucket: Optional[S3Bucket]
S3BucketOwner: Optional[S3BucketOwner]
S3Prefix: Optional[S3Prefix]
S3SseAlgorithm: Optional[S3SseAlgorithm]
S3SseKmsKeyId: Optional[S3SseKmsKeyId]
FailureCode: Optional[FailureCode]
FailureMessage: Optional[FailureMessage]
ExportFormat: Optional[ExportFormat]
BilledSizeBytes: Optional[BilledSizeBytes]
ItemCount: Optional[ItemCount]
ExportType: Optional[ExportType]
IncrementalExportSpecification: Optional[IncrementalExportSpecification]
class DescribeExportOutput(TypedDict, total=False):
ExportDescription: Optional[ExportDescription]
class DescribeGlobalTableInput(ServiceRequest):
GlobalTableName: TableName
class DescribeGlobalTableOutput(TypedDict, total=False):
GlobalTableDescription: Optional[GlobalTableDescription]
class DescribeGlobalTableSettingsInput(ServiceRequest):
GlobalTableName: TableName
class ReplicaGlobalSecondaryIndexSettingsDescription(TypedDict, total=False):
IndexName: IndexName
IndexStatus: Optional[IndexStatus]
ProvisionedReadCapacityUnits: Optional[PositiveLongObject]
ProvisionedReadCapacityAutoScalingSettings: Optional[AutoScalingSettingsDescription]
ProvisionedWriteCapacityUnits: Optional[PositiveLongObject]
ProvisionedWriteCapacityAutoScalingSettings: Optional[AutoScalingSettingsDescription]
ReplicaGlobalSecondaryIndexSettingsDescriptionList = List[
ReplicaGlobalSecondaryIndexSettingsDescription
]
class ReplicaSettingsDescription(TypedDict, total=False):
RegionName: RegionName
ReplicaStatus: Optional[ReplicaStatus]
ReplicaBillingModeSummary: Optional[BillingModeSummary]
ReplicaProvisionedReadCapacityUnits: Optional[NonNegativeLongObject]
ReplicaProvisionedReadCapacityAutoScalingSettings: Optional[AutoScalingSettingsDescription]
ReplicaProvisionedWriteCapacityUnits: Optional[NonNegativeLongObject]
ReplicaProvisionedWriteCapacityAutoScalingSettings: Optional[AutoScalingSettingsDescription]
ReplicaGlobalSecondaryIndexSettings: Optional[
ReplicaGlobalSecondaryIndexSettingsDescriptionList
]
ReplicaTableClassSummary: Optional[TableClassSummary]
ReplicaSettingsDescriptionList = List[ReplicaSettingsDescription]
class DescribeGlobalTableSettingsOutput(TypedDict, total=False):
GlobalTableName: Optional[TableName]
ReplicaSettings: Optional[ReplicaSettingsDescriptionList]
class DescribeImportInput(ServiceRequest):
ImportArn: ImportArn
ImportedItemCount = int
ProcessedItemCount = int
ImportEndTime = datetime
ImportStartTime = datetime
class TableCreationParameters(TypedDict, total=False):
TableName: TableName
AttributeDefinitions: AttributeDefinitions
KeySchema: KeySchema
BillingMode: Optional[BillingMode]
ProvisionedThroughput: Optional[ProvisionedThroughput]
OnDemandThroughput: Optional[OnDemandThroughput]
SSESpecification: Optional[SSESpecification]
GlobalSecondaryIndexes: Optional[GlobalSecondaryIndexList]
class InputFormatOptions(TypedDict, total=False):
Csv: Optional[CsvOptions]
ErrorCount = int
class S3BucketSource(TypedDict, total=False):
S3BucketOwner: Optional[S3BucketOwner]
S3Bucket: S3Bucket
S3KeyPrefix: Optional[S3Prefix]
class ImportTableDescription(TypedDict, total=False):
ImportArn: Optional[ImportArn]
ImportStatus: Optional[ImportStatus]
TableArn: Optional[TableArn]
TableId: Optional[TableId]
ClientToken: Optional[ClientToken]
S3BucketSource: Optional[S3BucketSource]
ErrorCount: Optional[ErrorCount]
CloudWatchLogGroupArn: Optional[CloudWatchLogGroupArn]
InputFormat: Optional[InputFormat]
InputFormatOptions: Optional[InputFormatOptions]
InputCompressionType: Optional[InputCompressionType]
TableCreationParameters: Optional[TableCreationParameters]
StartTime: Optional[ImportStartTime]
EndTime: Optional[ImportEndTime]
ProcessedSizeBytes: Optional[LongObject]
ProcessedItemCount: Optional[ProcessedItemCount]
ImportedItemCount: Optional[ImportedItemCount]
FailureCode: Optional[FailureCode]
FailureMessage: Optional[FailureMessage]
class DescribeImportOutput(TypedDict, total=False):
ImportTableDescription: ImportTableDescription
class DescribeKinesisStreamingDestinationInput(ServiceRequest):
TableName: TableArn
class KinesisDataStreamDestination(TypedDict, total=False):
StreamArn: Optional[StreamArn]
DestinationStatus: Optional[DestinationStatus]
DestinationStatusDescription: Optional[String]
ApproximateCreationDateTimePrecision: Optional[ApproximateCreationDateTimePrecision]
KinesisDataStreamDestinations = List[KinesisDataStreamDestination]
class DescribeKinesisStreamingDestinationOutput(TypedDict, total=False):
TableName: Optional[TableName]
KinesisDataStreamDestinations: Optional[KinesisDataStreamDestinations]
class DescribeLimitsInput(ServiceRequest):
pass
class DescribeLimitsOutput(TypedDict, total=False):
AccountMaxReadCapacityUnits: Optional[PositiveLongObject]
AccountMaxWriteCapacityUnits: Optional[PositiveLongObject]
TableMaxReadCapacityUnits: Optional[PositiveLongObject]
TableMaxWriteCapacityUnits: Optional[PositiveLongObject]
class DescribeTableInput(ServiceRequest):
TableName: TableArn
class DescribeTableOutput(TypedDict, total=False):
Table: Optional[TableDescription]
class DescribeTableReplicaAutoScalingInput(ServiceRequest):
TableName: TableArn
class ReplicaGlobalSecondaryIndexAutoScalingDescription(TypedDict, total=False):
IndexName: Optional[IndexName]
IndexStatus: Optional[IndexStatus]
ProvisionedReadCapacityAutoScalingSettings: Optional[AutoScalingSettingsDescription]
ProvisionedWriteCapacityAutoScalingSettings: Optional[AutoScalingSettingsDescription]
ReplicaGlobalSecondaryIndexAutoScalingDescriptionList = List[
ReplicaGlobalSecondaryIndexAutoScalingDescription
]
class ReplicaAutoScalingDescription(TypedDict, total=False):
RegionName: Optional[RegionName]
GlobalSecondaryIndexes: Optional[ReplicaGlobalSecondaryIndexAutoScalingDescriptionList]
ReplicaProvisionedReadCapacityAutoScalingSettings: Optional[AutoScalingSettingsDescription]
ReplicaProvisionedWriteCapacityAutoScalingSettings: Optional[AutoScalingSettingsDescription]
ReplicaStatus: Optional[ReplicaStatus]
ReplicaAutoScalingDescriptionList = List[ReplicaAutoScalingDescription]
class TableAutoScalingDescription(TypedDict, total=False):
TableName: Optional[TableName]
TableStatus: Optional[TableStatus]
Replicas: Optional[ReplicaAutoScalingDescriptionList]
class DescribeTableReplicaAutoScalingOutput(TypedDict, total=False):
TableAutoScalingDescription: Optional[TableAutoScalingDescription]
class DescribeTimeToLiveInput(ServiceRequest):
TableName: TableArn
class DescribeTimeToLiveOutput(TypedDict, total=False):
TimeToLiveDescription: Optional[TimeToLiveDescription]
class EnableKinesisStreamingConfiguration(TypedDict, total=False):
ApproximateCreationDateTimePrecision: Optional[ApproximateCreationDateTimePrecision]
class ExecuteStatementInput(ServiceRequest):
Statement: PartiQLStatement
Parameters: Optional[PreparedStatementParameters]
ConsistentRead: Optional[ConsistentRead]
NextToken: Optional[PartiQLNextToken]
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
Limit: Optional[PositiveIntegerObject]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
class ExecuteStatementOutput(TypedDict, total=False):
Items: Optional[ItemList]
NextToken: Optional[PartiQLNextToken]
ConsumedCapacity: Optional[ConsumedCapacity]
LastEvaluatedKey: Optional[Key]
class ParameterizedStatement(TypedDict, total=False):
Statement: PartiQLStatement
Parameters: Optional[PreparedStatementParameters]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
ParameterizedStatements = List[ParameterizedStatement]
class ExecuteTransactionInput(ServiceRequest):
TransactStatements: ParameterizedStatements
ClientRequestToken: Optional[ClientRequestToken]
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
class ItemResponse(TypedDict, total=False):
Item: Optional[AttributeMap]
ItemResponseList = List[ItemResponse]
class ExecuteTransactionOutput(TypedDict, total=False):
Responses: Optional[ItemResponseList]
ConsumedCapacity: Optional[ConsumedCapacityMultiple]
class ExportSummary(TypedDict, total=False):
ExportArn: Optional[ExportArn]
ExportStatus: Optional[ExportStatus]
ExportType: Optional[ExportType]
ExportSummaries = List[ExportSummary]
class ExportTableToPointInTimeInput(ServiceRequest):
TableArn: TableArn
ExportTime: Optional[ExportTime]
ClientToken: Optional[ClientToken]
S3Bucket: S3Bucket
S3BucketOwner: Optional[S3BucketOwner]
S3Prefix: Optional[S3Prefix]
S3SseAlgorithm: Optional[S3SseAlgorithm]
S3SseKmsKeyId: Optional[S3SseKmsKeyId]
ExportFormat: Optional[ExportFormat]
ExportType: Optional[ExportType]
IncrementalExportSpecification: Optional[IncrementalExportSpecification]
class ExportTableToPointInTimeOutput(TypedDict, total=False):
ExportDescription: Optional[ExportDescription]
FilterConditionMap = Dict[AttributeName, Condition]
class Get(TypedDict, total=False):
Key: Key
TableName: TableArn
ProjectionExpression: Optional[ProjectionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
class GetItemInput(ServiceRequest):
TableName: TableArn
Key: Key
AttributesToGet: Optional[AttributeNameList]
ConsistentRead: Optional[ConsistentRead]
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
ProjectionExpression: Optional[ProjectionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
class GetItemOutput(TypedDict, total=False):
Item: Optional[AttributeMap]
ConsumedCapacity: Optional[ConsumedCapacity]
class GetResourcePolicyInput(ServiceRequest):
ResourceArn: ResourceArnString
class GetResourcePolicyOutput(TypedDict, total=False):
Policy: Optional[ResourcePolicy]
RevisionId: Optional[PolicyRevisionId]
class GlobalSecondaryIndexAutoScalingUpdate(TypedDict, total=False):
IndexName: Optional[IndexName]
ProvisionedWriteCapacityAutoScalingUpdate: Optional[AutoScalingSettingsUpdate]
GlobalSecondaryIndexAutoScalingUpdateList = List[GlobalSecondaryIndexAutoScalingUpdate]
class UpdateGlobalSecondaryIndexAction(TypedDict, total=False):
IndexName: IndexName
ProvisionedThroughput: Optional[ProvisionedThroughput]
OnDemandThroughput: Optional[OnDemandThroughput]
class GlobalSecondaryIndexUpdate(TypedDict, total=False):
Update: Optional[UpdateGlobalSecondaryIndexAction]
Create: Optional[CreateGlobalSecondaryIndexAction]
Delete: Optional[DeleteGlobalSecondaryIndexAction]
GlobalSecondaryIndexUpdateList = List[GlobalSecondaryIndexUpdate]
class GlobalTable(TypedDict, total=False):
GlobalTableName: Optional[TableName]
ReplicationGroup: Optional[ReplicaList]
class GlobalTableGlobalSecondaryIndexSettingsUpdate(TypedDict, total=False):
IndexName: IndexName
ProvisionedWriteCapacityUnits: Optional[PositiveLongObject]
ProvisionedWriteCapacityAutoScalingSettingsUpdate: Optional[AutoScalingSettingsUpdate]
GlobalTableGlobalSecondaryIndexSettingsUpdateList = List[
GlobalTableGlobalSecondaryIndexSettingsUpdate
]
GlobalTableList = List[GlobalTable]
class ImportSummary(TypedDict, total=False):
ImportArn: Optional[ImportArn]
ImportStatus: Optional[ImportStatus]
TableArn: Optional[TableArn]
S3BucketSource: Optional[S3BucketSource]
CloudWatchLogGroupArn: Optional[CloudWatchLogGroupArn]
InputFormat: Optional[InputFormat]
StartTime: Optional[ImportStartTime]
EndTime: Optional[ImportEndTime]
ImportSummaryList = List[ImportSummary]
class ImportTableInput(ServiceRequest):
ClientToken: Optional[ClientToken]
S3BucketSource: S3BucketSource
InputFormat: InputFormat
InputFormatOptions: Optional[InputFormatOptions]
InputCompressionType: Optional[InputCompressionType]
TableCreationParameters: TableCreationParameters
class ImportTableOutput(TypedDict, total=False):
ImportTableDescription: ImportTableDescription
KeyConditions = Dict[AttributeName, Condition]
class KinesisStreamingDestinationInput(ServiceRequest):
TableName: TableArn
StreamArn: StreamArn
EnableKinesisStreamingConfiguration: Optional[EnableKinesisStreamingConfiguration]
class KinesisStreamingDestinationOutput(TypedDict, total=False):
TableName: Optional[TableName]
StreamArn: Optional[StreamArn]
DestinationStatus: Optional[DestinationStatus]
EnableKinesisStreamingConfiguration: Optional[EnableKinesisStreamingConfiguration]
TimeRangeUpperBound = datetime
TimeRangeLowerBound = datetime
class ListBackupsInput(ServiceRequest):
TableName: Optional[TableArn]
Limit: Optional[BackupsInputLimit]
TimeRangeLowerBound: Optional[TimeRangeLowerBound]
TimeRangeUpperBound: Optional[TimeRangeUpperBound]
ExclusiveStartBackupArn: Optional[BackupArn]
BackupType: Optional[BackupTypeFilter]
class ListBackupsOutput(TypedDict, total=False):
BackupSummaries: Optional[BackupSummaries]
LastEvaluatedBackupArn: Optional[BackupArn]
class ListContributorInsightsInput(ServiceRequest):
TableName: Optional[TableArn]
NextToken: Optional[NextTokenString]
MaxResults: Optional[ListContributorInsightsLimit]
class ListContributorInsightsOutput(TypedDict, total=False):
ContributorInsightsSummaries: Optional[ContributorInsightsSummaries]
NextToken: Optional[NextTokenString]
class ListExportsInput(ServiceRequest):
TableArn: Optional[TableArn]
MaxResults: Optional[ListExportsMaxLimit]
NextToken: Optional[ExportNextToken]
class ListExportsOutput(TypedDict, total=False):
ExportSummaries: Optional[ExportSummaries]
NextToken: Optional[ExportNextToken]
class ListGlobalTablesInput(ServiceRequest):
ExclusiveStartGlobalTableName: Optional[TableName]
Limit: Optional[PositiveIntegerObject]
RegionName: Optional[RegionName]
class ListGlobalTablesOutput(TypedDict, total=False):
GlobalTables: Optional[GlobalTableList]
LastEvaluatedGlobalTableName: Optional[TableName]
class ListImportsInput(ServiceRequest):
TableArn: Optional[TableArn]
PageSize: Optional[ListImportsMaxLimit]
NextToken: Optional[ImportNextToken]
class ListImportsOutput(TypedDict, total=False):
ImportSummaryList: Optional[ImportSummaryList]
NextToken: Optional[ImportNextToken]
class ListTablesInput(ServiceRequest):
ExclusiveStartTableName: Optional[TableName]
Limit: Optional[ListTablesInputLimit]
TableNameList = List[TableName]
class ListTablesOutput(TypedDict, total=False):
TableNames: Optional[TableNameList]
LastEvaluatedTableName: Optional[TableName]
class ListTagsOfResourceInput(ServiceRequest):
ResourceArn: ResourceArnString
NextToken: Optional[NextTokenString]
class ListTagsOfResourceOutput(TypedDict, total=False):
Tags: Optional[TagList]
NextToken: Optional[NextTokenString]
class PointInTimeRecoverySpecification(TypedDict, total=False):
PointInTimeRecoveryEnabled: BooleanObject
class Put(TypedDict, total=False):
Item: PutItemInputAttributeMap
TableName: TableArn
ConditionExpression: Optional[ConditionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
ExpressionAttributeValues: Optional[ExpressionAttributeValueMap]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
class PutItemInput(ServiceRequest):
TableName: TableArn
Item: PutItemInputAttributeMap
Expected: Optional[ExpectedAttributeMap]
ReturnValues: Optional[ReturnValue]
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
ReturnItemCollectionMetrics: Optional[ReturnItemCollectionMetrics]
ConditionalOperator: Optional[ConditionalOperator]
ConditionExpression: Optional[ConditionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
ExpressionAttributeValues: Optional[ExpressionAttributeValueMap]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
class PutItemOutput(TypedDict, total=False):
Attributes: Optional[AttributeMap]
ConsumedCapacity: Optional[ConsumedCapacity]
ItemCollectionMetrics: Optional[ItemCollectionMetrics]
class PutResourcePolicyInput(ServiceRequest):
ResourceArn: ResourceArnString
Policy: ResourcePolicy
ExpectedRevisionId: Optional[PolicyRevisionId]
ConfirmRemoveSelfResourceAccess: Optional[ConfirmRemoveSelfResourceAccess]
class PutResourcePolicyOutput(TypedDict, total=False):
RevisionId: Optional[PolicyRevisionId]
class QueryInput(ServiceRequest):
TableName: TableArn
IndexName: Optional[IndexName]
Select: Optional[Select]
AttributesToGet: Optional[AttributeNameList]
Limit: Optional[PositiveIntegerObject]
ConsistentRead: Optional[ConsistentRead]
KeyConditions: Optional[KeyConditions]
QueryFilter: Optional[FilterConditionMap]
ConditionalOperator: Optional[ConditionalOperator]
ScanIndexForward: Optional[BooleanObject]
ExclusiveStartKey: Optional[Key]
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
ProjectionExpression: Optional[ProjectionExpression]
FilterExpression: Optional[ConditionExpression]
KeyConditionExpression: Optional[KeyExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
ExpressionAttributeValues: Optional[ExpressionAttributeValueMap]
class QueryOutput(TypedDict, total=False):
Items: Optional[ItemList]
Count: Optional[Integer]
ScannedCount: Optional[Integer]
LastEvaluatedKey: Optional[Key]
ConsumedCapacity: Optional[ConsumedCapacity]
class ReplicaGlobalSecondaryIndexAutoScalingUpdate(TypedDict, total=False):
IndexName: Optional[IndexName]
ProvisionedReadCapacityAutoScalingUpdate: Optional[AutoScalingSettingsUpdate]
ReplicaGlobalSecondaryIndexAutoScalingUpdateList = List[
ReplicaGlobalSecondaryIndexAutoScalingUpdate
]
class ReplicaAutoScalingUpdate(TypedDict, total=False):
RegionName: RegionName
ReplicaGlobalSecondaryIndexUpdates: Optional[ReplicaGlobalSecondaryIndexAutoScalingUpdateList]
ReplicaProvisionedReadCapacityAutoScalingUpdate: Optional[AutoScalingSettingsUpdate]
ReplicaAutoScalingUpdateList = List[ReplicaAutoScalingUpdate]
class ReplicaGlobalSecondaryIndexSettingsUpdate(TypedDict, total=False):
IndexName: IndexName
ProvisionedReadCapacityUnits: Optional[PositiveLongObject]
ProvisionedReadCapacityAutoScalingSettingsUpdate: Optional[AutoScalingSettingsUpdate]
ReplicaGlobalSecondaryIndexSettingsUpdateList = List[ReplicaGlobalSecondaryIndexSettingsUpdate]
class ReplicaSettingsUpdate(TypedDict, total=False):
RegionName: RegionName
ReplicaProvisionedReadCapacityUnits: Optional[PositiveLongObject]
ReplicaProvisionedReadCapacityAutoScalingSettingsUpdate: Optional[AutoScalingSettingsUpdate]
ReplicaGlobalSecondaryIndexSettingsUpdate: Optional[
ReplicaGlobalSecondaryIndexSettingsUpdateList
]
ReplicaTableClass: Optional[TableClass]
ReplicaSettingsUpdateList = List[ReplicaSettingsUpdate]
class ReplicaUpdate(TypedDict, total=False):
Create: Optional[CreateReplicaAction]
Delete: Optional[DeleteReplicaAction]
ReplicaUpdateList = List[ReplicaUpdate]
class UpdateReplicationGroupMemberAction(TypedDict, total=False):
RegionName: RegionName
KMSMasterKeyId: Optional[KMSMasterKeyId]
ProvisionedThroughputOverride: Optional[ProvisionedThroughputOverride]
OnDemandThroughputOverride: Optional[OnDemandThroughputOverride]
GlobalSecondaryIndexes: Optional[ReplicaGlobalSecondaryIndexList]
TableClassOverride: Optional[TableClass]
class ReplicationGroupUpdate(TypedDict, total=False):
Create: Optional[CreateReplicationGroupMemberAction]
Update: Optional[UpdateReplicationGroupMemberAction]
Delete: Optional[DeleteReplicationGroupMemberAction]
ReplicationGroupUpdateList = List[ReplicationGroupUpdate]
class RestoreTableFromBackupInput(ServiceRequest):
TargetTableName: TableName
BackupArn: BackupArn
BillingModeOverride: Optional[BillingMode]
GlobalSecondaryIndexOverride: Optional[GlobalSecondaryIndexList]
LocalSecondaryIndexOverride: Optional[LocalSecondaryIndexList]
ProvisionedThroughputOverride: Optional[ProvisionedThroughput]
OnDemandThroughputOverride: Optional[OnDemandThroughput]
SSESpecificationOverride: Optional[SSESpecification]
class RestoreTableFromBackupOutput(TypedDict, total=False):
TableDescription: Optional[TableDescription]
class RestoreTableToPointInTimeInput(ServiceRequest):
SourceTableArn: Optional[TableArn]
SourceTableName: Optional[TableName]
TargetTableName: TableName
UseLatestRestorableTime: Optional[BooleanObject]
RestoreDateTime: Optional[Date]
BillingModeOverride: Optional[BillingMode]
GlobalSecondaryIndexOverride: Optional[GlobalSecondaryIndexList]
LocalSecondaryIndexOverride: Optional[LocalSecondaryIndexList]
ProvisionedThroughputOverride: Optional[ProvisionedThroughput]
OnDemandThroughputOverride: Optional[OnDemandThroughput]
SSESpecificationOverride: Optional[SSESpecification]
class RestoreTableToPointInTimeOutput(TypedDict, total=False):
TableDescription: Optional[TableDescription]
class ScanInput(ServiceRequest):
TableName: TableArn
IndexName: Optional[IndexName]
AttributesToGet: Optional[AttributeNameList]
Limit: Optional[PositiveIntegerObject]
Select: Optional[Select]
ScanFilter: Optional[FilterConditionMap]
ConditionalOperator: Optional[ConditionalOperator]
ExclusiveStartKey: Optional[Key]
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
TotalSegments: Optional[ScanTotalSegments]
Segment: Optional[ScanSegment]
ProjectionExpression: Optional[ProjectionExpression]
FilterExpression: Optional[ConditionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
ExpressionAttributeValues: Optional[ExpressionAttributeValueMap]
ConsistentRead: Optional[ConsistentRead]
class ScanOutput(TypedDict, total=False):
Items: Optional[ItemList]
Count: Optional[Integer]
ScannedCount: Optional[Integer]
LastEvaluatedKey: Optional[Key]
ConsumedCapacity: Optional[ConsumedCapacity]
TagKeyList = List[TagKeyString]
class TagResourceInput(ServiceRequest):
ResourceArn: ResourceArnString
Tags: TagList
class TimeToLiveSpecification(TypedDict, total=False):
Enabled: TimeToLiveEnabled
AttributeName: TimeToLiveAttributeName
class TransactGetItem(TypedDict, total=False):
Get: Get
TransactGetItemList = List[TransactGetItem]
class TransactGetItemsInput(ServiceRequest):
TransactItems: TransactGetItemList
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
class TransactGetItemsOutput(TypedDict, total=False):
ConsumedCapacity: Optional[ConsumedCapacityMultiple]
Responses: Optional[ItemResponseList]
class Update(TypedDict, total=False):
Key: Key
UpdateExpression: UpdateExpression
TableName: TableArn
ConditionExpression: Optional[ConditionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
ExpressionAttributeValues: Optional[ExpressionAttributeValueMap]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
class TransactWriteItem(TypedDict, total=False):
ConditionCheck: Optional[ConditionCheck]
Put: Optional[Put]
Delete: Optional[Delete]
Update: Optional[Update]
TransactWriteItemList = List[TransactWriteItem]
class TransactWriteItemsInput(ServiceRequest):
TransactItems: TransactWriteItemList
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
ReturnItemCollectionMetrics: Optional[ReturnItemCollectionMetrics]
ClientRequestToken: Optional[ClientRequestToken]
class TransactWriteItemsOutput(TypedDict, total=False):
ConsumedCapacity: Optional[ConsumedCapacityMultiple]
ItemCollectionMetrics: Optional[ItemCollectionMetricsPerTable]
class UntagResourceInput(ServiceRequest):
ResourceArn: ResourceArnString
TagKeys: TagKeyList
class UpdateContinuousBackupsInput(ServiceRequest):
TableName: TableArn
PointInTimeRecoverySpecification: PointInTimeRecoverySpecification
class UpdateContinuousBackupsOutput(TypedDict, total=False):
ContinuousBackupsDescription: Optional[ContinuousBackupsDescription]
class UpdateContributorInsightsInput(ServiceRequest):
TableName: TableArn
IndexName: Optional[IndexName]
ContributorInsightsAction: ContributorInsightsAction
class UpdateContributorInsightsOutput(TypedDict, total=False):
TableName: Optional[TableName]
IndexName: Optional[IndexName]
ContributorInsightsStatus: Optional[ContributorInsightsStatus]
class UpdateGlobalTableInput(ServiceRequest):
GlobalTableName: TableName
ReplicaUpdates: ReplicaUpdateList
class UpdateGlobalTableOutput(TypedDict, total=False):
GlobalTableDescription: Optional[GlobalTableDescription]
class UpdateGlobalTableSettingsInput(ServiceRequest):
GlobalTableName: TableName
GlobalTableBillingMode: Optional[BillingMode]
GlobalTableProvisionedWriteCapacityUnits: Optional[PositiveLongObject]
GlobalTableProvisionedWriteCapacityAutoScalingSettingsUpdate: Optional[
AutoScalingSettingsUpdate
]
GlobalTableGlobalSecondaryIndexSettingsUpdate: Optional[
GlobalTableGlobalSecondaryIndexSettingsUpdateList
]
ReplicaSettingsUpdate: Optional[ReplicaSettingsUpdateList]
class UpdateGlobalTableSettingsOutput(TypedDict, total=False):
GlobalTableName: Optional[TableName]
ReplicaSettings: Optional[ReplicaSettingsDescriptionList]
class UpdateItemInput(ServiceRequest):
TableName: TableArn
Key: Key
AttributeUpdates: Optional[AttributeUpdates]
Expected: Optional[ExpectedAttributeMap]
ConditionalOperator: Optional[ConditionalOperator]
ReturnValues: Optional[ReturnValue]
ReturnConsumedCapacity: Optional[ReturnConsumedCapacity]
ReturnItemCollectionMetrics: Optional[ReturnItemCollectionMetrics]
UpdateExpression: Optional[UpdateExpression]
ConditionExpression: Optional[ConditionExpression]
ExpressionAttributeNames: Optional[ExpressionAttributeNameMap]
ExpressionAttributeValues: Optional[ExpressionAttributeValueMap]
ReturnValuesOnConditionCheckFailure: Optional[ReturnValuesOnConditionCheckFailure]
class UpdateItemOutput(TypedDict, total=False):
Attributes: Optional[AttributeMap]
ConsumedCapacity: Optional[ConsumedCapacity]
ItemCollectionMetrics: Optional[ItemCollectionMetrics]
class UpdateKinesisStreamingConfiguration(TypedDict, total=False):
ApproximateCreationDateTimePrecision: Optional[ApproximateCreationDateTimePrecision]
class UpdateKinesisStreamingDestinationInput(ServiceRequest):
TableName: TableArn
StreamArn: StreamArn
UpdateKinesisStreamingConfiguration: Optional[UpdateKinesisStreamingConfiguration]
class UpdateKinesisStreamingDestinationOutput(TypedDict, total=False):
TableName: Optional[TableName]
StreamArn: Optional[StreamArn]
DestinationStatus: Optional[DestinationStatus]
UpdateKinesisStreamingConfiguration: Optional[UpdateKinesisStreamingConfiguration]
class UpdateTableInput(ServiceRequest):
AttributeDefinitions: Optional[AttributeDefinitions]
TableName: TableArn
BillingMode: Optional[BillingMode]
ProvisionedThroughput: Optional[ProvisionedThroughput]
GlobalSecondaryIndexUpdates: Optional[GlobalSecondaryIndexUpdateList]
StreamSpecification: Optional[StreamSpecification]
SSESpecification: Optional[SSESpecification]
ReplicaUpdates: Optional[ReplicationGroupUpdateList]
TableClass: Optional[TableClass]
DeletionProtectionEnabled: Optional[DeletionProtectionEnabled]
OnDemandThroughput: Optional[OnDemandThroughput]
class UpdateTableOutput(TypedDict, total=False):
TableDescription: Optional[TableDescription]
class UpdateTableReplicaAutoScalingInput(ServiceRequest):
GlobalSecondaryIndexUpdates: Optional[GlobalSecondaryIndexAutoScalingUpdateList]
TableName: TableArn
ProvisionedWriteCapacityAutoScalingUpdate: Optional[AutoScalingSettingsUpdate]
ReplicaUpdates: Optional[ReplicaAutoScalingUpdateList]
class UpdateTableReplicaAutoScalingOutput(TypedDict, total=False):
TableAutoScalingDescription: Optional[TableAutoScalingDescription]
class UpdateTimeToLiveInput(ServiceRequest):
TableName: TableArn
TimeToLiveSpecification: TimeToLiveSpecification
class UpdateTimeToLiveOutput(TypedDict, total=False):
TimeToLiveSpecification: Optional[TimeToLiveSpecification]
class DynamodbApi:
service = "dynamodb"
version = "2012-08-10"
@handler("BatchExecuteStatement")
def batch_execute_statement(
self,
context: RequestContext,
statements: PartiQLBatchRequest,
return_consumed_capacity: ReturnConsumedCapacity = None,
**kwargs,
) -> BatchExecuteStatementOutput:
raise NotImplementedError
@handler("BatchGetItem")
def batch_get_item(
self,
context: RequestContext,
request_items: BatchGetRequestMap,
return_consumed_capacity: ReturnConsumedCapacity = None,
**kwargs,
) -> BatchGetItemOutput:
raise NotImplementedError
@handler("BatchWriteItem")
def batch_write_item(
self,
context: RequestContext,
request_items: BatchWriteItemRequestMap,
return_consumed_capacity: ReturnConsumedCapacity = None,
return_item_collection_metrics: ReturnItemCollectionMetrics = None,
**kwargs,
) -> BatchWriteItemOutput:
raise NotImplementedError
@handler("CreateBackup")
def create_backup(
self, context: RequestContext, table_name: TableArn, backup_name: BackupName, **kwargs
) -> CreateBackupOutput:
raise NotImplementedError
@handler("CreateGlobalTable")
def create_global_table(
self,
context: RequestContext,
global_table_name: TableName,
replication_group: ReplicaList,
**kwargs,
) -> CreateGlobalTableOutput:
raise NotImplementedError
@handler("CreateTable")
def create_table(
self,
context: RequestContext,
attribute_definitions: AttributeDefinitions,
table_name: TableArn,
key_schema: KeySchema,
local_secondary_indexes: LocalSecondaryIndexList = None,
global_secondary_indexes: GlobalSecondaryIndexList = None,
billing_mode: BillingMode = None,
provisioned_throughput: ProvisionedThroughput = None,
stream_specification: StreamSpecification = None,
sse_specification: SSESpecification = None,
tags: TagList = None,
table_class: TableClass = None,
deletion_protection_enabled: DeletionProtectionEnabled = None,
resource_policy: ResourcePolicy = None,
on_demand_throughput: OnDemandThroughput = None,
**kwargs,
) -> CreateTableOutput:
raise NotImplementedError
@handler("DeleteBackup")
def delete_backup(
self, context: RequestContext, backup_arn: BackupArn, **kwargs
) -> DeleteBackupOutput:
raise NotImplementedError
@handler("DeleteItem")
def delete_item(
self,
context: RequestContext,
table_name: TableArn,
key: Key,
expected: ExpectedAttributeMap = None,
conditional_operator: ConditionalOperator = None,
return_values: ReturnValue = None,
return_consumed_capacity: ReturnConsumedCapacity = None,
return_item_collection_metrics: ReturnItemCollectionMetrics = None,
condition_expression: ConditionExpression = None,
expression_attribute_names: ExpressionAttributeNameMap = None,
expression_attribute_values: ExpressionAttributeValueMap = None,
return_values_on_condition_check_failure: ReturnValuesOnConditionCheckFailure = None,
**kwargs,
) -> DeleteItemOutput:
raise NotImplementedError
@handler("DeleteResourcePolicy")
def delete_resource_policy(
self,
context: RequestContext,
resource_arn: ResourceArnString,
expected_revision_id: PolicyRevisionId = None,
**kwargs,
) -> DeleteResourcePolicyOutput:
raise NotImplementedError
@handler("DeleteTable")
def delete_table(
self, context: RequestContext, table_name: TableArn, **kwargs
) -> DeleteTableOutput:
raise NotImplementedError
@handler("DescribeBackup")
def describe_backup(
self, context: RequestContext, backup_arn: BackupArn, **kwargs
) -> DescribeBackupOutput:
raise NotImplementedError
@handler("DescribeContinuousBackups")
def describe_continuous_backups(
self, context: RequestContext, table_name: TableArn, **kwargs
) -> DescribeContinuousBackupsOutput:
raise NotImplementedError
@handler("DescribeContributorInsights")
def describe_contributor_insights(
self, context: RequestContext, table_name: TableArn, index_name: IndexName = None, **kwargs
) -> DescribeContributorInsightsOutput:
raise NotImplementedError
@handler("DescribeEndpoints")
def describe_endpoints(self, context: RequestContext, **kwargs) -> DescribeEndpointsResponse:
raise NotImplementedError
@handler("DescribeExport")
def describe_export(
self, context: RequestContext, export_arn: ExportArn, **kwargs
) -> DescribeExportOutput:
raise NotImplementedError
@handler("DescribeGlobalTable")
def describe_global_table(
self, context: RequestContext, global_table_name: TableName, **kwargs
) -> DescribeGlobalTableOutput:
raise NotImplementedError
@handler("DescribeGlobalTableSettings")
def describe_global_table_settings(
self, context: RequestContext, global_table_name: TableName, **kwargs
) -> DescribeGlobalTableSettingsOutput:
raise NotImplementedError
@handler("DescribeImport")
def describe_import(
self, context: RequestContext, import_arn: ImportArn, **kwargs
) -> DescribeImportOutput:
raise NotImplementedError
@handler("DescribeKinesisStreamingDestination")
def describe_kinesis_streaming_destination(
self, context: RequestContext, table_name: TableArn, **kwargs
) -> DescribeKinesisStreamingDestinationOutput:
raise NotImplementedError
@handler("DescribeLimits")
def describe_limits(self, context: RequestContext, **kwargs) -> DescribeLimitsOutput:
raise NotImplementedError
@handler("DescribeTable")
def describe_table(
self, context: RequestContext, table_name: TableArn, **kwargs
) -> DescribeTableOutput:
raise NotImplementedError
@handler("DescribeTableReplicaAutoScaling")
def describe_table_replica_auto_scaling(
self, context: RequestContext, table_name: TableArn, **kwargs
) -> DescribeTableReplicaAutoScalingOutput:
raise NotImplementedError
@handler("DescribeTimeToLive")
def describe_time_to_live(
self, context: RequestContext, table_name: TableArn, **kwargs
) -> DescribeTimeToLiveOutput:
raise NotImplementedError
@handler("DisableKinesisStreamingDestination")
def disable_kinesis_streaming_destination(
self,
context: RequestContext,
table_name: TableArn,
stream_arn: StreamArn,
enable_kinesis_streaming_configuration: EnableKinesisStreamingConfiguration = None,
**kwargs,
) -> KinesisStreamingDestinationOutput:
raise NotImplementedError
@handler("EnableKinesisStreamingDestination")
def enable_kinesis_streaming_destination(
self,
context: RequestContext,
table_name: TableArn,
stream_arn: StreamArn,
enable_kinesis_streaming_configuration: EnableKinesisStreamingConfiguration = None,
**kwargs,
) -> KinesisStreamingDestinationOutput:
raise NotImplementedError
@handler("ExecuteStatement")
def execute_statement(
self,
context: RequestContext,
statement: PartiQLStatement,
parameters: PreparedStatementParameters = None,
consistent_read: ConsistentRead = None,
next_token: PartiQLNextToken = None,
return_consumed_capacity: ReturnConsumedCapacity = None,
limit: PositiveIntegerObject = None,
return_values_on_condition_check_failure: ReturnValuesOnConditionCheckFailure = None,
**kwargs,
) -> ExecuteStatementOutput:
raise NotImplementedError
@handler("ExecuteTransaction")
def execute_transaction(
self,
context: RequestContext,
transact_statements: ParameterizedStatements,
client_request_token: ClientRequestToken = None,
return_consumed_capacity: ReturnConsumedCapacity = None,
**kwargs,
) -> ExecuteTransactionOutput:
raise NotImplementedError
@handler("ExportTableToPointInTime")
def export_table_to_point_in_time(
self,
context: RequestContext,
table_arn: TableArn,
s3_bucket: S3Bucket,
export_time: ExportTime = None,
client_token: ClientToken = None,
s3_bucket_owner: S3BucketOwner = None,
s3_prefix: S3Prefix = None,
s3_sse_algorithm: S3SseAlgorithm = None,
s3_sse_kms_key_id: S3SseKmsKeyId = None,
export_format: ExportFormat = None,
export_type: ExportType = None,
incremental_export_specification: IncrementalExportSpecification = None,
**kwargs,
) -> ExportTableToPointInTimeOutput:
raise NotImplementedError
@handler("GetItem")
def get_item(
self,
context: RequestContext,
table_name: TableArn,
key: Key,
attributes_to_get: AttributeNameList = None,
consistent_read: ConsistentRead = None,
return_consumed_capacity: ReturnConsumedCapacity = None,
projection_expression: ProjectionExpression = None,
expression_attribute_names: ExpressionAttributeNameMap = None,
**kwargs,
) -> GetItemOutput:
raise NotImplementedError
@handler("GetResourcePolicy")
def get_resource_policy(
self, context: RequestContext, resource_arn: ResourceArnString, **kwargs
) -> GetResourcePolicyOutput:
raise NotImplementedError
@handler("ImportTable")
def import_table(
self,
context: RequestContext,
s3_bucket_source: S3BucketSource,
input_format: InputFormat,
table_creation_parameters: TableCreationParameters,
client_token: ClientToken = None,
input_format_options: InputFormatOptions = None,
input_compression_type: InputCompressionType = None,
**kwargs,
) -> ImportTableOutput:
raise NotImplementedError
@handler("ListBackups")
def list_backups(
self,
context: RequestContext,
table_name: TableArn = None,
limit: BackupsInputLimit = None,
time_range_lower_bound: TimeRangeLowerBound = None,
time_range_upper_bound: TimeRangeUpperBound = None,
exclusive_start_backup_arn: BackupArn = None,
backup_type: BackupTypeFilter = None,
**kwargs,
) -> ListBackupsOutput:
raise NotImplementedError
@handler("ListContributorInsights")
def list_contributor_insights(
self,
context: RequestContext,
table_name: TableArn = None,
next_token: NextTokenString = None,
max_results: ListContributorInsightsLimit = None,
**kwargs,
) -> ListContributorInsightsOutput:
raise NotImplementedError
@handler("ListExports")
def list_exports(
self,
context: RequestContext,
table_arn: TableArn = None,
max_results: ListExportsMaxLimit = None,
next_token: ExportNextToken = None,
**kwargs,
) -> ListExportsOutput:
raise NotImplementedError
@handler("ListGlobalTables")
def list_global_tables(
self,
context: RequestContext,
exclusive_start_global_table_name: TableName = None,
limit: PositiveIntegerObject = None,
region_name: RegionName = None,
**kwargs,
) -> ListGlobalTablesOutput:
raise NotImplementedError
@handler("ListImports")
def list_imports(
self,
context: RequestContext,
table_arn: TableArn = None,
page_size: ListImportsMaxLimit = None,
next_token: ImportNextToken = None,
**kwargs,
) -> ListImportsOutput:
raise NotImplementedError
@handler("ListTables")
def list_tables(
self,
context: RequestContext,
exclusive_start_table_name: TableName = None,
limit: ListTablesInputLimit = None,
**kwargs,
) -> ListTablesOutput:
raise NotImplementedError
@handler("ListTagsOfResource")
def list_tags_of_resource(
self,
context: RequestContext,
resource_arn: ResourceArnString,
next_token: NextTokenString = None,
**kwargs,
) -> ListTagsOfResourceOutput:
raise NotImplementedError
@handler("PutItem")
def put_item(
self,
context: RequestContext,
table_name: TableArn,
item: PutItemInputAttributeMap,
expected: ExpectedAttributeMap = None,
return_values: ReturnValue = None,
return_consumed_capacity: ReturnConsumedCapacity = None,
return_item_collection_metrics: ReturnItemCollectionMetrics = None,
conditional_operator: ConditionalOperator = None,
condition_expression: ConditionExpression = None,
expression_attribute_names: ExpressionAttributeNameMap = None,
expression_attribute_values: ExpressionAttributeValueMap = None,
return_values_on_condition_check_failure: ReturnValuesOnConditionCheckFailure = None,
**kwargs,
) -> PutItemOutput:
raise NotImplementedError
@handler("PutResourcePolicy")
def put_resource_policy(
self,
context: RequestContext,
resource_arn: ResourceArnString,
policy: ResourcePolicy,
expected_revision_id: PolicyRevisionId = None,
confirm_remove_self_resource_access: ConfirmRemoveSelfResourceAccess = None,
**kwargs,
) -> PutResourcePolicyOutput:
raise NotImplementedError
@handler("Query")
def query(
self,
context: RequestContext,
table_name: TableArn,
index_name: IndexName = None,
select: Select = None,
attributes_to_get: AttributeNameList = None,
limit: PositiveIntegerObject = None,
consistent_read: ConsistentRead = None,
key_conditions: KeyConditions = None,
query_filter: FilterConditionMap = None,
conditional_operator: ConditionalOperator = None,
scan_index_forward: BooleanObject = None,
exclusive_start_key: Key = None,
return_consumed_capacity: ReturnConsumedCapacity = None,
projection_expression: ProjectionExpression = None,
filter_expression: ConditionExpression = None,
key_condition_expression: KeyExpression = None,
expression_attribute_names: ExpressionAttributeNameMap = None,
expression_attribute_values: ExpressionAttributeValueMap = None,
**kwargs,
) -> QueryOutput:
raise NotImplementedError
@handler("RestoreTableFromBackup")
def restore_table_from_backup(
self,
context: RequestContext,
target_table_name: TableName,
backup_arn: BackupArn,
billing_mode_override: BillingMode = None,
global_secondary_index_override: GlobalSecondaryIndexList = None,
local_secondary_index_override: LocalSecondaryIndexList = None,
provisioned_throughput_override: ProvisionedThroughput = None,
on_demand_throughput_override: OnDemandThroughput = None,
sse_specification_override: SSESpecification = None,
**kwargs,
) -> RestoreTableFromBackupOutput:
raise NotImplementedError
@handler("RestoreTableToPointInTime")
def restore_table_to_point_in_time(
self,
context: RequestContext,
target_table_name: TableName,
source_table_arn: TableArn = None,
source_table_name: TableName = None,
use_latest_restorable_time: BooleanObject = None,
restore_date_time: Date = None,
billing_mode_override: BillingMode = None,
global_secondary_index_override: GlobalSecondaryIndexList = None,
local_secondary_index_override: LocalSecondaryIndexList = None,
provisioned_throughput_override: ProvisionedThroughput = None,
on_demand_throughput_override: OnDemandThroughput = None,
sse_specification_override: SSESpecification = None,
**kwargs,
) -> RestoreTableToPointInTimeOutput:
raise NotImplementedError
@handler("Scan")
def scan(
self,
context: RequestContext,
table_name: TableArn,
index_name: IndexName = None,
attributes_to_get: AttributeNameList = None,
limit: PositiveIntegerObject = None,
select: Select = None,
scan_filter: FilterConditionMap = None,
conditional_operator: ConditionalOperator = None,
exclusive_start_key: Key = None,
return_consumed_capacity: ReturnConsumedCapacity = None,
total_segments: ScanTotalSegments = None,
segment: ScanSegment = None,
projection_expression: ProjectionExpression = None,
filter_expression: ConditionExpression = None,
expression_attribute_names: ExpressionAttributeNameMap = None,
expression_attribute_values: ExpressionAttributeValueMap = None,
consistent_read: ConsistentRead = None,
**kwargs,
) -> ScanOutput:
raise NotImplementedError
@handler("TagResource")
def tag_resource(
self, context: RequestContext, resource_arn: ResourceArnString, tags: TagList, **kwargs
) -> None:
raise NotImplementedError
@handler("TransactGetItems")
def transact_get_items(
self,
context: RequestContext,
transact_items: TransactGetItemList,
return_consumed_capacity: ReturnConsumedCapacity = None,
**kwargs,
) -> TransactGetItemsOutput:
raise NotImplementedError
@handler("TransactWriteItems")
def transact_write_items(
self,
context: RequestContext,
transact_items: TransactWriteItemList,
return_consumed_capacity: ReturnConsumedCapacity = None,
return_item_collection_metrics: ReturnItemCollectionMetrics = None,
client_request_token: ClientRequestToken = None,
**kwargs,
) -> TransactWriteItemsOutput:
raise NotImplementedError
@handler("UntagResource")
def untag_resource(
self,
context: RequestContext,
resource_arn: ResourceArnString,
tag_keys: TagKeyList,
**kwargs,
) -> None:
raise NotImplementedError
@handler("UpdateContinuousBackups")
def update_continuous_backups(
self,
context: RequestContext,
table_name: TableArn,
point_in_time_recovery_specification: PointInTimeRecoverySpecification,
**kwargs,
) -> UpdateContinuousBackupsOutput:
raise NotImplementedError
@handler("UpdateContributorInsights")
def update_contributor_insights(
self,
context: RequestContext,
table_name: TableArn,
contributor_insights_action: ContributorInsightsAction,
index_name: IndexName = None,
**kwargs,
) -> UpdateContributorInsightsOutput:
raise NotImplementedError
@handler("UpdateGlobalTable")
def update_global_table(
self,
context: RequestContext,
global_table_name: TableName,
replica_updates: ReplicaUpdateList,
**kwargs,
) -> UpdateGlobalTableOutput:
raise NotImplementedError
@handler("UpdateGlobalTableSettings")
def update_global_table_settings(
self,
context: RequestContext,
global_table_name: TableName,
global_table_billing_mode: BillingMode = None,
global_table_provisioned_write_capacity_units: PositiveLongObject = None,
your_sha256_hashdate: AutoScalingSettingsUpdate = None,
global_table_global_secondary_index_settings_update: GlobalTableGlobalSecondaryIndexSettingsUpdateList = None,
replica_settings_update: ReplicaSettingsUpdateList = None,
**kwargs,
) -> UpdateGlobalTableSettingsOutput:
raise NotImplementedError
@handler("UpdateItem")
def update_item(
self,
context: RequestContext,
table_name: TableArn,
key: Key,
attribute_updates: AttributeUpdates = None,
expected: ExpectedAttributeMap = None,
conditional_operator: ConditionalOperator = None,
return_values: ReturnValue = None,
return_consumed_capacity: ReturnConsumedCapacity = None,
return_item_collection_metrics: ReturnItemCollectionMetrics = None,
update_expression: UpdateExpression = None,
condition_expression: ConditionExpression = None,
expression_attribute_names: ExpressionAttributeNameMap = None,
expression_attribute_values: ExpressionAttributeValueMap = None,
return_values_on_condition_check_failure: ReturnValuesOnConditionCheckFailure = None,
**kwargs,
) -> UpdateItemOutput:
raise NotImplementedError
@handler("UpdateKinesisStreamingDestination")
def update_kinesis_streaming_destination(
self,
context: RequestContext,
table_name: TableArn,
stream_arn: StreamArn,
update_kinesis_streaming_configuration: UpdateKinesisStreamingConfiguration = None,
**kwargs,
) -> UpdateKinesisStreamingDestinationOutput:
raise NotImplementedError
@handler("UpdateTable")
def update_table(
self,
context: RequestContext,
table_name: TableArn,
attribute_definitions: AttributeDefinitions = None,
billing_mode: BillingMode = None,
provisioned_throughput: ProvisionedThroughput = None,
global_secondary_index_updates: GlobalSecondaryIndexUpdateList = None,
stream_specification: StreamSpecification = None,
sse_specification: SSESpecification = None,
replica_updates: ReplicationGroupUpdateList = None,
table_class: TableClass = None,
deletion_protection_enabled: DeletionProtectionEnabled = None,
on_demand_throughput: OnDemandThroughput = None,
**kwargs,
) -> UpdateTableOutput:
raise NotImplementedError
@handler("UpdateTableReplicaAutoScaling")
def update_table_replica_auto_scaling(
self,
context: RequestContext,
table_name: TableArn,
global_secondary_index_updates: GlobalSecondaryIndexAutoScalingUpdateList = None,
provisioned_write_capacity_auto_scaling_update: AutoScalingSettingsUpdate = None,
replica_updates: ReplicaAutoScalingUpdateList = None,
**kwargs,
) -> UpdateTableReplicaAutoScalingOutput:
raise NotImplementedError
@handler("UpdateTimeToLive")
def update_time_to_live(
self,
context: RequestContext,
table_name: TableArn,
time_to_live_specification: TimeToLiveSpecification,
**kwargs,
) -> UpdateTimeToLiveOutput:
raise NotImplementedError
```
|
```java
/*
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package org.apache.arrow.driver.jdbc.accessor.impl.complex;
import java.util.List;
import java.util.function.IntSupplier;
import org.apache.arrow.driver.jdbc.accessor.ArrowFlightJdbcAccessorFactory;
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.complex.FixedSizeListVector;
/** Accessor for the Arrow type {@link FixedSizeListVector}. */
public class ArrowFlightJdbcFixedSizeListVectorAccessor
extends AbstractArrowFlightJdbcListVectorAccessor {
private final FixedSizeListVector vector;
public ArrowFlightJdbcFixedSizeListVectorAccessor(
FixedSizeListVector vector,
IntSupplier currentRowSupplier,
ArrowFlightJdbcAccessorFactory.WasNullConsumer setCursorWasNull) {
super(currentRowSupplier, setCursorWasNull);
this.vector = vector;
}
@Override
protected long getStartOffset(int index) {
return (long) vector.getListSize() * index;
}
@Override
protected long getEndOffset(int index) {
return (long) vector.getListSize() * (index + 1);
}
@Override
protected FieldVector getDataVector() {
return vector.getDataVector();
}
@Override
protected boolean isNull(int index) {
return vector.isNull(index);
}
@Override
public Object getObject() {
List<?> object = vector.getObject(getCurrentRow());
this.wasNull = object == null;
this.wasNullConsumer.setWasNull(this.wasNull);
return object;
}
}
```
|
```xml
/// <reference path="../../localtypings/pxtarget.d.ts" />
import * as React from "react"
import * as data from "./data"
import * as editor from "./toolboxeditor"
import * as sui from "./sui"
import * as core from "./core"
import * as coretsx from "./coretsx";
import Util = pxt.Util;
import { fireClickOnEnter } from "./util"
import { DeleteConfirmationModal } from "../../react-common/components/extensions/DeleteConfirmationModal"
import * as Blockly from "blockly";
import { classList } from "../../react-common/components/util"
export const enum CategoryNameID {
Loops = "loops",
Logic = "logic",
Variables = "variables",
Maths = "Math",
Functions = "functions",
Arrays = "arrays",
Text = "text",
Extensions = "addpackage"
}
// this is a supertype of pxtc.SymbolInfo (see partitionBlocks)
export interface BlockDefinition {
qName?: string;
name: string;
pyQName?: string;
pyName?: string;
namespace?: string;
type?: string;
snippet?: string;
snippetName?: string;
pySnippet?: string;
pySnippetName?: string;
snippetOnly?: boolean;
attributes: {
block?: string;
blockId?: string;
blockNamespace?: string;
color?: string;
weight?: number;
advanced?: boolean;
jsDoc?: string;
blockBuiltin?: boolean;
deprecated?: boolean;
blockHidden?: boolean;
group?: string;
subcategory?: string;
topblockWeight?: number;
help?: string;
_def?: pxtc.ParsedBlockDef;
};
retType?: string;
blockXml?: string;
builtinBlock?: boolean;
builtinField?: [string, string];
parameters?: pxtc.ParameterDesc[];
}
export interface GroupDefinition {
name: string;
icon?: string;
hasHelp?: boolean;
blocks: BlockDefinition[];
}
export interface ButtonDefinition {
name: string;
type: "button";
attributes: {
blockId?: string;
label?: string;
weight?: number;
}
callback?: () => void;
}
export interface BuiltinCategoryDefinition {
name: string;
nameid: string;
blocks: (BlockDefinition | ButtonDefinition)[];
groups?: string[];
attributes: pxtc.CommentAttrs;
removed?: boolean;
custom?: boolean; // Only add blocks defined in .blocks and don't query nsMap for more
customClick?: (theEditor: editor.ToolboxEditor) => boolean; // custom handler
onlyTriggerOnClick?: boolean;
}
export interface ToolboxProps {
editorname: string;
parent: editor.ToolboxEditor;
}
export interface ToolboxState {
showAdvanced?: boolean;
visible?: boolean;
loading?: boolean;
selectedItem?: string;
expandedItem?: string;
height?: number;
categories?: ToolboxCategory[];
showSearchBox?: boolean;
hasSearch?: boolean;
focusSearch?: boolean;
searchBlocks?: pxtc.service.SearchInfo[]; // block ids
hasError?: boolean;
shouldAnimate?: boolean;
tryToDeleteNamespace?: string;
}
const MONACO_EDITOR_NAME: string = "monaco";
export class Toolbox extends data.Component<ToolboxProps, ToolboxState> {
private rootElement: HTMLElement;
private selectedItem: CategoryItem;
private selectedIndex: number;
private items: ToolboxCategory[];
private selectedTreeRow: ToolboxCategory;
constructor(props: ToolboxProps) {
super(props);
this.state = {
categories: [],
visible: false,
loading: false,
showAdvanced: false,
shouldAnimate: !pxt.shell.getToolboxAnimation()
}
this.setSelection = this.setSelection.bind(this);
this.advancedClicked = this.advancedClicked.bind(this);
this.recoverToolbox = this.recoverToolbox.bind(this);
this.handleRemoveExtension = this.handleRemoveExtension.bind(this);
this.deleteExtension = this.deleteExtension.bind(this);
this.cancelDeleteExtension= this.cancelDeleteExtension.bind(this);
}
getElement() {
return this.rootElement;
}
hide() {
this.setState({ visible: false })
}
showLoading() {
this.setState({ visible: true, loading: true });
}
show() {
this.setState({ visible: true })
}
setSelectedItem(item: CategoryItem) {
this.selectedItem = item;
}
setPreviousItem() {
if (this.selectedIndex > 0) {
const newIndex = --this.selectedIndex;
// Check if the previous item has a subcategory
let previousItem = this.items[newIndex];
this.setSelection(previousItem, newIndex);
} else if (this.state.showSearchBox) {
// Focus the search box if it exists
const searchBox = this.refs.searchbox as ToolboxSearch;
if (searchBox) searchBox.focus();
}
}
setNextItem() {
if (this.items.length - 1 > this.selectedIndex) {
const newIndex = ++this.selectedIndex;
this.setSelection(this.items[newIndex], newIndex);
}
}
setSearch() {
// Focus the search box if it exists
const searchBox = this.refs.searchbox as ToolboxSearch;
if (searchBox) searchBox.focus();
}
clear() {
this.clearSelection();
this.selectedIndex = 0;
this.selectedTreeRow = undefined;
}
clearSelection() {
this.setState({ selectedItem: undefined, focusSearch: false });
}
clearExpandedItem() {
this.setState({ expandedItem: undefined });
}
clearSearch() {
this.setState({ hasSearch: false, searchBlocks: undefined, focusSearch: false });
}
async handleRemoveExtension(ns: string) {
this.setState({
tryToDeleteNamespace: ns
})
}
setSelection(treeRow: ToolboxCategory, index: number, force?: boolean, isClick = false) {
const { editorname, parent } = this.props;
const { nameid, subns, customClick, onlyTriggerOnClick } = treeRow;
pxt.tickEvent(`${editorname}.toolbox.click`, undefined, { interactiveConsent: true });
let id = subns ? nameid + subns : nameid;
if (this.state.selectedItem == id && !force && !onlyTriggerOnClick) {
this.clearSelection();
// Hide flyout
this.closeFlyout();
} else {
let handled = false;
if (customClick) {
if (!onlyTriggerOnClick || isClick) {
handled = customClick(parent);
if (handled) return;
}
else {
this.closeFlyout();
}
}
if (!handled) {
this.setState({ selectedItem: id, expandedItem: nameid, focusSearch: false })
this.selectedIndex = index;
this.selectedTreeRow = treeRow;
if (treeRow.advanced && !this.state.showAdvanced) this.showAdvanced();
if (!customClick) {
// Show flyout
this.showFlyout(treeRow);
}
}
}
}
onCategoryClick = (treeRow: ToolboxCategory, index: number) => {
this.setSelection(treeRow, index, undefined, true);
}
focus(itemToFocus?: string) {
if (!this.rootElement) return;
if (this.selectedItem && this.selectedItem.getTreeRow()) {
// Focus the selected item
const selectedItem = this.selectedItem.props.treeRow;
const selectedItemIndex = this.items.indexOf(selectedItem);
this.setSelection(selectedItem, selectedItemIndex, true);
} else {
// Focus first item in the toolbox
if (itemToFocus) {
for (const item of this.items) {
if (item.nameid === itemToFocus) {
this.setSelection(item, this.items.indexOf(item), true);
return;
}
}
}
this.selectFirstItem();
}
}
selectFirstItem() {
if (this.items[0]) {
this.setSelection(this.items[0], 0, true);
}
}
moveFocusToFlyout() {
const { parent } = this.props;
parent.moveFocusToFlyout();
}
UNSAFE_componentWillReceiveProps(props: ToolboxProps) {
// if leaving monaco, mark toolbox animation as shown. also
// handles full screen sim, where we hide the toolbox via css
// without re-rendering, which will trigger the animation again
if ((this.props.editorname == MONACO_EDITOR_NAME && props.editorname != MONACO_EDITOR_NAME)
|| (props.editorname == MONACO_EDITOR_NAME && props.parent.parent.state.fullscreen)
&& this.state.shouldAnimate) {
pxt.shell.setToolboxAnimation();
this.setState({ shouldAnimate: false });
}
}
componentDidUpdate(prevProps: ToolboxProps, prevState: ToolboxState) {
if (prevState.visible != this.state.visible
|| prevState.loading != this.state.loading
|| prevState.showAdvanced != this.state.showAdvanced
|| this.state.expandedItem != prevState.expandedItem) {
this.props.parent.resize();
}
if (this.state.hasSearch && this.state.searchBlocks != prevState.searchBlocks) {
// Referesh search items
this.refreshSearchItem();
} else if (prevState.hasSearch && !this.state.hasSearch && this.state.selectedItem == 'search') {
// No more search
this.closeFlyout();
}
}
componentDidCatch(error: any, info: any) {
// Log what happened
const { editorname } = this.props;
pxt.tickEvent(`${editorname}.toolbox.crashed`, { error: error });
// Update error state
this.setState({ hasError: true });
}
componentWillUnmount() {
if (this.props.editorname == MONACO_EDITOR_NAME) {
pxt.shell.setToolboxAnimation();
}
}
recoverToolbox() {
// Recover from above error state
this.setState({ hasError: false });
}
advancedClicked() {
const { editorname } = this.props;
pxt.tickEvent(`${editorname}.advanced`, undefined, { interactiveConsent: true });
this.showAdvanced();
}
showAdvanced() {
if (this.selectedItem && this.selectedItem.props.treeRow
&& this.selectedItem.props.treeRow.advanced) {
this.clear();
this.closeFlyout();
}
this.setState({ showAdvanced: !this.state.showAdvanced });
}
getSearchBlocks(): BlockDefinition[] {
const { searchBlocks } = this.state;
return searchBlocks.map(searchResult => {
return {
name: searchResult.qName,
attributes: {
blockId: searchResult.id
},
builtinBlock: searchResult.builtinBlock,
builtinField: searchResult.field
}
});
}
refreshSelection() {
const { parent } = this.props;
if (!this.state.selectedItem || !this.selectedTreeRow) return;
if (this.selectedTreeRow.customClick) {
this.selectedTreeRow.customClick(parent);
} else {
this.showFlyout(this.selectedTreeRow);
}
}
refreshSearchItem() {
const searchTreeRow = ToolboxSearch.getSearchTreeRow();
this.showFlyout(searchTreeRow);
}
private showFlyout(treeRow: ToolboxCategory) {
const { parent } = this.props;
parent.showFlyout(treeRow);
}
private async deleteExtension(ns: string) {
this.setState({
tryToDeleteNamespace: undefined
})
// TODO: Not implemented yet.
// Remove the top level extension, only if there are no blocks in the workspace
// Associated with that extension.
await this.props.parent.parent.reloadHeaderAsync()
}
private cancelDeleteExtension() {
this.setState({
tryToDeleteNamespace: undefined
})
}
closeFlyout() {
const { parent } = this.props;
parent.closeFlyout();
}
hasAdvancedCategories() {
const { categories } = this.state;
return categories.some(category => category.advanced);
}
getNonAdvancedCategories() {
const { categories } = this.state;
return categories.filter(category => !category.advanced);
}
getAdvancedCategories() {
const { categories } = this.state;
return categories.filter(category => category.advanced);
}
private getAllCategoriesList(visibleOnly?: boolean): ToolboxCategory[] {
const { categories, hasSearch, expandedItem } = this.state;
const categoriesList: ToolboxCategory[] = [];
if (hasSearch) categoriesList.push(ToolboxSearch.getSearchTreeRow());
categories.forEach(category => {
categoriesList.push(category);
if (category.subcategories &&
(!visibleOnly || visibleOnly && category.nameid == expandedItem)) {
category.subcategories.forEach(subcategory => {
categoriesList.push(subcategory);
})
}
})
return categoriesList;
}
shouldComponentUpdate(nextProps: ToolboxProps, nextState: ToolboxState) {
if (this.state != nextState) return true;
return false;
}
handleRootElementRef = (c: HTMLDivElement) => {
this.rootElement = c;
}
isRtl() {
const { editorname } = this.props;
return editorname == 'monaco' ? false : Util.isUserLanguageRtl();
}
renderCore() {
const { editorname, parent } = this.props;
const { showAdvanced, visible, loading, selectedItem, expandedItem, hasSearch, showSearchBox, hasError, tryToDeleteNamespace } = this.state;
if (!visible) {
return (
<div style={{ display: 'none' }} />
);
}
const theme = pxt.appTarget.appTheme;
const tutorialOptions = parent.parent.state.tutorialOptions;
const inTutorial = !!tutorialOptions && !!tutorialOptions.tutorial
const hasTopBlocks = !!theme.topBlocks && !inTutorial;
const showToolboxLabel = inTutorial;
if (loading || hasError) {
return (
<div>
<div className="blocklyTreeRoot">
<div className="blocklyTreeRow" style={{ opacity: 0 }} />
</div>
{loading &&
<div className="ui active dimmer">
<div className="ui loader indeterminate" />
</div>
}
{hasError &&
<div className="ui">
{lf("Toolbox crashed..")}
<sui.Button
icon='refresh'
onClick={this.recoverToolbox}
text={lf("Reload")}
className='fluid'
/>
</div>
}
</div>
);
}
const hasAdvanced = this.hasAdvancedCategories();
let nonAdvancedCategories = this.getNonAdvancedCategories();
const advancedCategories = hasAdvanced ? this.getAdvancedCategories() : [];
this.items = this.getAllCategoriesList();
const searchTreeRow = ToolboxSearch.getSearchTreeRow();
const topBlocksTreeRow = {
nameid: 'topblocks',
name: lf("{id:category}Basic"),
color: pxt.toolbox.getNamespaceColor('topblocks'),
icon: pxt.toolbox.getNamespaceIcon('topblocks')
};
const appTheme = pxt.appTarget.appTheme;
const classes = classList(
'pxtToolbox',
appTheme.invertedToolbox && 'invertedToolbox',
appTheme.coloredToolbox && 'coloredToolbox'
);
let index = 0;
let topRowIndex = 0; // index of top-level rows for animation
const advancedButtonState = showAdvanced ? "advancedexpanded" : "advancedcollapsed";
return (
<div
ref={this.handleRootElementRef}
className={classes}
id={`${editorname}EditorToolbox`}
>
<ToolboxStyle categories={this.items} />
{showToolboxLabel &&
<div className="toolbox-title">{lf("Toolbox")}</div>
}
{showSearchBox &&
<ToolboxSearch
ref="searchbox"
parent={parent}
toolbox={this}
editorname={editorname}
/>
}
<div className="blocklyTreeRoot">
<div role="tree">
{tryToDeleteNamespace &&
<DeleteConfirmationModal
ns={tryToDeleteNamespace}
onCancelClick={this.cancelDeleteExtension}
onDeleteClick={this.deleteExtension}
/>
}
{hasSearch &&
<CategoryItem
key={"search"}
toolbox={this}
index={index++}
selected={selectedItem == "search"}
treeRow={searchTreeRow}
onCategoryClick={this.onCategoryClick}
/>
}
{hasTopBlocks &&
<CategoryItem
key={"topblocks"}
toolbox={this}
selected={selectedItem == "topblocks"}
treeRow={topBlocksTreeRow}
onCategoryClick={this.onCategoryClick}
/>
}
{nonAdvancedCategories.map(treeRow =>
<CategoryItem
key={treeRow.nameid}
toolbox={this}
index={index++}
selected={selectedItem == treeRow.nameid}
childrenVisible={expandedItem == treeRow.nameid}
treeRow={treeRow}
onCategoryClick={this.onCategoryClick}
topRowIndex={topRowIndex++}
shouldAnimate={this.state.shouldAnimate}
hasDeleteButton={treeRow.allowDelete}
onDeleteClick={this.handleRemoveExtension}
>
{treeRow.subcategories &&
treeRow.subcategories.map(subTreeRow =>
<CategoryItem
key={subTreeRow.nameid + subTreeRow.subns}
index={index++}
toolbox={this}
selected={selectedItem == (subTreeRow.nameid + subTreeRow.subns)}
treeRow={subTreeRow}
onCategoryClick={this.onCategoryClick}
/>
)
}
</CategoryItem>
)}
{hasAdvanced &&
<>
<TreeSeparator key="advancedseparator" />
<CategoryItem
toolbox={this}
treeRow={{
nameid: "",
name: pxt.toolbox.advancedTitle(),
color: pxt.toolbox.getNamespaceColor('advanced'),
icon: pxt.toolbox.getNamespaceIcon(advancedButtonState),
advancedButtonState: advancedButtonState
}}
onCategoryClick={this.advancedClicked}
topRowIndex={topRowIndex++}
/>
</>
}
{showAdvanced &&
advancedCategories.map(treeRow =>
<CategoryItem
key={treeRow.nameid}
toolbox={this}
index={index++}
selected={selectedItem == treeRow.nameid}
childrenVisible={expandedItem == treeRow.nameid}
treeRow={treeRow}
onCategoryClick={this.onCategoryClick}
>
{treeRow.subcategories &&
treeRow.subcategories.map(subTreeRow =>
<CategoryItem
key={subTreeRow.nameid}
toolbox={this}
index={index++}
selected={selectedItem == (subTreeRow.nameid + subTreeRow.subns)}
treeRow={subTreeRow}
onCategoryClick={this.onCategoryClick}
/>
)
}
</CategoryItem>
)
}
</div>
</div>
</div>
);
}
}
export interface CategoryItemProps extends TreeRowProps {
toolbox: Toolbox;
childrenVisible?: boolean;
onCategoryClick?: (treeRow: ToolboxCategory, index: number) => void;
index?: number;
topRowIndex?: number;
hasDeleteButton?: boolean;
onDeleteClick?: (ns: string) => void;
}
export interface CategoryItemState {
selected?: boolean;
}
export class CategoryItem extends data.Component<CategoryItemProps, CategoryItemState> {
private treeRowElement: TreeRow;
constructor(props: CategoryItemProps) {
super(props);
this.state = {
selected: props.selected
}
this.handleClick = this.handleClick.bind(this);
this.handleKeyDown = this.handleKeyDown.bind(this);
}
getTreeRow() {
return this.treeRowElement;
}
UNSAFE_componentWillReceiveProps(nextProps: CategoryItemProps) {
const newState: CategoryItemState = {};
if (nextProps.selected != undefined) {
newState.selected = nextProps.selected;
}
if (Object.keys(newState).length > 0) this.setState(newState)
}
componentDidUpdate(prevProps: CategoryItemProps, prevState: CategoryItemState) {
const { toolbox } = this.props;
if (this.state.selected) {
this.props.toolbox.setSelectedItem(this);
if (!toolbox.state.focusSearch && !coretsx.dialogIsShowing()) this.focusElement();
}
}
focusElement() {
this.treeRowElement.focus();
}
handleClick(e: React.MouseEvent<any>) {
const { treeRow, onCategoryClick, index } = this.props;
if (onCategoryClick) onCategoryClick(treeRow, index);
e.preventDefault();
e.stopPropagation();
}
handleKeyDown(e: React.KeyboardEvent<HTMLElement>) {
const { toolbox } = this.props;
const isRtl = Util.isUserLanguageRtl();
const mainWorkspace = Blockly.getMainWorkspace() as Blockly.WorkspaceSvg;
const accessibleBlocksEnabled = mainWorkspace.keyboardAccessibilityMode;
const accessibleBlocksState = accessibleBlocksEnabled
&& (toolbox.props.parent as any).navigationController?.navigation?.getState(mainWorkspace);
const keyMap: { [key: string]: number } = {
"DOWN": accessibleBlocksEnabled ? 83 : 40, // 'S' || down arrow
"UP": accessibleBlocksEnabled ? 87 : 38, // 'W' || up arrow
"LEFT": accessibleBlocksEnabled ? 65 : 37, // 'A' || left arrow
"RIGHT": accessibleBlocksEnabled ? 68 : 39 // 'D' || right arrow
}
const charCode = core.keyCodeFromEvent(e);
if (!accessibleBlocksEnabled || accessibleBlocksState == "toolbox") {
if (charCode == keyMap["DOWN"]) {
this.nextItem();
} else if (charCode == keyMap["UP"]) {
this.previousItem();
} else if ((charCode == keyMap["RIGHT"] && !isRtl)
|| (charCode == keyMap["LEFT"] && isRtl)) {
// Focus inside flyout
toolbox.moveFocusToFlyout();
} else if (charCode == 27) { // ESCAPE
// Close the flyout
toolbox.closeFlyout();
} else if (charCode == core.ENTER_KEY || charCode == core.SPACE_KEY) {
fireClickOnEnter.call(this, e);
} else if (charCode == core.TAB_KEY
|| charCode == 37 /* Left arrow key */
|| charCode == 39 /* Left arrow key */
|| charCode == 17 /* Ctrl Key */
|| charCode == 16 /* Shift Key */
|| charCode == 91 /* Cmd Key */) {
// Escape tab and shift key
} else if (!accessibleBlocksEnabled) {
toolbox.setSearch();
}
} else if (accessibleBlocksEnabled && accessibleBlocksState == "flyout"
&& ((charCode == keyMap["LEFT"] && !isRtl)
|| (charCode == keyMap["RIGHT"] && isRtl))) {
this.focusElement();
e.stopPropagation();
}
}
previousItem() {
const { toolbox } = this.props;
const editorname = toolbox.props.editorname;
pxt.tickEvent(`${editorname}.toolbox.keyboard.prev"`, undefined, { interactiveConsent: true });
toolbox.setPreviousItem();
}
nextItem() {
const { toolbox } = this.props;
const editorname = toolbox.props.editorname;
pxt.tickEvent(`${editorname}.toolbox.keyboard.next"`, undefined, { interactiveConsent: true });
toolbox.setNextItem();
}
handleTreeRowRef = (c: TreeRow) => {
this.treeRowElement = c;
}
renderCore() {
const { toolbox, childrenVisible, hasDeleteButton } = this.props;
const { selected } = this.state;
return (
<TreeItem>
<TreeRow
ref={this.handleTreeRowRef}
isRtl={toolbox.isRtl()}
{...this.props}
selected={selected}
onClick={this.handleClick}
onKeyDown={this.handleKeyDown}
hasDeleteButton={hasDeleteButton}
/>
<TreeGroup visible={childrenVisible}>
{this.props.children}
</TreeGroup>
</TreeItem>
);
}
}
export interface ToolboxCategory {
nameid: string;
subns?: string;
name?: string;
color?: string;
icon?: string;
groups?: string[];
groupIcons?: string[];
groupHelp?: string[];
labelLineWidth?: string;
blocks?: BlockDefinition[];
subcategories?: ToolboxCategory[];
customClick?: (theEditor: editor.ToolboxEditor) => boolean;
onlyTriggerOnClick?: boolean;
advanced?: boolean; /*@internal*/
allowDelete?: boolean;
// for advanced button, the current state of the button
advancedButtonState?: "advancedexpanded" | "advancedcollapsed";
}
export interface TreeRowProps {
treeRow: ToolboxCategory;
onClick?: (e: React.MouseEvent<any>) => void;
onKeyDown?: (e: React.KeyboardEvent<any>) => void;
selected?: boolean;
isRtl?: boolean;
topRowIndex?: number;
shouldAnimate?: boolean;
hasDeleteButton?: boolean;
onDeleteClick?: (ns: string) => void;
}
export class TreeRow extends data.Component<TreeRowProps, {}> {
private treeRow: HTMLElement;
private baseAnimationDelay: number = 1;
private animationDelay: number = 0.15;
private brandIcons = {
'\uf287': 'usb', '\uf368': 'accessible-icon', '\uf170': 'adn', '\uf1a7': 'pied-piper-pp', '\uf1b6': 'steam', '\uf294': 'bluetooth-b',
'\uf1d0': 'rebel', '\uf136': 'maxcdn', '\uf1aa': 'joomla', '\uf213': 'sellsy', '\uf20e': 'connectdevelop', '\uf113': 'github-alt'
};
constructor(props: TreeRowProps) {
super(props);
this.state = {
}
this.handleDeleteClick = this.handleDeleteClick.bind(this);
}
focus() {
if (this.treeRow) this.treeRow.focus();
}
getProperties() {
const { treeRow } = this.props;
return treeRow;
}
getMetaColor() {
const { color } = this.props.treeRow;
return pxt.toolbox.getAccessibleBackground(
pxt.toolbox.convertColor(color) || pxt.toolbox.getNamespaceColor('default')
);
}
handleTreeRowRef = (c: HTMLDivElement) => {
this.treeRow = c;
}
handleDeleteClick (e: React.MouseEvent) {
e.stopPropagation();
this.props.onDeleteClick(this.props.treeRow.nameid)
}
renderCore() {
const { selected, onClick, onKeyDown, topRowIndex, hasDeleteButton } = this.props;
const { nameid, advancedButtonState, subns, name, icon } = this.props.treeRow;
const appTheme = pxt.appTarget.appTheme;
const metaColor = this.getMetaColor();
const invertedMultipler = appTheme.blocklyOptions
&& appTheme.blocklyOptions.toolboxOptions
&& appTheme.blocklyOptions.toolboxOptions.invertedMultiplier || 0.3;
let treeRowStyle: React.CSSProperties = {
paddingLeft: '0px',
"--block-meta-color": metaColor,
"--block-faded-color": pxt.toolbox.fadeColor(metaColor || '#ddd', invertedMultipler, false)
} as React.CSSProperties;
let treeRowClass = `blocklyTreeRow${selected ? ' blocklyTreeSelected' : '' }`;
if (topRowIndex && this.props.shouldAnimate) {
treeRowStyle.animationDelay = `${(topRowIndex * this.animationDelay) + this.baseAnimationDelay}s`;
treeRowClass += ' blocklyTreeAnimate';
}
// Icon
let iconClass = `blocklyTreeIcon${subns ? 'more' : icon ? (nameid || icon).toLowerCase() : 'Default'}`.replace(/\s/g, '');
let iconContent = subns ? pxt.toolbox.getNamespaceIcon('more') : icon || pxt.toolbox.getNamespaceIcon('default');
const isImageIcon = iconContent.length > 1; // It's probably an image icon, and not an icon code
let iconImageStyle: React.CSSProperties = {
"--image-icon-url": isImageIcon ? `url("${Util.pathJoin(pxt.webConfig.commitCdnUrl, encodeURI(icon))}")!important`: undefined,
display: "inline-block"
} as React.CSSProperties;
if (isImageIcon) {
iconClass += ' image-icon';
iconContent = undefined;
}
const rowTitle = name ? name : Util.capitalize(subns || nameid);
const dataNs = advancedButtonState || nameid;
const extraIconClass = !subns && Object.keys(this.brandIcons).includes(icon) ? 'brandIcon' : ''
return (
<div
role="button"
ref={this.handleTreeRowRef}
className={treeRowClass}
style={treeRowStyle}
tabIndex={0}
data-ns={dataNs}
aria-label={lf("Toggle category {0}", rowTitle)}
aria-expanded={selected}
onClick={onClick}
onContextMenu={onClick}
onKeyDown={onKeyDown ? onKeyDown : fireClickOnEnter}
>
<span className="blocklyTreeIcon" role="presentation"/>
<span
style={iconImageStyle}
className={`blocklyTreeIcon ${iconClass} ${extraIconClass}`}
role="presentation"
>
{iconContent}
</span>
<span className="blocklyTreeLabel">
{rowTitle}
</span>
{hasDeleteButton &&
<i
className="blocklyTreeButton icon times circle"
onClick={this.handleDeleteClick}
/>
}
</div>
);
}
}
export class TreeSeparator extends data.Component<{}, {}> {
renderCore() {
return (
<TreeItem>
<div className="blocklyTreeSeparator">
<span style={{ display: 'inline-block' }} role="presentation"></span>
</div>
</TreeItem>
);
}
}
export interface TreeItemProps {
selected?: boolean;
children?: any;
}
export class TreeItem extends data.Component<TreeItemProps, {}> {
renderCore() {
const { selected } = this.props;
return (
<div role="treeitem" aria-selected={selected}>
{this.props.children}
</div>
);
}
}
export interface TreeGroupProps {
visible?: boolean;
children?: any;
}
export class TreeGroup extends data.Component<TreeGroupProps, {}> {
renderCore() {
const { visible } = this.props;
if (!this.props.children) return <div />;
return (
<div role="tree" style={{ backgroundPosition: '0px 0px', 'display': visible ? '' : 'none' }}>
{this.props.children}
</div>
);
}
}
export interface ToolboxSearchProps {
parent: editor.ToolboxEditor;
editorname: string;
toolbox: Toolbox;
}
export interface ToolboxSearchState {
searchAccessibilityLabel?: string;
}
export class ToolboxSearch extends data.Component<ToolboxSearchProps, ToolboxSearchState> {
constructor(props: ToolboxSearchProps) {
super(props);
this.state = {
}
this.searchImmediate = this.searchImmediate.bind(this);
this.handleKeyDown = this.handleKeyDown.bind(this);
this.handleChange = this.handleChange.bind(this);
}
static getSearchTreeRow(): ToolboxCategory {
return {
nameid: 'search',
name: lf("{id:category}Search"),
color: pxt.toolbox.getNamespaceColor('search'),
icon: pxt.toolbox.getNamespaceIcon('search')
}
}
private search = Util.debounce(() => {
this.searchImmediate();
}, 300, false);
handleChange() {
this.search();
}
handleKeyDown(e: React.KeyboardEvent<any>) {
const { toolbox } = this.props;
let charCode = (typeof e.which == "number") ? e.which : e.keyCode
if (charCode === 40 /* Down Key */) {
// Select first item in the toolbox
toolbox.selectFirstItem();
}
}
focus() {
(this.refs.searchInput as HTMLInputElement).focus();
}
searchImmediate() {
const { parent, toolbox, editorname } = this.props;
const searchTerm = (this.refs.searchInput as HTMLInputElement).value;
let searchAccessibilityLabel = '';
let hasSearch = false;
pxt.tickEvent(`${editorname}.search`, undefined, { interactiveConsent: true });
// Execute search
parent.searchAsync(searchTerm)
.then((blocks) => {
if (blocks.length == 0) {
searchAccessibilityLabel = lf("No search results...");
} else {
searchAccessibilityLabel = lf("{0} result matching '{1}'", blocks.length, searchTerm.toLowerCase());
}
hasSearch = searchTerm != '';
const newState: ToolboxState = {};
newState.hasSearch = hasSearch;
newState.searchBlocks = blocks;
newState.focusSearch = true;
if (hasSearch) newState.selectedItem = 'search';
toolbox.setState(newState);
this.setState({ searchAccessibilityLabel: searchAccessibilityLabel });
});
}
renderCore() {
const { searchAccessibilityLabel } = this.state;
return (
<div id="blocklySearchArea">
<div id="blocklySearchInput" className="ui fluid icon input" role="search">
<input
ref="searchInput"
type="text"
placeholder={lf("Search...")}
onFocus={this.searchImmediate}
onKeyDown={this.handleKeyDown}
onChange={this.handleChange}
id="blocklySearchInputField"
className="blocklySearchInputField"
aria-label={lf("Search")}
autoComplete="off"
autoCorrect="off"
autoCapitalize="off"
spellCheck={false}
/>
<i className="search icon" role="presentation" aria-hidden="true" />
<div
className="accessible-hidden"
id="blocklySearchLabel"
aria-live="polite"
>
{searchAccessibilityLabel}
</div>
</div>
</div>
);
}
}
interface ToolboxTrashIconProps {
flyoutOnly?: boolean;
}
export class ToolboxTrashIcon extends data.Component<ToolboxTrashIconProps, {}> {
constructor(props: ToolboxTrashIconProps) {
super(props);
}
getStyle() {
let style: any = { opacity: 0, display: 'none' };
if (this.props.flyoutOnly) {
let flyout = document.querySelector('.blocklyFlyout');
if (flyout) {
style["left"] = (flyout.clientWidth / 2);
style["transform"] = "translateX(-45%)";
}
}
return style;
}
renderCore() {
return (
<div id="blocklyTrashIcon" style={this.getStyle()}>
<i className="trash icon" aria-hidden="true"></i>
</div>
);
}
}
interface ToolboxStyleProps {
categories: ToolboxCategory[];
}
export class ToolboxStyle extends data.Component<ToolboxStyleProps, {}> {
renderCore() {
const { categories } = this.props;
// Add inline CSS for each category used so that the tutorial engine is able to render blocks
// and assosiate them with a specific category
return (
<style>
{categories.filter(c => !!c.color).map(category =>
`
span.docs.inlineblock.${category.nameid.toLowerCase()} {
--inline-namespace-color: ${category.color || pxt.toolbox.getNamespaceColor(category.nameid.toLowerCase()) || "black"};
}
`
)}
</style>
);
}
}
```
|
The Enovate ME-S, known fully as the Enovate ME-Sports, is an electric concept vehicle developed by Enovate.
Overview
The Enovate ME-S is a 4-door sports car concept shown at the 2019 Auto Shanghai.
Production plans
It was planned to go into production in 2021. The planned production ME-S model can go 0-62 mph in 3 seconds, can charge up to 80% in 15 minutes, and has a Level 4 autonomy. It costs $55,000.
See also
Enovate
References
Sports sedans
Electric sports cars
Luxury vehicles
Cars introduced in 2019
Cars of China
Electric concept cars
|
```c++
/*
Qalculate (library)
This program is free software; you can redistribute it and/or modify
(at your option) any later version.
*/
#include "support.h"
#include "MathStructure.h"
#include "Calculator.h"
#include "BuiltinFunctions.h"
#include "Number.h"
#include "Function.h"
#include "Variable.h"
#include "Unit.h"
#include "Prefix.h"
#include "MathStructure-support.h"
using std::string;
using std::cout;
using std::vector;
using std::ostream;
using std::endl;
bool factorize_fix_root_power(MathStructure &m) {
if(!m[0].isFunction() || m[0].function()->id() != FUNCTION_ID_ROOT || !VALID_ROOT(m[0])) return false;
if(m[1].isNumber() && m[1].number().isInteger() && !m[1].number().isMinusOne()) {
if(m[1] == m[0][1]) {
// root(x, a)^a=x
m.setToChild(1, true);
m.setToChild(1, true);
return true;
} else if(m[1].number().isIntegerDivisible(m[0][1].number())) {
// root(x, a)^(2a)=x^2
if(m[1].number().divide(m[0][1].number())) {
m[0].setToChild(1, true);
return true;
}
} else if(m[0][1].number().isIntegerDivisible(m[1].number())) {
// root(x, 3a)^(a)=root(x, 3)
if(m[0][1].number().divide(m[1].number())) {
m.setToChild(1, true);
m.childUpdated(2);
return true;
}
}
}
return false;
}
bool sqrfree_differentiate(const MathStructure &mpoly, const MathStructure &x_var, MathStructure &mdiff, const EvaluationOptions &eo) {
if(mpoly == x_var) {
mdiff.set(1, 1, 0);
return true;
}
switch(mpoly.type()) {
case STRUCT_ADDITION: {
mdiff.clear();
mdiff.setType(STRUCT_ADDITION);
for(size_t i = 0; i < mpoly.size(); i++) {
mdiff.addChild(m_zero);
if(!sqrfree_differentiate(mpoly[i], x_var, mdiff[i], eo)) return false;
}
mdiff.calculatesub(eo, eo, false);
break;
}
case STRUCT_VARIABLE: {}
case STRUCT_FUNCTION: {}
case STRUCT_SYMBOLIC: {}
case STRUCT_UNIT: {}
case STRUCT_NUMBER: {
mdiff.clear();
break;
}
case STRUCT_POWER: {
if(mpoly[0] == x_var) {
mdiff = mpoly[1];
mdiff.multiply(x_var);
if(!mpoly[1].number().isTwo()) {
mdiff[1].raise(mpoly[1]);
mdiff[1][1].number()--;
}
mdiff.evalSort(true);
} else {
mdiff.clear();
}
break;
}
case STRUCT_MULTIPLICATION: {
if(mpoly.size() < 1) {
mdiff.clear();
break;
} else if(mpoly.size() < 2) {
return sqrfree_differentiate(mpoly[0], x_var, mdiff, eo);
}
mdiff.clear();
for(size_t i = 0; i < mpoly.size(); i++) {
if(mpoly[i] == x_var) {
if(mpoly.size() == 2) {
if(i == 0) mdiff = mpoly[1];
else mdiff = mpoly[0];
} else {
mdiff.setType(STRUCT_MULTIPLICATION);
for(size_t i2 = 0; i2 < mpoly.size(); i2++) {
if(i2 != i) {
mdiff.addChild(mpoly[i2]);
}
}
}
break;
} else if(mpoly[i].isPower() && mpoly[i][0] == x_var) {
mdiff = mpoly;
if(mpoly[i][1].number().isTwo()) {
mdiff[i].setToChild(1);
} else {
mdiff[i][1].number()--;
}
if(mdiff[0].isNumber()) {
mdiff[0].number() *= mpoly[i][1].number();
} else {
mdiff.insertChild(mpoly[i][1].number(), 1);
}
mdiff.evalSort();
break;
}
}
break;
}
default: {
return false;
}
}
return true;
}
bool fix_root_pow(MathStructure &m, const MathStructure &xvar, const EvaluationOptions &eo) {
if(m.isPower() && m[0].contains(xvar) && m[1].isNumber()) {
return m.calculateRaiseExponent(eo);
}
bool b_ret = false;
for(size_t i = 0; i < m.size(); i++) {
if(fix_root_pow(m[i], xvar, eo)) {m.childUpdated(i + 1); b_ret = true;}
}
if(b_ret) m.calculatesub(eo, eo, false);
return b_ret;
}
bool sqrfree_yun(const MathStructure &a, const MathStructure &xvar, MathStructure &factors, const EvaluationOptions &eo) {
MathStructure w(a);
MathStructure z;
if(!sqrfree_differentiate(a, xvar, z, eo)) {
return false;
}
MathStructure g;
if(!MathStructure::gcd(w, z, g, eo)) {
return false;
}
if(g.isOne()) {
factors.addChild(a);
return true;
}
MathStructure y;
MathStructure tmp;
do {
tmp = w;
if(!MathStructure::polynomialQuotient(tmp, g, xvar, w, eo)) {
return false;
}
if(!MathStructure::polynomialQuotient(z, g, xvar, y, eo)) {
return false;
}
if(!sqrfree_differentiate(w, xvar, tmp, eo)) {
return false;
}
z = y;
z.calculateSubtract(tmp, eo);
if(!MathStructure::gcd(w, z, g, eo)) {
return false;
}
factors.addChild(g);
} while (!z.isZero());
return true;
}
bool sqrfree_simple(const MathStructure &a, const MathStructure &xvar, MathStructure &factors, const EvaluationOptions &eo) {
MathStructure w(a);
while(true) {
MathStructure z, zmod;
if(!sqrfree_differentiate(w, xvar, z, eo)) return false;
polynomial_smod(z, nr_three, zmod, eo);
if(z == w) {
factors.addChild(w);
break;
}
MathStructure mgcd;
if(!MathStructure::gcd(w, z, mgcd, eo)) return false;
if(mgcd.isOne() || mgcd == w) {
factors.addChild(w);
break;
}
MathStructure tmp(w);
if(!MathStructure::polynomialQuotient(tmp, mgcd, xvar, w, eo)) return false;
if(!sqrfree_simple(mgcd, xvar, factors, eo)) return false;
}
return true;
}
void lcmcoeff(const MathStructure &e, const Number &l, Number &nlcm);
void lcmcoeff(const MathStructure &e, const Number &l, Number &nlcm) {
if(e.isNumber() && e.number().isRational()) {
nlcm = e.number().denominator();
if(l.isInteger()) nlcm.lcm(l);
else nlcm.multiply(l);
} else if(e.isAddition()) {
nlcm.set(1, 1, 0);
for(size_t i = 0; i < e.size(); i++) {
Number c(nlcm);
lcmcoeff(e[i], c, nlcm);
}
if(l.isInteger()) nlcm.lcm(l);
else nlcm.multiply(l);
} else if(e.isMultiplication()) {
nlcm.set(1, 1, 0);
for(size_t i = 0; i < e.size(); i++) {
Number c(nlcm);
lcmcoeff(e[i], nr_one, c);
nlcm *= c;
}
if(l.isInteger()) nlcm.lcm(l);
else nlcm.multiply(l);
} else if(e.isPower()) {
if(IS_A_SYMBOL(e[0]) || e[0].isUnit()) {
nlcm = l;
} else {
lcmcoeff(e[0], l, nlcm);
nlcm ^= e[1].number();
}
} else {
nlcm = l;
}
}
void lcm_of_coefficients_denominators(const MathStructure &e, Number &nlcm) {
return lcmcoeff(e, nr_one, nlcm);
}
void multiply_lcm(const MathStructure &e, const Number &lcm, MathStructure &mmul, const EvaluationOptions &eo) {
if(e.isMultiplication()) {
Number lcm_accum(1, 1);
mmul.clear();
for(size_t i = 0; i < e.size(); i++) {
Number op_lcm;
lcmcoeff(e[i], nr_one, op_lcm);
if(mmul.isZero()) {
multiply_lcm(e[i], op_lcm, mmul, eo);
if(mmul.isOne()) mmul.clear();
} else {
mmul.multiply(m_one, true);
multiply_lcm(e[i], op_lcm, mmul[mmul.size() - 1], eo);
if(mmul[mmul.size() - 1].isOne()) {
mmul.delChild(i + 1);
if(mmul.size() == 1) mmul.setToChild(1);
}
}
lcm_accum *= op_lcm;
}
Number lcm2(lcm);
lcm2 /= lcm_accum;
if(mmul.isZero()) {
mmul = lcm2;
} else if(!lcm2.isOne()) {
if(mmul.size() > 0 && mmul[0].isNumber()) {
mmul[0].number() *= lcm2;
} else {
mmul.multiply(lcm2, true);
}
}
mmul.evalSort();
} else if(e.isAddition()) {
mmul.clear();
for (size_t i = 0; i < e.size(); i++) {
if(mmul.isZero()) {
multiply_lcm(e[i], lcm, mmul, eo);
} else {
mmul.add(m_zero, true);
multiply_lcm(e[i], lcm, mmul[mmul.size() - 1], eo);
}
}
mmul.evalSort();
} else if(e.isPower()) {
if(IS_A_SYMBOL(e[0]) || e[0].isUnit()) {
mmul = e;
if(!lcm.isOne()) {
mmul *= lcm;
mmul.evalSort();
}
} else {
mmul = e;
Number lcm_exp = e[1].number();
lcm_exp.recip();
multiply_lcm(e[0], lcm ^ lcm_exp, mmul[0], eo);
if(mmul[0] != e[0]) {
mmul.calculatesub(eo, eo, false);
}
}
} else if(e.isNumber()) {
mmul = e;
mmul.number() *= lcm;
} else if(IS_A_SYMBOL(e) || e.isUnit()) {
mmul = e;
if(!lcm.isOne()) {
mmul *= lcm;
mmul.evalSort();
}
} else {
mmul = e;
if(!lcm.isOne()) {
mmul.calculateMultiply(lcm, eo);
mmul.evalSort();
}
}
}
//from GiNaC
bool sqrfree(MathStructure &mpoly, const EvaluationOptions &eo) {
vector<MathStructure> symbols;
collect_symbols(mpoly, symbols);
return sqrfree(mpoly, symbols, eo);
}
bool sqrfree(MathStructure &mpoly, const vector<MathStructure> &symbols, const EvaluationOptions &eo) {
EvaluationOptions eo2 = eo;
eo2.assume_denominators_nonzero = true;
eo2.warn_about_denominators_assumed_nonzero = false;
eo2.reduce_divisions = true;
eo2.keep_zero_units = false;
eo2.do_polynomial_division = false;
eo2.sync_units = false;
eo2.expand = true;
eo2.calculate_functions = false;
eo2.protected_function = CALCULATOR->getFunctionById(FUNCTION_ID_SIGNUM);
if(mpoly.size() == 0) {
return true;
}
if(symbols.empty()) return true;
size_t symbol_index = 0;
if(symbols.size() > 1) {
for(size_t i = 1; i < symbols.size(); i++) {
if(mpoly.degree(symbols[symbol_index]).isGreaterThan(mpoly.degree(symbols[i]))) symbol_index = i;
}
}
MathStructure xvar(symbols[symbol_index]);
UnknownVariable *var = NULL;
if(xvar.size() > 0) {
var = new UnknownVariable("", format_and_print(xvar));
var->setAssumptions(xvar);
mpoly.replace(xvar, var);
xvar = var;
}
Number nlcm;
lcm_of_coefficients_denominators(mpoly, nlcm);
MathStructure tmp;
multiply_lcm(mpoly, nlcm, tmp, eo2);
MathStructure factors;
factors.clearVector();
if(!sqrfree_yun(tmp, xvar, factors, eo2)) {
if(var) tmp.replace(var, symbols[symbol_index]);
factors.clearVector();
factors.addChild(tmp);
} else {
if(var) tmp.replace(var, symbols[symbol_index]);
}
if(var) {mpoly.replace(var, symbols[symbol_index]); var->destroy();}
vector<MathStructure> newsymbols;
for(size_t i = 0; i < symbols.size(); i++) {
if(i != symbol_index) newsymbols.push_back(symbols[i]);
}
if(newsymbols.size() > 0) {
for(size_t i = 0; i < factors.size(); i++) {
if(!sqrfree(factors[i], newsymbols, eo)) return false;
}
}
mpoly.set(1, 1, 0);
for(size_t i = 0; i < factors.size(); i++) {
if(CALCULATOR->aborted()) return false;
if(!factors[i].isOne()) {
if(mpoly.isOne()) {
mpoly = factors[i];
if(i != 0) mpoly.raise(MathStructure((long int) i + 1, 1L, 0L));
} else {
mpoly.multiply(factors[i], true);
mpoly[mpoly.size() - 1].raise(MathStructure((long int) i + 1, 1L, 0L));
}
}
}
if(CALCULATOR->aborted()) return false;
if(mpoly.isZero()) {
CALCULATOR->error(true, "mpoly is zero: %s. %s", format_and_print(tmp).c_str(), _("This is a bug. Please report it."), NULL);
return false;
}
MathStructure mquo;
MathStructure mpoly_expand(mpoly);
EvaluationOptions eo3 = eo;
eo3.expand = true;
mpoly_expand.calculatesub(eo3, eo3);
MathStructure::polynomialQuotient(tmp, mpoly_expand, xvar, mquo, eo2);
if(CALCULATOR->aborted()) return false;
if(mquo.isZero()) {
//CALCULATOR->error(true, "quo is zero: %s. %s", format_and_print(tmp).c_str(), _("This is a bug. Please report it."), NULL);
return false;
}
if(newsymbols.size() > 0) {
if(!sqrfree(mquo, newsymbols, eo)) return false;
}
if(!mquo.isOne()) {
mpoly.multiply(mquo, true);
}
if(!nlcm.isOne()) {
nlcm.recip();
mpoly.multiply(nlcm, true);
}
eo3.expand = false;
mpoly.calculatesub(eo3, eo3, false);
return true;
}
bool MathStructure::integerFactorize() {
if(isVector()) {
for(size_t i = 0; i < SIZE; i++) {
if(CHILD(i).isVector()) {
for(size_t i2 = 0; i2 < CHILD(i).size(); i2++) {
if(!CHILD(i)[i2].isNumber()) return false;
}
} else if(!CHILD(i).isNumber()) {
return false;
}
}
bool b = false;
for(size_t i = 0; i < SIZE; i++) {
if(CHILD(i).integerFactorize()) b = true;
}
return b;
}
if(!isNumber() || !o_number.isRational()) return false;
if(!o_number.isInteger()) {
MathStructure mnum(o_number.numerator()), mden(o_number.denominator());
if(mnum.integerFactorize() && mden.integerFactorize()) {
if(!mnum.isMultiplication() && !mden.isMultiplication()) return true;
set(mnum);
divide(mden);
return true;
}
return false;
}
vector<Number> factors;
if(!o_number.factorize(factors)) return false;
if(factors.size() <= 1) return true;
clear(true);
bool b_pow = false;
Number *lastnr = NULL;
for(size_t i = 0; i < factors.size(); i++) {
if(lastnr && factors[i] == *lastnr) {
if(!b_pow) {
LAST.raise(m_one);
b_pow = true;
}
LAST[1].number()++;
} else {
APPEND(factors[i]);
b_pow = false;
}
lastnr = &factors[i];
}
m_type = STRUCT_MULTIPLICATION;
return true;
}
size_t count_powers(const MathStructure &mstruct) {
if(mstruct.isPower()) {
if(mstruct[1].isInteger()) {
bool overflow = false;
int c = mstruct.number().intValue(&overflow) - 1;
if(overflow) return 0;
if(c < 0) return -c;
return c;
}
}
size_t c = 0;
for(size_t i = 0; i < mstruct.size(); i++) {
c += count_powers(mstruct[i]);
}
return c;
}
bool gather_factors(const MathStructure &mstruct, const MathStructure &x_var, MathStructure &madd, MathStructure &mmul, MathStructure &mexp, bool mexp_as_x2 = false) {
madd.clear();
if(mexp_as_x2) mexp = m_zero;
else mexp = m_one;
mmul = m_zero;
if(mstruct == x_var) {
mmul = m_one;
return true;
} else if(mexp_as_x2 && mstruct.isPower()) {
if(mstruct[1].isNumber() && mstruct[1].number().isTwo() && mstruct[0] == x_var) {
mexp = m_one;
return true;
}
} else if(!mexp_as_x2 && mstruct.isPower() && mstruct[1].isInteger() && mstruct[0] == x_var) {
if(mstruct[0] == x_var) {
mexp = mstruct[1];
mmul = m_one;
return true;
}
} else if(mstruct.isMultiplication() && mstruct.size() >= 2) {
bool b_x = false;
bool b2 = false;
size_t i_x = 0;
for(size_t i = 0; i < mstruct.size(); i++) {
if(!b_x && mstruct[i] == x_var) {
b_x = true;
i_x = i;
} else if(!b_x && mexp_as_x2 && mstruct[i].isPower() && mstruct[i][1].isNumber() && mstruct[i][1].number().isTwo() && mstruct[i][0] == x_var) {
b_x = true;
b2 = true;
i_x = i;
} else if(!b_x && !mexp_as_x2 && mstruct[i].isPower() && mstruct[i][1].isInteger() && mstruct[i][0] == x_var) {
b_x = true;
i_x = i;
mexp = mstruct[i][1];
} else if(mstruct[i].containsRepresentativeOf(x_var, true, true) != 0) {
return false;
}
}
if(!b_x) return false;
if(mstruct.size() == 1) {
if(b2) mexp = m_one;
else mmul = m_one;
} else if(mstruct.size() == 2) {
if(b2) {
if(i_x == 1) mexp = mstruct[0];
else mexp = mstruct[1];
} else {
if(i_x == 1) mmul = mstruct[0];
else mmul = mstruct[1];
}
} else {
if(b2) {
mexp = mstruct;
mexp.delChild(i_x + 1, true);
} else {
mmul = mstruct;
mmul.delChild(i_x + 1, true);
}
}
return true;
} else if(mstruct.isAddition()) {
mmul.setType(STRUCT_ADDITION);
if(mexp_as_x2) mexp.setType(STRUCT_ADDITION);
madd.setType(STRUCT_ADDITION);
for(size_t i = 0; i < mstruct.size(); i++) {
if(mstruct[i] == x_var) {
if(mexp_as_x2 || mexp.isOne()) mmul.addChild(m_one);
else return false;
} else if(mexp_as_x2 && mstruct[i].isPower() && mstruct[i][1].isNumber() && mstruct[i][1].number().isTwo() && mstruct[i][0] == x_var) {
mexp.addChild(m_one);
} else if(!mexp_as_x2 && mstruct[i].isPower() && mstruct[i][1].isInteger() && mstruct[i][0] == x_var) {
if(mmul.size() == 0) {
mexp = mstruct[i][1];
} else if(mexp != mstruct[i][1]) {
return false;
}
mmul.addChild(m_one);
} else if(mstruct[i].isMultiplication()) {
bool b_x = false;
bool b2 = false;
size_t i_x = 0;
for(size_t i2 = 0; i2 < mstruct[i].size(); i2++) {
if(!b_x && mstruct[i][i2] == x_var) {
if(!mexp_as_x2 && !mexp.isOne()) return false;
i_x = i2;
b_x = true;
} else if(!b_x && mexp_as_x2 && mstruct[i][i2].isPower() && mstruct[i][i2][1].isNumber() && mstruct[i][i2][1].number().isTwo() && mstruct[i][i2][0] == x_var) {
b2 = true;
i_x = i2;
b_x = true;
} else if(!b_x && !mexp_as_x2 && mstruct[i][i2].isPower() && mstruct[i][i2][1].isInteger() && mstruct[i][i2][0] == x_var) {
if(mmul.size() == 0) {
mexp = mstruct[i][i2][1];
} else if(mexp != mstruct[i][i2][1]) {
return false;
}
i_x = i2;
b_x = true;
} else if(mstruct[i][i2].containsRepresentativeOf(x_var, true, true) != 0) {
return false;
}
}
if(b_x) {
if(mstruct[i].size() == 1) {
if(b2) mexp.addChild(m_one);
else mmul.addChild(m_one);
} else {
if(b2) {
mexp.addChild(mstruct[i]);
mexp[mexp.size() - 1].delChild(i_x + 1, true);
mexp.childUpdated(mexp.size());
} else {
mmul.addChild(mstruct[i]);
mmul[mmul.size() - 1].delChild(i_x + 1, true);
mmul.childUpdated(mmul.size());
}
}
} else {
madd.addChild(mstruct[i]);
}
} else if(mstruct[i].containsRepresentativeOf(x_var, true, true) != 0) {
return false;
} else {
madd.addChild(mstruct[i]);
}
}
if(mmul.size() == 0 && (!mexp_as_x2 || mexp.size() == 0)) {
mmul.clear();
if(mexp_as_x2) mexp.clear();
return false;
}
if(mmul.size() == 0) mmul.clear();
else if(mmul.size() == 1) mmul.setToChild(1);
if(mexp_as_x2) {
if(mexp.size() == 0) mexp.clear();
else if(mexp.size() == 1) mexp.setToChild(1);
}
if(madd.size() == 0) madd.clear();
else if(madd.size() == 1) madd.setToChild(1);
return true;
}
return false;
}
bool factorize_find_multiplier(const MathStructure &mstruct, MathStructure &mnew, MathStructure &factor_mstruct, bool only_units) {
factor_mstruct.set(m_one);
switch(mstruct.type()) {
case STRUCT_ADDITION: {
if(!only_units) {
bool bfrac = false, bint = true;
idm1(mstruct, bfrac, bint);
if(bfrac || bint) {
Number gcd(1, 1);
idm2(mstruct, bfrac, bint, gcd);
if((bint || bfrac) && !gcd.isOne()) {
if(bfrac) gcd.recip();
factor_mstruct.set(gcd);
}
}
}
size_t nfac = 0;
if(mstruct.size() > 0) {
size_t i = 0;
const MathStructure *cur_mstruct;
while(true) {
if(mstruct[0].isMultiplication()) {
if(i >= mstruct[0].size()) {
break;
}
cur_mstruct = &mstruct[0][i];
} else {
cur_mstruct = &mstruct[0];
}
if(!cur_mstruct->containsInterval(true) && !cur_mstruct->isNumber() && (!only_units || cur_mstruct->isUnit_exp())) {
const MathStructure *exp = NULL;
const MathStructure *bas;
if(cur_mstruct->isPower() && IS_REAL((*cur_mstruct)[1]) && !(*cur_mstruct)[0].isNumber()) {
exp = cur_mstruct->exponent();
bas = cur_mstruct->base();
} else {
bas = cur_mstruct;
}
bool b = true;
for(size_t i2 = 1; i2 < mstruct.size(); i2++) {
b = false;
size_t i3 = 0;
const MathStructure *cmp_mstruct;
while(true) {
if(mstruct[i2].isMultiplication()) {
if(i3 >= mstruct[i2].size()) {
break;
}
cmp_mstruct = &mstruct[i2][i3];
} else {
cmp_mstruct = &mstruct[i2];
}
if(cmp_mstruct->equals(*bas)) {
if(exp) {
exp = NULL;
}
b = true;
break;
} else if(cmp_mstruct->isPower() && IS_REAL((*cmp_mstruct)[1]) && cmp_mstruct->base()->equals(*bas)) {
if(exp) {
if(cmp_mstruct->exponent()->number().isLessThan(exp->number())) {
exp = cmp_mstruct->exponent();
}
b = true;
break;
} else {
b = true;
break;
}
}
if(!mstruct[i2].isMultiplication()) {
break;
}
i3++;
}
if(!b) break;
}
if(b) {
b = !factor_mstruct.isOne();
if(exp) {
MathStructure *mpow = new MathStructure(*bas);
mpow->raise(*exp);
if(factor_mstruct.isOne()) {
factor_mstruct.set_nocopy(*mpow);
mpow->unref();
} else {
factor_mstruct.multiply_nocopy(mpow, true);
}
} else {
if(factor_mstruct.isOne()) factor_mstruct.set(*bas);
else factor_mstruct.multiply(*bas, true);
}
nfac++;
if(b) {
size_t i3 = 0;
const MathStructure *cmp_mstruct;
b = false;
while(true) {
if(i3 >= factor_mstruct.size() - 1) {
break;
}
cmp_mstruct = &factor_mstruct[i3];
if(cmp_mstruct->equals(factor_mstruct.last())) {
if(exp) {
exp = NULL;
}
b = true;
break;
} else if(cmp_mstruct->isPower() && IS_REAL((*cmp_mstruct)[1]) && cmp_mstruct->base()->equals(factor_mstruct.last())) {
if(exp) {
if(cmp_mstruct->exponent()->number().isLessThan(exp->number())) {
exp = cmp_mstruct->exponent();
}
b = true;
break;
} else {
b = true;
break;
}
}
i3++;
}
if(b) {
factor_mstruct.delChild(factor_mstruct.size(), true);
nfac--;
}
}
}
}
if(!mstruct[0].isMultiplication()) {
break;
}
i++;
}
}
if(!factor_mstruct.isOne()) {
if(&mstruct != &mnew) mnew.set(mstruct);
MathStructure *mfactor;
size_t i = 0;
int b_mul = factor_mstruct.isMultiplication();
while(true) {
if(b_mul > 0) {
if(i >= factor_mstruct.size()) break;
mfactor = &factor_mstruct[i];
} else {
mfactor = &factor_mstruct;
}
for(size_t i2 = 0; i2 < mnew.size(); i2++) {
switch(mnew[i2].type()) {
case STRUCT_NUMBER: {
if(mfactor->isNumber()) {
mnew[i2].number() /= mfactor->number();
}
break;
}
case STRUCT_POWER: {
if(!IS_REAL(mnew[i2][1])) {
if(mfactor->isNumber()) {
mnew[i2].transform(STRUCT_MULTIPLICATION);
mnew[i2].insertChild(MathStructure(1, 1, 0), 1);
mnew[i2][0].number() /= mfactor->number();
} else {
mnew[i2].set(m_one);
}
} else if(mfactor->isNumber()) {
mnew[i2].transform(STRUCT_MULTIPLICATION);
mnew[i2].insertChild(MathStructure(1, 1, 0), 1);
mnew[i2][0].number() /= mfactor->number();
} else if(mfactor->isPower() && IS_REAL((*mfactor)[1])) {
if(mfactor->equals(mnew[i2])) {
mnew[i2].set(m_one);
} else {
mnew[i2][1].number() -= mfactor->exponent()->number();
if(mnew[i2][1].number().isOne()) {
mnew[i2].setToChild(1, true);
} else if(factorize_fix_root_power(mnew[i2])) {
mnew.childUpdated(i2 + 1);
}
}
} else {
mnew[i2][1].number() -= 1;
if(mnew[i2][1].number().isOne()) {
mnew[i2].setToChild(1);
} else if(mnew[i2][1].number().isZero()) {
mnew[i2].set(m_one);
} else if(factorize_fix_root_power(mnew[i2])) {
mnew.childUpdated(i2 + 1);
}
}
break;
}
case STRUCT_MULTIPLICATION: {
bool b = true;
if(mfactor->isNumber() && (mnew[i2].size() < 1 || !mnew[i2][0].isNumber())) {
mnew[i2].insertChild(MathStructure(1, 1, 0), 1);
}
for(size_t i3 = 0; i3 < mnew[i2].size() && b; i3++) {
switch(mnew[i2][i3].type()) {
case STRUCT_NUMBER: {
if(mfactor->isNumber()) {
if(mfactor->equals(mnew[i2][i3])) {
mnew[i2].delChild(i3 + 1);
} else {
mnew[i2][i3].number() /= mfactor->number();
}
b = false;
}
break;
}
case STRUCT_POWER: {
if(!IS_REAL(mnew[i2][i3][1])) {
if(mfactor->equals(mnew[i2][i3])) {
mnew[i2].delChild(i3 + 1);
b = false;
}
} else if(mfactor->isPower() && IS_REAL((*mfactor)[1]) && mfactor->base()->equals(mnew[i2][i3][0])) {
if(mfactor->equals(mnew[i2][i3])) {
mnew[i2].delChild(i3 + 1);
} else {
mnew[i2][i3][1].number() -= mfactor->exponent()->number();
if(mnew[i2][i3][1].number().isOne()) {
MathStructure mstruct2(mnew[i2][i3][0]);
mnew[i2][i3] = mstruct2;
} else if(mnew[i2][i3][1].number().isZero()) {
mnew[i2].delChild(i3 + 1);
} else if(factorize_fix_root_power(mnew[i2][i3])) {
mnew[i2].childUpdated(i3 + 1);
mnew.childUpdated(i2 + 1);
}
}
b = false;
} else if(mfactor->equals(mnew[i2][i3][0])) {
if(mnew[i2][i3][1].number() == 2) {
MathStructure mstruct2(mnew[i2][i3][0]);
mnew[i2][i3] = mstruct2;
} else if(mnew[i2][i3][1].number().isOne()) {
mnew[i2].delChild(i3 + 1);
} else {
mnew[i2][i3][1].number() -= 1;
if(factorize_fix_root_power(mnew[i2][i3])) {
mnew[i2].childUpdated(i3 + 1);
mnew.childUpdated(i2 + 1);
}
}
b = false;
}
break;
}
default: {
if(mfactor->equals(mnew[i2][i3])) {
mnew[i2].delChild(i3 + 1);
b = false;
}
}
}
}
if(mnew[i2].size() == 1) {
MathStructure mstruct2(mnew[i2][0]);
mnew[i2] = mstruct2;
}
if(b) {
if(b_mul > 0 && nfac == 1 && &mstruct != &mnew) {
b_mul = -1;
mnew.set(mstruct);
} else {
return false;
}
}
break;
}
default: {
if(mfactor->isNumber()) {
mnew[i2].transform(STRUCT_MULTIPLICATION);
mnew[i2].insertChild(MathStructure(1, 1, 0), 1);
mnew[i2][0].number() /= mfactor->number();
} else {
mnew[i2].set(m_one);
}
}
}
}
if(b_mul > 0) {
i++;
} else if(b_mul < 0) {
b_mul = 0;
} else {
break;
}
}
return true;
}
}
default: {}
}
return false;
}
bool polynomial_divide_integers(const vector<Number> &vnum, const vector<Number> &vden, vector<Number> &vquotient) {
vquotient.clear();
long int numdeg = vnum.size() - 1;
long int dendeg = vden.size() - 1;
Number dencoeff(vden[dendeg]);
if(numdeg < dendeg) return false;
vquotient.resize(numdeg - dendeg + 1, nr_zero);
vector<Number> vrem = vnum;
while(numdeg >= dendeg) {
Number numcoeff(vrem[numdeg]);
numdeg -= dendeg;
if(!numcoeff.isIntegerDivisible(dencoeff)) break;
numcoeff /= dencoeff;
vquotient[numdeg] += numcoeff;
for(size_t i = 0; i < vden.size(); i++) {
vrem[numdeg + i] -= (vden[i] * numcoeff);
}
while(true) {
if(vrem.back().isZero()) vrem.pop_back();
else break;
if(vrem.size() == 0) return true;
}
numdeg = (long int) vrem.size() - 1;
}
return false;
}
bool combination_factorize_is_complicated(MathStructure &m) {
if(m.isPower()) {
return combination_factorize_is_complicated(m[0]) || combination_factorize_is_complicated(m[1]);
}
return m.size() > 0;
}
bool combination_factorize(MathStructure &mstruct) {
bool retval = false;
switch(mstruct.type()) {
case STRUCT_ADDITION: {
bool b = false;
// 5/y + x/y + z = (5 + x)/y + z
MathStructure mstruct_units(mstruct);
MathStructure mstruct_new(mstruct);
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(mstruct_units[i].isMultiplication()) {
for(size_t i2 = 0; i2 < mstruct_units[i].size();) {
if(!mstruct_units[i][i2].isPower() || !mstruct_units[i][i2][1].hasNegativeSign()) {
mstruct_units[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_units[i].size() == 0) mstruct_units[i].setUndefined();
else if(mstruct_units[i].size() == 1) mstruct_units[i].setToChild(1);
for(size_t i2 = 0; i2 < mstruct_new[i].size();) {
if(mstruct_new[i][i2].isPower() && mstruct_new[i][i2][1].hasNegativeSign()) {
mstruct_new[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_new[i].size() == 0) mstruct_new[i].set(1, 1, 0);
else if(mstruct_new[i].size() == 1) mstruct_new[i].setToChild(1);
} else if(mstruct_new[i].isPower() && mstruct_new[i][1].hasNegativeSign()) {
mstruct_new[i].set(1, 1, 0);
} else {
mstruct_units[i].setUndefined();
}
}
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(!mstruct_units[i].isUndefined()) {
for(size_t i2 = i + 1; i2 < mstruct_units.size();) {
if(mstruct_units[i2] == mstruct_units[i]) {
mstruct_new[i].add(mstruct_new[i2], true);
mstruct_new.delChild(i2 + 1);
mstruct_units.delChild(i2 + 1);
b = true;
} else {
i2++;
}
}
if(mstruct_new[i].isOne()) mstruct_new[i].set(mstruct_units[i]);
else mstruct_new[i].multiply(mstruct_units[i], true);
}
}
if(b) {
if(mstruct_new.size() == 1) {
mstruct.set(mstruct_new[0], true);
} else {
mstruct = mstruct_new;
}
b = false;
retval = true;
}
if(mstruct.isAddition()) {
// y*f(x) + z*f(x) = (y+z)*f(x)
MathStructure mstruct_units(mstruct);
MathStructure mstruct_new(mstruct);
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(mstruct_units[i].isMultiplication()) {
for(size_t i2 = 0; i2 < mstruct_units[i].size();) {
if(!combination_factorize_is_complicated(mstruct_units[i][i2])) {
mstruct_units[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_units[i].size() == 0) mstruct_units[i].setUndefined();
else if(mstruct_units[i].size() == 1) mstruct_units[i].setToChild(1);
for(size_t i2 = 0; i2 < mstruct_new[i].size();) {
if(combination_factorize_is_complicated(mstruct_new[i][i2])) {
mstruct_new[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_new[i].size() == 0) mstruct_new[i].set(1, 1, 0);
else if(mstruct_new[i].size() == 1) mstruct_new[i].setToChild(1);
} else if(combination_factorize_is_complicated(mstruct_units[i])) {
mstruct_new[i].set(1, 1, 0);
} else {
mstruct_units[i].setUndefined();
}
}
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(!mstruct_units[i].isUndefined()) {
for(size_t i2 = i + 1; i2 < mstruct_units.size();) {
if(mstruct_units[i2] == mstruct_units[i]) {
mstruct_new[i].add(mstruct_new[i2], true);
mstruct_new.delChild(i2 + 1);
mstruct_units.delChild(i2 + 1);
b = true;
} else {
i2++;
}
}
if(mstruct_new[i].isOne()) mstruct_new[i].set(mstruct_units[i]);
else mstruct_new[i].multiply(mstruct_units[i], true);
}
}
if(b) {
if(mstruct_new.size() == 1) mstruct.set(mstruct_new[0], true);
else mstruct = mstruct_new;
retval = true;
}
}
if(mstruct.isAddition()) {
// 5x + pi*x + 5y + xy = (5 + pi)x + 5y + xy
MathStructure mstruct_units(mstruct);
MathStructure mstruct_new(mstruct);
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(mstruct_units[i].isMultiplication()) {
for(size_t i2 = 0; i2 < mstruct_units[i].size();) {
if(!mstruct_units[i][i2].containsType(STRUCT_UNIT, true) && !mstruct_units[i][i2].containsUnknowns()) {
mstruct_units[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_units[i].size() == 0) mstruct_units[i].setUndefined();
else if(mstruct_units[i].size() == 1) mstruct_units[i].setToChild(1);
for(size_t i2 = 0; i2 < mstruct_new[i].size();) {
if(mstruct_new[i][i2].containsType(STRUCT_UNIT, true) || mstruct_new[i][i2].containsUnknowns()) {
mstruct_new[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_new[i].size() == 0) mstruct_new[i].set(1, 1, 0);
else if(mstruct_new[i].size() == 1) mstruct_new[i].setToChild(1);
} else if(mstruct_units[i].containsType(STRUCT_UNIT, true) || mstruct_units[i].containsUnknowns()) {
mstruct_new[i].set(1, 1, 0);
} else {
mstruct_units[i].setUndefined();
}
}
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(!mstruct_units[i].isUndefined()) {
for(size_t i2 = i + 1; i2 < mstruct_units.size();) {
if(mstruct_units[i2] == mstruct_units[i]) {
mstruct_new[i].add(mstruct_new[i2], true);
mstruct_new.delChild(i2 + 1);
mstruct_units.delChild(i2 + 1);
b = true;
} else {
i2++;
}
}
if(mstruct_new[i].isOne()) mstruct_new[i].set(mstruct_units[i]);
else mstruct_new[i].multiply(mstruct_units[i], true);
}
}
if(b) {
if(mstruct_new.size() == 1) mstruct.set(mstruct_new[0], true);
else mstruct = mstruct_new;
retval = true;
}
}
//if(retval) return retval;
}
default: {
bool b = false;
for(size_t i = 0; i < mstruct.size(); i++) {
if(combination_factorize(mstruct[i])) {
mstruct.childUpdated(i);
b = true;
}
}
if(b) retval = true;
}
}
return retval;
}
bool MathStructure::factorize(const EvaluationOptions &eo_pre, bool unfactorize, int term_combination_levels, int max_msecs, bool only_integers, int recursive, struct timeval *endtime_p, const MathStructure &force_factorization, bool complete_square, bool only_sqrfree, int max_factor_degree) {
if(CALCULATOR->aborted()) return false;
struct timeval endtime;
if(max_msecs > 0 && !endtime_p) {
#ifndef CLOCK_MONOTONIC
gettimeofday(&endtime, NULL);
#else
struct timespec ts;
clock_gettime(CLOCK_MONOTONIC, &ts);
endtime.tv_sec = ts.tv_sec;
endtime.tv_usec = ts.tv_nsec / 1000;
#endif
endtime.tv_sec += max_msecs / 1000;
long int usecs = endtime.tv_usec + (long int) (max_msecs % 1000) * 1000;
if(usecs >= 1000000) {
usecs -= 1000000;
endtime.tv_sec++;
}
endtime.tv_usec = usecs;
max_msecs = 0;
endtime_p = &endtime;
}
EvaluationOptions eo = eo_pre;
eo.sync_units = false;
eo.structuring = STRUCTURING_NONE;
if(unfactorize) {
unformat(eo_pre);
EvaluationOptions eo2 = eo;
eo2.expand = true;
eo2.combine_divisions = false;
eo2.sync_units = false;
calculatesub(eo2, eo2);
do_simplification(*this, eo, true, false, true);
} else if(term_combination_levels && isAddition()) {
MathStructure *mdiv = new MathStructure;
mdiv->setType(STRUCT_ADDITION);
for(size_t i = 0; i < SIZE; ) {
bool b = false;
if(CHILD(i).isMultiplication()) {
for(size_t i2 = 0; i2 < CHILD(i).size(); i2++) {
if(CHILD(i)[i2].isPower() && CHILD(i)[i2][1].hasNegativeSign()) {
b = true;
break;
}
}
} else if(CHILD(i).isPower() && CHILD(i)[1].hasNegativeSign()) {
b = true;
}
if(b) {
CHILD(i).ref();
mdiv->addChild_nocopy(&CHILD(i));
ERASE(i)
} else {
i++;
}
}
if(mdiv->size() > 0) {
bool b_ret = false;
if(SIZE == 1 && recursive) {
b_ret = CHILD(0).factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
} else if(SIZE > 1) {
b_ret = factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
}
if(mdiv->size() > 1) {
// 5/y + x/y + z = (5 + x)/y + z
MathStructure mstruct_units(*mdiv);
MathStructure mstruct_new(*mdiv);
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(mstruct_units[i].isMultiplication()) {
for(size_t i2 = 0; i2 < mstruct_units[i].size();) {
if(!mstruct_units[i][i2].isPower() || !mstruct_units[i][i2][1].hasNegativeSign()) {
mstruct_units[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_units[i].size() == 0) mstruct_units[i].setUndefined();
else if(mstruct_units[i].size() == 1) mstruct_units[i].setToChild(1);
for(size_t i2 = 0; i2 < mstruct_new[i].size();) {
if(mstruct_new[i][i2].isPower() && mstruct_new[i][i2][1].hasNegativeSign()) {
mstruct_new[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_new[i].size() == 0) mstruct_new[i].set(1, 1, 0);
else if(mstruct_new[i].size() == 1) mstruct_new[i].setToChild(1);
} else if(mstruct_new[i].isPower() && mstruct_new[i][1].hasNegativeSign()) {
mstruct_new[i].set(1, 1, 0);
} else {
mstruct_units[i].setUndefined();
}
}
bool b = false;
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(!mstruct_units[i].isUndefined()) {
for(size_t i2 = i + 1; i2 < mstruct_units.size();) {
if(mstruct_units[i2] == mstruct_units[i]) {
mstruct_new[i].add(mstruct_new[i2], true);
mstruct_new.delChild(i2 + 1);
mstruct_units.delChild(i2 + 1);
b = true;
} else {
i2++;
}
}
if(mstruct_new[i].isOne()) mstruct_new[i].set(mstruct_units[i]);
else mstruct_new[i].multiply(mstruct_units[i], true);
}
}
if(b) {
if(mstruct_new.size() == 1) {
mdiv->set_nocopy(mstruct_new[0], true);
} else {
mdiv->set_nocopy(mstruct_new);
}
b_ret = true;
}
}
size_t index = 1;
if(isAddition()) index = SIZE;
if(index == 0) {
set_nocopy(*mdiv);
mdiv->unref();
} else if(mdiv->isAddition()) {
for(size_t i = 0; i < mdiv->size(); i++) {
(*mdiv)[i].ref();
add_nocopy(&(*mdiv)[i], true);
}
mdiv->unref();
} else {
add_nocopy(mdiv, true);
}
if(recursive) {
for(; index < SIZE; index++) {
b_ret = CHILD(index).factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree) || b_ret;
}
}
return b_ret;
}
mdiv->unref();
}
MathStructure mden, mnum;
evalSort(true);
if(term_combination_levels >= -1 && isAddition() && isRationalPolynomial()) {
MathStructure msqrfree(*this);
eo.protected_function = CALCULATOR->getFunctionById(FUNCTION_ID_SIGNUM);
if(sqrfree(msqrfree, eo)) {
if((!only_sqrfree || msqrfree.isPower()) && !equals(msqrfree) && (!msqrfree.isMultiplication() || msqrfree.size() != 2 || (!(msqrfree[0].isNumber() && msqrfree[1].isAddition()) && !(msqrfree[1].isNumber() && msqrfree[0].isAddition())))) {
MathStructure mcopy(msqrfree);
EvaluationOptions eo2 = eo;
eo2.expand = true;
eo2.calculate_functions = false;
CALCULATOR->beginTemporaryStopMessages();
mcopy.calculatesub(eo2, eo2);
CALCULATOR->endTemporaryStopMessages();
bool b_equal = equals(mcopy);
if(!b_equal && !CALCULATOR->aborted()) {
MathStructure mcopy2(*this);
CALCULATOR->beginTemporaryStopMessages();
mcopy.calculatesub(eo2, eo2, true);
mcopy2.calculatesub(eo2, eo2, true);
CALCULATOR->endTemporaryStopMessages();
b_equal = mcopy.equals(mcopy2);
}
if(!b_equal) {
eo.protected_function = eo_pre.protected_function;
if(CALCULATOR->aborted()) return false;
CALCULATOR->error(true, "factorized result is wrong: %s != %s. %s", format_and_print(msqrfree).c_str(), format_and_print(*this).c_str(), _("This is a bug. Please report it."), NULL);
} else {
eo.protected_function = eo_pre.protected_function;
set(msqrfree);
if(!isAddition()) {
if(isMultiplication()) flattenMultiplication(*this);
if(isMultiplication() && SIZE >= 2 && CHILD(0).isNumber()) {
for(size_t i = 1; i < SIZE; i++) {
if(CHILD(i).isNumber()) {
CHILD(i).number() *= CHILD(0).number();
CHILD(0).set(CHILD(i));
delChild(i);
} else if(CHILD(i).isPower() && CHILD(i)[0].isMultiplication() && CHILD(i)[0].size() >= 2 && CHILD(i)[0][0].isNumber() && CHILD(i)[0][0].number().isRational() && !CHILD(i)[0][0].number().isInteger() && CHILD(i)[1].isInteger()) {
CHILD(i)[0][0].number().raise(CHILD(i)[1].number());
CHILD(0).number().multiply(CHILD(i)[0][0].number());
CHILD(i)[0].delChild(1);
if(CHILD(i)[0].size() == 1) CHILD(i)[0].setToChild(1, true);
}
}
if(SIZE > 1 && CHILD(0).isOne()) {
ERASE(0);
}
if(SIZE == 1) SET_CHILD_MAP(0);
}
if(isMultiplication() && SIZE >= 2 && CHILD(0).isNumber() && CHILD(0).number().isRational() && !CHILD(0).number().isInteger()) {
Number den = CHILD(0).number().denominator();
for(size_t i = 1; i < SIZE; i++) {
if(CHILD(i).isAddition()) {
bool b = true;
for(size_t i2 = 0; i2 < CHILD(i).size(); i2++) {
if(CHILD(i)[i2].isNumber()) {
if(!CHILD(i)[i2].number().isIntegerDivisible(den)) {b = false; break;}
} else if(CHILD(i)[i2].isMultiplication() && CHILD(i)[i2][0].isNumber()) {
if(!CHILD(i)[i2][0].number().isIntegerDivisible(den)) {b = false; break;}
} else {
b = false;
break;
}
}
if(b) {
for(size_t i2 = 0; i2 < CHILD(i).size(); i2++) {
if(CHILD(i)[i2].isNumber()) {
CHILD(i)[i2].number().divide(den);
} else if(CHILD(i)[i2].isMultiplication()) {
CHILD(i)[i2][0].number().divide(den);
if(CHILD(i)[i2][0].isOne() && CHILD(i)[i2].size() > 1) {
CHILD(i)[i2].delChild(1);
if(CHILD(i)[i2].size() == 1) {
CHILD(i)[i2].setToChild(1, true);
}
}
}
}
CHILD(0).set(CHILD(0).number().numerator(), true);
if(SIZE > 1 && CHILD(0).isOne()) {
ERASE(0);
}
if(SIZE == 1) SET_CHILD_MAP(0);
break;
}
}
}
}
if(isMultiplication()) {
for(size_t i = 0; i < SIZE; i++) {
if(CHILD(i).isPower() && CHILD(i)[1].isInteger()) {
if(CHILD(i)[0].isAddition()) {
bool b = true;
for(size_t i2 = 0; i2 < CHILD(i)[0].size(); i2++) {
if((!CHILD(i)[0][i2].isNumber() || !CHILD(i)[0][i2].number().isNegative()) && (!CHILD(i)[0][i2].isMultiplication() || CHILD(i)[0][i2].size() < 2 || !CHILD(i)[0][i2][0].isNumber() || !CHILD(i)[0][i2][0].number().isNegative())) {
b = false;
break;
}
}
if(b) {
for(size_t i2 = 0; i2 < CHILD(i)[0].size(); i2++) {
if(CHILD(i)[0][i2].isNumber()) {
CHILD(i)[0][i2].number().negate();
} else {
CHILD(i)[0][i2][0].number().negate();
if(CHILD(i)[0][i2][0].isOne() && CHILD(i)[0][i2].size() > 1) {
CHILD(i)[0][i2].delChild(1);
if(CHILD(i)[0][i2].size() == 1) {
CHILD(i)[0][i2].setToChild(1, true);
}
}
}
}
if(CHILD(i)[1].number().isOdd()) {
if(CHILD(0).isNumber()) CHILD(0).number().negate();
else {
PREPEND(MathStructure(-1, 1, 0));
i++;
}
}
}
} else if(CHILD(i)[0].isMultiplication() && CHILD(i)[0].size() >= 2 && CHILD(i)[0][0].isNumber() && CHILD(i)[0][0].number().isNegative()) {
CHILD(i)[0][0].number().negate();
if(CHILD(i)[0][0].isOne() && CHILD(i)[0].size() > 1) {
CHILD(i)[0].delChild(1);
if(CHILD(i)[0].size() == 1) {
CHILD(i)[0].setToChild(1, true);
}
}
if(CHILD(i)[1].number().isOdd()) {
if(CHILD(0).isNumber()) CHILD(0).number().negate();
else {
PREPEND(MathStructure(-1, 1, 0));
i++;
}
}
}
} else if(CHILD(i).isAddition()) {
bool b = true;
for(size_t i2 = 0; i2 < CHILD(i).size(); i2++) {
if((!CHILD(i)[i2].isNumber() || !CHILD(i)[i2].number().isNegative()) && (!CHILD(i)[i2].isMultiplication() || CHILD(i)[i2].size() < 2 || !CHILD(i)[i2][0].isNumber() || !CHILD(i)[i2][0].number().isNegative())) {
b = false;
break;
}
}
if(b) {
for(size_t i2 = 0; i2 < CHILD(i).size(); i2++) {
if(CHILD(i)[i2].isNumber()) {
CHILD(i)[i2].number().negate();
} else {
CHILD(i)[i2][0].number().negate();
if(CHILD(i)[i2][0].isOne() && CHILD(i)[i2].size() > 1) {
CHILD(i)[i2].delChild(1);
if(CHILD(i)[i2].size() == 1) {
CHILD(i)[i2].setToChild(1, true);
}
}
}
}
if(CHILD(0).isNumber()) CHILD(0).number().negate();
else {
PREPEND(MathStructure(-1, 1, 0));
i++;
}
}
}
}
if(SIZE > 1 && CHILD(0).isOne()) {
ERASE(0);
}
if(SIZE == 1) SET_CHILD_MAP(0);
}
if(isPower() && CHILD(1).isInteger()) {
if(CHILD(0).isAddition()) {
bool b = true;
for(size_t i2 = 0; i2 < CHILD(0).size(); i2++) {
if((!CHILD(0)[i2].isNumber() || !CHILD(0)[i2].number().isNegative()) && (!CHILD(0)[i2].isMultiplication() || CHILD(0)[i2].size() < 2 || !CHILD(0)[i2][0].isNumber() || !CHILD(0)[i2][0].number().isNegative())) {
b = false;
break;
}
}
if(b) {
for(size_t i2 = 0; i2 < CHILD(0).size(); i2++) {
if(CHILD(0)[i2].isNumber()) {
CHILD(0)[i2].number().negate();
} else {
CHILD(0)[i2][0].number().negate();
if(CHILD(0)[i2][0].isOne() && CHILD(0)[i2].size() > 1) {
CHILD(0)[i2].delChild(1);
if(CHILD(0)[i2].size() == 1) {
CHILD(0)[i2].setToChild(1, true);
}
}
}
}
if(CHILD(1).number().isOdd()) {
multiply(MathStructure(-1, 1, 0));
CHILD_TO_FRONT(1)
}
}
} else if(CHILD(0).isMultiplication() && CHILD(0).size() >= 2 && CHILD(0)[0].isNumber() && CHILD(0)[0].number().isNegative()) {
CHILD(0)[0].number().negate();
if(CHILD(0)[0].isOne() && CHILD(0).size() > 1) {
CHILD(0).delChild(1);
if(CHILD(0).size() == 1) {
CHILD(0).setToChild(1, true);
}
}
if(CHILD(1).number().isOdd()) {
multiply(MathStructure(-1, 1, 0));
CHILD_TO_FRONT(1)
}
}
}
}
evalSort(true);
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
}
}
}
eo.protected_function = eo_pre.protected_function;
}
switch(type()) {
case STRUCT_ADDITION: {
if(CALCULATOR->aborted()) return false;
if(term_combination_levels >= -1 && !only_sqrfree && max_factor_degree != 0) {
if(SIZE <= 3 && SIZE > 1) {
MathStructure *xvar = NULL;
Number nr2(1, 1);
if(CHILD(0).isPower() && CHILD(0)[0].size() == 0 && CHILD(0)[1].isNumber() && CHILD(0)[1].number().isTwo()) {
xvar = &CHILD(0)[0];
} else if(CHILD(0).isMultiplication() && CHILD(0).size() == 2 && CHILD(0)[0].isNumber()) {
if(CHILD(0)[1].isPower()) {
if(CHILD(0)[1][0].size() == 0 && CHILD(0)[1][1].isNumber() && CHILD(0)[1][1].number().isTwo()) {
xvar = &CHILD(0)[1][0];
nr2.set(CHILD(0)[0].number());
}
}
}
if(xvar) {
bool factorable = false;
Number nr1, nr0;
if(SIZE == 2 && CHILD(1).isNumber()) {
factorable = true;
nr0 = CHILD(1).number();
} else if(SIZE == 3 && CHILD(2).isNumber()) {
nr0 = CHILD(2).number();
if(CHILD(1).isMultiplication()) {
if(CHILD(1).size() == 2 && CHILD(1)[0].isNumber() && xvar->equals(CHILD(1)[1])) {
nr1 = CHILD(1)[0].number();
factorable = true;
}
} else if(xvar->equals(CHILD(1))) {
nr1.set(1, 1, 0);
factorable = true;
}
}
if(factorable) {
Number nr4ac(4, 1, 0);
nr4ac *= nr2;
nr4ac *= nr0;
Number nr2a(2, 1, 0);
nr2a *= nr2;
Number sqrtb24ac(nr1);
sqrtb24ac.raise(nr_two);
sqrtb24ac -= nr4ac;
if(sqrtb24ac.isNegative()) factorable = false;
MathStructure mstructb24(sqrtb24ac);
if(factorable) {
if(!only_integers) {
if(eo.approximation == APPROXIMATION_EXACT && !sqrtb24ac.isApproximate()) {
sqrtb24ac.raise(nr_half);
if(sqrtb24ac.isApproximate()) {
mstructb24.raise(nr_half);
} else {
mstructb24.set(sqrtb24ac);
}
} else {
mstructb24.number().raise(nr_half);
}
} else {
mstructb24.number().raise(nr_half);
if((!sqrtb24ac.isApproximate() && mstructb24.number().isApproximate()) || (sqrtb24ac.isInteger() && !mstructb24.number().isInteger())) {
factorable = false;
}
}
}
if(factorable) {
MathStructure m1(nr1), m2(nr1);
Number mul1(1, 1), mul2(1, 1);
if(mstructb24.isNumber()) {
m1.number() += mstructb24.number();
m1.number() /= nr2a;
if(m1.number().isRational() && !m1.number().isInteger()) {
mul1 = m1.number().denominator();
m1.number() *= mul1;
}
m2.number() -= mstructb24.number();
m2.number() /= nr2a;
if(m2.number().isRational() && !m2.number().isInteger()) {
mul2 = m2.number().denominator();
m2.number() *= mul2;
}
} else {
m1.calculateAdd(mstructb24, eo);
m1.calculateDivide(nr2a, eo);
if(m1.isNumber()) {
if(m1.number().isRational() && !m1.number().isInteger()) {
mul1 = m1.number().denominator();
m1.number() *= mul1;
}
} else {
bool bint = false, bfrac = false;
idm1(m1, bfrac, bint);
if(bfrac) {
idm2(m1, bfrac, bint, mul1);
idm3(m1, mul1, true);
}
}
m2.calculateSubtract(mstructb24, eo);
m2.calculateDivide(nr2a, eo);
if(m2.isNumber()) {
if(m2.number().isRational() && !m2.number().isInteger()) {
mul2 = m2.number().denominator();
m2.number() *= mul2;
}
} else {
bool bint = false, bfrac = false;
idm1(m2, bfrac, bint);
if(bfrac) {
idm2(m2, bfrac, bint, mul2);
idm3(m2, mul2, true);
}
}
}
nr2 /= mul1;
nr2 /= mul2;
if(m1 == m2 && mul1 == mul2) {
MathStructure xvar2(*xvar);
if(!mul1.isOne()) xvar2 *= mul1;
set(m1);
add(xvar2, true);
raise(MathStructure(2, 1, 0));
if(!nr2.isOne()) {
multiply(nr2);
}
} else {
m1.add(*xvar, true);
if(!mul1.isOne()) m1[m1.size() - 1] *= mul1;
m2.add(*xvar, true);
if(!mul2.isOne()) m2[m2.size() - 1] *= mul2;
clear(true);
m_type = STRUCT_MULTIPLICATION;
if(!nr2.isOne()) {
APPEND_NEW(nr2);
}
APPEND(m1);
APPEND(m2);
}
EvaluationOptions eo2 = eo;
eo2.expand = false;
calculatesub(eo2, eo2, false);
evalSort(true);
return true;
}
}
}
}
MathStructure *factor_mstruct = new MathStructure(1, 1, 0);
MathStructure mnew;
if(factorize_find_multiplier(*this, mnew, *factor_mstruct) && !factor_mstruct->isZero() && !mnew.isZero()) {
mnew.factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
factor_mstruct->factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
clear(true);
m_type = STRUCT_MULTIPLICATION;
APPEND_REF(factor_mstruct);
APPEND(mnew);
EvaluationOptions eo2 = eo;
eo2.expand = false;
calculatesub(eo2, eo2, false);
factor_mstruct->unref();
evalSort(true);
return true;
}
factor_mstruct->unref();
if(SIZE > 1 && CHILD(SIZE - 1).isNumber() && CHILD(SIZE - 1).number().isInteger() && max_factor_degree != 0) {
MathStructure *xvar = NULL;
Number qnr(1, 1);
int degree = 1;
bool overflow = false;
int qcof = 1;
if(CHILD(0).isPower() && !CHILD(0)[0].isNumber() && CHILD(0)[0].size() == 0 && CHILD(0)[1].isNumber() && CHILD(0)[1].number().isInteger() && CHILD(0)[1].number().isPositive()) {
xvar = &CHILD(0)[0];
degree = CHILD(0)[1].number().intValue(&overflow);
} else if(CHILD(0).isMultiplication() && CHILD(0).size() == 2 && CHILD(0)[0].isNumber() && CHILD(0)[0].number().isInteger()) {
if(CHILD(0)[1].isPower()) {
if(CHILD(0)[1][0].size() == 0 && !CHILD(0)[1][0].isNumber() && CHILD(0)[1][1].isNumber() && CHILD(0)[1][1].number().isInteger() && CHILD(0)[1][1].number().isPositive()) {
xvar = &CHILD(0)[1][0];
qcof = CHILD(0)[0].number().intValue(&overflow);
if(!overflow) {
if(qcof < 0) qcof = -qcof;
degree = CHILD(0)[1][1].number().intValue(&overflow);
}
}
}
}
int pcof = 1;
if(!overflow) {
pcof = CHILD(SIZE - 1).number().intValue(&overflow);
if(pcof < 0) pcof = -pcof;
}
if(xvar && !overflow && degree <= 1000 && degree > 2 && qcof != 0 && pcof != 0) {
bool b = true, b2 = true;
for(size_t i = 1; b && i < SIZE - 1; i++) {
switch(CHILD(i).type()) {
case STRUCT_NUMBER: {
b = false;
break;
}
case STRUCT_POWER: {
if(!CHILD(i)[1].isNumber() || !xvar->equals(CHILD(i)[0]) || !CHILD(i)[1].number().isInteger() || !CHILD(i)[1].number().isPositive()) {
b = false;
}
break;
}
case STRUCT_MULTIPLICATION: {
if(!(CHILD(i).size() == 2) || !CHILD(i)[0].isNumber()) {
b = false;
} else if(CHILD(i)[1].isPower()) {
if(!CHILD(i)[1][1].isNumber() || !xvar->equals(CHILD(i)[1][0]) || !CHILD(i)[1][1].number().isInteger() || !CHILD(i)[1][1].number().isPositive()) {
b = false;
}
} else if(!xvar->equals(CHILD(i)[1])) {
b = false;
}
if(b && b2 && !CHILD(i)[0].isInteger()) b2 = false;
break;
}
default: {
if(!xvar->equals(CHILD(i))) {
b = false;
}
}
}
}
if(b) {
vector<Number> factors;
factors.resize(degree + 1, Number());
factors[0] = CHILD(SIZE - 1).number();
vector<int> ps;
vector<int> qs;
vector<Number> zeroes;
int curdeg = 1, prevdeg = 0;
for(size_t i = 0; b && i < SIZE - 1; i++) {
switch(CHILD(i).type()) {
case STRUCT_POWER: {
curdeg = CHILD(i)[1].number().intValue(&overflow);
if(curdeg == prevdeg || curdeg > degree || (prevdeg > 0 && curdeg > prevdeg) || overflow) {
b = false;
} else {
factors[curdeg].set(1, 1, 0);
}
break;
}
case STRUCT_MULTIPLICATION: {
if(CHILD(i)[1].isPower()) {
curdeg = CHILD(i)[1][1].number().intValue(&overflow);
} else {
curdeg = 1;
}
if(curdeg == prevdeg || curdeg > degree || (prevdeg > 0 && curdeg > prevdeg) || overflow) {
b = false;
} else {
factors[curdeg] = CHILD(i)[0].number();
}
break;
}
default: {
curdeg = 1;
factors[curdeg].set(1, 1, 0);
}
}
prevdeg = curdeg;
}
while(b && degree > 2) {
for(int i = 1; i <= 1000; i++) {
if(i > pcof) break;
if(pcof % i == 0) ps.push_back(i);
}
for(int i = 1; i <= 1000; i++) {
if(i > qcof) break;
if(qcof % i == 0) qs.push_back(i);
}
Number itest;
int i2;
size_t pi = 0, qi = 0;
if(ps.empty() || qs.empty()) break;
Number nrtest(ps[0], qs[0], 0);
while(true) {
itest.clear(); i2 = degree;
while(true) {
itest += factors[i2];
if(i2 == 0) break;
itest *= nrtest;
i2--;
}
if(itest.isZero()) {
break;
}
if(nrtest.isPositive()) {
nrtest.negate();
} else {
qi++;
if(qi == qs.size()) {
qi = 0;
pi++;
if(pi == ps.size()) {
break;
}
}
nrtest.set(ps[pi], qs[qi], 0);
}
}
if(itest.isZero()) {
itest.clear(); i2 = degree;
Number ntmp(factors[i2]);
for(; i2 > 0; i2--) {
itest += ntmp;
ntmp = factors[i2 - 1];
factors[i2 - 1] = itest;
itest *= nrtest;
}
degree--;
nrtest.negate();
zeroes.push_back(nrtest);
if(degree == 2) {
break;
}
qcof = factors[degree].intValue(&overflow);
if(!overflow) {
if(qcof < 0) qcof = -qcof;
pcof = factors[0].intValue(&overflow);
if(!overflow) {
if(pcof < 0) pcof = -pcof;
}
}
if(overflow || qcof == 0 || pcof == 0) {
break;
}
} else {
break;
}
ps.clear();
qs.clear();
}
if(zeroes.size() > 0) {
MathStructure mleft;
MathStructure mtmp;
MathStructure *mcur;
for(int i = degree; i >= 0; i--) {
if(!factors[i].isZero()) {
if(mleft.isZero()) {
mcur = &mleft;
} else {
mleft.add(m_zero, true);
mcur = &mleft[mleft.size() - 1];
}
if(i > 1) {
if(!factors[i].isOne()) {
mcur->multiply(*xvar);
(*mcur)[0].set(factors[i]);
mcur = &(*mcur)[1];
} else {
mcur->set(*xvar);
}
mtmp.set(i, 1, 0);
mcur->raise(mtmp);
} else if(i == 1) {
if(!factors[i].isOne()) {
mcur->multiply(*xvar);
(*mcur)[0].set(factors[i]);
} else {
mcur->set(*xvar);
}
} else {
mcur->set(factors[i]);
}
}
}
mleft.factorize(eo, false, 0, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
vector<long int> powers;
vector<size_t> powers_i;
int dupsfound = 0;
for(size_t i = 0; i < zeroes.size() - 1; i++) {
while(i + 1 < zeroes.size() && zeroes[i] == zeroes[i + 1]) {
dupsfound++;
zeroes.erase(zeroes.begin() + (i + 1));
}
if(dupsfound > 0) {
powers_i.push_back(i);
powers.push_back(dupsfound + 1);
dupsfound = 0;
}
}
MathStructure xvar2(*xvar);
Number *nrmul;
if(mleft.isMultiplication()) {
set(mleft);
evalSort();
if(CHILD(0).isNumber()) {
nrmul = &CHILD(0).number();
} else if(CHILD(0).isMultiplication() && CHILD(0).size() > 0 && CHILD(0)[0].isNumber()) {
nrmul = &CHILD(0)[0].number();
} else {
PREPEND(m_one);
nrmul = &CHILD(0).number();
}
} else {
clear(true);
m_type = STRUCT_MULTIPLICATION;
APPEND(m_one);
APPEND(mleft);
nrmul = &CHILD(0).number();
}
size_t pi = 0;
for(size_t i = 0; i < zeroes.size(); i++) {
if(zeroes[i].isInteger()) {
APPEND(xvar2);
} else {
APPEND(m_zero);
}
mcur = &CHILD(SIZE - 1);
if(pi < powers_i.size() && powers_i[pi] == i) {
mcur->raise(MathStructure(powers[pi], 1L, 0L));
mcur = &(*mcur)[0];
if(zeroes[i].isInteger()) {
mcur->add(zeroes[i]);
} else {
Number nr(zeroes[i].denominator());
mcur->add(zeroes[i].numerator());
(*mcur)[0] *= xvar2;
(*mcur)[0][0].number() = nr;
nr.raise(powers[pi]);
nrmul->divide(nr);
}
pi++;
} else {
if(zeroes[i].isInteger()) {
mcur->add(zeroes[i]);
} else {
nrmul->divide(zeroes[i].denominator());
mcur->add(zeroes[i].numerator());
(*mcur)[0] *= xvar2;
(*mcur)[0][0].number() = zeroes[i].denominator();
}
}
}
if(CHILD(0).isNumber() && CHILD(0).number().isOne()) {
ERASE(0);
} else if(CHILD(0).isMultiplication() && CHILD(0).size() > 0 && CHILD(0)[0].isNumber() && CHILD(0)[0].number().isOne()) {
if(CHILD(0).size() == 1) {
ERASE(0);
} else if(CHILD(0).size() == 2) {
CHILD(0).setToChild(2, true);
} else {
CHILD(0).delChild(1);
}
}
evalSort(true);
Number dupspow;
for(size_t i = 0; i < SIZE - 1; i++) {
mcur = NULL;
if(CHILD(i).isPower()) {
if(CHILD(i)[0].isAddition() && CHILD(i)[1].isNumber()) {
mcur = &CHILD(i)[0];
}
} else if(CHILD(i).isAddition()) {
mcur = &CHILD(i);
}
while(mcur && i + 1 < SIZE) {
if(CHILD(i + 1).isPower()) {
if(CHILD(i + 1)[0].isAddition() && CHILD(i + 1)[1].isNumber() && mcur->equals(CHILD(i + 1)[0])) {
dupspow += CHILD(i + 1)[1].number();
} else {
mcur = NULL;
}
} else if(CHILD(i + 1).isAddition() && mcur->equals(CHILD(i + 1))) {
dupspow++;
} else {
mcur = NULL;
}
if(mcur) {
ERASE(i + 1);
}
}
if(!dupspow.isZero()) {
if(CHILD(i).isPower()) {
CHILD(i)[1].number() += dupspow;
} else {
dupspow++;
CHILD(i) ^= dupspow;
}
dupspow.clear();
}
}
if(SIZE == 1) {
setToChild(1, true);
} else {
EvaluationOptions eo2 = eo;
eo2.expand = false;
calculatesub(eo2, eo2, false);
}
evalSort(true);
return true;
}
}
if(b && b2 && (max_factor_degree < 0 || max_factor_degree >= 2) && degree > 3 && degree < 50) {
// Kronecker method
vector<Number> vnum;
vnum.resize(degree + 1, nr_zero);
bool overflow = false;
for(size_t i = 0; b && i < SIZE; i++) {
switch(CHILD(i).type()) {
case STRUCT_POWER: {
if(CHILD(i)[0] == *xvar && CHILD(i)[1].isInteger()) {
int curdeg = CHILD(i)[1].number().intValue(&overflow);
if(curdeg < 0 || overflow || curdeg > degree) b = false;
else vnum[curdeg] += 1;
} else {
b = false;
}
break;
}
case STRUCT_MULTIPLICATION: {
if(CHILD(i).size() == 2 && CHILD(i)[0].isInteger()) {
long int icoeff = CHILD(i)[0].number().intValue(&overflow);
if(!overflow && CHILD(i)[1].isPower() && CHILD(i)[1][0] == *xvar && CHILD(i)[1][1].isInteger()) {
int curdeg = CHILD(i)[1][1].number().intValue(&overflow);
if(curdeg < 0 || overflow || curdeg > degree) b = false;
else vnum[curdeg] += icoeff;
} else if(!overflow && CHILD(i)[1] == *xvar) {
vnum[1] += icoeff;
} else {
b = false;
}
} else {
b = false;
}
break;
}
default: {
if(CHILD(i).isInteger()) {
long int icoeff = CHILD(i).number().intValue(&overflow);
if(overflow) b = false;
else vnum[0] += icoeff;
} else if(CHILD(i) == *xvar) {
vnum[1] += 1;
} else {
b = false;
}
break;
}
}
}
long int lcoeff = vnum[degree].lintValue();
vector<int> vs;
if(b && lcoeff != 0) {
degree /= 2;
if(max_factor_degree > 0 && degree > max_factor_degree) degree = max_factor_degree;
for(int i = 0; i <= degree; i++) {
if(CALCULATOR->aborted()) return false;
MathStructure mcalc(*this);
mcalc.calculateReplace(*xvar, Number((i / 2 + i % 2) * (i % 2 == 0 ? -1 : 1), 1), eo);
mcalc.calculatesub(eo, eo, false);
if(!mcalc.isInteger()) break;
bool overflow = false;
int v = ::abs(mcalc.number().intValue(&overflow));
if(overflow) {
if(i > 2) degree = i;
else b = false;
break;
}
vs.push_back(v);
}
}
if(b) {
vector<int> factors0, factorsl;
factors0.push_back(1);
for(int i = 2; i < vs[0] / 3 && i < 1000; i++) {
if(vs[0] % i == 0) factors0.push_back(i);
}
if(vs[0] % 3 == 0) factors0.push_back(vs[0] / 3);
if(vs[0] % 2 == 0) factors0.push_back(vs[0] / 2);
factors0.push_back(vs[0]);
for(int i = 2; i < lcoeff / 3 && i < 1000; i++) {
if(lcoeff % i == 0) factorsl.push_back(i);
}
factorsl.push_back(1);
if(lcoeff % 3 == 0) factorsl.push_back(lcoeff / 3);
if(lcoeff % 2 == 0) factorsl.push_back(lcoeff / 2);
factorsl.push_back(lcoeff);
long long int cmax = 500000LL / (factors0.size() * factorsl.size());
if(term_combination_levels != 0) cmax *= 10;
if(degree >= 2 && cmax > 10) {
vector<Number> vden;
vector<Number> vquo;
vden.resize(3, nr_zero);
long int c0;
for(size_t i = 0; i < factors0.size() * 2; i++) {
c0 = factors0[i / 2];
if(i % 2 == 1) c0 = -c0;
long int c2;
for(size_t i2 = 0; i2 < factorsl.size(); i2++) {
c2 = factorsl[i2];
long int c1max = vs[1] - c0 - c2, c1min;
if(c1max < 0) {c1min = c1max; c1max = -vs[1] - c0 - c2;}
else {c1min = -vs[1] - c0 - c2;}
if(-(vs[2] - c0 - c2) < -(-vs[2] - c0 - c2)) {
if(c1max > -(-vs[2] - c0 - c2)) c1max = -(-vs[2] - c0 - c2);
if(c1min < -(vs[2] - c0 - c2)) c1min = -(vs[2] - c0 - c2);
} else {
if(c1max > -(vs[2] - c0 - c2)) c1max = -(vs[2] - c0 - c2);
if(c1min < -(-vs[2] - c0 - c2)) c1min = -(-vs[2] - c0 - c2);
}
if(c1min < -cmax / 2) c1min = -cmax / 2;
for(long int c1 = c1min; c1 <= c1max && c1 <= cmax / 2; c1++) {
long int v1 = ::labs(c2 + c1 + c0);
long int v2 = ::labs(c2 - c1 + c0);
if(v1 != 0 && v2 != 0 && v1 <= vs[1] && v2 <= vs[2] && (c1 != 0 || c2 != 0) && vs[1] % v1 == 0 && vs[2] % v2 == 0) {
vden[0] = c0; vden[1] = c1; vden[2] = c2;
if(CALCULATOR->aborted()) return false;
if(polynomial_divide_integers(vnum, vden, vquo)) {
MathStructure mtest;
mtest.setType(STRUCT_ADDITION);
if(c2 != 0) {
MathStructure *mpow = new MathStructure();
mpow->setType(STRUCT_POWER);
mpow->addChild(*xvar);
mpow->addChild_nocopy(new MathStructure(2, 1, 0));
if(c2 == 1) {
mtest.addChild_nocopy(mpow);
} else {
MathStructure *mterm = new MathStructure();
mterm->setType(STRUCT_MULTIPLICATION);
mterm->addChild_nocopy(new MathStructure(c2, 1L, 0L));
mterm->addChild_nocopy(mpow);
mtest.addChild_nocopy(mterm);
}
}
if(c1 == 1) {
mtest.addChild(*xvar);
} else if(c1 != 0) {
MathStructure *mterm = new MathStructure();
mterm->setType(STRUCT_MULTIPLICATION);
mterm->addChild_nocopy(new MathStructure(c1, 1L, 0L));
mterm->addChild(*xvar);
mtest.addChild_nocopy(mterm);
}
mtest.addChild_nocopy(new MathStructure(c0, 1L, 0L));
MathStructure mthis(*this);
MathStructure mquo;
if(mtest.size() > 1 && polynomialDivide(mthis, mtest, mquo, eo, false)) {
mquo.factorize(eo, false, 0, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
set(mquo, true);
multiply(mtest, true);
return true;
}
}
}
}
}
}
}
for(int i_d = 3; i_d <= degree; i_d++) {
if(CALCULATOR->aborted()) return false;
long int t1max = ::pow(cmax / (i_d - 1), 1.0 / (i_d - 1));
if(t1max < 1) break;
if(t1max > 1000) t1max = 1000;
long int c2totalmax = t1max;
long int c2cur;
for(int i = 0; i < i_d - 3; i++) {
c2totalmax *= t1max;
}
vector<Number> vden;
vector<Number> vquo;
long int *vc = (long int*) malloc(sizeof(long int) * (i_d + 1));
vden.resize(i_d + 1, nr_zero);
for(size_t i = 0; i < factors0.size() * 2; i++) {
vc[0] = factors0[i / 2] * (i % 2 == 1 ? -1 : 1);
for(size_t i2 = 0; i2 < factorsl.size(); i2++) {
vc[i_d] = factorsl[i2];
for(long int c2p = 0; c2p <= c2totalmax; c2p++) {
c2cur = c2p;
for(int i = 2; i < i_d; i++) {
vc[i] = c2cur % t1max;
if(vc[i] % 2 == 1) vc[i] = -vc[i];
vc[i] = vc[i] / 2 + vc[i] % 2;
c2cur /= t1max;
}
long int c1max = t1max / 2 + t1max % 2, c1min = -t1max / 2 - t1max % 2;
for(size_t i = 1; i < vs.size(); i++) {
long int vsmax = vs[i] - vc[0];
long int vsmin = -vs[i] - vc[0];
int ix = (i / 2 + i % 2) * (i % 2 == 0 ? -1 : 1);
int ixi = ix;
for(int i2 = 2; i2 <= i_d; i2++) {
ixi *= ix;
vsmax -= vc[i2] * ixi;
}
vsmax /= ix;
vsmin /= ix;
if(vsmax < vsmin) {
if(c1max > vsmin) c1max = vsmin;
if(c1min < vsmax) c1min = vsmax;
} else {
if(c1max > vsmax) c1max = vsmax;
if(c1min < vsmin) c1min = vsmin;
}
}
for(long int c1 = c1min; c1 <= c1max; c1++) {
vc[1] = c1;
bool b = true;
for(size_t i = 1; i < vs.size(); i++) {
long int v = vc[0];
int ix = (i / 2 + i % 2) * (i % 2 == 0 ? -1 : 1);
int ixi = 1;
for(int i2 = 1; i2 <= i_d; i2++) {
ixi *= ix;
v += vc[i2] * ixi;
}
if(v < 0) v = -v;
if(v == 0 || v > vs[i] || vs[i] % v != 0) {
b = false;
break;
}
}
if(b) {
if(CALCULATOR->aborted()) return false;
for(size_t iden = 0; iden < vden.size(); iden++) {
vden[iden] = vc[iden];
}
if(polynomial_divide_integers(vnum, vden, vquo)) {
MathStructure mtest;
mtest.setType(STRUCT_ADDITION);
for(int i2 = i_d; i2 >= 2; i2--) {
if(vc[i2] != 0) {
MathStructure *mpow = new MathStructure();
mpow->setType(STRUCT_POWER);
mpow->addChild(*xvar);
mpow->addChild_nocopy(new MathStructure(i2, 1, 0));
if(vc[i2] == 1) {
mtest.addChild_nocopy(mpow);
} else {
MathStructure *mterm = new MathStructure();
mterm->setType(STRUCT_MULTIPLICATION);
mterm->addChild_nocopy(new MathStructure(vc[i2], 1L, 0L));
mterm->addChild_nocopy(mpow);
mtest.addChild_nocopy(mterm);
}
}
}
if(vc[1] == 1) {
mtest.addChild(*xvar);
} else if(vc[1] != 0) {
MathStructure *mterm = new MathStructure();
mterm->setType(STRUCT_MULTIPLICATION);
mterm->addChild_nocopy(new MathStructure(vc[1], 1L, 0L));
mterm->addChild(*xvar);
mtest.addChild_nocopy(mterm);
}
mtest.addChild_nocopy(new MathStructure(vc[0], 1L, 0L));
MathStructure mthis(*this);
MathStructure mquo;
if(mtest.size() > 1 && polynomialDivide(mthis, mtest, mquo, eo, false)) {
mquo.factorize(eo, false, 0, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
free(vc);
set(mquo, true);
multiply(mtest, true);
return true;
}
}
}
}
}
}
}
free(vc);
}
}
}
}
}
if(SIZE == 2 && max_factor_degree != 0) {
Number nr1(1, 1, 0), nr2(1, 1, 0);
bool b = true, b_nonnum = false;
bool b1_neg = false, b2_neg = false;
for(size_t i = 0; i < SIZE && b; i++) {
b = false;
if(CHILD(i).isInteger() && CHILD(i).number().integerLength() < 100) {
b = true;
if(i == 0) nr1 = CHILD(i).number();
else nr2 = CHILD(i).number();
} else if(CHILD(i).isMultiplication() && CHILD(i).size() > 1) {
b_nonnum = true;
b = true;
size_t i2 = 0;
if(CHILD(i)[0].isInteger() && CHILD(i).number().integerLength() < 100) {
if(i == 0) nr1 = CHILD(i)[0].number();
else nr2 = CHILD(i)[0].number();
i2++;
}
for(; i2 < CHILD(i).size(); i2++) {
if(!CHILD(i)[i2].isPower() || !CHILD(i)[i2][1].isInteger() || !CHILD(i)[i2][1].number().isPositive() || !CHILD(i)[i2][1].number().isEven() || CHILD(i)[1].number().integerLength() >= 100 || !CHILD(i)[i2][0].representsNonMatrix()) {
b = false;
break;
}
}
} else if(CHILD(i).isPower() && CHILD(i)[1].isNumber() && CHILD(i)[1].number().isInteger() && CHILD(i)[1].number().isPositive() && CHILD(i)[1].number().isEven() && CHILD(i)[1].number().integerLength() < 100 && CHILD(i)[0].representsNonMatrix()) {
b_nonnum = true;
b = true;
}
}
if(!b_nonnum) b = false;
if(b) {
b1_neg = nr1.isNegative();
b2_neg = nr2.isNegative();
if(b1_neg == b2_neg) b = false;
}
if(b) {
if(b1_neg) b = nr1.negate();
if(b && !nr1.isOne()) {
b = nr1.isPerfectSquare() && nr1.isqrt();
}
}
if(b) {
if(b2_neg) nr2.negate();
if(!nr2.isOne()) {
b = nr2.isPerfectSquare() && nr2.isqrt();
}
}
if(b) {
bool calc = false;
MathStructure *mmul = new MathStructure(*this);
for(size_t i = 0; i < SIZE; i++) {
if(CHILD(i).isNumber()) {
if(i == 0) {
CHILD(i).number() = nr1;
if(b1_neg) nr1.negate();
(*mmul)[i].number() = nr1;
} else {
CHILD(i).number() = nr2;
if(b2_neg) nr2.negate();
(*mmul)[i].number() = nr2;
}
} else if(CHILD(i).isMultiplication() && CHILD(i).size() > 1) {
b = true;
size_t i2 = 0;
if(CHILD(i)[0].isNumber()) {
if(i == 0) {
CHILD(i)[0].number() = nr1;
if(b1_neg) nr1.negate();
(*mmul)[i][0].number() = nr1;
} else {
CHILD(i)[0].number() = nr2;
if(b2_neg) nr2.negate();
(*mmul)[i][0].number() = nr2;
}
i2++;
}
for(; i2 < CHILD(i).size(); i2++) {
if(CHILD(i)[i2][1].number().isTwo()) {
CHILD(i)[i2].setToChild(1, true);
(*mmul)[i][i2].setToChild(1, true);
} else {
CHILD(i)[i2][1].number().divide(2);
(*mmul)[i][i2][1].number().divide(2);
}
CHILD(i).childUpdated(i2 + 1);
(*mmul)[i].childUpdated(i2 + 1);
}
if(CHILD(i)[0].isOne()) CHILD(i).delChild(1, true);
if((*mmul)[i][0].isOne()) (*mmul)[i].delChild(1, true);
} else if(CHILD(i).isPower()) {
if(CHILD(i)[1].number().isTwo()) {
CHILD(i).setToChild(1, true);
(*mmul)[i].setToChild(1, true);
} else {
CHILD(i)[1].number().divide(2);
(*mmul)[i][1].number().divide(2);
}
}
if(CHILD(i).isAddition()) calc = true;
CHILD_UPDATED(i)
mmul->childUpdated(i + 1);
}
if(calc) {
calculatesub(eo, eo, false);
mmul->calculatesub(eo, eo, false);
}
if(recursive) {
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
mmul->factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
}
multiply_nocopy(mmul);
evalSort(true);
return true;
}
}
//x^3-y^3=(x-y)(x^2+xy+y^2)
if(max_factor_degree != 0 && SIZE == 2 && CHILD(0).isPower() && CHILD(0)[1].isNumber() && CHILD(0)[1].number() == 3 && CHILD(1).isMultiplication() && CHILD(1).size() == 2 && CHILD(1)[0].isMinusOne() && CHILD(1)[1].isPower() && CHILD(1)[1][1].isNumber() && CHILD(1)[1][1].number() == 3) {
if(CHILD(0)[0].representsNonMatrix() && CHILD(1)[1][0].representsNonMatrix()) {
MathStructure *m2 = new MathStructure(*this);
(*m2)[0].setToChild(1, true);
(*m2)[1][1].setToChild(1, true);
EvaluationOptions eo2 = eo;
eo2.expand = false;
m2->calculatesub(eo2, eo2, false);
CHILD(0)[1].set(2, 1, 0, true);
CHILD(1).setToChild(2, true);
CHILD(1)[1].set(2, 1, 0, true);
MathStructure *m3 = new MathStructure(CHILD(0)[0]);
m3->calculateMultiply(CHILD(1)[0], eo2);
add_nocopy(m3, true);
calculatesub(eo2, eo2, false);
multiply_nocopy(m2, true);
evalSort(true);
return true;
}
}
//-x^3+y^3=(-x+y)(x^2+xy+y^2)
if(max_factor_degree != 0 && SIZE == 2 && CHILD(1).isPower() && CHILD(1)[1].isNumber() && CHILD(1)[1].number() == 3 && CHILD(0).isMultiplication() && CHILD(0).size() == 2 && CHILD(0)[0].isMinusOne() && CHILD(0)[1].isPower() && CHILD(0)[1][1].isNumber() && CHILD(0)[1][1].number() == 3) {
if(CHILD(1)[0].representsNonMatrix() && CHILD(0)[1][0].representsNonMatrix()) {
MathStructure *m2 = new MathStructure(*this);
(*m2)[1].setToChild(1, true);
(*m2)[0][1].setToChild(1, true);
EvaluationOptions eo2 = eo;
eo2.expand = false;
m2->calculatesub(eo2, eo2, false);
CHILD(1)[1].set(2, 1, 0, true);
CHILD(0).setToChild(2, true);
CHILD(0)[1].set(2, 1, 0, true);
MathStructure *m3 = new MathStructure(CHILD(0)[0]);
m3->calculateMultiply(CHILD(1)[0], eo2);
add_nocopy(m3, true);
calculatesub(eo2, eo2, false);
multiply_nocopy(m2, true);
evalSort(true);
return true;
}
}
if(max_factor_degree != 0 && !only_integers && !force_factorization.isUndefined() && SIZE >= 2) {
MathStructure mexp, madd, mmul;
if(gather_factors(*this, force_factorization, madd, mmul, mexp) && !madd.isZero() && !mmul.isZero() && mexp.isInteger() && mexp.number().isGreaterThan(nr_two)) {
if(!mmul.isOne()) madd.calculateDivide(mmul, eo);
bool overflow = false;
int n = mexp.number().intValue(&overflow);
if(!overflow) {
if(n % 4 == 0) {
int i_u = 1;
if(n != 4) {
i_u = n / 4;
}
MathStructure m_sqrt2(2, 1, 0);
m_sqrt2.calculateRaise(nr_half, eo);
MathStructure m_sqrtb(madd);
m_sqrtb.calculateRaise(nr_half, eo);
MathStructure m_bfourth(madd);
m_bfourth.calculateRaise(Number(1, 4), eo);
m_sqrt2.calculateMultiply(m_bfourth, eo);
MathStructure m_x(force_factorization);
if(i_u != 1) m_x ^= i_u;
m_sqrt2.calculateMultiply(m_x, eo);
MathStructure *m2 = new MathStructure(force_factorization);
m2->raise(Number(i_u * 2, 1));
m2->add(m_sqrtb);
m2->calculateAdd(m_sqrt2, eo);
set(force_factorization, true);
raise(Number(i_u * 2, 1));
add(m_sqrtb);
calculateSubtract(m_sqrt2, eo);
multiply_nocopy(m2);
} else {
int i_u = 1;
if(n % 2 == 0) {
i_u = 2;
n /= 2;
}
MathStructure *m2 = new MathStructure(madd);
m2->calculateRaise(Number(n - 1, n), eo);
for(int i = 1; i < n - 1; i++) {
MathStructure *mterm = new MathStructure(madd);
mterm->calculateRaise(Number(n - i - 1, n), eo);
mterm->multiply(force_factorization);
if(i != 1 || i_u != 1) {
mterm->last().raise(Number(i * i_u, 1));
mterm->childUpdated(mterm->size());
}
if(i % 2 == 1) mterm->calculateMultiply(m_minus_one, eo);
m2->add_nocopy(mterm, true);
}
MathStructure *mterm = new MathStructure(force_factorization);
mterm->raise(Number((n - 1) * i_u, 1));
m2->add_nocopy(mterm, true);
mterm = new MathStructure(force_factorization);
if(i_u != 1) mterm->raise(Number(i_u, 1));
set(madd, true);
calculateRaise(Number(1, n), eo);
add_nocopy(mterm);
multiply_nocopy(m2);
}
if(!mmul.isOne()) multiply(mmul, true);
evalSort(true);
return true;
}
}
}
//-x-y = -(x+y)
bool b = true;
for(size_t i2 = 0; i2 < SIZE; i2++) {
if((!CHILD(i2).isNumber() || !CHILD(i2).number().isNegative()) && (!CHILD(i2).isMultiplication() || CHILD(i2).size() < 2 || !CHILD(i2)[0].isNumber() || !CHILD(i2)[0].number().isNegative())) {
b = false;
break;
}
}
if(b) {
for(size_t i2 = 0; i2 < SIZE; i2++) {
if(CHILD(i2).isNumber()) {
CHILD(i2).number().negate();
} else {
CHILD(i2)[0].number().negate();
if(CHILD(i2)[0].isOne() && CHILD(i2).size() > 1) {
CHILD(i2).delChild(1);
if(CHILD(i2).size() == 1) {
CHILD(i2).setToChild(1, true);
}
}
}
}
multiply(MathStructure(-1, 1, 0));
CHILD_TO_FRONT(1)
}
for(size_t i = 0; i < SIZE; i++) {
if(CHILD(i).isMultiplication() && CHILD(i).size() > 1) {
for(size_t i2 = 0; i2 < CHILD(i).size(); i2++) {
if(CHILD(i)[i2].isAddition()) {
for(size_t i3 = i + 1; i3 < SIZE; i3++) {
if(CHILD(i3).isMultiplication() && CHILD(i3).size() > 1) {
for(size_t i4 = 0; i4 < CHILD(i3).size(); i4++) {
if(CHILD(i3)[i4].isAddition() && CHILD(i3)[i4] == CHILD(i)[i2]) {
MathStructure *mfac = &CHILD(i)[i2];
mfac->ref();
CHILD(i).delChild(i2 + 1, true);
CHILD(i3).delChild(i4 + 1, true);
CHILD(i3).ref();
CHILD(i).add_nocopy(&CHILD(i3));
CHILD(i).calculateAddLast(eo);
CHILD(i).multiply_nocopy(mfac);
CHILD_UPDATED(i)
delChild(i3 + 1, true);
evalSort(true);
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
}
}
}
}
if(SIZE > 2) {
MathStructure mtest(*this);
mtest.delChild(i + 1);
if(mtest == CHILD(i)[i2]) {
CHILD(i).delChild(i2 + 1, true);
SET_CHILD_MAP(i);
add(m_one, true);
multiply(mtest);
evalSort(true);
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
}
}
}
}
}
}
}
//complete the square
if(max_factor_degree != 0 && (term_combination_levels != 0 || complete_square)) {
if(only_integers) {
if(SIZE <= 3 && SIZE > 1) {
MathStructure *xvar = NULL;
Number nr2(1, 1);
if(CHILD(0).isPower() && CHILD(0)[0].size() == 0 && CHILD(0)[1].isNumber() && CHILD(0)[1].number().isTwo()) {
xvar = &CHILD(0)[0];
} else if(CHILD(0).isMultiplication() && CHILD(0).size() == 2 && CHILD(0)[0].isNumber()) {
if(CHILD(0)[1].isPower()) {
if(CHILD(0)[1][0].size() == 0 && CHILD(0)[1][1].isNumber() && CHILD(0)[1][1].number().isTwo()) {
xvar = &CHILD(0)[1][0];
nr2.set(CHILD(0)[0].number());
}
}
}
if(xvar) {
bool factorable = false;
Number nr1, nr0;
if(SIZE == 2 && CHILD(1).isNumber()) {
factorable = true;
nr0 = CHILD(1).number();
} else if(SIZE == 3 && CHILD(2).isNumber()) {
nr0 = CHILD(2).number();
if(CHILD(1).isMultiplication()) {
if(CHILD(1).size() == 2 && CHILD(1)[0].isNumber() && xvar->equals(CHILD(1)[1])) {
nr1 = CHILD(1)[0].number();
factorable = true;
}
} else if(xvar->equals(CHILD(1))) {
nr1.set(1, 1, 0);
factorable = true;
}
}
if(factorable && !nr2.isZero() && !nr1.isZero()) {
Number nrh(nr1);
nrh /= 2;
nrh /= nr2;
if(nrh.isInteger()) {
Number nrk(nrh);
if(nrk.square()) {
nrk *= nr2;
nrk.negate();
nrk += nr0;
set(MathStructure(*xvar), true);
add(nrh);
raise(nr_two);
if(!nr2.isOne()) multiply(nr2);
if(!nrk.isZero()) add(nrk);
evalSort(true);
return true;
}
}
}
}
}
} else {
MathStructure m2, m1, m0;
const MathStructure *xvar = NULL;
if(!force_factorization.isUndefined()) {
xvar = &force_factorization;
} else {
if(CHILD(0).isPower() && CHILD(0)[0].size() == 0 && CHILD(0)[1].isNumber() && CHILD(0)[1].number().isTwo()) {
xvar = &CHILD(0)[0];
} else if(CHILD(0).isMultiplication()) {
for(size_t i2 = 0; i2 < CHILD(0).size(); i2++) {
if(CHILD(0).isPower() && CHILD(0)[i2][0].size() == 0 && CHILD(0)[i2][1].isNumber() && CHILD(0)[i2][1].number().isTwo()) {
xvar = &CHILD(0)[0];
}
}
}
}
if(xvar && gather_factors(*this, *xvar, m0, m1, m2, true) && !m1.isZero() && !m2.isZero()) {
MathStructure *mx = new MathStructure(*xvar);
set(m1, true);
calculateMultiply(nr_half, eo);
if(!m2.isOne()) calculateDivide(m2, eo);
add_nocopy(mx);
calculateAddLast(eo);
raise(nr_two);
if(!m2.isOne()) multiply(m2);
if(!m1.isOne()) m1.calculateRaise(nr_two, eo);
m1.calculateMultiply(Number(-1, 4), eo);
if(!m2.isOne()) {
m2.calculateInverse(eo);
m1.calculateMultiply(m2, eo);
}
m0.calculateAdd(m1, eo);
if(!m0.isZero()) add(m0);
if(recursive) {
CHILD(0).factorize(eo, false, 0, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
CHILD(1).factorize(eo, false, 0, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
CHILDREN_UPDATED
}
evalSort(true);
return true;
}
}
}
//Try factorize combinations of terms
if(SIZE > 2 && term_combination_levels > 0) {
bool b = false, b_ret = false;
// 5/y + x/y + z = (5 + x)/y + z
MathStructure mstruct_units(*this);
MathStructure mstruct_new(*this);
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(mstruct_units[i].isMultiplication()) {
for(size_t i2 = 0; i2 < mstruct_units[i].size();) {
if(!mstruct_units[i][i2].isPower() || !mstruct_units[i][i2][1].hasNegativeSign()) {
mstruct_units[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_units[i].size() == 0) mstruct_units[i].setUndefined();
else if(mstruct_units[i].size() == 1) mstruct_units[i].setToChild(1);
for(size_t i2 = 0; i2 < mstruct_new[i].size();) {
if(mstruct_new[i][i2].isPower() && mstruct_new[i][i2][1].hasNegativeSign()) {
mstruct_new[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_new[i].size() == 0) mstruct_new[i].set(1, 1, 0);
else if(mstruct_new[i].size() == 1) mstruct_new[i].setToChild(1);
} else if(mstruct_new[i].isPower() && mstruct_new[i][1].hasNegativeSign()) {
mstruct_new[i].set(1, 1, 0);
} else {
mstruct_units[i].setUndefined();
}
}
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(!mstruct_units[i].isUndefined()) {
for(size_t i2 = i + 1; i2 < mstruct_units.size();) {
if(mstruct_units[i2] == mstruct_units[i]) {
mstruct_new[i].add(mstruct_new[i2], true);
mstruct_new.delChild(i2 + 1);
mstruct_units.delChild(i2 + 1);
b = true;
} else {
i2++;
}
}
if(mstruct_new[i].isOne()) mstruct_new[i].set(mstruct_units[i]);
else mstruct_new[i].multiply(mstruct_units[i], true);
}
}
if(b) {
if(mstruct_new.size() == 1) {
set(mstruct_new[0], true);
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
} else {
set(mstruct_new);
}
b = false;
b_ret = true;
}
// a*y + a*z + x = a(y + z) + x
vector<MathStructure> syms;
vector<size_t> counts;
collect_symbols(*this, syms);
size_t max_count = 0, max_i = 0;
Number min_pow;
for(size_t i = 0; i < syms.size(); i++) {
if(syms[i].containsUnknowns()) {
size_t count = 0;
Number min_pow_i;
for(size_t i2 = 0; i2 < SIZE; i2++) {
if(CHILD(i2).isMultiplication()) {
for(size_t i3 = 0; i3 < CHILD(i2).size(); i3++) {
if(CHILD(i2)[i3].isPower() && CHILD(i2)[i3][1].isNumber() && CHILD(i2)[i3][1].number().isRational() && CHILD(i2)[i3][1].number().isNegative() && CHILD(i2)[i3][0] == syms[i]) {
if(min_pow_i.isZero() || CHILD(i2)[i3][1].number() > min_pow_i) min_pow_i = CHILD(i2)[i3][1].number();
count++;
break;
}
}
} else if(CHILD(i2).isPower() && CHILD(i2)[1].isNumber() && CHILD(i2)[1].number().isRational() && CHILD(i2)[1].number().isNegative() && CHILD(i2)[0] == syms[i]) {
if(min_pow_i.isZero() || CHILD(i2)[1].number() > min_pow_i) min_pow_i = CHILD(i2)[1].number();
count++;
}
}
if(count > 1 && count > max_count) {
max_count = count;
min_pow = min_pow_i;
max_i = i;
}
}
}
if(!max_count) {
for(size_t i = 0; i < syms.size(); i++) {
if(syms[i].containsUnknowns()) {
size_t count = 0;
Number min_pow_i;
for(size_t i2 = 0; i2 < SIZE; i2++) {
if(CHILD(i2).isMultiplication()) {
for(size_t i3 = 0; i3 < CHILD(i2).size(); i3++) {
if(CHILD(i2)[i3].isPower() && CHILD(i2)[i3][1].isNumber() && CHILD(i2)[i3][1].number().isRational() && CHILD(i2)[i3][1].number().isPositive() && CHILD(i2)[i3][0] == syms[i]) {
if(min_pow_i.isZero() || CHILD(i2)[i3][1].number() < min_pow_i) min_pow_i = CHILD(i2)[i3][1].number();
count++;
break;
} else if(CHILD(i2)[i3] == syms[i]) {
if(min_pow_i.isZero() || min_pow_i > 1) min_pow_i = 1;
count++;
break;
}
}
} else if(CHILD(i2).isPower() && CHILD(i2)[1].isNumber() && CHILD(i2)[1].number().isRational() && CHILD(i2)[1].number().isPositive() && CHILD(i2)[0] == syms[i]) {
if(min_pow_i.isZero() || CHILD(i2)[1].number() < min_pow_i) min_pow_i = CHILD(i2)[1].number();
count++;
} else if(CHILD(i2) == syms[i]) {
if(min_pow_i.isZero() || min_pow_i > 1) min_pow_i = 1;
count++;
}
}
if(count > 1 && count > max_count) {
max_count = count;
min_pow = min_pow_i;
max_i = i;
}
}
}
}
if(max_count > 0) {
size_t i = max_i;
vector<MathStructure*> mleft;
for(size_t i2 = 0; i2 < SIZE;) {
b = false;
if(CHILD(i2).isMultiplication()) {
for(size_t i3 = 0; i3 < CHILD(i2).size(); i3++) {
if(CHILD(i2)[i3].isPower() && CHILD(i2)[i3][1].isNumber() && CHILD(i2)[i3][1].number().isRational() && (min_pow.isPositive() ? CHILD(i2)[i3][1].number().isPositive() : CHILD(i2)[i3][1].number().isNegative()) && CHILD(i2)[i3][0] == syms[i]) {
if(CHILD(i2)[i3][1] == min_pow) CHILD(i2).delChild(i3 + 1, true);
else if(CHILD(i2)[i3][1] == min_pow + 1) CHILD(i2)[i3].setToChild(1, true);
else {
CHILD(i2)[i3][1].number() -= min_pow;
factorize_fix_root_power(CHILD(i2)[i3]);
}
b = true;
break;
} else if(min_pow.isPositive() && CHILD(i2)[i3] == syms[i]) {
if(min_pow.isOne()) CHILD(i2).delChild(i3 + 1, true);
else {
CHILD(i2)[i3].raise((-min_pow) + 1);
factorize_fix_root_power(CHILD(i2)[i3]);
}
b = true;
break;
}
}
} else if(CHILD(i2).isPower() && CHILD(i2)[1].isNumber() && CHILD(i2)[1].number().isRational() && (min_pow.isPositive() ? CHILD(i2)[1].number().isPositive() : CHILD(i2)[1].number().isNegative()) && CHILD(i2)[0] == syms[i]) {
if(CHILD(i2)[1] == min_pow) CHILD(i2).set(1, 1, 0, true);
else if(CHILD(i2)[1] == min_pow + 1) CHILD(i2).setToChild(1, true);
else {
CHILD(i2)[1].number() -= min_pow;
factorize_fix_root_power(CHILD(i2));
}
b = true;
} else if(min_pow.isPositive() && CHILD(i2) == syms[i]) {
if(min_pow.isOne()) CHILD(i2).set(1, 1, 0, true);
else {
CHILD(i2).raise((-min_pow) + 1);
factorize_fix_root_power(CHILD(i2));
}
b = true;
}
if(b) {
i2++;
} else {
CHILD(i2).ref();
mleft.push_back(&CHILD(i2));
ERASE(i2)
}
}
multiply(syms[i]);
if(!min_pow.isOne()) LAST ^= min_pow;
for(size_t i2 = 0; i2 < mleft.size(); i2++) {
add_nocopy(mleft[i2], true);
}
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
}
if(LAST.isNumber()) {
MathStructure *mdel = &LAST;
mdel->ref();
delChild(SIZE, true);
b = factorize(eo, false, term_combination_levels - 1, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
add_nocopy(mdel, true);
if(term_combination_levels == 1) return b || b_ret;
if(b) b_ret = true;
}
for(size_t i = 0; !b && i < SIZE; i++) {
MathStructure *mdel = &CHILD(i);
mdel->ref();
delChild(i + 1, true);
b = true;
if(mdel->isMultiplication()) {
for(size_t i2 = 0; i2 < mdel->size(); i2++) {
if((*mdel)[i2].isPower() && (*mdel)[i2][0].containsUnknowns()) {
if(contains((*mdel)[i2][0], false, false, false) > 0) {b = false; break;}
} else if((*mdel)[i2].containsUnknowns()) {
if(contains((*mdel)[i2], false, false, false) > 0) {b = false; break;}
}
}
} else {
b = contains(*mdel, false, false, false) <= 0;
}
if(b) {
b = factorize(eo, false, term_combination_levels - 1, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
if(recursive) mdel->factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
add_nocopy(mdel, true);
if(term_combination_levels == 1) return b || b_ret;
if(b) b_ret = true;
break;
} else {
insertChild_nocopy(mdel, i + 1);
}
}
b = false;
// a*y + a*z + x = a(y + z) + x
syms.clear();
counts.clear();
collect_symbols(*this, syms);
max_count = 0; max_i = 0;
for(size_t i = 0; i < syms.size(); i++) {
size_t count = 0;
Number min_pow_i;
for(size_t i2 = 0; i2 < SIZE; i2++) {
if(CHILD(i2).isMultiplication()) {
for(size_t i3 = 0; i3 < CHILD(i2).size(); i3++) {
if(CHILD(i2)[i3].isPower() && CHILD(i2)[i3][1].isNumber() && CHILD(i2)[i3][1].number().isRational() && CHILD(i2)[i3][1].number().isNegative() && CHILD(i2)[i3][0] == syms[i]) {
if(min_pow_i.isZero() || CHILD(i2)[i3][1].number() > min_pow_i) min_pow_i = CHILD(i2)[i3][1].number();
count++;
break;
}
}
} else if(CHILD(i2).isPower() && CHILD(i2)[1].isNumber() && CHILD(i2)[1].number().isRational() && CHILD(i2)[1].number().isNegative() && CHILD(i2)[0] == syms[i]) {
if(min_pow_i.isZero() || CHILD(i2)[1].number() > min_pow_i) min_pow_i = CHILD(i2)[1].number();
count++;
}
}
if(count > 1 && count > max_count) {
max_count = count;
min_pow = min_pow_i;
max_i = i;
}
}
if(!max_count) {
for(size_t i = 0; i < syms.size(); i++) {
size_t count = 0;
Number min_pow_i;
for(size_t i2 = 0; i2 < SIZE; i2++) {
if(CHILD(i2).isMultiplication()) {
for(size_t i3 = 0; i3 < CHILD(i2).size(); i3++) {
if(CHILD(i2)[i3].isPower() && CHILD(i2)[i3][1].isNumber() && CHILD(i2)[i3][1].number().isRational() && CHILD(i2)[i3][1].number().isPositive() && CHILD(i2)[i3][0] == syms[i]) {
if(min_pow_i.isZero() || CHILD(i2)[i3][1].number() < min_pow_i) min_pow_i = CHILD(i2)[i3][1].number();
count++;
break;
} else if(CHILD(i2)[i3] == syms[i]) {
if(min_pow_i.isZero() || min_pow_i > 1) min_pow_i = 1;
count++;
break;
}
}
} else if(CHILD(i2).isPower() && CHILD(i2)[1].isNumber() && CHILD(i2)[1].number().isRational() && CHILD(i2)[1].number().isPositive() && CHILD(i2)[0] == syms[i]) {
if(min_pow_i.isZero() || CHILD(i2)[1].number() < min_pow_i) min_pow_i = CHILD(i2)[1].number();
count++;
} else if(CHILD(i2) == syms[i]) {
if(min_pow_i.isZero() || min_pow_i > 1) min_pow_i = 1;
count++;
}
}
if(count > 1 && count > max_count) {
max_count = count;
min_pow = min_pow_i;
max_i = i;
}
}
}
if(max_count > 0) {
size_t i = max_i;
vector<MathStructure*> mleft;
for(size_t i2 = 0; i2 < SIZE;) {
b = false;
if(CHILD(i2).isMultiplication()) {
for(size_t i3 = 0; i3 < CHILD(i2).size(); i3++) {
if(CHILD(i2)[i3].isPower() && CHILD(i2)[i3][1].isNumber() && CHILD(i2)[i3][1].number().isRational() && (min_pow.isPositive() ? CHILD(i2)[i3][1].number().isPositive() : CHILD(i2)[i3][1].number().isNegative()) && CHILD(i2)[i3][0] == syms[i]) {
if(CHILD(i2)[i3][1] == min_pow) CHILD(i2).delChild(i3 + 1, true);
else if(CHILD(i2)[i3][1] == min_pow + 1) CHILD(i2)[i3].setToChild(1, true);
else {
CHILD(i2)[i3][1].number() -= min_pow;
factorize_fix_root_power(CHILD(i2)[i3]);
}
b = true;
break;
} else if(min_pow.isPositive() && CHILD(i2)[i3] == syms[i]) {
if(min_pow.isOne()) CHILD(i2).delChild(i3 + 1, true);
else {
CHILD(i2)[i3].raise((-min_pow) + 1);
factorize_fix_root_power(CHILD(i2)[i3]);
}
b = true;
break;
}
}
} else if(CHILD(i2).isPower() && CHILD(i2)[1].isNumber() && CHILD(i2)[1].number().isRational() && (min_pow.isPositive() ? CHILD(i2)[1].number().isPositive() : CHILD(i2)[1].number().isNegative()) && CHILD(i2)[0] == syms[i]) {
if(CHILD(i2)[1] == min_pow) CHILD(i2).set(1, 1, 0, true);
else if(CHILD(i2)[1] == min_pow + 1) CHILD(i2).setToChild(1, true);
else {
CHILD(i2)[1].number() -= min_pow;
factorize_fix_root_power(CHILD(i2));
}
b = true;
} else if(min_pow.isPositive() && CHILD(i2) == syms[i]) {
if(min_pow.isOne()) CHILD(i2).set(1, 1, 0, true);
else {
CHILD(i2).raise((-min_pow) + 1);
factorize_fix_root_power(CHILD(i2));
}
b = true;
}
if(b) {
i2++;
} else {
CHILD(i2).ref();
mleft.push_back(&CHILD(i2));
ERASE(i2)
}
}
multiply(syms[i]);
if(!min_pow.isOne()) LAST ^= min_pow;
for(size_t i2 = 0; i2 < mleft.size(); i2++) {
add_nocopy(mleft[i2], true);
}
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
}
if(isAddition()) {
b = false;
// y*f(x) + z*f(x) = (y+z)*f(x)
mstruct_units.set(*this);
mstruct_new.set(*this);
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(mstruct_units[i].isMultiplication()) {
for(size_t i2 = 0; i2 < mstruct_units[i].size();) {
if(!combination_factorize_is_complicated(mstruct_units[i][i2])) {
mstruct_units[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_units[i].size() == 0) mstruct_units[i].setUndefined();
else if(mstruct_units[i].size() == 1) mstruct_units[i].setToChild(1);
for(size_t i2 = 0; i2 < mstruct_new[i].size();) {
if(combination_factorize_is_complicated(mstruct_new[i][i2])) {
mstruct_new[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_new[i].size() == 0) mstruct_new[i].set(1, 1, 0);
else if(mstruct_new[i].size() == 1) mstruct_new[i].setToChild(1);
} else if(combination_factorize_is_complicated(mstruct_units[i])) {
mstruct_new[i].set(1, 1, 0);
} else {
mstruct_units[i].setUndefined();
}
}
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(!mstruct_units[i].isUndefined()) {
for(size_t i2 = i + 1; i2 < mstruct_units.size();) {
if(mstruct_units[i2] == mstruct_units[i]) {
mstruct_new[i].add(mstruct_new[i2], true);
mstruct_new.delChild(i2 + 1);
mstruct_units.delChild(i2 + 1);
b = true;
} else {
i2++;
}
}
if(mstruct_new[i].isOne()) mstruct_new[i].set(mstruct_units[i]);
else mstruct_new[i].multiply(mstruct_units[i], true);
}
}
if(b) {
if(mstruct_new.size() == 1) set(mstruct_new[0], true);
else set(mstruct_new);
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
}
}
if(isAddition()) {
b = false;
mstruct_units.set(*this);
mstruct_new.set(*this);
// 5x + pi*x + 5y + xy = (5 + pi)x + 5y + xy
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(mstruct_units[i].isMultiplication()) {
for(size_t i2 = 0; i2 < mstruct_units[i].size();) {
if(!mstruct_units[i][i2].containsType(STRUCT_UNIT, true) && !mstruct_units[i][i2].containsUnknowns()) {
mstruct_units[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_units[i].size() == 0) mstruct_units[i].setUndefined();
else if(mstruct_units[i].size() == 1) mstruct_units[i].setToChild(1);
for(size_t i2 = 0; i2 < mstruct_new[i].size();) {
if(mstruct_new[i][i2].containsType(STRUCT_UNIT, true) || mstruct_new[i][i2].containsUnknowns()) {
mstruct_new[i].delChild(i2 + 1);
} else {
i2++;
}
}
if(mstruct_new[i].size() == 0) mstruct_new[i].set(1, 1, 0);
else if(mstruct_new[i].size() == 1) mstruct_new[i].setToChild(1);
} else if(mstruct_units[i].containsType(STRUCT_UNIT, true) || mstruct_units[i].containsUnknowns()) {
mstruct_new[i].set(1, 1, 0);
} else {
mstruct_units[i].setUndefined();
}
}
for(size_t i = 0; i < mstruct_units.size(); i++) {
if(!mstruct_units[i].isUndefined()) {
for(size_t i2 = i + 1; i2 < mstruct_units.size();) {
if(mstruct_units[i2] == mstruct_units[i]) {
mstruct_new[i].add(mstruct_new[i2], true);
mstruct_new.delChild(i2 + 1);
mstruct_units.delChild(i2 + 1);
b = true;
} else {
i2++;
}
}
if(mstruct_new[i].isOne()) mstruct_new[i].set(mstruct_units[i]);
else mstruct_new[i].multiply(mstruct_units[i], true);
}
}
if(b) {
if(mstruct_new.size() == 1) set(mstruct_new[0], true);
else set(mstruct_new);
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
}
}
return b_ret;
} else if(term_combination_levels != 0 && SIZE > 2) {
int start_index = rand() % SIZE;
int index = start_index;
int best_index = -1;
int run_index = 0;
int max_run_index = SIZE - 3;
if(term_combination_levels < -1) {
run_index = -term_combination_levels - 2;
max_run_index = run_index;
} else if(term_combination_levels > 0 && term_combination_levels - 1 < max_run_index) {
max_run_index = term_combination_levels -1;
}
MathStructure mbest;
do {
if(CALCULATOR->aborted()) break;
if(endtime_p && endtime_p->tv_sec > 0) {
#ifndef CLOCK_MONOTONIC
struct timeval curtime;
gettimeofday(&curtime, NULL);
if(curtime.tv_sec > endtime_p->tv_sec || (curtime.tv_sec == endtime_p->tv_sec && curtime.tv_usec > endtime_p->tv_usec)) {
#else
struct timespec curtime;
clock_gettime(CLOCK_MONOTONIC, &curtime);
if(curtime.tv_sec > endtime_p->tv_sec || (curtime.tv_sec == endtime_p->tv_sec && curtime.tv_nsec / 1000 > endtime_p->tv_usec)) {
#endif
CALCULATOR->error(false, _("Because of time constraints only a limited number of combinations of terms were tried during factorization. Repeat factorization to try other random combinations."), NULL);
break;
}
}
MathStructure mtest(*this);
mtest.delChild(index + 1);
if(mtest.factorize(eo, false, run_index == 0 ? 0 : -1 - run_index, 0, only_integers, false, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree)) {
bool b = best_index < 0 || (mbest.isAddition() && !mtest.isAddition());
if(!b && (mtest.isAddition() == mbest.isAddition())) {
b = mtest.isAddition() && (mtest.size() < mbest.size());
if(!b && (!mtest.isAddition() || mtest.size() == mbest.size())) {
size_t c1 = mtest.countTotalChildren() + CHILD(index).countTotalChildren();
size_t c2 = mbest.countTotalChildren() + CHILD(best_index).countTotalChildren();
b = (c1 < c2);
if(c1 == c2) {
b = (count_powers(mtest) + count_powers(CHILD(index))) < (count_powers(mbest) + count_powers(CHILD(best_index)));
}
}
}
if(b) {
mbest = mtest;
best_index = index;
if(mbest.isPower()) {
break;
}
}
}
index++;
if(index == (int) SIZE) index = 0;
if(index == start_index) {
if(best_index >= 0) {
break;
}
run_index++;
if(run_index > max_run_index) break;
}
} while(true);
if(best_index >= 0) {
mbest.add(CHILD(best_index), true);
set(mbest);
if(term_combination_levels >= -1 && (run_index > 0 || recursive)) {
factorize(eo, false, term_combination_levels, 0, only_integers, true, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
}
return true;
}
}
}
default: {
if(term_combination_levels < -1) break;
bool b = false;
if(isComparison()) {
EvaluationOptions eo2 = eo;
eo2.assume_denominators_nonzero = false;
for(size_t i = 0; i < SIZE; i++) {
if(CHILD(i).factorize(eo2, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree)) {
CHILD_UPDATED(i);
b = true;
}
}
} else if(recursive && (recursive > 1 || !isAddition())) {
for(size_t i = 0; i < SIZE; i++) {
if(CHILD(i).factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree)) {
CHILD_UPDATED(i);
b = true;
}
}
}
if(b) {
EvaluationOptions eo2 = eo;
eo2.expand = false;
calculatesub(eo2, eo2, false);
evalSort(true);
if(isAddition()) {
for(size_t i = 0; i < SIZE; i++) {
if(CHILD(i).isMultiplication() && CHILD(i).size() > 1) {
for(size_t i2 = 0; i2 < CHILD(i).size(); i2++) {
if(CHILD(i)[i2].isAddition()) {
for(size_t i3 = i + 1; i3 < SIZE; i3++) {
if(CHILD(i3).isMultiplication() && CHILD(i3).size() > 1) {
for(size_t i4 = 0; i4 < CHILD(i3).size(); i4++) {
if(CHILD(i3)[i4].isAddition() && CHILD(i3)[i4] == CHILD(i)[i2]) {
MathStructure *mfac = &CHILD(i)[i2];
mfac->ref();
CHILD(i).delChild(i2 + 1, true);
CHILD(i3).delChild(i4 + 1, true);
CHILD(i3).ref();
CHILD(i).add_nocopy(&CHILD(i3));
CHILD(i).calculateAddLast(eo);
CHILD(i).multiply_nocopy(mfac);
CHILD_UPDATED(i)
delChild(i3 + 1, true);
evalSort(true);
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
}
}
}
}
if(SIZE > 2) {
MathStructure mtest(*this);
mtest.delChild(i + 1);
if(mtest == CHILD(i)[i2]) {
CHILD(i).delChild(i2 + 1, true);
SET_CHILD_MAP(i);
add(m_one, true);
multiply(mtest);
evalSort(true);
factorize(eo, false, term_combination_levels, 0, only_integers, recursive, endtime_p, force_factorization, complete_square, only_sqrfree, max_factor_degree);
return true;
}
}
}
}
}
}
}
return true;
}
}
}
return false;
}
```
|
Tollerton is a former village in central Alberta, Canada within Yellowhead County. It was located on the former Canadian Northern Railway along the north shore of the McLeod River, approximately southwest of the Town of Edson.
History
Tollerton was established as a division point along the Canadian Northern Railway. Its rail facilities included a train station, a timber water tank, an ice house, a bunk house, a steam-heated engine house, and three rail sidings with capacity to hold 249 cars.
The community incorporated as the Village of Tollerton on December 27, 1913. In 1917, a decision was made to close the Canadian Northern Railway line in favour of the Grand Trunk Pacific Railway through Edson to the north, resulting in the demise of Tollerton. Subsequently, the community dissolved from village status on January 26, 1918.
Demographics
In the 1916 Census of Prairie Provinces, Tollerton had a population of 49, although Alberta Municipal Affairs indicated that it had a population of 180 in the same year.
See also
List of communities in Alberta
List of former urban municipalities in Alberta
List of ghost towns in Alberta
References
External links
Canadian Northern Tollerton Townsite – Atlas of Alberta Railways
Sale of lots for Tollerton townsite, Tollerton, Alberta – Glenbow Museum
View of Tollerton, Alberta – Glenbow Museum
Yellowhead County
Former villages in Alberta
Ghost towns in Alberta
|
Jacira Francisco Mendonca (born 7 January 1986 in Bissau) is a female competition wrestler from Guinea-Bissau. She represented Guinea-Bissau in the 2012 Summer Olympics in London, United Kingdom.
Major results
References
1986 births
Living people
Bissau-Guinean female sport wrestlers
Wrestlers at the 2012 Summer Olympics
Olympic wrestlers for Guinea-Bissau
African Wrestling Championships medalists
|
```javascript
/**
* @license Apache-2.0
*
*
*
* path_to_url
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
'use strict';
// MODULES //
var resolve = require( 'path' ).resolve;
var bench = require( '@stdlib/bench' );
var uniform = require( '@stdlib/random/base/uniform' ).factory;
var isnan = require( '@stdlib/math/base/assert/is-nan' );
var pow = require( '@stdlib/math/base/special/pow' );
var Float64Array = require( '@stdlib/array/float64' );
var Uint8Array = require( '@stdlib/array/uint8' );
var tryRequire = require( '@stdlib/utils/try-require' );
var pkg = require( './../package.json' ).name;
// VARIABLES //
var dmsktrunc = tryRequire( resolve( __dirname, './../lib/ndarray.native.js' ) );
var opts = {
'skip': ( dmsktrunc instanceof Error )
};
var rand = uniform( -10.0, 10.0 );
// FUNCTIONS //
/**
* Creates a benchmark function.
*
* @private
* @param {PositiveInteger} len - array length
* @returns {Function} benchmark function
*/
function createBenchmark( len ) {
var x;
var m;
var y;
var i;
x = new Float64Array( len );
m = new Uint8Array( len );
y = new Float64Array( len );
for ( i = 0; i < x.length; i++ ) {
x[ i ] = rand();
}
return benchmark;
/**
* Benchmark function.
*
* @private
* @param {Benchmark} b - benchmark instance
*/
function benchmark( b ) {
var z;
var i;
b.tic();
for ( i = 0; i < b.iterations; i++ ) {
z = dmsktrunc( x.length, x, 1, 0, m, 1, 0, y, 1, 0 );
if ( isnan( z[ i%len ] ) ) {
b.fail( 'should not return NaN' );
}
}
b.toc();
if ( isnan( z[ i%len ] ) ) {
b.fail( 'should not return NaN' );
}
b.pass( 'benchmark finished' );
b.end();
}
}
// MAIN //
/**
* Main execution sequence.
*
* @private
*/
function main() {
var len;
var min;
var max;
var f;
var i;
min = 1; // 10^min
max = 6; // 10^max
for ( i = min; i <= max; i++ ) {
len = pow( 10, i );
f = createBenchmark( len );
bench( pkg+'::native:ndarray:len='+len, opts, f );
}
}
main();
```
|
Lucifer Box is a fictional character created by Mark Gatiss.
Appearances
The Vesuvius Club (2004)
The Vesuvius Club: Graphic Edition (2005)
The Devil in Amber (2006)
Black Butterfly (2008)
Persona
Box is a flamboyant, dashing and elegant figure who displays a prominent sense of fashion. A portrait painter to the rich and influential, he uses this as cover for his activities as a secret agent (in the world of the novels, the Royal Academy is the front for Britain's secret services). Box is a charming but ruthless figure, who combines a sardonic sense of humour with a cold determination to kill when necessary.
He lives at Number 9 Downing Street (next door to the Prime Minister, famously at Number 10), and presumably two doors down from the Chancellor of the Exchequer. (Historical note: No. 9 Downing Street is behind the Cabinet Office building at 70 Whitehall and has now been absorbed into that office complex.)
Box is also openly bisexual, and is equally comfortable seducing both male and female partners.
Lucifer Box was Charles Dickens's nickname for his daughter Kate, given to her on account of her fiery temper.
Family
He has a sister called Pandora and a servant called Delilah, and in the third novel, The Black Butterfly, has a son called Christmas.
References
External links
Fictional artists
Fictional bisexual men
Fictional British secret agents
Fictional English people
Fictional secret agents and spies
Fictional socialites
Fictional LGBT characters in literature
|
Dolichomitus is a genus of ichneumon wasps in the family Ichneumonidae. There are at least 70 described species in Dolichomitus. The name is derived from the Greek dolicho, meaning long or narrow, and the Greek mitus, meaning a thread.
Species
These 85 species belong to the genus Dolichomitus:
Dolichomitus aciculatus (Hellen, 1915) c g
Dolichomitus agnoscendus (Roman, 1939) c g
Dolichomitus annulicornis (Cameron, 1886) c g
Dolichomitus atratus (Rudow, 1881) c
Dolichomitus baiamarensis (Constantineanu & Pisica, 1970) c g
Dolichomitus billorum Gauld, 1991 c g
Dolichomitus birnovensis (Constantineanu & Pisica, 1970) c g
Dolichomitus bivittatus Townes, 1975 c g
Dolichomitus buccatus Townes, 1960 c g
Dolichomitus californicus Townes, 1960 c g
Dolichomitus cangrejae Gauld, Ugalde & Hanson, 1998 c g
Dolichomitus cantillanoi Gauld, 1991 c g
Dolichomitus cephalotes (Holmgren, 1860) c
Dolichomitus cognator (Thunberg, 1822) c g
Dolichomitus crassus (Morley, 1913) c
Dolichomitus curticornis (Perkins, 1943) c g
Dolichomitus cuspidatus Townes, 1960 c g
Dolichomitus debilis Sheng, 2002 c g
Dolichomitus diversicostae (Perkins, 1943) c g
Dolichomitus dobrogensis Constantineanu & Pisica, 1970 c g
Dolichomitus dolichosoma (Viereck, 1912) c g
Dolichomitus dux (Tschek, 1869) c g
Dolichomitus elongatus (Uchida, 1928) c g
Dolichomitus excavatus Zwakhals, 2010 g
Dolichomitus feralis (Tosquinet, 1903) c g
Dolichomitus flacissimus Gauld, Ugalde & Hanson, 1998 c g
Dolichomitus flexilis Townes, 1960 c g
Dolichomitus fortis Sheng, 2002 c g
Dolichomitus foxleei Townes, 1960 c g
Dolichomitus garudai Gupta & Tikar, 1976 c g
Dolichomitus grilloi Gauld, 1991 c g
Dolichomitus hypermeces Townes, 1975 c g
Dolichomitus imperator (Kriechbaumer, 1854) c g
Dolichomitus iridipennis (Morley, 1913) c g
Dolichomitus irritator (Fabricius, 1775) c g b
Dolichomitus jiyuanensis Lin, 2005 c g
Dolichomitus khasianus Gupta & Tikar, 1976 c g
Dolichomitus koreanus Lee & Choi g
Dolichomitus kriechbaumeri (Schulz, 1906) c g
Dolichomitus lami g
Dolichomitus lateralis (Wollaston, 1858) c g
Dolichomitus longicauda Smith, 1877 c g
Dolichomitus malaisei Gupta & Tikar, 1976 c g
Dolichomitus mandibularis (Uchida, 1932) c g
Dolichomitus mariajosae Araujo & Pádua, 2020
Dolichomitus maruti Gupta & Tikar, 1976 c g
Dolichomitus matsumurai (Uchida, 1926) c g
Dolichomitus megalourus (Morley, 1914) c g
Dolichomitus meii (Di Giovanni & Sääksjärvi, 2021)
Dolichomitus melanomerus (Vollenhoven, 1878) c g
Dolichomitus menai Araujo & Pádua, 2020
Dolichomitus mesocentrus (Gravenhorst, 1829) c g
Dolichomitus messor (Gravenhorst, 1829) c g
Dolichomitus moacyri Loffredo & Penteado-Dias, 2012 g
Dolichomitus mordator (Aubert, 1965) c g
Dolichomitus mucronatus Constantineanu & Pisica, 1970 c g
Dolichomitus nakamurai (Uchida, 1928) c g
Dolichomitus nigritarsis (Cameron, 1899) c g
Dolichomitus nitidus (Haupt, 1954) c g
Dolichomitus orejuelai Araujo & Pádua, 2020
Dolichomitus pallitibia Baltazar, 1961 c g
Dolichomitus pimmi Araujo & Pádua, 2020
Dolichomitus populneus (Ratzeburg, 1848) c g
Dolichomitus pterelas (Say, 1829) c g b
Dolichomitus pygmaeus (Walsh, 1873) c g b
Dolichomitus quercicolus Zwakhals, 2010 g
Dolichomitus rendoni Araujo & Pádua, 2020
Dolichomitus romanicus Constantineanu & Pisica, 1970 c g
Dolichomitus rufescens (Cresson, 1865) c g
Dolichomitus rufinus Lee & Choi g
Dolichomitus saperdus Wang, 2000 c g
Dolichomitus scutellaris (Thomson, 1877) c g
Dolichomitus sericeus (Hartig, 1847) c g
Dolichomitus shenefelti Baltazar, 1961 c g
Dolichomitus sirenkoi Varga, 2012 g
Dolichomitus songxianicus Sheng, 2004 c g
Dolichomitus speciosus (Hellen, 1915) c g
Dolichomitus splendidus Sheng, 2002 c g
Dolichomitus taeniatus Townes, 1960 c g
Dolichomitus terebrans (Ratzeburg, 1844) c g
Dolichomitus triangustus Wang, 1997 c g
Dolichomitus tuberculatus (Geoffroy, 1785) c g
Dolichomitus vitticrus Townes, 1960 c g
Dolichomitus xanthopodus Gupta & Tikar, 1976 c g
Dolichomitus zonatus (Cresson, 1874) c g
Data sources: i = ITIS, c = Catalogue of Life, g = GBIF, b = Bugguide.net
References
Further reading
External links
Pimplinae
Ichneumonidae genera
|
Birds of Prey, also known in the United States as The Perfect Alibi, is a 1930 British mystery film produced and directed by Basil Dean, from a screenplay he co-wrote with A.A. Milne from Milne's play which was known as The Perfect Alibi in the United States and The Fourth Wall in the United Kingdom. The film stars Dorothy Boyd, Robert Loraine, Warwick Ward, C. Aubrey Smith, Frank Lawton, and Robert Loraine, and was produced at Beaconsfield Studios by Associated Talking Pictures.
Plot
At his country estate, Arthur Hilton (C. Aubrey Smith) is regaling his dinner guests with his exploits as a police officer decades earlier in Africa. He keys in a case where he had to track down a gang of three men who were suspected of a series of murders. He does stumble on them, but they actually end up capturing him. Fortunately, he was able to talk his way out of that predicament, and later on tracked them down again and captured them. One was hanged for his crimes, while the other two were sentenced to twenty years in prison.
Little does Hilton know that two of his dinner guests, Edward Laverick (Warwick Ward) and Edward Carter (Robert Loraine), are the two men he sent to prison. They have vowed revenge, and prior to dinner, they exact it, killing Hilton. However, they have planned it to look like a suicide on the part of the elderly aristocrat. Initially, their plot seems successful, as the local constables who arrive to investigate the incident buy into the suicide scenario.
Hilton's nephew, Jimmy Hilton (Frank Lawton), and his girlfriend Mollie (Dorothy Boyd), who also happens to be the ward of the elder Hilton, become suspicious of the story told by the two men, and begin their own investigation. Their questioning leads them to arrive at the truth, and the two murderers are apprehended.
Cast
Dorothy Boyd as Mollie Cunningham
Nigel Bruce as Major Fothergill
Audrey Carten as Jane
David Hawthorne as Sergeant Joe Mallet
Ellis Jeffreys as Elizabeth Green
Robert Loraine as Edward Pontifex Carter
Frank Lawton as Jimmy Hilton
Tom Reynolds as Police Constable Mallet
C. Aubrey Smith as Arthur Hilton
Warwick Ward as Edward Laverick
Jack Hawkins as Alfred
(Cast list as per AFI and BFI databases)
Reception
Mordaunt Hall of The New York Times gave the film a lukewarm review, praising several of the players, particularly Smith and Ward, while questioning the abilities of some of the other actors, such as Loraine. He felt the direction was uneven, stating the overall production, "may not be endowed with imaginative direction, but, because of the author's intriguing story and C. Aubrey Smith's excellent performance, it succeeds in being an entertaining study of a cool, calculating murderer".
Notes
Rupert Downing also contributed to the screenplay.
The film's art direction was by Clifford Pember.
Jack Hawkins made his screen debut in this film.
The play on which this film is based was produced in London at the Haymarket Theatre in 1928. The play, under the title, The Perfect Alibi, was produced on Broadway at the Charles Hopkins Theatre on Broadway from November 1928 through July 1929.
References
Bibliography
Low, Rachael. Filmmaking in 1930s Britain. George Allen & Unwin, 1985.
Perry, George. Forever Ealing. Pavilion Books, 1994.
Wood, Linda. British Films, 1927-1939. British Film Institute, 1986.
External links
1930 films
British mystery drama films
British crime drama films
1930s mystery drama films
1930 crime drama films
1930s English-language films
British films based on plays
Films based on works by A. A. Milne
Films directed by Basil Dean
Films set in England
Films shot in Buckinghamshire
Associated Talking Pictures
Films shot at Beaconsfield Studios
RKO Pictures films
American mystery drama films
American black-and-white films
British black-and-white films
1930s American films
1930s British films
|
```html
{% extends "!layout.html" -%}
{# Refer to path_to_url #}
{%- block document %}
{#
Adds a warning message on the 'latest' version.
The warning is only added on readthedocs, if the version is 'latest'.
For the 'dev' folder, no warning is shown since the 'latest' version is
usually the most up-to-date.
#}
{% if READTHEDOCS and polybar_is_latest and not pagename.startswith('dev/') %}
<div class="admonition important">
<p class="admonition-title">Development Version</p>
<p>
This is the <code class="docutils literal notranslate"><span class="pre">latest</span></code>
(unstable) version of this documentation, which may document features
not available in or compatible with released stable versions of polybar.
</p>
<p>
See the <a href="path_to_url{{ pagename }}.html">stable version</a> of this documentation page instead.
</p>
</div>
{% endif %}
{{ super() }}
{%- endblock %}
```
|
In the Middle Ages, hierocracy or papalism was a current of Latin legal and political thought that argued that the pope held supreme authority over not just spiritual, but also temporal affairs. In its full, late medieval form, hierocratic theory posited that since Christ was lord of the universe and both king and priest, and the pope was his earthly vicar, the pope must also possess both spiritual and temporal authority over everybody in the world. Papalist writers at the turn of the 14th century such as Augustinus Triumphus and Giles of Rome depicted secular government as a product of human sinfulness that originated, by necessity, in tyrannical usurpation, and could be redeemed only by submission to the superior spiritual sovereignty of the pope. At the head of the Catholic Church, responsible to no other jurisdiction except God, the pope, they argued, was the monarch of a universal kingdom whose power extended to Christians and non-Christians alike.
The hierocrats limited their extensive conception of the pope's authority by acknowledging that the day-to-day exercise of temporal power belonged, in general, to secular princes, albeit under the guidance of the pope. Hierocracy was criticised at the time from a pro-royal perspective by John of Paris, in defence of the universal monarchy of the Holy Roman emperor by Dante Alighieri, and by critics of papal supremacy over the Catholic Church itself such as Marsilius of Padua. Though hierocracy continued to influence papal policy until the time of the Reformation, by the 17th century the doctrine was generally rejected by Catholic theologians.
History
Medieval hierocratic theory originated with the Gregorian Reform of the mid-11th century. Since the pontificate of Gregory I at the start of the 7th century, successive popes had rarely felt the need to assert the principle of papal primacy explicitly. The growing frequency of papal intervention in church government incentivised medieval canonists to clarify the relationship between the pope and the bishops, and by the 11th century this articulation of papal primacy had begun to extend to the pope's authority in the secular sphere as well. In his 1075 Dictatus papae, Pope Gregory VII gave the principle a detailed legal form that sought to translate the abstract theory of primacy into concrete government policy. Once the pope's internal monarchy within the church itself had been firmly established under Pope Innocent III at the beginning of the 13th century, the canonists could direct their attention further towards temporal affairs.
The mid-13th century elaboration by the canonist Hostiensis of the pope's , "plenitude of power", was an important milestone in the development of hierocratic theory. Hostiensis noted the traditional Christian argument that all political power derived from God, but argued that the pope had a special status: as God's vicar, the pope, when he exercised his office and did not sin, acted as God. The pope's power was "miraculous"; he could issue dispensations at will from any positive law, rendering bastards legitimate, for example, and restoring the reputation of the infamous. Pope Innocent IV, who reigned from 1243 to 1254, gave papal policy an increasingly hierocratic direction by asserting that the pope had the right to elect a monarch himself if any Christian kingdom should fall vacant.
Hierocratic ideas developed further at the time of the struggle between Pope Boniface VIII and Philip IV of France, and received their ultimate official formulation in Boniface's 1302 bull Unam sanctam, which pronounced that the "spiritual power has to institute the earthly power and judge it" and that "it is entirely necessary for salvation that every human creature be subject to the Roman pontiff". Elaborate expositions of the hierocratic theory were composed at around the same time, such as Giles of Rome's ("On Ecclesiastical Power") in 1301 and James of Viterbo's ("On Christian Government") in 1302. Boniface's eventual defeat dealt a blow to hierocratic aspirations from which they would not recover; nonetheless, hierocratic theory continued to influence the papacy well into the 16th century, as shown by the Fifth Lateran Council's republishing of Unam sanctam shortly before the Reformation and Pope Pius V's attempt to depose Elizabeth I of England in his 1570 bull Regnans in excelsis.
Despite their sweeping conception of the authority of the papal office, the hierocratic theorists did not believe that the pope should, in the ordinary course of events, directly exercise temporal power himself. Though inferior to the pope, they held that the office of the secular prince was nonetheless ordained by God, and the pope's temporal authority was to be exercised indirectly through his guidance and direction of princes. The hierocratic canonist Augustinus Triumphus, in his 1326 ("Summary Account of Ecclesiastical Power"), argued that the pope had universal jurisdiction in both temporal and spiritual matters across the whole world (), but his immediate temporal administration extended only to the lands then believed to "have been granted" to him by the Donation of Constantine. Originally this was the Western Roman Empire, but when later monarchs arose and unjustly carved out territories for themselves, the pope had chosen to suffer their claims to sovereignty to avoid schism among the faithful, and subsequently limited his administration in practice to Italy—without, however, renouncing any rights.
Critiques
Medieval opposition to hierocracy, insisting on a clear separation of temporal and spiritual power, is often termed "dualism": in practice hierocratic and dualist positions often overlapped, with hierocrats acknowledging the distinct authority of secular princes while dualists accepted the pope's overall leadership of the Christian community.
Hierocracy was critiqued by other medieval writers on a number of fronts. Writing in the context of the dispute between Boniface and Philip of France, John of Paris argued in his 1303 ("On Royal and Papal Power") that Christ's kingship was not of this world, and could not be interpreted as temporal jurisdiction. Moreover, while spiritual authority was united in the church and its steward the pope, political authority was naturally plural. In his , composed roughly around 1310, Dante Alighieri adopted a different line of attack, defending the universal authority of the Holy Roman emperor: it was against nature for the church to exercise temporal power, but also for political authority to be divided. Marsilius of Padua, in his 1324 ("The Defender of the Peace"), rejected the entire basis of the papacy as a divinely sanctioned office, arguing that it was a political office like any other and that the pope's illegitimate claims to universal authority were a cause of civil discord.
Hierocratic arguments were discredited in later Catholic theology. Cardinal Robert Bellarmine attacked the hierocratic conception in his 1610 work ("On the Power of the Supreme Pontiff") on the basis that the pope's duties descended from Christ as a mortal man, not as God, and the Jesuit theologian Francisco Suárez argued around the same time that the pope could not infringe, even in extraordinary cases, on the supremacy of secular sovereigns in worldly affairs. Hierocracy had very few notable supporters by the early 17th century, and these critiques were concerned less with refuting it as a live position than with reassuring secular rulers that Catholicism would not undermine their authority.
See also
Ultramontanism
Decretalist
Guelphs and Ghibellines
Sun and Moon allegory
Alvarus Pelagius
Doctrine of the two swords
Notes
References
Sources
Further reading
Catholicism in the Middle Ages
History of the papacy
Medieval philosophy
Papal primacy
Catholicism and politics
Canon law history
|
"Little Children" is a song written by J. Leslie McFarland and Mort Shuman, and was recorded by Billy J. Kramer & the Dakotas.
Background
The lyric concerns a man's entreaties to his girlfriend's young siblings not to reveal his courtship of their elder sister and to leave them alone, at some points, even bribing them with things like "candy and a quarter" and "a movie", on the condition that they "keep a secret". As such, it was a departure from the traditional love songs previously recorded by Kramer (sometimes supplied by Lennon & McCartney). When offered another Lennon and McCartney song, "One and One Is Two", for his next single by the manager of both groups, Brian Epstein, Kramer turned it down and chose "Little Children" instead, after a search for suitable material from music publishers.
Chart performance
"Little Children" reached No.1 in the UK Singles Chart in March 1964, and No. 7 in the US Hot 100 singles chart later the same year. The B-side of "Little Children" in the U.S., "Bad to Me" (which had previously been an A-side in the UK and which made No. 1 there in August 1963) peaked at No. 9 on the US charts simultaneously to the success of "Little Children".
References
Billy J. Kramer songs
1964 singles
UK Singles Chart number-one singles
Song recordings produced by George Martin
Songs with music by Mort Shuman
1964 songs
Parlophone singles
Songs written by John Leslie McFarland
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.