language stringclasses 1
value | repo stringclasses 346
values | path stringlengths 6 201 | class_span dict | source stringlengths 21 2.38M | target stringlengths 1 96 |
|---|---|---|---|---|---|
python | wandb__wandb | wandb/sdk/artifacts/storage_policies/_multipart.py | {
"start": 1424,
"end": 2211
} | class ____:
"""Signal the end of the multipart chunk queue.
Queue consumers terminate when they receive this item from the queue. Do
not instantiate this class directly; use the `END_CHUNK` constant as a
pseudo-singleton instead.
NOTE: Use this only in multi-threaded (not multi-process) contexts because
it is not guaranteed to be process-safe.
"""
def __repr__(self) -> str:
return "ChunkSentinel"
END_CHUNK: Final[_ChunkSentinel] = _ChunkSentinel()
def is_end_chunk(obj: Any) -> TypeIs[_ChunkSentinel]:
"""Returns True if the object is the terminal queue item for multipart downloads."""
# Needed for type checking, since _ChunkSentinel isn't formally a singleton.
return obj is END_CHUNK
@dataclass(frozen=True)
| _ChunkSentinel |
python | huggingface__transformers | tests/models/sam/test_modeling_sam.py | {
"start": 12308,
"end": 18254
} | class ____:
def __init__(
self,
parent,
hidden_size=36,
intermediate_size=72,
projection_dim=62,
output_channels=32,
num_hidden_layers=2,
num_attention_heads=4,
num_channels=3,
image_size=24,
patch_size=2,
hidden_act="gelu",
layer_norm_eps=1e-06,
dropout=0.0,
attention_dropout=0.0,
initializer_range=0.02,
initializer_factor=1.0,
qkv_bias=True,
mlp_ratio=4.0,
use_abs_pos=True,
use_rel_pos=True,
rel_pos_zero_init=False,
window_size=14,
global_attn_indexes=[2, 5, 8, 11],
num_pos_feats=16,
mlp_dim=None,
batch_size=2,
):
self.parent = parent
self.image_size = image_size
self.patch_size = patch_size
self.output_channels = output_channels
self.num_channels = num_channels
self.hidden_size = hidden_size
self.projection_dim = projection_dim
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.dropout = dropout
self.attention_dropout = attention_dropout
self.initializer_range = initializer_range
self.initializer_factor = initializer_factor
self.hidden_act = hidden_act
self.layer_norm_eps = layer_norm_eps
self.qkv_bias = qkv_bias
self.mlp_ratio = mlp_ratio
self.use_abs_pos = use_abs_pos
self.use_rel_pos = use_rel_pos
self.rel_pos_zero_init = rel_pos_zero_init
self.window_size = window_size
self.global_attn_indexes = global_attn_indexes
self.num_pos_feats = num_pos_feats
self.mlp_dim = mlp_dim
self.batch_size = batch_size
# in ViT, the seq length equals the number of patches + 1 (we add 1 for the [CLS] token)
num_patches = (image_size // patch_size) ** 2
self.seq_length = num_patches + 1
self.prompt_encoder_tester = SamPromptEncoderTester()
self.mask_decoder_tester = SamMaskDecoderTester()
def prepare_config_and_inputs(self):
pixel_values = floats_tensor([self.batch_size, self.num_channels, self.image_size, self.image_size])
config = self.get_config()
return config, pixel_values
def get_config(self):
vision_config = SamVisionConfig(
image_size=self.image_size,
patch_size=self.patch_size,
num_channels=self.num_channels,
hidden_size=self.hidden_size,
projection_dim=self.projection_dim,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=self.num_attention_heads,
intermediate_size=self.intermediate_size,
dropout=self.dropout,
attention_dropout=self.attention_dropout,
initializer_range=self.initializer_range,
initializer_factor=self.initializer_factor,
output_channels=self.output_channels,
qkv_bias=self.qkv_bias,
mlp_ratio=self.mlp_ratio,
use_abs_pos=self.use_abs_pos,
use_rel_pos=self.use_rel_pos,
rel_pos_zero_init=self.rel_pos_zero_init,
window_size=self.window_size,
global_attn_indexes=self.global_attn_indexes,
num_pos_feats=self.num_pos_feats,
mlp_dim=self.mlp_dim,
)
prompt_encoder_config = self.prompt_encoder_tester.get_config()
mask_decoder_config = self.mask_decoder_tester.get_config()
return SamConfig(
vision_config=vision_config,
prompt_encoder_config=prompt_encoder_config,
mask_decoder_config=mask_decoder_config,
)
def create_and_check_model(self, config, pixel_values):
model = SamModel(config=config)
model.to(torch_device)
model.eval()
with torch.no_grad():
result = model(pixel_values)
self.parent.assertEqual(result.iou_scores.shape, (self.batch_size, 1, 3))
self.parent.assertEqual(result.pred_masks.shape[:3], (self.batch_size, 1, 3))
def create_and_check_get_image_features(self, config, pixel_values):
model = SamModel(config=config)
model.to(torch_device)
model.eval()
with torch.no_grad():
result = model.get_image_embeddings(pixel_values)
self.parent.assertEqual(result[0].shape, (self.output_channels, 12, 12))
def create_and_check_get_image_hidden_states(self, config, pixel_values):
model = SamModel(config=config)
model.to(torch_device)
model.eval()
with torch.no_grad():
result = model.vision_encoder(
pixel_values,
output_hidden_states=True,
return_dict=True,
)
# after computing the convolutional features
expected_hidden_states_shape = (self.batch_size, 12, 12, 36)
self.parent.assertEqual(len(result[1]), self.num_hidden_layers + 1)
self.parent.assertEqual(result[1][0].shape, expected_hidden_states_shape)
with torch.no_grad():
result = model.vision_encoder(
pixel_values,
output_hidden_states=True,
return_dict=False,
)
# after computing the convolutional features
expected_hidden_states_shape = (self.batch_size, 12, 12, 36)
self.parent.assertEqual(len(result[1]), self.num_hidden_layers + 1)
self.parent.assertEqual(result[1][0].shape, expected_hidden_states_shape)
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, pixel_values = config_and_inputs
inputs_dict = {"pixel_values": pixel_values}
return config, inputs_dict
@require_torch
| SamModelTester |
python | tornadoweb__tornado | tornado/test/web_test.py | {
"start": 72977,
"end": 73528
} | class ____(SimpleHandlerTestCase):
class Handler(RequestHandler):
def get(self):
self.clear_all_cookies()
self.write("ok")
def test_clear_all_cookies(self):
response = self.fetch("/", headers={"Cookie": "foo=bar; baz=xyzzy"})
set_cookies = sorted(response.headers.get_list("Set-Cookie"))
# Python 3.5 sends 'baz="";'; older versions use 'baz=;'
self.assertTrue(set_cookies[0].startswith('baz="";'))
self.assertTrue(set_cookies[1].startswith('foo="";'))
| ClearAllCookiesTest |
python | huggingface__transformers | tests/models/hunyuan_v1_dense/test_modeling_hunyuan_v1_dense.py | {
"start": 1190,
"end": 1596
} | class ____(CausalLMModelTest, unittest.TestCase):
model_tester_class = HunYuanDenseV1ModelTester
def is_pipeline_test_to_skip(
self,
pipeline_test_case_name,
config_class,
model_architecture,
tokenizer_name,
image_processor_name,
feature_extractor_name,
processor_name,
):
return True
@require_torch
| HunYuanDenseV1ModelTest |
python | microsoft__pyright | packages/pyright-internal/src/tests/samples/super6.py | {
"start": 637,
"end": 876
} | class ____(FirstLevelMeta):
def __new__(cls, name: str, bases, dct):
new_class = super().__new__(cls, name, bases, dct)
reveal_type(new_class, expected_text="Self@SecondLevelMeta")
return new_class
| SecondLevelMeta |
python | huggingface__transformers | src/transformers/models/glm/modeling_glm.py | {
"start": 12932,
"end": 13651
} | class ____(nn.Module):
def __init__(self, hidden_size, eps=1e-6):
"""
GlmRMSNorm is equivalent to T5LayerNorm
"""
super().__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.variance_epsilon = eps
def forward(self, hidden_states):
input_dtype = hidden_states.dtype
hidden_states = hidden_states.to(torch.float32)
variance = hidden_states.pow(2).mean(-1, keepdim=True)
hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
return self.weight * hidden_states.to(input_dtype)
def extra_repr(self):
return f"{tuple(self.weight.shape)}, eps={self.variance_epsilon}"
| GlmRMSNorm |
python | django__django | tests/model_package/tests.py | {
"start": 214,
"end": 382
} | class ____(models.Model):
customer = models.CharField(max_length=100)
publications = models.ManyToManyField("model_package.Publication", blank=True)
| Advertisement |
python | milvus-io__pymilvus | pymilvus/bulk_writer/constants.py | {
"start": 3569,
"end": 3711
} | class ____(IntEnum):
NUMPY = 1
NPY = 1 # deprecated
JSON = 2
JSON_RB = 2 # deprecated
PARQUET = 3
CSV = 4
| BulkFileType |
python | getsentry__sentry | src/sentry/models/groupowner.py | {
"start": 1185,
"end": 1280
} | class ____(Enum):
SUSPECT_COMMIT = 0
OWNERSHIP_RULE = 1
CODEOWNERS = 2
| GroupOwnerType |
python | bokeh__bokeh | src/bokeh/events.py | {
"start": 7587,
"end": 8097
} | class ____(Event):
''' Base class for all Bokeh Model events.
This base class is not typically useful to instantiate on its own.
'''
model: Model | None
def __init__(self, model: Model | None) -> None:
''' Create a new base event.
Args:
model (Model) : a Bokeh model to register event callbacks on
'''
self.model = model
def event_values(self) -> dict[str, Any]:
return dict(**super().event_values(), model=self.model)
| ModelEvent |
python | falconry__falcon | falcon/testing/resource.py | {
"start": 4349,
"end": 7852
} | class ____:
"""Mock resource for functional testing of framework components.
This class implements a simple test resource that can be extended
as needed to test middleware, hooks, and the Falcon framework
itself.
Only noop ``on_get()`` and ``on_post()`` responders are implemented;
when overriding these, or adding additional responders in child
classes, they can be decorated with the
:meth:`falcon.testing.capture_responder_args` hook in
order to capture the *req*, *resp*, and *params* arguments that
are passed to the responder. Responders may also be decorated with
the :meth:`falcon.testing.set_resp_defaults` hook in order to
set *resp* properties to default *status*, *body*, and *header*
values.
Keyword Arguments:
status (str): Default status string to use in responses
body (str): Default body string to use in responses
json (JSON serializable): Default JSON document to use in responses.
Will be serialized to a string and encoded as UTF-8. Either
*json* or *body* may be specified, but not both.
headers (dict): Default set of additional headers to include in
responses
"""
captured_req: wsgi.Request | asgi.Request | None
"""The last Request object passed into any one of the responder methods."""
captured_resp: wsgi.Response | asgi.Response | None
"""The last Response object passed into any one of the responder methods."""
captured_kwargs: typing.Any | None
"""The last dictionary of kwargs, beyond ``req`` and ``resp``, that were
passed into any one of the responder methods."""
captured_req_media: typing.Any | None
"""The last Request media provided to any one of the responder methods.
This value is only captured when the ``'capture-req-media'`` header is
set on the request.
"""
captured_req_body: bytes | None
"""The last Request body provided to any one of the responder methods.
This value is only captured when the ``'capture-req-body-bytes'`` header is
set on the request. The value of the header is the number of bytes to read.
"""
def __init__(
self,
status: str | None = None,
body: str | None = None,
json: dict[str, str] | None = None,
headers: HeaderArg | None = None,
) -> None:
self._default_status = status
self._default_headers = headers
if json is not None:
if body is not None:
msg = 'Either json or body may be specified, but not both'
raise ValueError(msg)
self._default_body: str | None = json_dumps(json, ensure_ascii=False)
else:
self._default_body = body
self.captured_req = None
self.captured_resp = None
self.captured_kwargs = None
self.captured_req_media = None
self.captured_req_body = None
@property
def called(self) -> bool:
"""Whether or not a req/resp was captured."""
return self.captured_req is not None
@falcon.before(capture_responder_args)
@falcon.before(set_resp_defaults)
def on_get(
self, req: wsgi.Request, resp: wsgi.Response, **kwargs: typing.Any
) -> None:
pass
@falcon.before(capture_responder_args)
@falcon.before(set_resp_defaults)
def on_post(
self, req: wsgi.Request, resp: wsgi.Response, **kwargs: typing.Any
) -> None:
pass
| SimpleTestResource |
python | eventlet__eventlet | eventlet/green/http/cookies.py | {
"start": 18623,
"end": 23727
} | class ____(dict):
"""A container class for a set of Morsels."""
def value_decode(self, val):
"""real_value, coded_value = value_decode(STRING)
Called prior to setting a cookie's value from the network
representation. The VALUE is the value read from HTTP
header.
Override this function to modify the behavior of cookies.
"""
return val, val
def value_encode(self, val):
"""real_value, coded_value = value_encode(VALUE)
Called prior to setting a cookie's value from the dictionary
representation. The VALUE is the value being assigned.
Override this function to modify the behavior of cookies.
"""
strval = str(val)
return strval, strval
def __init__(self, input=None):
if input:
self.load(input)
def __set(self, key, real_value, coded_value):
"""Private method for setting a cookie's value"""
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
def __setitem__(self, key, value):
"""Dictionary style assignment."""
if isinstance(value, Morsel):
# allow assignment of constructed Morsels (e.g. for pickling)
dict.__setitem__(self, key, value)
else:
rval, cval = self.value_encode(value)
self.__set(key, rval, cval)
def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"):
"""Return a string suitable for HTTP."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.output(attrs, header))
return sep.join(result)
__str__ = output
def __repr__(self):
l = []
items = sorted(self.items())
for key, value in items:
l.append('%s=%s' % (key, repr(value.value)))
return '<%s: %s>' % (self.__class__.__name__, _spacejoin(l))
def js_output(self, attrs=None):
"""Return a string suitable for JavaScript."""
result = []
items = sorted(self.items())
for key, value in items:
result.append(value.js_output(attrs))
return _nulljoin(result)
def load(self, rawdata):
"""Load cookies from a string (presumably HTTP_COOKIE) or
from a dictionary. Loading cookies from a dictionary 'd'
is equivalent to calling:
map(Cookie.__setitem__, d.keys(), d.values())
"""
if isinstance(rawdata, str):
self.__parse_string(rawdata)
else:
# self.update() wouldn't call our custom __setitem__
for key, value in rawdata.items():
self[key] = value
return
def __parse_string(self, str, patt=_CookiePattern):
i = 0 # Our starting point
n = len(str) # Length of string
parsed_items = [] # Parsed (type, key, value) triples
morsel_seen = False # A key=value pair was previously encountered
TYPE_ATTRIBUTE = 1
TYPE_KEYVALUE = 2
# We first parse the whole cookie string and reject it if it's
# syntactically invalid (this helps avoid some classes of injection
# attacks).
while 0 <= i < n:
# Start looking for a cookie
match = patt.match(str, i)
if not match:
# No more cookies
break
key, value = match.group("key"), match.group("val")
i = match.end(0)
if key[0] == "$":
if not morsel_seen:
# We ignore attributes which pertain to the cookie
# mechanism as a whole, such as "$Version".
# See RFC 2965. (Does anyone care?)
continue
parsed_items.append((TYPE_ATTRIBUTE, key[1:], value))
elif key.lower() in Morsel._reserved:
if not morsel_seen:
# Invalid cookie string
return
if value is None:
if key.lower() in Morsel._flags:
parsed_items.append((TYPE_ATTRIBUTE, key, True))
else:
# Invalid cookie string
return
else:
parsed_items.append((TYPE_ATTRIBUTE, key, _unquote(value)))
elif value is not None:
parsed_items.append((TYPE_KEYVALUE, key, self.value_decode(value)))
morsel_seen = True
else:
# Invalid cookie string
return
# The cookie string is valid, apply it.
M = None # current morsel
for tp, key, value in parsed_items:
if tp == TYPE_ATTRIBUTE:
assert M is not None
M[key] = value
else:
assert tp == TYPE_KEYVALUE
rval, cval = value
self.__set(key, rval, cval)
M = self[key]
| BaseCookie |
python | encode__django-rest-framework | tests/test_versioning.py | {
"start": 650,
"end": 795
} | class ____(APIView):
def get(self, request, *args, **kwargs):
return Response({'url': reverse('another', request=request)})
| ReverseView |
python | pyca__cryptography | src/cryptography/hazmat/decrepit/ciphers/modes.py | {
"start": 368,
"end": 795
} | class ____(ModeWithInitializationVector):
name = "OFB"
def __init__(self, initialization_vector: utils.Buffer):
utils._check_byteslike("initialization_vector", initialization_vector)
self._initialization_vector = initialization_vector
@property
def initialization_vector(self) -> utils.Buffer:
return self._initialization_vector
validate_for_algorithm = _check_iv_and_key_length
| OFB |
python | ray-project__ray | python/ray/tests/autoscaler/util.py | {
"start": 122,
"end": 2422
} | class ____(unittest.TestCase):
def setUp(self):
# Create a mock LoadMetricsSummary object with the required attributes
lm_summary_mock_data = {
"e9919752e5e8d757765d97d8bec910a2e78e8826f20bce46fd58f92e": {
"node:172.31.6.57": [0.0, 1.0],
"object_store_memory": [0.0, 13984228147.0],
"memory": [0.0, 27968456295.0],
"node:__internal_head__": [0.0, 1.0],
"CPU": [0.0, 8.0],
}
}
self.lm_summary_mock = Mock()
self.lm_summary_mock.usage_by_node = lm_summary_mock_data
def test_get_per_node_breakdown_as_dict(self):
result = get_per_node_breakdown_as_dict(self.lm_summary_mock)
expected_output = {
"e9919752e5e8d757765d97d8bec910a2e78e8826f20bce46fd58f92e": (
"0.0/8.0 CPU\n0B/26.05GiB memory\n0B/13.02GiB object_store_memory"
)
}
self.assertEqual(result, expected_output)
def test_get_per_node_breakdown_as_dict_empty_summary(self):
# Test with an empty lm_summary
lm_summary_mock_data = {}
self.lm_summary_mock.usage_by_node = lm_summary_mock_data
result = get_per_node_breakdown_as_dict(self.lm_summary_mock)
expected_output = {}
self.assertEqual(result, expected_output)
def test_get_per_node_breakdown_as_dict_missing_usage(self):
# Test with missing usage data for a node
lm_summary_mock_data = {
"e9919752e5e8d757765d97d8bec910a2e78e8826f20bce46fd58f92e": {
"node:172.31.6.57": [0.0, 1.0],
"object_store_memory": [0.0, 13984228147.0],
# 'memory': [0.0, 27968456295.0], # Missing memory data
"node:__internal_head__": [0.0, 1.0],
"CPU": [0.0, 8.0],
}
}
self.lm_summary_mock.usage_by_node = lm_summary_mock_data
result = get_per_node_breakdown_as_dict(self.lm_summary_mock)
expected_output = {
"e9919752e5e8d757765d97d8bec910a2e78e8826f20bce46fd58f92e": "0.0/8.0 CPU\n"
"0B/13.02GiB object_store_memory"
}
self.assertEqual(result, expected_output)
if __name__ == "__main__":
unittest.main()
| TestGetPerNodeBreakdown |
python | doocs__leetcode | solution/1800-1899/1815.Maximum Number of Groups Getting Fresh Donuts/Solution.py | {
"start": 0,
"end": 616
} | class ____:
def maxHappyGroups(self, batchSize: int, groups: List[int]) -> int:
@cache
def dfs(state, mod):
res = 0
x = int(mod == 0)
for i in range(1, batchSize):
if state >> (i * 5) & 31:
t = dfs(state - (1 << (i * 5)), (mod + i) % batchSize)
res = max(res, t + x)
return res
state = ans = 0
for v in groups:
i = v % batchSize
ans += i == 0
if i:
state += 1 << (i * 5)
ans += dfs(state, 0)
return ans
| Solution |
python | pandas-dev__pandas | pandas/core/arrays/floating.py | {
"start": 1721,
"end": 4275
} | class ____(NumericArray):
"""
Array of floating (optional missing) values.
.. warning::
FloatingArray is currently experimental, and its API or internal
implementation may change without warning. Especially the behaviour
regarding NaN (distinct from NA missing values) is subject to change.
We represent a FloatingArray with 2 numpy arrays:
- data: contains a numpy float array of the appropriate dtype
- mask: a boolean array holding a mask on the data, True is missing
To construct a FloatingArray from generic array-like input, use
:func:`pandas.array` with one of the float dtypes (see examples).
See :ref:`integer_na` for more.
Parameters
----------
values : numpy.ndarray
A 1-d float-dtype array.
mask : numpy.ndarray
A 1-d boolean-dtype array indicating missing values.
copy : bool, default False
Whether to copy the `values` and `mask`.
Attributes
----------
None
Methods
-------
None
Returns
-------
FloatingArray
See Also
--------
array : Create an array.
Float32Dtype : Float32 dtype for FloatingArray.
Float64Dtype : Float64 dtype for FloatingArray.
Series : One-dimensional labeled array capable of holding data.
DataFrame : Two-dimensional, size-mutable, potentially heterogeneous tabular data.
Examples
--------
Create a FloatingArray with :func:`pandas.array`:
>>> pd.array([0.1, None, 0.3], dtype=pd.Float32Dtype())
<FloatingArray>
[0.1, <NA>, 0.3]
Length: 3, dtype: Float32
String aliases for the dtypes are also available. They are capitalized.
>>> pd.array([0.1, None, 0.3], dtype="Float32")
<FloatingArray>
[0.1, <NA>, 0.3]
Length: 3, dtype: Float32
"""
_dtype_cls = FloatingDtype
_dtype_docstring = """
An ExtensionDtype for {dtype} data.
This dtype uses ``pd.NA`` as missing value indicator.
Attributes
----------
None
Methods
-------
None
See Also
--------
CategoricalDtype : Type for categorical data with the categories and orderedness.
IntegerDtype : An ExtensionDtype to hold a single size & kind of integer dtype.
StringDtype : An ExtensionDtype for string data.
Examples
--------
For Float32Dtype:
>>> ser = pd.Series([2.25, pd.NA], dtype=pd.Float32Dtype())
>>> ser.dtype
Float32Dtype()
For Float64Dtype:
>>> ser = pd.Series([2.25, pd.NA], dtype=pd.Float64Dtype())
>>> ser.dtype
Float64Dtype()
"""
# create the Dtype
@register_extension_dtype
@set_module("pandas")
| FloatingArray |
python | pytorch__pytorch | test/dynamo/cpython/3_13/test_generators.py | {
"start": 2055,
"end": 2654
} | class ____(__TestCase):
def generator1(self):
return (yield from self.generator2())
def generator2(self):
try:
yield
except KeyboardInterrupt:
return "PASSED"
else:
return "FAILED"
def test_raise_and_yield_from(self):
gen = self.generator1()
gen.send(None)
try:
_testcapi.raise_SIGINT_then_send_None(gen)
except BaseException as _exc:
exc = _exc
self.assertIs(type(exc), StopIteration)
self.assertEqual(exc.value, "PASSED")
| SignalAndYieldFromTest |
python | PyCQA__pylint | doc/data/messages/i/invalid-metaclass/good.py | {
"start": 24,
"end": 53
} | class ____(Plant):
pass
| Apple |
python | spack__spack | lib/spack/spack/modules/lmod.py | {
"start": 15823,
"end": 18539
} | class ____(BaseContext):
"""Context class for lmod module files."""
@tengine.context_property
def has_modulepath_modifications(self):
"""True if this module modifies MODULEPATH, False otherwise."""
return bool(self.conf.provides)
@tengine.context_property
def has_conditional_modifications(self):
"""True if this module modifies MODULEPATH conditionally to the
presence of other services in the environment, False otherwise.
"""
# In general we have conditional modifications if we have modifications
# and we are not providing **only** a compiler
provides = self.conf.provides
provide_compiler_only = "compiler" in provides and len(provides) == 1
has_modifications = self.has_modulepath_modifications
return has_modifications and not provide_compiler_only
@tengine.context_property
def name_part(self):
"""Name of this provider."""
return self.spec.name
@tengine.context_property
def version_part(self):
"""Version of this provider."""
s = self.spec
return "-".join([str(s.version), s.dag_hash(length=7)])
@tengine.context_property
def provides(self):
"""Returns the dictionary of provided services."""
return self.conf.provides
@tengine.context_property
def missing(self):
"""Returns a list of missing services."""
return self.conf.missing
@tengine.context_property
@lang.memoized
def unlocked_paths(self):
"""Returns the list of paths that are unlocked unconditionally."""
layout = make_layout(self.spec, self.conf.name)
return [os.path.join(*parts) for parts in layout.unlocked_paths[None]]
@tengine.context_property
def conditionally_unlocked_paths(self):
"""Returns the list of paths that are unlocked conditionally.
Each item in the list is a tuple with the structure (condition, path).
"""
layout = make_layout(self.spec, self.conf.name)
value = []
conditional_paths = layout.unlocked_paths
conditional_paths.pop(None)
for services_needed, list_of_path_parts in conditional_paths.items():
condition = " and ".join([x + "_name" for x in services_needed])
for parts in list_of_path_parts:
def manipulate_path(token):
if token in self.conf.hierarchy_tokens:
return "{0}_name, {0}_version".format(token)
return '"' + token + '"'
path = ", ".join([manipulate_path(x) for x in parts])
value.append((condition, path))
return value
| LmodContext |
python | nedbat__coveragepy | tests/modules/plugins/a_plugin.py | {
"start": 182,
"end": 358
} | class ____(CoveragePlugin):
pass
def coverage_init(
reg: Plugins,
options: Any, # pylint: disable=unused-argument
) -> None:
reg.add_file_tracer(Plugin())
| Plugin |
python | PrefectHQ__prefect | tests/test_tasks.py | {
"start": 19055,
"end": 25679
} | class ____:
def test_raises_outside_of_flow(self):
@task
def foo(x):
return x
with pytest.raises(RuntimeError):
foo.submit(1)
async def test_sync_task_submitted_inside_sync_flow(self):
@task
def foo(x):
return x
@flow
def bar():
future = foo.submit(1)
assert isinstance(future, PrefectFuture)
return future
task_state = bar()
assert await task_state.result() == 1
async def test_sync_task_with_return_state_true(self):
@task
def foo(x):
return x
@flow
def bar():
state = foo.submit(1, return_state=True)
assert isinstance(state, State)
return state
task_state = bar()
assert await task_state.result() == 1
async def test_async_task_with_return_state_true(self):
@task
async def foo(x):
return x
@flow
async def bar():
state = foo.submit(1, return_state=True)
assert isinstance(state, State)
return state
task_state = await bar()
assert await task_state.result() == 1
async def test_async_task_submitted_inside_async_flow(self):
@task
async def foo(x):
return x
@flow
async def bar():
future = foo.submit(1)
assert isinstance(future, PrefectFuture)
return future
task_state = await bar()
assert await task_state.result() == 1
async def test_sync_task_submitted_inside_async_flow(self):
@task
def foo(x):
return x
@flow
async def bar():
future = foo.submit(1)
assert isinstance(future, PrefectFuture)
return future
task_state = await bar()
assert await task_state.result() == 1
async def test_async_task_submitted_inside_sync_flow(self):
@task
async def foo(x):
return x
@flow
def bar():
future = foo.submit(1)
assert isinstance(future, PrefectFuture)
return future
task_state = bar()
assert await task_state.result() == 1
def test_task_failure_does_not_affect_flow(self):
@task
def foo():
raise ValueError("Test")
@flow
def bar():
foo.submit()
return "bar"
assert bar() == "bar"
async def test_downstream_does_not_run_if_upstream_fails(self):
@task
def fails():
raise ValueError("Fail task!")
@task
def bar(y):
return y
@flow
def test_flow():
f = fails.submit()
b = bar.submit(f)
return b
flow_state = test_flow(return_state=True)
task_state = await flow_state.result(raise_on_failure=False)
assert task_state.is_pending()
assert task_state.name == "NotReady"
def test_downstream_runs_if_upstream_succeeds(self):
@task
def foo(x):
return x
@task
def bar(y):
return y + 1
@flow
def test_flow():
f = foo.submit(1)
b = bar.submit(f)
return b.result()
assert test_flow() == 2
def test_downstream_receives_exception_if_upstream_fails_and_allow_failure(self):
@task
def fails():
raise ValueError("Fail task!")
@task
def bar(y):
return y
@flow
def test_flow():
f = fails.submit()
b = bar.submit(allow_failure(f))
return b.result()
result = test_flow()
assert isinstance(result, ValueError)
assert "Fail task!" in str(result)
def test_downstream_receives_exception_in_collection_if_upstream_fails_and_allow_failure(
self,
):
@task
def fails():
raise ValueError("Fail task!")
@task
def bar(y):
return y
@flow
def test_flow():
f = fails.submit()
b = bar.submit(allow_failure([f, 1, 2]))
return b.result()
result = test_flow()
assert isinstance(result, list), f"Expected list; got {type(result)}"
assert isinstance(result[0], ValueError)
assert result[1:] == [1, 2]
assert "Fail task!" in str(result)
async def test_allow_failure_chained_mapped_tasks(
self,
):
@task
def fails_on_two(x):
if x == 2:
raise ValueError("Fail task")
return x
@task
def identity(y):
return y
@flow
def test_flow():
f = fails_on_two.map([1, 2, 3])
b = identity.map(allow_failure(f))
return b
states = test_flow()
assert isinstance(states, list), f"Expected list; got {type(states)}"
assert await states[0].result(), await states[2].result() == [1, 3]
assert states[1].is_completed()
assert exceptions_equal(await states[1].result(), ValueError("Fail task"))
async def test_allow_failure_mapped_with_noniterable_upstream(
self,
):
@task
def fails():
raise ValueError("Fail task")
@task
def identity(y, z):
return y, z
@flow
def test_flow():
f = fails.submit()
b = identity.map([1, 2, 3], allow_failure(f))
return b
states = test_flow()
assert isinstance(states, list), f"Expected list; got {type(states)}"
assert len(states) == 3
for i, state in enumerate(states):
y, z = await state.result()
assert y == i + 1
assert exceptions_equal(z, ValueError("Fail task"))
async def test_raises_if_depends_on_itself(self):
@task
def say_hello(name):
return f"Hello {name}!"
@flow
def my_flow():
greeting_queue = []
for i in range(3):
if greeting_queue:
wait_for = greeting_queue
else:
wait_for = []
future = say_hello.submit(name=f"Person {i}", wait_for=wait_for)
greeting_queue.append(future)
for fut in greeting_queue:
print(fut.result())
with pytest.raises(ValueError, match="deadlock"):
my_flow()
| TestTaskSubmit |
python | h5py__h5py | h5py/tests/test_h5d_direct_chunk.py | {
"start": 1161,
"end": 5165
} | class ____(TestCase):
def test_read_compressed_offsets(self):
filename = self.mktemp().encode()
with h5py.File(filename, "w") as filehandle:
frame = numpy.arange(16).reshape(4, 4)
frame_dataset = filehandle.create_dataset("frame",
data=frame,
compression="gzip",
compression_opts=9)
dataset = filehandle.create_dataset("compressed_chunked",
data=[frame, frame, frame],
compression="gzip",
compression_opts=9,
chunks=(1, ) + frame.shape)
filter_mask, compressed_frame = frame_dataset.id.read_direct_chunk((0, 0))
# No filter must be disabled
self.assertEqual(filter_mask, 0)
for i in range(dataset.shape[0]):
filter_mask, data = dataset.id.read_direct_chunk((i, 0, 0))
self.assertEqual(compressed_frame, data)
# No filter must be disabled
self.assertEqual(filter_mask, 0)
def test_read_uncompressed_offsets(self):
filename = self.mktemp().encode()
frame = numpy.arange(16).reshape(4, 4)
with h5py.File(filename, "w") as filehandle:
dataset = filehandle.create_dataset("frame",
maxshape=(1,) + frame.shape,
shape=(1,) + frame.shape,
compression="gzip",
compression_opts=9)
# Write uncompressed data
DISABLE_ALL_FILTERS = 0xFFFFFFFF
dataset.id.write_direct_chunk((0, 0, 0), frame.tobytes(), filter_mask=DISABLE_ALL_FILTERS)
# FIXME: Here we have to close the file and load it back else
# a runtime error occurs:
# RuntimeError: Can't get storage size of chunk (chunk storage is not allocated)
with h5py.File(filename, "r") as filehandle:
dataset = filehandle["frame"]
filter_mask, compressed_frame = dataset.id.read_direct_chunk((0, 0, 0))
# At least 1 filter is supposed to be disabled
self.assertNotEqual(filter_mask, 0)
self.assertEqual(compressed_frame, frame.tobytes())
def test_read_write_chunk(self):
filename = self.mktemp().encode()
with h5py.File(filename, "w") as filehandle:
# create a reference
frame = numpy.arange(16).reshape(4, 4)
frame_dataset = filehandle.create_dataset("source",
data=frame,
compression="gzip",
compression_opts=9)
# configure an empty dataset
filter_mask, compressed_frame = frame_dataset.id.read_direct_chunk((0, 0))
dataset = filehandle.create_dataset("created",
shape=frame_dataset.shape,
maxshape=frame_dataset.shape,
chunks=frame_dataset.chunks,
dtype=frame_dataset.dtype,
compression="gzip",
compression_opts=9)
# copy the data
dataset.id.write_direct_chunk((0, 0), compressed_frame, filter_mask=filter_mask)
# checking
with h5py.File(filename, "r") as filehandle:
dataset = filehandle["created"][...]
numpy.testing.assert_array_equal(dataset, frame)
| TestReadDirectChunk |
python | huggingface__transformers | src/transformers/models/granitemoehybrid/modeling_granitemoehybrid.py | {
"start": 55965,
"end": 57236
} | class ____(PreTrainedModel):
config: GraniteMoeHybridConfig
base_model_prefix = "model"
supports_gradient_checkpointing = True
_no_split_modules = ["GraniteMoeHybridDecoderLayer"]
_skip_keys_device_placement = ["past_key_values"]
_supports_flash_attn = True
_supports_sdpa = True
_supports_flex_attn = True
_can_compile_fullgraph = False # MoE models don't work with torch.compile (`torch.where(condition)` not supported)
_supports_attention_backend = True
_can_record_outputs = {
"hidden_states": GraniteMoeHybridDecoderLayer,
"attentions": GraniteMoeHybridAttention,
}
_is_stateful = True
@torch.no_grad()
def _init_weights(self, module):
super()._init_weights(module)
if isinstance(module, GraniteMoeHybridParallelExperts):
init.normal_(module.weight, mean=0.0, std=self.config.initializer_range)
if isinstance(module, GraniteMoeHybridMambaLayer):
init.ones_(module.dt_bias)
init.copy_(module.A_log, torch.log(torch.arange(1, module.num_heads + 1)))
init.ones_(module.D)
elif isinstance(module, GraniteMoeHybridRMSNormGated):
init.ones_(module.weight)
@auto_docstring
| GraniteMoeHybridPreTrainedModel |
python | huggingface__transformers | src/transformers/models/edgetam_video/modeling_edgetam_video.py | {
"start": 75431,
"end": 85944
} | class ____(nn.Module):
def __init__(self, config: EdgeTamVideoMaskDecoderConfig):
super().__init__()
self.config = config
self.hidden_size = config.hidden_size
self.num_multimask_outputs = config.num_multimask_outputs
self.num_mask_tokens = config.num_multimask_outputs + 1
self.iou_token = nn.Embedding(1, self.hidden_size)
self.mask_tokens = nn.Embedding(self.num_mask_tokens, self.hidden_size)
self.transformer = EdgeTamVideoTwoWayTransformer(config)
# should we create a new class for this?
self.upscale_conv1 = nn.ConvTranspose2d(self.hidden_size, self.hidden_size // 4, kernel_size=2, stride=2)
self.upscale_conv2 = nn.ConvTranspose2d(self.hidden_size // 4, self.hidden_size // 8, kernel_size=2, stride=2)
self.upscale_layer_norm = EdgeTamVideoLayerNorm(self.hidden_size // 4, data_format="channels_first")
self.activation = nn.GELU()
mlps_list = []
for _ in range(self.num_mask_tokens):
mlps_list += [EdgeTamVideoFeedForward(self.hidden_size, self.hidden_size, self.hidden_size // 8, 3)]
self.output_hypernetworks_mlps = nn.ModuleList(mlps_list)
self.iou_prediction_head = EdgeTamVideoFeedForward(
self.hidden_size,
config.iou_head_hidden_dim,
self.num_mask_tokens,
config.iou_head_depth,
sigmoid_output=True,
)
self.conv_s0 = nn.Conv2d(config.hidden_size, config.hidden_size // 8, kernel_size=1, stride=1)
self.conv_s1 = nn.Conv2d(config.hidden_size, config.hidden_size // 4, kernel_size=1, stride=1)
self.obj_score_token = nn.Embedding(1, self.hidden_size)
self.pred_obj_score_head = EdgeTamVideoFeedForward(self.hidden_size, self.hidden_size, 1, 3)
self.dynamic_multimask_via_stability = config.dynamic_multimask_via_stability
self.dynamic_multimask_stability_delta = config.dynamic_multimask_stability_delta
self.dynamic_multimask_stability_thresh = config.dynamic_multimask_stability_thresh
def forward(
self,
image_embeddings: torch.Tensor,
image_positional_embeddings: torch.Tensor,
sparse_prompt_embeddings: torch.Tensor,
dense_prompt_embeddings: torch.Tensor,
multimask_output: bool,
high_resolution_features: list[torch.Tensor],
attention_similarity: Optional[torch.Tensor] = None,
target_embedding: Optional[torch.Tensor] = None,
**kwargs: Unpack[TransformersKwargs],
) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]:
"""
Predict masks given image and prompt embeddings.
Args:
image_embeddings (`torch.Tensor`):
The embeddings from the image encoder.
image_positional_embeddings (`torch.Tensor`):
Positional encoding with the shape of image_embeddings.
sparse_prompt_embeddings (`torch.Tensor`):
The embeddings of the points and boxes.
dense_prompt_embeddings (`torch.Tensor`):
The embeddings of the mask inputs.
multimask_output (`bool`):
Whether to return multiple masks or a single mask.
high_resolution_features (`list[torch.Tensor]`, *optional*):
The high-resolution features from the vision encoder.
attention_similarity (`torch.Tensor`, *optional*):
The attention similarity tensor.
target_embedding (`torch.Tensor`, *optional*):
The target embedding.
"""
batch_size, num_channels, height, width = image_embeddings.shape
point_batch_size = sparse_prompt_embeddings.shape[1]
# Concatenate output tokens
output_tokens = torch.cat(
[
self.obj_score_token.weight,
self.iou_token.weight,
self.mask_tokens.weight,
],
dim=0,
)
output_tokens = output_tokens.repeat(batch_size, point_batch_size, 1, 1)
if sparse_prompt_embeddings.shape[0] != 0:
tokens = torch.cat((output_tokens, sparse_prompt_embeddings), dim=2)
else:
tokens = output_tokens
point_embeddings = tokens.to(self.iou_token.weight.dtype)
# Expand per-image data in batch direction to be per-mask
image_embeddings = image_embeddings + dense_prompt_embeddings
image_embeddings = image_embeddings.repeat_interleave(point_batch_size, dim=0)
image_positional_embeddings = image_positional_embeddings.repeat_interleave(point_batch_size, 0)
# Run the transformer
point_embeddings, image_embeddings = self.transformer(
point_embeddings=point_embeddings,
image_embeddings=image_embeddings,
image_positional_embeddings=image_positional_embeddings,
attention_similarity=attention_similarity,
target_embedding=target_embedding,
**kwargs,
)
iou_token_out = point_embeddings[:, :, 1, :]
mask_tokens_out = point_embeddings[:, :, 2 : (2 + self.num_mask_tokens), :]
# Upscale mask embeddings and predict masks using the mask tokens
image_embeddings = image_embeddings.transpose(2, 3).view(
batch_size * point_batch_size, num_channels, height, width
)
feat_s0, feat_s1 = high_resolution_features
feat_s0 = feat_s0.repeat_interleave(point_batch_size, dim=0)
feat_s1 = feat_s1.repeat_interleave(point_batch_size, dim=0)
upscaled_embedding = self.upscale_conv1(image_embeddings) + feat_s1
upscaled_embedding = self.activation(self.upscale_layer_norm(upscaled_embedding))
upscaled_embedding = self.activation(self.upscale_conv2(upscaled_embedding) + feat_s0)
hyper_in_list: list[torch.Tensor] = []
for i in range(self.num_mask_tokens):
current_mlp = self.output_hypernetworks_mlps[i]
hyper_in_list += [current_mlp(mask_tokens_out[:, :, i, :])]
hyper_in = torch.stack(hyper_in_list, dim=2)
_, num_channels, height, width = upscaled_embedding.shape
upscaled_embedding = upscaled_embedding.view(batch_size, point_batch_size, num_channels, height * width)
masks = (hyper_in @ upscaled_embedding).view(batch_size, point_batch_size, -1, height, width)
# Generate mask quality predictions
iou_pred = self.iou_prediction_head(iou_token_out)
object_score_logits = self.pred_obj_score_head(point_embeddings[:, :, 0, :])
# Select the correct mask or masks for output
if multimask_output:
mask_slice = slice(1, None)
masks = masks[:, :, mask_slice, :, :]
iou_pred = iou_pred[:, :, mask_slice]
elif self.dynamic_multimask_via_stability and not self.training:
mask_slice = slice(0, 1)
masks, iou_pred = self._dynamic_multimask_via_stability(masks, iou_pred)
else:
mask_slice = slice(0, 1)
masks = masks[:, :, mask_slice, :, :]
iou_pred = iou_pred[:, :, mask_slice]
sam_tokens_out = mask_tokens_out[:, :, mask_slice] # [b, 3, c] shape
return masks, iou_pred, sam_tokens_out, object_score_logits
def _get_stability_scores(self, mask_logits):
"""
Compute stability scores of the mask logits based on the IoU between upper and
lower thresholds.
"""
mask_logits = mask_logits.flatten(-2)
stability_delta = self.dynamic_multimask_stability_delta
area_i = torch.sum(mask_logits > stability_delta, dim=-1).float()
area_u = torch.sum(mask_logits > -stability_delta, dim=-1).float()
stability_scores = torch.where(area_u > 0, area_i / area_u, 1.0)
return stability_scores
def _dynamic_multimask_via_stability(self, all_mask_logits, all_iou_scores):
"""
When outputting a single mask, if the stability score from the current single-mask
output (based on output token 0) falls below a threshold, we instead select from
multi-mask outputs (based on output token 1~3) the mask with the highest predicted
IoU score. This is intended to ensure a valid mask for both clicking and tracking.
"""
# The best mask from multimask output tokens (1~3)
multimask_logits = all_mask_logits[:, :, 1:, :, :]
multimask_iou_scores = all_iou_scores[:, :, 1:]
best_scores_inds = torch.argmax(multimask_iou_scores, dim=-1) # [B, P]
best_scores_inds_expanded = best_scores_inds.unsqueeze(-1).unsqueeze(-1).unsqueeze(-1)
best_scores_inds_expanded = best_scores_inds_expanded.expand(
-1, -1, 1, multimask_logits.size(-2), multimask_logits.size(-1)
)
best_multimask_logits = torch.gather(multimask_logits, 2, best_scores_inds_expanded) # [B, P, 1, H, W]
best_multimask_iou_scores = torch.gather(multimask_iou_scores, 2, best_scores_inds.unsqueeze(-1)) # [B, P, 1]
# The mask from singlemask output token 0 and its stability score
singlemask_logits = all_mask_logits[:, :, 0:1, :, :]
singlemask_iou_scores = all_iou_scores[:, :, 0:1]
stability_scores = self._get_stability_scores(singlemask_logits)
is_stable = stability_scores >= self.dynamic_multimask_stability_thresh
# Dynamically fall back to best multimask output upon low stability scores.
mask_logits_out = torch.where(
is_stable[..., None, None].expand_as(singlemask_logits),
singlemask_logits,
best_multimask_logits,
)
iou_scores_out = torch.where(
is_stable.expand_as(singlemask_iou_scores),
singlemask_iou_scores,
best_multimask_iou_scores,
)
return mask_logits_out, iou_scores_out
# a large negative value as a placeholder score for missing objects
NO_OBJ_SCORE = -1024.0
def get_1d_sine_pe(pos_inds, dim, temperature=10000):
"""
Get 1D sine positional embedding as in the original Transformer paper.
"""
pe_dim = dim // 2
dim_t = torch.arange(pe_dim, dtype=torch.float32, device=pos_inds.device)
dim_t = temperature ** (2 * (dim_t // 2) / pe_dim)
pos_embed = pos_inds.unsqueeze(-1) / dim_t
pos_embed = torch.cat([pos_embed.sin(), pos_embed.cos()], dim=-1)
return pos_embed
@auto_docstring
| EdgeTamVideoMaskDecoder |
python | getsentry__sentry | tests/sentry/rules/history/endpoints/test_project_rule_stats.py | {
"start": 639,
"end": 1048
} | class ____(TestCase):
def test(self) -> None:
time_series_value = TimeSeriesValue(datetime.now(), 30)
result = serialize([time_series_value], self.user, TimeSeriesValueSerializer())
assert result == [
{
"date": time_series_value.bucket,
"count": time_series_value.count,
}
]
@freeze_time()
| TimeSeriesValueSerializerTest |
python | neetcode-gh__leetcode | python/0994-rotting-oranges.py | {
"start": 0,
"end": 1120
} | class ____:
def orangesRotting(self, grid: List[List[int]]) -> int:
q = collections.deque()
fresh = 0
time = 0
for r in range(len(grid)):
for c in range(len(grid[0])):
if grid[r][c] == 1:
fresh += 1
if grid[r][c] == 2:
q.append((r, c))
directions = [[0, 1], [0, -1], [1, 0], [-1, 0]]
while fresh > 0 and q:
length = len(q)
for i in range(length):
r, c = q.popleft()
for dr, dc in directions:
row, col = r + dr, c + dc
# if in bounds and nonrotten, make rotten
# and add to q
if (
row in range(len(grid))
and col in range(len(grid[0]))
and grid[row][col] == 1
):
grid[row][col] = 2
q.append((row, col))
fresh -= 1
time += 1
return time if fresh == 0 else -1
| Solution |
python | scipy__scipy | scipy/sparse/linalg/_special_sparse_arrays.py | {
"start": 25405,
"end": 27558
} | class ____(LinearOperator):
"""
Construct a mass matrix in various formats of Mikota pair.
The mass matrix `M` is square real diagonal
positive definite with entries that are reciprocal to integers.
Parameters
----------
shape : tuple of int
The shape of the matrix.
dtype : dtype
Numerical type of the array. Default is ``np.float64``.
Methods
-------
toarray()
Construct a dense array from Mikota data
tosparse()
Construct a sparse array from Mikota data
tobanded()
The format for banded symmetric matrices,
i.e., (1, n) ndarray with the main diagonal.
"""
def __init__(self, shape, dtype=np.float64):
self.shape = shape
self.dtype = dtype
super().__init__(dtype, shape)
def _diag(self):
# The matrix is constructed from its diagonal 1 / [1, ..., N+1];
# compute in a function to avoid duplicated code & storage footprint
return (1. / np.arange(1, self.shape[0] + 1)).astype(self.dtype)
def tobanded(self):
return self._diag()
def tosparse(self):
from scipy.sparse import diags_array
return diags_array([self._diag()], offsets=[0],
shape=self.shape, dtype=self.dtype)
def toarray(self):
return np.diag(self._diag()).astype(self.dtype)
def _matvec(self, x):
"""
Construct matrix-free callable banded-matrix-vector multiplication by
the Mikota mass matrix without constructing or storing the matrix itself
using the knowledge of its entries and the diagonal format.
"""
x = x.reshape(self.shape[0], -1)
return self._diag()[:, np.newaxis] * x
def _matmat(self, x):
"""
Construct matrix-free callable matrix-matrix multiplication by
the Mikota mass matrix without constructing or storing the matrix itself
by reusing the ``_matvec(x)`` that supports both 1D and 2D arrays ``x``.
"""
return self._matvec(x)
def _adjoint(self):
return self
def _transpose(self):
return self
| MikotaM |
python | openai__openai-python | src/openai/types/beta/realtime/input_audio_buffer_commit_event_param.py | {
"start": 232,
"end": 503
} | class ____(TypedDict, total=False):
type: Required[Literal["input_audio_buffer.commit"]]
"""The event type, must be `input_audio_buffer.commit`."""
event_id: str
"""Optional client-generated ID used to identify this event."""
| InputAudioBufferCommitEventParam |
python | pytorch__pytorch | torch/ao/quantization/fx/quantize_handler.py | {
"start": 6956,
"end": 7112
} | class ____(QuantizeHandler):
pass
# TODO: not used, can be removed after torch.ao.quantization namespace is deprecated
| GeneralTensorShapeOpQuantizeHandler |
python | jmcnamara__XlsxWriter | xlsxwriter/test/comparison/test_textbox29.py | {
"start": 315,
"end": 848
} | class ____(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename("textbox29.xlsx")
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with textbox(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_textbox("E9", None, {"textlink": "=$A$1"})
workbook.close()
self.assertExcelEqual()
| TestCompareXLSXFiles |
python | redis__redis-py | tests/test_multidb/test_config.py | {
"start": 5198,
"end": 6051
} | class ____:
def test_default_config(self):
config = DatabaseConfig(
client_kwargs={"host": "host1", "port": "port1"}, weight=1.0
)
assert config.client_kwargs == {"host": "host1", "port": "port1"}
assert config.weight == 1.0
assert isinstance(config.default_circuit_breaker(), PBCircuitBreakerAdapter)
def test_overridden_config(self):
mock_connection_pool = Mock(spec=ConnectionPool)
mock_circuit = Mock(spec=CircuitBreaker)
config = DatabaseConfig(
client_kwargs={"connection_pool": mock_connection_pool},
weight=1.0,
circuit=mock_circuit,
)
assert config.client_kwargs == {"connection_pool": mock_connection_pool}
assert config.weight == 1.0
assert config.circuit == mock_circuit
| TestDatabaseConfig |
python | joke2k__faker | faker/providers/job/ro_RO/__init__.py | {
"start": 42,
"end": 170683
} | class ____(BaseProvider):
jobs = [
"Adjunct Al Procurorului General",
"Ambasador",
"Chestor Parlament",
"Comandant Unic Aviatie",
"Comisar General",
"Comisar General Adjunct",
"Senator",
"Guvernator",
"Presedinte Academie",
"Presedinte Inalta Curte De Casatie Si Justitie",
"Presedinte Curte De Apel",
"Presedinte Curtea De Conturi A Romaniei",
"Presedinte De Judecatorie",
"Presedinte Camera Deputatilor",
"Presedinte Sectie (La Inalta Curte De Casatie Si Justitie, La Curtea De Apel, Tribunale Si Judecatorii)",
"Presedinte Tribunal",
"Presedintele Romaniei",
"Prim-Procuror",
"Prim-Procuror Adjunct",
"Prim-Adjunct Al Procurorului General",
"Prim-Ministru",
"Procuror General",
"Procuror Sef De Sectie",
"Procuror Sef De Sectie Adjunct",
"Secretar General Al Guvernului",
"Secretar Parlament",
"Secretar De Stat",
"Vicepresedinte (La Inalta Curte De Casatie Si Justitie, Curtea De Apel, Tribunale Si Judecatorii",
"Inspector-Sef Al Inspectiei Judiciare De Pe Langa Plenul Consiliului Superior Al Magistraturii",
"Membru Al Consiliului Superior Al Magistraturii",
"Secretar General Adjunct Al Consiliului Superior Al Magistraturii",
"Inspector In Cadrul Inspectiei Judiciare Pentru Judecatori/",
"Inspector General Judecatoresc Sef",
"Ministru",
"Ministru Consilier",
"Ministru De Stat",
"Ministru Plenipotentiar",
"Deputat",
"Presedinte Senat",
"Subsecretar De Stat",
"Consilier Diplomatic",
"Consilier Guvernamental",
"Consilier Si Consultant Juridic",
"Consilier Institutii Publice",
"Consilier Al Ministrului",
"Consul General",
"Director Institutie Publica",
"Director Adjunct Institutie Publica",
"Director De Cabinet",
"Director General Institutie Publica",
"Consilier Economic",
"Inspector De Stat Sef",
"Inspector-Sef In Administratia Publica",
"Magistrat-Asistent-Sef",
"Notar-Sef",
"Notar-Sef Adjunct",
"Secretar-Sef Notariat",
"Prefect",
"Secretar General",
"Sef Birou Institutie Publica",
"Sef Cabinet",
"Sef Birou Senatorial",
"Sef Departament",
"Sef Protocol De Stat",
"Sef Serviciu Institutie Publica",
"Subprefect",
"Viceguvernator",
"Presedinte Institutie Publica",
"Consilier Prezidential",
"Consilier Parlamentar",
"Vicepresedinte Institutie Publica",
"Atasat Diplomatic",
"Consul",
"Secretar Diplomatic",
"Viceconsul",
"Consultant Prezidential Si Guvernamental",
"Secretar General Academie",
"Primar",
"Secretar Primarie, Prefectura",
"Viceprimar",
"Consilier Organizatie Politica",
"Presedinte Organizatie Politica",
"Vicepresedinte Organizatie Politica",
"Secretar Organizatie Politica",
"Conducator De Asociatii, Filiale Si Organizatii Obstesti",
"Loctiitor Al Conducatorului De Asociatii, Filiale Si Organizatii Obstesti",
"Secretar Si Secretar Adjunct Ai Asociatiilor, Filialelor Si Organizatiilor Obstesti",
"Presedinte Organizatie Sindicala",
"Vicepresedinte Organizatie Sindicala",
"Secretar Organizatie Sindicala",
"Delegat Sindical",
"Presedinte Asociatie Nationala Cooperatista",
"Vicepresedinte Asociatie Nationala Cooperatista",
"Secretar General Asociatie Nationala Cooperatista",
"Presedinte Asociatie Teritoriala De Organizatii Cooperatiste",
"Vicepresedinte Asociatie Teritoriala De Organizatii Cooperatiste",
"Secretar Asociatie Teritoriala De Organizatii Cooperatiste",
"Sef Departament Organizatie Sindicala",
"Presedinte Organizatie Cooperatista",
"Vicepresedinte Organizatie Cooperatista",
"Presedinte Asociatie Patronala",
"Vicepresedinte Asociatie Patronala",
"Presedinte Organizatie Profesionala Nationala",
"Vicepresedinte Organizatie Profesionala Nationala",
"Secretar National Organizatie Profesionala Nationala",
"Presedinte Organizatie Profesionala, Filiala Judeteana/Municipiu",
"Vicepresedinte Organizatie Profesionala, Filiala Judeteana/Municipiu",
"Secretar Organizatie Profesionala, Filiala Judeteana/Municipiu",
"Consilier Presedinte Organizatie Profesionala Nationala",
"Consilier Presedinte Organizatie Profesionala, Filiala Judeteana/Municipiu",
"Sef Departament/Compartiment/Presedinte Comisie Organizatie Profesionala, Filiala Judeteana/Municipiu",
"Delegat Sindical Local",
"Conducator De Organizatii Umanitare",
"Secretar Al Organizatiilor Umanitare",
"Comandant/Comandant Adjunct Aviatie",
"Comandant Port, Flota",
"Decan, Rector, Prorector, Prodecan",
"Director Societate Comerciala",
"Director Adjunct Societate Comerciala",
"Inspector General Scolar",
"Director Stiintific Cercetare-Dezvoltare",
"Inspector Sanitar Sef",
"Medic (Farmacist) Director",
"Medic (Farmacist) Director Adjunct",
"Director General Societate Comerciala",
"Director General Adjunct Societate Comerciala",
"Director De Program",
"Director General Regie Autonoma",
"Director General Adjunct Regie Autonoma",
"Director Control Risc",
"Director Comercial",
"Director Vanzari",
"Director/Director Adjunct, Inspector-Sef",
"Director Economic",
"Director Magazin",
"Sef Corp Executori Bancari",
"Director Sucursala",
"Director Tehnic",
"Director General Institut National De Cercetaredezvoltare",
"Director Incubator Tehnologic De Afaceri",
"Director Departament Cercetare-Dezvoltare",
"Manager General",
"Manager",
"Sef Cancelarie",
"Director De Societate Comerciala Agricola",
"Antreprenor In Economia Sociala",
"Director Resurse Umane",
"Inspector Scolar General Adjunct",
"Director Casa Corpului Didactic",
"Presedinte Banca/Vicepresedinte/Prim-Vicepresedinte",
"Economist-Sef",
"Inspector General Vanatoare",
"Director Executiv Banca/Director/Director Adjunct",
"Sef Departament Banca/Sef-Adjunct Departament",
"Sef Proiect Banca",
"Sef Serviciu/Sef Birou/Banca/Societate De Leasing",
"Coordonator Compartiment/Colectiv Banca",
"Dealer-Sef (Arbitragist Banca)",
"Director Unitate Bancara Operationala/Director Adjunct Unitate Bancara Operationala",
"Sef Agentie Bancara",
"Contabil-Sef/Director Financiar/Banca/Societate De Leasing",
"Director De Arhiva Banca",
"Director/Director Adjunct Divizie/Directie De Leasing",
"Director/Director Executiv Conformitate",
"Coordonator Conformitate",
"Manager Securitatea Informatiei (Chief Information Security Officer ",
"Comisar Sef Divizie Garda Financiara",
"Contabil-Sef",
"Controlor Financiar",
"Sef Agentie Cec",
"Sef Birou/Serviciu/Sectie Circumscriptie Financiara",
"Sef Birou/Serviciu Financiar-Contabilitate",
"Manager Financiar",
"Manager Relatii Financiare Externe",
"Controlor Revizor Financiar",
"Sef Birou Calificare Si Recalificare",
"Sef Birou Pensii",
"Sef Birou Somaj",
"Sef Oficiu Somaj",
"Sef Serviciu Resurse Umane",
"Sef Serviciu Evaluarea Resurselor De Munca",
"Manager Resurse Umane",
"Sef Centru Perfectionare",
"Sef Birou Organizatie Politica, Obsteasca, Umanitara",
"Sef Serviciu Organizatie Politica, Obsteasca, Umanitara",
"Inspector Protectie Civila",
"Sef Executiv Audit Intern",
"Sef Obiectiv Servicii De Securitate",
"Manager De Securitate",
"Manager Energetic",
"Manager Informatii Pentru Afaceri",
"Sef Serviciu",
"Sef Atelier",
"Sef Sectie",
"Sef Birou",
"Procuror Sef Birou/Serviciu",
"Sef Birou/Serviciu Administrativ",
"Registrator Coordonator",
"Registrator-Sef",
"Grefier-Sef (Judecatorie, Parchet)",
"Grefier-Sef De Sectie (Curte De Apel, Tribunal, Parchete)",
"Prim-Grefier",
"Sef Laborator Criminalistica",
"Sef Serviciu Marketing",
"Sef Birou Marketing",
"Sef Licitatie",
"Director Operatii Tranzactii",
"Sef Casa Compensatie",
"Sef Agentie Bursiera",
"Manager Marketing (Tarife, Contracte, Achizitii)",
"Conducator Firma Mica ",
"Sef Agentie Reclama Publicitara",
"Sef Birou Reclama Publicitara",
"Sef Serviciu Reclama Publicitara",
"Arhitect-Sef",
"Geolog-Sef",
"Secretar Stiintific",
"Sef Formatie Lucrari Geologice",
"Sef Formatie Cercetare-Dezvoltare",
"Meteorolog-Sef",
"Director Filiala Cercetare-Proiectare",
"Sef Atelier Editie, Multiplicare, Expeditie",
"Sef Proiect Cercetare-Proiectare",
"Sef Sectie Cercetare-Proiectare",
"Sef Atelier Cercetare-Proiectare",
"Responsabil Cte (Control Tehnic-Economic) In Cercetare-Proiectare",
"Director Proiect",
"Sef Proiect/Program",
"Inspector-Sef Inspectia Meteorologica Nationala",
"Hidrometeorolog-Sef",
"Inginer-Sef Agricultura Si Silvicultura",
"Medic Veterinar Sef",
"Sef Centru Protectia Plantelor Si Mediului",
"Sef Centru Reproductia Si Selectia Animalelor",
"Sef District, Centru, Ocol Silvic",
"Sef Circumscriptie Sanitar-Veterinara Si Control Al Alimentelor",
"Sef Complex Zootehnic",
"Sef Fazanerie",
"Sef Ferma Agricola (Agrozootehnica)",
"Sef Laborator Analize Pedologice",
"Sef Oficiu Cadastru",
"Sef Parchet",
"Sef Pepiniera Silvicola, Pomicola, Viticola",
"Sef Statie Hidrologica, Meteorologica Si Incubatie",
"Sef Statie Productie, Exploatare, Intretinere In Agricultura",
"Sef Statie Vinificatie",
"Sef Statie Lucrari De Irigatie Si Ameliorare A Solului",
"Inspector Veterinar Sef",
"Sef Sectie Mecanizare",
"Conducator Intreprindere Mica ",
"Sef Pastravarie",
"Inginer-Sef Piscicultura Si Vanatoare",
"Conducator Intreprindere Mica ",
"Inginer-Sef Industria Prelucratoare",
"Sef Atelier Industria Prelucratoare",
"Sef Sector Industria Prelucratoare",
"Sef Laborator In Industria Prelucratoare",
"Sef Modul In Industria Prelucratoare",
"Sef Sectie Industrie Prelucratoare",
"Sef Serviciu Industrie Prelucratoare",
"Sef Birou Industrie Prelucratoare",
"Manager Securitate Instalatii Industria Prelucratoare",
"Conducator Intreprindere Mica ",
"Inginer-Sef Industria Extractiva",
"Sef Atelier Industria Extractiva",
"Sef Sector Industria Extractiva",
"Sef Modul In Industria Extractiva",
"Sef Sectie Industrie Extractiva",
"Sef Serviciu Industrie Extractiva",
"Sef Birou Industrie Extractiva",
"Manager Securitate Instalatii Industria Extractiva",
"Conducator Intreprindere Mica ",
"Inginer-Sef Exploatare Nucleara",
"Inginer-Sef Radioprotectie",
"Sef Unitati Miniere",
"Sef Brigada Exploatare Miniera",
"Inspector-Sef Conservarea Energiei",
"Sef Centru Prelucrare",
"Sef Laborator Control Tehnic De Calitate A Combustibilului Nuclear",
"Sef Serviciu Tehnic Si Componente Nucleare",
"Sef Serviciu Termochimic",
"Sef Uzina, Centrala Electrica, Gaze, Apa",
"Sef Centrala Electrica, Gaze Si Apa",
"Sef Atelier Reparatii Capitale",
"Inspector General Industria Petroliera",
"Sef Formatie Industria Petroliera/Petrochimica",
"Sef Instalatie Petroliera",
"Sef Laborator Industria Petroliera",
"Sef Statie Epurare Ape Reziduale",
"Supervizor Geolog Si Foraj",
"Sef Formatie In Industria De Masini Si Echipamente",
"Sef/Sef Adjunct Statie Electrica",
"Sef/Sef Adjunct Centru Exploatare Retele Electrice",
"Sef Dispecer Energetic Central (Dec)",
"Sef Dispecer Energetic Teritorial (Det)",
"Sef Formatie La Fabricarea Armamentului Si Munitiei",
"Sef Schimb",
"Sef Formatie",
"Ajutor Sef Brigada In Constructii",
"Inginer-Sef In Constructii",
"Conducator Antrepriza Constructii-Montaj",
"Sef Atelier In Constructii",
"Sef Brigada Complexa Sau Specializata",
"Sef Laborator In Constructii",
"Sef Lot",
"Sef Santier",
"Sef Sector (Sectie) Drumuri-Poduri",
"Sef Sectie Productie, Exploatare, Intretinere, Reparatii In Constructii Si Lucrari Publice",
"Sef Serviciu In Constructii",
"Sef Birou In Constructii",
"Sef Sector Exploatare Imbunatatiri Funciare",
"Sef Sistem Exploatare Imbunatatiri Funciare",
"Conducator Intreprindere Mica ",
"Capitan Sef Port",
"Comandant Nave Maritime",
"Comandant Coordonator Grup Mare Pescuit Oceanic",
"Conducator (Director Si Director Adjunct) Administratia Filiala Dunarea De Jos (Afdj)",
"Director Zbor",
"Inginer-Sef Transporturi",
"Picher Sef District",
"Revizor General Siguranta Circulatiei",
"Sef Agentie Navala",
"Sef Atelier Aeroport",
"Sef Atelier Transporturi",
"Sef Autobaza",
"Sef Birou Aeroport",
"Sef Birou/Serviciu Relatii Internationale",
"Sef Birou/Serviciu Transport Maritim Si Fluvial",
"Sef Coloana Auto",
"Sef Depou/Adjunct",
"Sef District Cai Ferate, Poduri, Drumuri",
"Sef Divizie Cai Ferate",
"Sef Laborator Aeroport",
"Sef Port",
"Sef Regulator Circulatie Cai Ferate",
"Sef Revizie Locomotive, Automotoare",
"Sef Revizie Vagoane",
"Sef Sectie/Adjunct (Sector) Transporturi",
"Sef Serviciu, Centru, Statie, Aeroport",
"Sef Serviciu Filiala Administratia Filiala Dunarea De Jos",
"Sef Statie Cai Ferate",
"Sef Statie Teleferic",
"Sef Agentie Pilotaj",
"Sef Cart",
"Diriginte Oficiu Transporturi",
"Sef Garaj",
"Comandant Instructor",
"Sef Mecanic Instructor",
"Sef Mecanic Maritim/Fluvial",
"Sef Electrician Maritim",
"Sef Atelier Reparatii",
"Conducator Activitate De Transport Rutier",
"Sef Trafic Auto Intern",
"Sef Trafic Curierat Intern",
"Sef Departament Logistica",
"Sef Birou Aprovizionare-Desfacere",
"Sef Depozit",
"Sef Serviciu Aprovizionare-Desfacere",
"Sef Siloz",
"Sef Statie Uscare-Conditionare Cereale",
"Manager Achizitii",
"Manager Farmacii",
"Manager Aprovizionare",
"Manager Relatia Cu Furnizorii",
"Conducator Intreprindere Mica ",
"Director Centru De Calcul",
"Sef Oficiu De Calcul",
"Sef Atelier Informatica",
"Sef Laborator Informatica",
"Director Divizie Informatica",
"Director Departament Informatica",
"Manager Tehnologia Informatiilor Si Comunicatii",
"Director Departament Securitate",
"Conducator De Intreprindere Mica ",
"Sef Atelier Telecomunicatii",
"Sef Birou Exploatare Postala",
"Sef Birou Radiotelecomunicatii",
"Sef Centrala Telefonica",
"Sef Centru Control Calitate Emisie Radiofonica",
"Sef Centru Control Calitate Emisie Televiziune",
"Sef Centru Control Local Comunicatii",
"Sef Centru Dirijare Zbor",
"Sef Centru Postal",
"Sef Centru (Sectie, Sector) Radiodifuziune",
"Sef Centru (Sectie, Sector) Telecomunicatii",
"Sef Centru Zonal Interventii Radiorelee",
"Sef Centru Zonal De Interventii Translatare Tv",
"Sef Formatie Comunicatii",
"Sef Formatie Operationala Telecomunicatii",
"Sef Laborator Masuratori Telecomunicatii",
"Sef Laborator Radioteleviziune",
"Sef Laborator Telecomunicatii",
"Sef Lot Telecomunicatii",
"Sef Retea Telecomunicatii",
"Sef Serviciu Control Zonal Comunicatii",
"Sef Serviciu Exploatare Postala",
"Sef Serviciu Informare Zbor",
"Sef Serviciu Navigatie",
"Sef Serviciu Radiotelecomunicatii",
"Sef Statie Comunicatii Prin Satelit",
"Sef Statie Radiorelee",
"Sef Statie Televiziune",
"Sef Studio",
"Telefonist-Sef",
"Telegrafist-Sef",
"Sef Oficiu Zonal Posta",
"Sef Oficiu Special Posta",
"Diriginte Oficiu Telecomunicatii",
"Sef Turn Telecomunicatii",
"Sef Statie Radiotelegrafie (Rtg)",
"Sef Centru National De Telecomunicatii Aeronautice Aviatie Civila",
"Conducator Intreprindere Mica ",
"Asistent Medical Sef",
"Biochimist Sef Sectie, Laborator",
"Biolog Sef Sectie, Laborator",
"Chimist Sef Sectie, Laborator",
"Farmacist Sef Sectie, Laborator",
"Farmacist Diriginte",
"Laborant Medical Sef",
"Medic-Sef (Policlinica, Statie De Salvare, Centru De Recoltare Sange)",
"Medic Sef Sectie, Laborator",
"Moasa-Sefa",
"Oficiant Medical Sef",
"Psiholog Sef Sectie, Laborator",
"Sora Medicala Sefa",
"Tehnician Sanitar Sef",
"Sef Serviciu De Reintegrare Sociala Si Supraveghere",
"Conducator De Intreprindere Mica ",
"Conducator Tabara Scolara",
"Director Unitate De Invatamant",
"Secretar Stiintific Invatamant, Cercetare",
"Sef Lectorat",
"Sef Catedra",
"Conducator De Intreprindere Mica ",
"Director Palate Si Cluburi Ale Elevilor",
"Sef Serviciu/Sef Birou Asigurari",
"Sef Serviciu/Sef Birou Daune",
"Sef Expozitii Si Targuri",
"Sef Vama",
"Inginer-Sef Intreprinderi De Reparatii Obiecte De Uz Casnic, Curatatorii Si Alte Servicii Pentru Populatie",
"Sef Atelier Reparatii Obiecte De Uz Casnic, Curatatorii Si Alte Servicii Pentru Populatie",
"Sef Centru Reparatii",
"Sef Centru Dezinfectie, Deratizare Si Dezinsectie",
"Coordonator Presa",
"Librar-Sef",
"Conducator Intreprindere Mica ",
"Redactor-Sef Presa, Editura",
"Secretar General Agentie Presa, Editura",
"Secretar General Redactie",
"Sef Birou Exploatare, Coordonare Presa",
"Sef Birou Redactie",
"Sef Birou Relatii Unitati Presa",
"Sef Oficiu Juridic",
"Sef Oficiu, Serviciu, Sectie, Redactie",
"Conducator Intreprindere Mica ",
"Sef Complex Hotelier",
"Sef Unitate Balneoclimaterica",
"Administrator Hotel",
"Director De Hotel",
"Director De Motel",
"Director De Hotel Pentru Tineret",
"Director De Camping",
"Director De Sat De Vacanta",
"Director De Popas Turistic",
"Director Restaurant",
"Director Rotiserie",
"Director Crama",
"Director Braserie",
"Director Berarie",
"Director Gradina De Vara",
"Director Bar",
"Director Cafenea",
"Director Disco-Bar",
"Director Unitati Tip Fast-Food",
"Director Cofetarie, Patiserie",
"Director De Club (Hotelier)",
"Director De Cazare",
"Sef Restaurant",
"Director De Departament Alimentatie",
"Director De Departament Catering",
"Presedinte Cooperativa De Consum",
"Sef Baza Receptie",
"Sef Serviciu Comert Cu Ridicata Si Cu Amanuntul",
"Sef Birou Comert Cu Ridicata Si Cu Amanuntul",
"Vicepresedinte Cooperativa De Consum",
"Sef Statie Peco",
"Sef Departament Marfuri Alimentare/Nealimentare",
"Manager De Zona",
"Inginer Sef Firme De Afaceri Si Alte Servicii Comerciale",
"Sef Agentie Comerciala",
"Conducator Intreprindere Mica ",
"Antrenor Federatie Sportiva",
"Comandant Aeroclub",
"Consilier Teritorial Sef Inspectoratul Pentru Cultura",
"Director Asezamant Cultural",
"Manager Al Organizatiei Culturale",
"Presedinte Federatie Sportiva",
"Presedinte Complex, Club Sportiv",
"Secretar General Federatie Sport",
"Sef Agentie Concursuri Hipice",
"Sef Productie Film",
"Sef Sectie Productie Film",
"Sef Atelier Productie Film",
"Sef Oficiu Interjudetean Difuzare Film",
"Conducator De Intreprindere Mica ",
"Sef Atelier Decorator",
"Sef Agentie/Oficiu Turism",
"Sef Unitate Elementara De Lucru",
"Sef Atelier Presa",
"Sef Laborator Conservare-Restaurare Opere De Arta",
"Sef Serviciu Control Tehnic Presa",
"Conducator De Intreprindere Mica ",
"Manager In Activitatea De Turism",
"Director De Agentie De Turism Touroperatoare/ Detailista/Filiala/Sucursala",
"Director Centru Informare Turistica",
"Director De Departament Organizare Evenimente",
"Conducator De Pensiune Turistica (Rurala, Agroturistica, Montana)",
"Sef Serviciu Statie, Tura Meteo",
"Sef Centru Meteo Aeronautic",
"Sef Birou/Statie/Tura Meteo Aeronautic/De Aerodrom",
"Sef Centru National Pentru Protectia Meteorologica A Navigatiei Aeriene",
"Sef Echipa Interventii Si Supraveghere Echipamente In Serviciile De Trafic Aerian",
"Fizician",
"Cercetator In Fizica",
"Asistent De Cercetare In Fizica",
"Cercetator In Fizica-Chimie",
"Asistent De Cercetare In Fizica-Chimie",
"Cercetator In Fizica Tehnologica",
"Asistent De Cercetare In Fizica Tehnologica",
"Cercetator In Astronomie",
"Asistent De Cercetare In Astronomie",
"Cercetator De Aeronave",
"Inginer De Cercetare De Aeronave",
"Asistent De Cercetare De Aeronave",
"Cercetator In Constructii Aerospatiale",
"Inginer De Cercetare In Constructii Aerospatiale",
"Asistent De Cercetare In Constructii Aerospatiale",
"Meteorolog (Studii Superioare)",
"Meteorolog Previzionist",
"Climatolog",
"Meteorolog Aeronautic",
"Consilier/Expert In Meteorologie Si Domenii Conexe",
"Asistent Meteorolog",
"Meteorolog Aeronautic Prognozist",
"Coordonator Interventii Active In Atmosfera",
"Cercetator In Meteorologie",
"Asistent De Cercetare In Meteorologie",
"Chimist",
"Consilier Chimist",
"Expert Chimist",
"Inspector De Specialitate Chimist",
"Referent De Specialitate Chimist",
"Cercetator In Chimie",
"Asistent De Cercetare In Chimie",
"Cercetator In Biochimie Tehnologica",
"Asistent De Cercetare In Biochimie Tehnologica",
"Cercetator In Chimie Fizica",
"Asistent De Cercetare In Chimie Fizica",
"Consilier Geolog",
"Expert Geolog",
"Inspector De Specialitate Geolog",
"Referent De Specialitate Geolog",
"Consilier Geofizician",
"Expert Geofizician",
"Inspector De Specialitate Geofizician",
"Referent De Specialitate Geofizician",
"Consilier Hidrogeolog",
"Expert Hidrogeolog",
"Inspector De Specialitate Hidrogeolog",
"Referent De Specialitate Hidrogeolog",
"Consilier Hidrolog",
"Expert Hidrolog",
"Inspector De Specialitate Hidrolog",
"Referent De Specialitate Hidrolog",
"Consilier Pedolog",
"Expert Pedolog",
"Inspector De Specialitate Pedolog",
"Referent De Specialitate Pedolog",
"Inginer Geolog",
"Geolog",
"Geofizician",
"Hidrolog",
"Pedolog",
"Cercetator In Geologie",
"Asistent De Cercetare In Geologie",
"Cercetator In Geologie Tehnica",
"Asistent De Cercetare In Geologie Tehnica",
"Cercetator In Geofizica",
"Asistent De Cercetare In Geofizica",
"Cercetator In Mineralogia Tehnica Si Experimentala",
"Asistent De Cercetare In Mineralogia Tehnica Si Experimentala",
"Cercetator In Geochimie",
"Asistent De Cercetare In Geochimie",
"Cercetator In Geologie Petroliera",
"Asistent De Cercetare In Geologie Petroliera",
"Cercetator In Geodezie",
"Inginer De Cercetare In Geodezie",
"Asistent De Cercetare In Geodezie",
"Cercetator In Cadastru",
"Inginer De Cercetare In Cadastru",
"Asistent De Cercetare In Cadastru",
"Consilier Matematician",
"Expert Matematician",
"Inspector De Specialitate Matematician",
"Referent De Specialitate Matematician",
"Consilier Actuar",
"Expert Actuar",
"Inspector De Specialitate Actuar",
"Referent De Specialitate Actuar",
"Matematician",
"Actuar (Studii Superioare)",
"Consilier Statistician",
"Expert Statistician",
"Inspector De Specialitate Statistician",
"Referent De Specialitate Statistician",
"Cercetator In Matematica",
"Asistent De Cercetare In Matematica",
"Cercetator In Matematica Mecanica",
"Asistent De Cercetare In Matematica-Mecanica",
"Cercetator In Matematica Aplicata",
"Asistent De Cercetare In Matematica Aplicata",
"Cercetator In Matematica-Fizica",
"Asistent De Cercetare In Matematica-Fizica",
"Cercetator In Matematica Informatica",
"Asistent De Cercetare In Matematica-Informatica",
"Cercetator In Statistica",
"Asistent De Cercetare In Statistica",
"Cercetator In Demografie",
"Asistent De Cercetare In Demografie",
"Consilier Biolog",
"Expert Biolog",
"Inspector De Specialitate Biolog",
"Referent De Specialitate Biolog",
"Consilier Botanist",
"Expert Botanist",
"Inspector De Specialitate Botanist",
"Referent De Specialitate Botanist",
"Consilier Zoolog",
"Expert Zoolog",
"Inspector De Specialitate Zoolog",
"Referent De Specialitate Zoolog",
"Consilier Ecolog",
"Biolog",
"Zoolog",
"Botanist",
"Consilier Bacteriolog",
"Expert Bacteriolog",
"Inspector De Specialitate Bacteriolog",
"Referent De Specialitate Bacteriolog",
"Consilier Biochimist",
"Expert Biochimist",
"Inspector De Specialitate Biochimist",
"Referent De Specialitate Biochimist",
"Consilier Farmacolog",
"Expert Farmacolog",
"Inspector De Specialitate Farmacolog",
"Referent De Specialitate Farmacolog",
"Consilier Microbiolog",
"Expert Microbiolog",
"Inspector De Specialitate Microbiolog",
"Referent De Specialitate Microbiolog",
"Farmacolog",
"Bacteriolog",
"Microbiolog",
"Cercetator In Biologie",
"Asistent De Cercetare In Biologie",
"Cercetator In Microbiologie-Bacteriologie",
"Asistent De Cercetare In Microbiologie-Bacteriologie",
"Cercetator In Biologie Chimie",
"Asistent De Cercetare In Biologie Chimie",
"Cercetator In Botanica",
"Asistent De Cercetare In Botanica",
"Cercetator In Domeniul Zoologic",
"Asistent De Cercetare In Domeniul Zoologic",
"Cercetator In Ecologie Si Protectia Mediului",
"Asistent De Cercetare In Ecologie Si Protectia Mediului",
"Cercetator In Ingineria Genetica",
"Asistent De Cercetare In Ingineria Genetica",
"Consilier Inginer Agronom",
"Expert Inginer Agronom",
"Inspector De Specialitate Inginer Agronom",
"Referent De Specialitate Inginer Agronom",
"Consilier Inginer Horticol",
"Expert Inginer Horticol",
"Inspector De Specialitate Inginer Horticol",
"Referent De Specialitate Inginer Horticol",
"Consilier Inginer Zootehnist",
"Expert Inginer Zootehnist",
"Inspector De Specialitate Inginer Zootehnist",
"Referent De Specialitate Inginer Zootehnist",
"Subinginer Agronom",
"Subinginer Zootehnist",
"Inginer Tehnolog In Zootehnie",
"Proiectant Inginer In Agricultura",
"Proiectant Inginer In Zootehnie",
"Proiectant Inginer In Silvicultura",
"Consilier Inginer Silvic",
"Expert Inginer Silvic",
"Inspector De Specialitate Inginer Silvic",
"Referent De Specialitate Inginer Silvic",
"Inginer Imbunatatiri Funciare",
"Inginer/Subinginer Silvic",
"Inginer Agronom",
"Inginer Zootehnist",
"Consultant Tehnic In Productia De Cereale, Plante Tehnice Si Furaje",
"Subinginer Imbunatatiri Funciare",
"Agent Agricol",
"Brigadier Silvic",
"Padurar",
"Tehnician Agronom ",
"Tehnician Zootehnist ",
"Tehnician Silvic ",
"Clasificator Carcase",
"Operator De Insamantari Artificiale La Animale",
"Administrator Bunuri Agricole",
"Consultant Afaceri In Agricultura",
"Cercetator In Agricultura",
"Inginer De Cercetare In Agricultura",
"Asistent De Cercetare In Agricultura",
"Inginer De Cercetare In Pedologie-Agrochimie",
"Asistent De Cercetare In Pedologie-Agrochimie",
"Cercetator In Pedologie-Agrochimie",
"Cercetator In Horticultura",
"Inginer De Cercetare In Horticultura",
"Asistent De Cercetare In Horticultura",
"Cercetator In Agromontanologie",
"Inginer De Cercetare In Agromontanologie",
"Asistent De Cercetare In Agromontanologie",
"Cercetator In Silvicultura",
"Inginer De Cercetare In Silvicultura",
"Asistent De Cercetare In Silvicultura",
"Cercetator In Zootehnie",
"Asistent De Cercetare In Zootehnie",
"Cercetator In Biotehnologie Pentru Agricultura",
"Asistent De Cercetare In Biotehnologie Pentru Agricultura",
"Expert Ecolog",
"Inspector De Specialitate Ecolog",
"Referent De Specialitate Ecolog",
"Inginer Ecolog",
"Ecolog",
"Inginer Confectii Piele Si Inlocuitori",
"Inginer Textile, Pielarie",
"Inginer Tricotaje, Confectii",
"Subinginer Textile, Pielarie",
"Proiectant Inginer Textile, Pielarie",
"Consilier Inginer Textile, Pielarie",
"Expert Inginer Textile, Pielarie",
"Inspector Specialitate Inginer Textile, Pielarie",
"Referent De Specialitate Inginer Textile, Pielarie",
"Conceptor/Conceptor Cao",
"Specialist Incercari Componente Vehicule/Grup Motopropulsor/Optimizare Energetica/Sisteme De Masurare",
"Specialist Documentatie Studii",
"Instructor Sistem De Productie",
"Metodist",
"Responsabil Afacere",
"Manager De Cladire",
"Inginer Industrializarea Lemnului",
"Subinginer Industrializarea Lemnului",
"Consilier Inginer Industrializarea Lemnului",
"Expert Inginer Industrializarea Lemnului",
"Inspector De Specialitate Inginer Industrializarea Lemnului",
"Referent De Specialitate Inginer Industrializarea Lemnului",
"Cercetator In Tehnologia Prelucrarii Produselor Agricole",
"Inginer De Cercetare In Tehnologia Prelucrarii Produselor Agricole",
"Asistent De Cercetare In Tehnologia Prelucrarii Produselor Agricole",
"Cercetator In Pescuit Si Acvacultura",
"Inginer De Cercetare In Pescuit Si Acvacultura",
"Asistent De Cercetare In Pescuit Si Acvacultura",
"Specialist In Domeniul Calitatii",
"Auditor In Domeniul Calitatii",
"Analist Calitate",
"Analist Masuratori Metrologice",
"Analist Studiul Materialelor",
"Consultant Sistem De Calitate",
"Logistician Gestiune Flux",
"Programator Fabricatie/Lansator Fabricatie",
"Documentarist Ordonantare Logistica",
"Auditor Energetic Pentru Cladiri",
"Inginer Constructii Civile, Industriale Si Agricole",
"Subinginer Constructii Civile, Industriale Si Agricole",
"Inginer Instalatii Pentru Constructii",
"Inginer Cai Ferate, Drumuri Si Poduri",
"Inginer Constructii Hidrotehnice",
"Inginer Constructor Instalatii",
"Proiectant Inginer Instalatii",
"Proiectant Inginer Constructii",
"Consilier Inginer Constructii",
"Expert Inginer Constructii",
"Inspector De Specialitate Inginer Constructii",
"Referent De Specialitate Inginer Constructii",
"Conducator De Lucrari Civile",
"Diriginte Santier (Studii Superioare)",
"Cercetator In Constructii Civile, Industriale Si Agricole",
"Inginer De Cercetare In Constructii Civile, Industriale Si Agricole",
"Asistent De Cercetare In Constructii Civile, Industriale Si Agricole",
"Cercetator In Constructii De Cai Ferate, Drumuri Si Poduri",
"Inginer De Cercetare In Constructii De Cai Ferate, Drumuri Si Poduri",
"Asistent De Cercetare In Constructii De Cai Ferate, Drumuri Si Poduri",
"Cercetator In Constructii Hidrotehnice",
"Inginer De Cercetare In Constructii Hidrotehnice",
"Asistent De Cercetare In Constructii Hidrotehnice",
"Inginer De Cercetare In Ingineria Sanitara Si Protectia Mediului",
"Cercetator In Constructii Miniere",
"Inginer De Cercetare In Constructii Miniere",
"Asistent De Cercetare In Constructii Miniere",
"Cercetator In Instalatii",
"Inginer De Cercetare In Instalatii",
"Asistent De Cercetare In Instalatii",
"Cercetator In Stiinta Si Ingineria Materialelor Oxidice",
"Inginer De Cercetare In Ingineria Materialelor Oxidice",
"Asistent De Cercetare In Ingineria Materialelor Oxidice",
"Cercetator In Centrale Hidroelectrice In Ingineria Mediului",
"Inginer De Cercetare In Centrale Hidroelectrice In Ingineria Mediului",
"Asistent De Cercetare In Centrale Hidroelectrice In Ingineria Mediului",
"Inginer Mecanic",
"Subinginer Mecanic",
"Inginer Electromecanic Minier",
"Inginer Material Rulant Cale Ferata",
"Inginer Mecanica Agricola",
"Inginer Aviatie",
"Inginer Nave",
"Inginer Masini-Unelte",
"Inginer Mecanica Fina",
"Inginer Masini Termice",
"Inginer Masini Hidraulice Si Pneumatice",
"Inginer Autovehicule Rutiere",
"Inginer Mecanic Utilaj Tehnologic Chimic",
"Inginer Mecanic Utilaj Tehnologic Petrolier",
"Inginer Mecanic Utilaj Tehnologic Masini Agricole",
"Inginer Mecanic Utilaj Tehnologic Textil",
"Inginer Mecanic Utilaj Tehnologic Pentru Constructii",
"Inginer Mecanic Utilaj Tehnologic Pentru Prelucrare La Cald",
"Inginer Mecanic Masini Instalatii Miniere",
"Subinginer Mecanic Tehnologia Constructiilor De Masini",
"Subinginer Mecanic Utilaje Si Tehnica Sudurii",
"Subinginer Mecanic, Mecanica Fina",
"Subinginer Mecanic Material Rulant De Cale Ferata",
"Subinginer Mecanic Mecanica Agricola",
"Subinginer Mecanic Utilaj Tehnologic Pentru Chimie",
"Subinginer Mecanic Utilaje Pentru Constructii",
"Subinginer Mecanic Avioane Si Motoare De Aviatie",
"Subinginer Mecanic Constructii Corp De Nava",
"Subinginer Mecanic Instalatii Navale De Bord",
"Subinginer Mecanic Automobile",
"Subinginer Mecanic Utilaje Pentru Industria Lemnului",
"Subinginer Mecanic Utilaje Pentru Materiale De Constructie",
"Consilier Inginer Mecanic",
"Expert Inginer Mecanic",
"Inspector De Specialitate Inginer Mecanic",
"Referent De Specialitate Inginer Mecanic",
"Proiectant Inginer Aeronave",
"Proiectant Inginer Mecanic",
"Inginer Pilot De Incercare",
"Subinginer Proiectant Mecanic",
"Specialist Reglementari/Carti De Identitate Vehicule/Verificari Tehnice Inmatriculare/Inspectii Tehnice",
"Specialist Prestatii Vehicule",
"Specialist Mentenanta Mecanica Echipamente Industriale",
"Inginer/Subinginer Tehnolog Prelucrari Mecanice",
"Inginer Tehnolog In Fabricarea Armamentului Si Munitiei",
"Subinginer Tehnolog In Fabricarea Armamentului Si Munitiei",
"Inginer Pentru Protectia Navigatiei Aeriene (Comunicatii, Navigatie, Supraveghere)",
"Cercetator In Sisteme De Propulsie",
"Inginer De Cercetare In Sisteme De Propulsie",
"Asistent De Cercetare In Sisteme De Propulsie",
"Cercetator In Echipamente Si Instalatii De Bord",
"Inginer De Cercetare In Echipamente Si Instalatii De Bord",
"Asistent De Cercetare In Echipamente Si Instalatii De Bord",
"Cercetator In Masini Si Echipamente Termice",
"Inginer De Cercetare In Masini Si Echipamente Termice",
"Asistent De Cercetare In Masini Si Echipamente Termice",
"Cercetator In Masini Hidraulice Si Pneumatice",
"Inginer De Cercetare In Masini Hidraulice Si Pneumatice",
"Asistent De Cercetare In Masini Hidraulice Si Pneumatice",
"Cercetator In Echipamente De Proces",
"Inginer De Cercetare In Echipamente De Proces",
"Asistent De Cercetare In Echipamente De Proces",
"Cercetator In Mecanica Fina",
"Inginer De Cercetare In Mecanica Fina",
"Asistent De Cercetare In Mecanica Fina",
"Cercetator In Tehnologia Constructiilor De Masini",
"Inginer De Cercetare In Tehnologia Constructiilor De Masini",
"Asistent De Cercetare In Tehnologia Constructiilor De Masini",
"Cercetator In Constructii De Masini Agricole",
"Inginer De Cercetare In Constructii De Masini Agricole",
"Asistent De Cercetare In Constructii De Masini Agricole",
"Cercetator In Autovehicule Rutiere",
"Inginer De Cercetare In Autovehicule Rutiere",
"Asistent De Cercetare In Autovehicule Rutiere",
"Cercetator In Utilaje Si Instalatii Portuare",
"Inginer De Cercetare In Utilaje Si Instalatii Portuare",
"Asistent De Cercetare In Utilaje Si Instalatii Portuare",
"Cercetator In Utilaje Si Tehnologia Ambalarii",
"Inginer De Cercetare In Utilaje Si Tehnologia Ambalarii",
"Asistent De Cercetare In Utilaje Si Tehnologia Ambalarii",
"Cercetator In Creatia Tehnica In Constructia De Masini",
"Inginer De Cercetare In Creatia Tehnica In Constructia De Masini",
"Asistent De Cercetare In Creatia Tehnica In Constructia De Masini",
"Cercetator In Masini Si Instalatii Mecanice",
"Inginer De Cercetare In Masini Si Instalatii Mecanice",
"Asistent De Cercetare In Masini Si Instalatii Mecanice",
"Cercetator In Instalatii Si Utilaje Pentru Transportul Si Depozitarea Produselor Petroliere",
"Inginer Petrochimist",
"Subinginer Petrochimist",
"Proiectant Inginer Chimist",
"Consilier Inginer Chimist",
"Expert Inginer Chimist",
"Inspector De Specialitate Inginer Chimist",
"Referent De Specialitate Inginer Chimist",
"Consilier Inginer Petrochimist",
"Expert Inginer Petrochimist",
"Inspector De Specialitate Inginer Petrochimist",
"Referent De Specialitate Petrochimist",
"Biochimist",
"Inginer Chimist",
"Inginer In Industria Alimentara",
"Subinginer In Industria Alimentara",
"Proiectant Inginer Produse Alimentare",
"Consilier Inginer Industria Alimentara",
"Expert Inginer Industria Alimentara",
"Inspector De Specialitate Inginer Industria Alimentara",
"Referent De Specialitate Inginer Industria Alimentara",
"Cercetator In Tehnologia Substantelor Anorganice",
"Inginer De Cercetare In Tehnologia Substantelor Anorganice",
"Asistent De Cercetare In Tehnologia Substantelor Anorganice",
"Cercetator In Tehnologia Substantelor Organice",
"Inginer De Cercetare In Tehnologia Substantelor Organice",
"Asistent De Cercetare In Tehnologia Substantelor Organice",
"Cercetator In Petrochimie Si Carbochimie",
"Inginer De Cercetare In Petrochimie Si Carbochimie",
"Asistent De Cercetare In Petrochimie Si Carbochimie",
"Cercetator In Tehnologia Compusilor Macromoleculari",
"Inginer De Cercetare In Tehnologia Compusilor Macromoleculari",
"Asistent De Cercetare In Tehnologia Compusilor Macromoleculari",
"Cercetator In Controlul Calitatii Produselor Alimentare",
"Inginer De Cercetare In Controlul Calitatii Produseloralimentare",
"Asistent De Cercetare In Controlul Calitatii Produselor Alimentare",
"Inginer Metalurgie Extractiva",
"Inginer Minier",
"Subinginer Metalurgist",
"Subinginer Minier",
"Inginer Preparator Minier",
"Consilier Inginer Metalurg",
"Inspector De Specialitate Inginer Metalurg",
"Referent De Specialitate Inginer Metalurg",
"Consilier Inginer Minier",
"Expert Inginer Minier",
"Inspector De Specialitate Inginer Minier",
"Referent De Specialitate Inginer Minier",
"Inginer Prelucrari Metalurgice",
"Inginer Metalurgie Neferoasa",
"Inginer Petrolist",
"Subinginer Petrolist",
"Consilier Inginer Petrolist",
"Expert Inginer Petrolist",
"Referent Inginer Petrolist",
"Proiectant Inginer Petrolist",
"Inginer Tehnolog Metalurg",
"Proiectant Inginer Metalurg",
"Proiectant Inginer In Minerit",
"Inginer Mineralurg",
"Cercetator In Exploatari Miniere",
"Inginer De Cercetare In Exploatari Miniere",
"Asistent De Cercetare In Exploatari Miniere",
"Cercetator In Prepararea Substantelor Minerale Utile",
"Inginer De Cercetare In Prepararea Substantelor Minerale Utile",
"Asistent De Cercetare In Prepararea Substantelor Minerale Utile",
"Cercetator In Petrol (Extractie-Prospectiune)",
"Inginer De Cercetare In Petrol (Extractie-Prospectiune)",
"Asistent De Cercetare In Petrol (Extractieprospectiune)",
"Cercetator In Topografie Miniera",
"Inginer De Cercetare In Topografie Miniera",
"Asistent De Cercetare In Topografie Miniera",
"Cercetator In Ingineria Proceselor Siderurgice",
"Inginer De Cercetare In Ingineria Proceselor Siderurgice",
"Asistent De Cercetare In Ingineria Proceselor Siderurgice",
"Cercetator In Metalurgia Neferoasa",
"Inginer De Cercetare In Metalurgia Neferoasa",
"Asistent De Cercetare In Metalurgia Neferoasa",
"Inginer De Cercetare In Turnarea Metalelor",
"Asistent De Cercetare In Turnarea Metalelor",
"Cercetator In Prelucrari Plastice Si Tratamente Termice",
"Inginer De Cercetare In Prelucrari Plastice Si Tratamente Termice",
"Asistent De Cercetare In Prelucrari Plastice Si Tratamente Termice",
"Cercetator In Stiinta Materialelor",
"Inginer De Cercetare In Stiinta Materialelor",
"Asistent De Cercetare In Stiinta Materialelor",
"Cercetator In Tehnologii Carbochimice",
"Inginer De Cercetare In Tehnologii Carbochimice",
"Asistent De Cercetare In Tehnologii Carbochimice",
"Inginer De Cercetare In Instalatii Si Utilaje Pentru Transportul Si Depozitarea Produselor Petroliere",
"Asistent De Cercetare In Instalatii Si Utilaje Pentru Transportul Si Depozitarea Produselor Petroliere",
"Expert Inginer Metalurg",
"Inginer Prelucrarea Sticlei Si Ceramicii",
"Subinginer Prelucrarea Sticlei Si Ceramicii",
"Inginer Materiale De Constructii",
"Subinginer Materiale De Constructii",
"Consilier Inginer Prelucrarea Sticlei Si Ceramicii",
"Expert Inginer Prelucrarea Sticlei Si Ceramicii",
"Inspector De Specialitate Inginer Prelucrarea Sticlei Si Ceramicii",
"Referent De Specialitate Inginer Prelucrarea Sticlei Si Ceramicii",
"Proiectant Inginer Ceramica, Sticla",
"Chimist In Materiale Oxidice (Sticla, Ceramica)",
"Subinginer Tehnologia Celulozei Si Hartiei",
"Proiectant Inginer Celuloza Si Hartie",
"Consilier Inginer Tehnologia Celulozei Si Hartiei",
"Expert Inginer Tehnologia Celulozei Si Hartiei",
"Inspector De Specialitate Inginer Tehnologia Celulozei Si Hartiei",
"Referent De Specialitate Inginer Tehnologia Celulozei Si Hartiei",
"Cercetator In Informatica",
"Asistent De Cercetare In Informatica",
"Cercetator In Filatura-Tesatorie",
"Inginer De Cercetare In Filatura-Tesatorie",
"Asistent De Cercetare In Filatura-Tesatorie",
"Cercetator In Tricotaje-Confectii Textile",
"Inginer De Cercetare In Tricotaje-Confectii Textile",
"Asistent De Cercetare In Tricotaje-Confectii Textile",
"Cercetator In Tehnologia Chimica A Produselor Textile, Pieii, Blanurilor Si Inlocuitorilor",
"Inginer De Cercetare In Tehnologia Chimica A Produselor Textile, Pieii, Blanurilor Si Inlocuitorilor",
"Asistent De Cercetare In Tehnologia Chimica A Produselor Textile, Pieii, Blanurilor Si Inlocuitorilor",
"Cercetator In Confectii Din Piele Si Inlocuitori",
"Inginer De Cercetare In Confectii Din Piele Si Inlocuitori",
"Asistent De Cercetare In Confectii Din Piele Si Inlocuitori",
"Cercetator In Exploatari Forestiere",
"Inginer De Cercetare In Exploatari Forestiere",
"Asistent De Cercetare In Exploatari Forestiere",
"Inginer De Cercetare In Proiectarea Mobilei Si Produselor Finite Din Lemn",
"Cercetator In Prelucrarea Lemnului",
"Inginer De Cercetare In Prelucrarea Lemnului",
"Asistent De Cercetare In Prelucrarea Lemnului",
"Cercetator In Tehnologie Si Echipamente Neconventionale",
"Inginer De Cercetare In Tehnologie Si Echipamente Neconventionale",
"Asistent De Cercetare In Tehnologie Si Echipamente Neconventionale",
"Cercetator In Tehnologia Celulozei, Hartiei, Poligrafiei Si Fibrelor",
"Inginer De Cercetare In Tehnologia Celulozei, Hartiei, Poligrafiei Si Fibrelor",
"Asistent De Cercetare In Tehnologia Celulozei, Hartiei, Poligrafiei Si Fibrelor",
"Cercetator In Turnarea Metalelor",
"Administrator Societate Comerciala",
"Expert Achizitii Publice",
"Expert Elaborare Documentatii Tehnice De Montaj",
"Expert Monitorizare Si Control Lucrari De Montaj",
"Expert Prevenire-Reducere Riscuri Tehnologice",
"Expert Urmarire Comportare In Exploatare Lucrari Montaj",
"Dispecer Energetic Feroviar",
"Dispecer Centrala, Hidrocentru, Cascada, Dispecerate Teritoriale",
"Dispecer Retea Distributie",
"Dispecer Retele De Inalta Tensiune",
"Inginer Electroenergetica",
"Radiochimist",
"Subinginer Electroenergetica",
"Inginer Energetica Industriala",
"Inginer Termoenergetica",
"Proiectant Inginer Electrotehnic",
"Proiectant Inginer Energetician",
"Inginer Retele Electrice",
"Subinginer Retele Electrice",
"Inginer Hidroenergetica",
"Inginer Centrale Nuclearoelectrice",
"Subinginer Centrale Termoelectrice",
"Inginer Exploatare Instalatii Nucleare",
"Proiectant Subinginer Electrotehnic",
"Proiectant Sisteme De Securitate",
"Sef Tura Dispecer Energetic",
"Cercetator In Electrotehnica",
"Inginer De Cercetare In Electrotehnica",
"Asistent De Cercetare In Electrotehnica",
"Cercetator In Electrofizica",
"Inginer De Cercetare In Electrofizica",
"Asistent De Cercetare In Electrofizica",
"Cercetator In Metrologie",
"Inginer De Cercetare In Metrologie",
"Asistent De Cercetare In Metrologie",
"Cercetator In Electromecanica",
"Inginer De Cercetare In Electromecanica",
"Asistent De Cercetare In Electromecanica",
"Cercetator Roboti Industriali",
"Inginer De Cercetare Roboti Industriali",
"Asistent De Cercetare Roboti Industriali",
"Cercetator In Centrale Termoelectrice",
"Inginer De Cercetare In Centrale Termoelectrice",
"Asistent De Cercetare In Centrale Termoelectrice",
"Cercetator In Centrale Nuclearoelectrice",
"Inginer De Cercetare In Centrale Nuclearoelectrice",
"Asistent De Cercetare In Centrale Nuclearoelectrice",
"Cercetator In Electroenergetica",
"Inginer De Cercetare In Electroenergetica",
"Asistent De Cercetare In Electroenergetica",
"Cercetator In Energetica Industriala",
"Inginer De Cercetare In Energetica Industriala",
"Asistent De Cercetare In Energetica Industriala",
"Inginer Electromecanic Scb",
"Inginer Automatist",
"Inginer Navigatie",
"Inginer Electronist Transporturi, Telecomunicatii",
"Inginer Productie",
"Instructor Instalatii",
"Instructor Linii",
"Revizor Siguranta Circulatiei Feroviare",
"Subinginer Automatist",
"Subinginer Electronist Transporturi, Telecomunicatii",
"Subinginer Reglaje Subansamble",
"Inginer De Receptie Si Control Aeronave",
"Proiectant Inginer Electronist",
"Proiectant Inginer De Sisteme Si Calculatoare",
"Proiectant Inginer Electromecanic",
"Inginer Electromecanic",
"Subinginer Electromecanic",
"Capitan Secund",
"Capitan Port (Studii Superioare)",
"Specialist Mentenanta Electromecanica-Automatica Echipamente Industriale",
"Inspector Aeronautic",
"Inginer Sisteme De Securitate",
"Cercetator In Electronica Aplicata",
"Inginer De Cercetare In Electronica Aplicata",
"Asistent De Cercetare In Electronica Aplicata",
"Cercetator In Comunicatii",
"Inginer De Cercetare In Comunicatii",
"Asistent De Cercetare In Comunicatii",
"Cercetator In Microelectronica",
"Inginer De Cercetare In Microelectronica",
"Asistent De Cercetare In Microelectronica",
"Cercetator In Telecomenzi Si Electronica In Transporturi",
"Inginer De Cercetare In Telecomenzi Si Electronica In Transporturi",
"Asistent De Cercetare In Telecomenzi Si Electronica In Transporturi",
"Cercetator In Calculatoare",
"Inginer De Cercetare In Calculatoare",
"Asistent De Cercetare In Calculatoare",
"Cercetator In Automatica",
"Inginer De Cercetare In Automatica",
"Asistent De Cercetare In Automatica",
"Inginer Emisie",
"Inginer Montaj",
"Inginer Electrotehnist",
"Inginer Imagine",
"Inginer Sunet",
"Inginer-Sef Car Reportaj",
"Subinginer-Sef Car Reportaj",
"Inginer-Sef Schimb Emisie",
"Subinginer Iluminat Tehnologic",
"Inginer Proiectant Comunicatii",
"Subinginer Proiectant Comunicatii",
"Inginer/Inspector De Specialitate/Referent De Specialitate/Expert In Serviciile De Trafic Aerian",
"Consilier Tehnic",
"Inginer Iluminare",
"Sef Studio Rtv",
"Arhitect Cladiri",
"Conductor Arhitect",
"Arhitect Restaurari",
"Consilier Arhitect",
"Expert Arhitect",
"Inspector De Specialitate Arhitect",
"Referent De Specialitate Arhitect",
"Proiectant Arhitect",
"Cercetator In Arhitectura",
"Asistent De Cercetare In Arhitectura",
"Arhitect Peisagistica Si Amenajarea Teritoriului",
"Designer Industrial",
"Grafician Industrial",
"Lucrator In Ateliere De Modele",
"Costumier",
"Pictor Creator Costume",
"Modelier Confectii",
"Designer Vestimentar",
"Cercetator In Arte Plastice ",
"Asistent De Cercetare In Arte Plastice ",
"Cercetator In Arte Plastice ",
"Asistent De Cercetare In Arte Plastice-Textile (Tapiserie, Contexturi, Moda,Imprimeuri)",
"Cercetator In Arte Plastice ",
"Asistent De Cercetare In Arte Plastice ",
"Urbanist",
"Arhitect Urbanism",
"Cartograf",
"Inginer Geodez",
"Subinginer Geodez",
"Inginer Topograf",
"Inginer Topograf Minier",
"Proiectant Inginer Geodez",
"Designer Grafica (Studii Medii)",
"Animator Film De Animatie (Studii Medii)",
"Intermediarist Film Desene Animate (Studii Medii)",
"Stilizator Film Desene Animate (Studii Medii)",
"Asistent Regizor Studio",
"Asistent Regizor Emisie",
"Designer Floral",
"Videojurnalist",
"Art Director",
"Designer Pagini Web (Studii Superioare)",
"Designer Grafica (Studii Superioare)",
"Grafician Calculator (Studii Medii)",
"Designer Pagini Web (Studii Medii)",
"Medic Medicina Generala",
"Medic Rezident",
"Consilier Medic",
"Expert Medic",
"Inspector De Specialitate Medic",
"Referent De Specialitate Medic",
"Medic Primar",
"Medic Medicina In Familie",
"Cercetator In Medicina Generala",
"Asistent De Cercetare In Medicina Generala",
"Medic Specialist",
"Asistent Medical Generalist",
"Moasa",
"Cercetator In Medicina Traditionala",
"Asistent De Cercetare In Medicina Traditionala",
"Instructor Ergoterapie",
"Tehnician Homeopat",
"Instructor De Educatie Sanitara",
"Bioenergetician",
"Infoenergetician Radiestezist",
"Terapeut In Terapii Complementare",
"Paramedic",
"Epizotolog",
"Medic Veterinar",
"Medic Veterinar ",
"Cercetator In Medicina Veterinara",
"Asistent De Cercetare In Medicina Veterinara",
"Medic Stomatolog",
"Medic Stomatolog Rezident",
"Medic Stomatolog De Specialitate",
"Cercetator In Medicina Stomatologica",
"Asistent De Cercetare In Medicina Stomatologica",
"Farmacist",
"Farmacist Rezident",
"Farmacist De Specialitate",
"Inginer Clinic",
"Specialist In Domeniul Securitatii Si Sanatatii In Munca",
"Coordonator In Materie De Securitate Si Sanatate In Munca (Studii Superioare)",
"Cercetator Stiintific In Bacteriologie, Microbiochimie, Farmacologie",
"Asistent De Cercetare In Bacteriologie, Microbiologie, Biochimie, Farmacologie",
"Igienist",
"Evaluator De Risc Si Auditor In Domeniul Securitatii Sisanatatii In Munca",
"Fiziokinetoterapeut",
"Fizioterapeut",
"Cercetator In Fiziokinetoterapie",
"Asistent De Cercetare In Fiziokinetoterapie",
"Kinetoterapeut",
"Profesor De Cultura Fizica Medicala",
"Asistent De Nutritie",
"Dietetician",
"Nutritionist Si Dietetician",
"Instructor Logoped",
"Interpret In Limbaj Mimico-Gestual (Studii Medii)",
"Logoped",
"Audiolog",
"Optometrist (Studii Superioare)",
"Medic Igienist",
"Medic Expertiza A Capacitatii De Munca",
"Medic Legist",
"Bioinginer Medical",
"Asistent Medical (Studii Superioare)",
"Fizician Medical",
"Medic Specialist Psihiatru",
"Medic De Familie Cu Competente In Sanatatea Mintala",
"Cercetator In Educatie Fizica Si Sport",
"Asistent De Cercetare In Educatie Fizica Si Sport",
"Asistent Universitar",
"Conferentiar Universitar",
"Lector Universitar",
"Preparator Invatamantul Universitar",
"Profesor Universitar",
"Expert Centre De Perfectionare",
"Profesor In Invatamantul Profesional Si De Maistri",
"Profesor In Invatamantul Liceal, Postliceal",
"Profesor In Invatamantul Gimnazial",
"Profesor In Invatamantul Primar",
"Invatator",
"Institutor",
"Profesor In Invatamantul Prescolar",
"Educatoare",
"Educator Puericultor",
"Cercetator In Pedagogie",
"Asistent De Cercetare In Pedagogie",
"Consilier Invatamant",
"Expert Invatamant",
"Inspector Scolar",
"Referent De Specialitate Invatamant",
"Defectolog",
"Interpret In Limbaj Mimico-Gestual (Studii Superioare)",
"Educator In Unitati De Handicapati",
"Instructor-Educator In Unitati De Handicapati",
"Pedagog De Recuperare",
"Secretar Institut, Facultate",
"Mentor",
"Consilier Scolar",
"Designer Instructional",
"Dezvoltator De E-Learning",
"Laborant In Invatamant",
"Maistru Instructor",
"Pedagog Scolar",
"Secretar Scoala",
"Sef Atelier Scoala",
"Mediator Scolar",
"Inspector Scolar Pentru Implementarea Descentralizarii Institutionale",
"Inspector Scolar Pentru Managementul Resurselor Umane",
"Inspector Scolar Pentru Mentorat",
"Inspector Scolar Pentru Dezvoltarea Resursei Umane",
"Inspector Scolar Pentru Educatie Permanenta",
"Inspector Scolar Pentru Invatamant Particular Si Lternative Educationale",
"Inspector Scolar Pentru Invatamantul Special",
"Director Centrul Judetean De Asistenta Si Resurse Educationale (Cjare)",
"Consilier Pentru Tineret",
"Controlor Tezaur",
"Expert Contabil-Verificator",
"Revizor Contabil",
"Referent De Specialitate Financiar-Contabilitate",
"Auditor Intern",
"Controlor De Gestiune",
"Auditor Financiar",
"Cenzor",
"Comisar Garda Financiara",
"Consilier Financiar-Bancar",
"Expert Financiar-Bancar",
"Inspector Financiar-Bancar",
"Inspector Asigurari",
"Comisar Principal",
"Consultant Bugetar",
"Dealer",
"Evaluator",
"Analist Investitii",
"Manager De Fond Actiuni/Obligatiuni",
"Consultant Plasamente Valori Mobiliare",
"Agent Capital De Risc",
"Administrator Credite",
"Specialist Control Risc",
"Specialist Evaluare Daune",
"Lichidator",
"Administrator Judiciar",
"Analist Pret De Revenire/Costuri",
"Expert Fiscal",
"Consultant Fiscal",
"Inspector General De Banca",
"Economist Banca",
"Manager Banca",
"Manager De Operatiuni/Produs",
"Manager Relatii Cu Clientii Bancii/Societate De Leasing",
"Trezorier (Studii Superioare)",
"Analist Bancar/Societate De Leasing",
"Ofiter Bancar (Credite, Marketing, Produse Si Servicii Bancare)",
"Administrator Bancar/Produs Leasing",
"Operator Cifru (Mesaje Cifrate)",
"Proiectant Produse Bancare",
"Consultant Bancar",
"Agent Compensare (Interbancara)",
"Referent Bancar/Societate De Leasing",
"Ofiter Conformitate",
"Expert Conformitate",
"Ofiter Securitatea Informatiei (Security Officer ",
"Administrator De Risc",
"Analist Credite",
"Inspector De Specialitate Asigurari",
"Inspector De Specialitate Subscriere",
"Referent De Specialitate Asigurari",
"Consilier Vanzari Asigurari",
"Inspector Coordonator Asigurari",
"Inspector De Risc",
"Inspector De Specialitate Daune",
"Inspector Coordonator Daune",
"Specialist Sistem Asigurari",
"Expert Evaluator De Intreprinderi",
"Expert Evaluator De Proprietati Imobiliare",
"Expert Evaluator De Bunuri Mobile",
"Expert Evaluator De Active Financiare",
"Planificator/Specialist Plan Sinteze",
"Expert In Ingineria Costurilor Investitionale",
"Expert Contractare Activitati Investitionale",
"Expert Receptie Investitii Industriale",
"Expert Eficientizare Investitii",
"Expert Evaluare-Actualizare Devize Generale Investitii",
"Expert Elaborare-Evaluare Documentatii Achizitii Investitionale",
"Expert In Management Activitati Investitionale",
"Evaluator Proiecte",
"Inspector Casier",
"Broker De Tehnologii",
"Cercetator In Finante-Banci",
"Asistent De Cercetare In Finante-Banci",
"Cercetator In Gestiune, Contabilitate, Control Financiar",
"Asistent De Cercetare In Gestiune, Contabilitate, Control Financiar",
"Analist Financiar",
"Auditor Intern In Sectorul Public",
"Asistent Analist",
"Specialist Bancar",
"Analist Financiar Bancar",
"Asistent Bancar",
"Specialist/Analist Organizare",
"Manager Proiect",
"Specialist Imbunatatire Procese",
"Specialist Strategie Industriala",
"Responsabil Proces",
"Coordonator Secretariat Studiouri Teritoriale",
"Manager De Inovare",
"Expert In Conducerea Si Organizarea Activitatilor De Mentenanta",
"Manager Imbunatatire Procese",
"Specialist Plan Progres",
"Specialist In Planificarea, Controlul Si Raportarea Performantei Economice",
"Consilier Administratia Publica",
"Expert Administratia Publica",
"Inspector De Specialitate In Administratia Publica",
"Referent De Specialitate In Administratia Publica",
"Consultant In Administratia Publica",
"Reglementator",
"Agent De Dezvoltare",
"Administrator Public",
"Inspector De Integritate",
"Examinator De Stat De Specialitate",
"Administrator Publicatii",
"Agent Consular",
"Expert Accesare Fonduri Structurale Si De Coeziune Europene",
"Consilier Afaceri Europene",
"Referent Relatii Externe",
"Inspector De Trafic A.R.R. (Studii Superioare)",
"Expert Informatii Pentru Afaceri",
"Administrator Editura",
"Expert Aplicare Legislatie Armonizata In Domeniul Industriei Si Comertului",
"Expert Legislatia Muncii",
"Consilier Forta De Munca Si Somaj",
"Expert Forta De Munca Si Somaj",
"Inspector De Specialitate Forta De Munca Si Somaj",
"Expert In Securitate Si Sanatate In Munca",
"Referent De Specialitate Forta De Munca Si Somaj",
"Consilier Orientare Privind Cariera",
"Consultant In Domeniul Fortei De Munca",
"Analist Piata Muncii",
"Analist Recrutare/Integrare Salariati",
"Analist Sisteme Salarizare",
"Consultant Reconversie-Mobilitate Personal",
"Consultant Conditii De Munca",
"Specialist Sisteme De Calificare",
"Specialist Resurse Umane",
"Consilier Vocational",
"Consultant In Standardizare",
"Consultant In Resurse Umane",
"Consultant Intern In Resurse Umane",
"Specialist In Formare",
"Specialist In Recrutare",
"Specialist In Compensatii Si Beneficii",
"Specialist In Dezvoltare Organizationala",
"Specialist In Relatii De Munca",
"Formator",
"Formator De Formatori",
"Organizator/Conceptor/Consultant Formare",
"Inspector De Specialitate Formare, Evaluare Si Selectie Profesionala",
"Evaluator De Competente Profesionale",
"Manager De Formare",
"Administrator De Formare",
"Art Director Publicitate (Studii Medii)",
"Organizator Activitate Turism (Studii Superioare)",
"Specialist Marketing",
"Manager De Produs",
"Specialist In Relatii Publice",
"Mediator",
"Referent De Specialitate Marketing",
"Specialist Protocol Si Ceremonial",
"Consultant Cameral",
"Purtator De Cuvant",
"Brand Manager",
"Organizator Protocol",
"Organizator Relatii",
"Organizator Targuri Si Expozitii",
"Prezentator Expozitii",
"Specialist Relatii Sociale",
"Expert Relatii Externe",
"Curier Diplomatic",
"Specialist Garantii Auto",
"Analist Servicii Client",
"Asistent Director/Responsabil De Functiune (Studii Superioare)",
"Corespondent Comercial",
"Asistent Comercial",
"Specialist In Activitatea De Lobby",
"Analist Cumparari/Consultant Furnizori",
"Reprezentant Medical",
"Proiectant Sisteme Informatice",
"Analist",
"Programator",
"Inginer De Sistem In Informatica",
"Programator De Sistem Informatic",
"Inginer De Sistem Software",
"Manager Proiect Informatic",
"Specialist In Domeniul Proiectarii Asistate Pe Calculator",
"Specialist In Proceduri Si Instrumente De Securitate A Istemelor Informatice",
"Consultant In Informatica",
"Administrator Baze De Date",
"Administrator Sistem De Securitate Bancara",
"Administrator De Retea De Calculatoare",
"Administrator De Retea De Telefonie Voip",
"Avocat",
"Jurisconsult",
"Consilier Juridic",
"Procuror",
"Judecator",
"Magistrat-Asistent",
"Judecator Inspector",
"Magistrat Consultant",
"Consilier De Probatiune",
"Inspector Probatiune",
"Executor Judecatoresc",
"Inspector Justitie",
"Expert Jurist",
"Consilier De Justitie",
"Referent De Specialitate In Justitie",
"Notar",
"Inspector General Judecatoresc",
"Inspector General Notarial",
"Inspector General Penitenciare",
"Consilier Armonizare Legislativa",
"Expert Armonizare Legislativa",
"Analist Armonizare Legislativa",
"Registrator Carte Funciara",
"Revizor Jurist",
"Cercetator In Domeniul Stiintelor Juridice",
"Asistent De Cercetare In Domeniul Stiintelor Juridice",
"Executor Bancar",
"Consilier Proprietate Industriala Autorizat",
"Specialist Proprietate Intelectuala",
"Expert Prevenire Si Combatere A Coruptiei",
"Arhivist",
"Conservator Opere De Arta Si Monumente Istorice (Studii Superioare)",
"Muzeograf",
"Restaurator Opere De Arta Si Monumente Istorice (Studii Superioare)",
"Conservator Arhiva (Studii Superioare)",
"Restaurator Arhiva (Studii Superioare)",
"Restaurator Bunuri Culturale (Studii Superioare)",
"Bibliograf",
"Bibliotecar (Studii Superioare)",
"Documentarist (Studii Superioare)",
"Referent Difuzare Carte",
"Lector Carte",
"Bibliotecar Arhivist",
"Referent De Specialitate Asezamant Cultural",
"Consilier/Expert/Inspector/Referent/Economist In Management",
"Consilier/Expert/Inspector/Referent/Economist In Economie Generala",
"Consilier/Expert/Inspector/Referent/Economist In Economia Mediului",
"Consilier/Expert/Inspector/Referent/Economist In Comert Si Marketing",
"Consilier/Expert/Inspector/Referent/Economist In Relatii Economice Internationale",
"Consilier/Expert/Inspector/Referent/Economist In Gestiunea Economica",
"Consultant In Management",
"Tehnician Economist",
"Inginer Economist",
"Inspector De Concurenta",
"Administrator Financiar (Patrimoniu) (Studii Superioare)",
"Cercetator Economist In Management",
"Asistent De Cercetare Economist In Management",
"Cercetator Economist In Economia Mediului",
"Asistent De Cercetare Economist In Economia Mediului",
"Cercetator Economist In Economia Generala",
"Asistent De Cercetare Economist In Economia Generala",
"Cercetator Economist In Economie Agroalimentara",
"Asistent De Cercetare Economist In Economie Agroalimentara",
"Cercetator Economist In Marketing",
"Asistent De Cercetare Economist In Marketing",
"Cercetator Economist In Relatii Economice Internationale",
"Asistent De Cercetare Economist In Relatii Economice Internationale",
"Cercetator Economist In Gestiunea Economica",
"Asistent De Cercetare Economist In Gestiuneaeconomica",
"Secretar Economic (Studii Superioare)",
"Sociolog",
"Geograf",
"Analist De Mediu",
"Analist In Turism",
"Analist Teritorial",
"Arheolog",
"Cercetator De Dezvoltare Comunitara",
"Cercetator In Sociologie",
"Asistent De Cercetare In Sociologie",
"Cercetator In Antropologie",
"Asistent De Cercetare In Antropologie",
"Cercetator In Geografie",
"Asistent De Cercetare In Geografie",
"Cercetator In Arheologie",
"Asistent De Cercetare In Arheologie",
"Cercetator In Etnologie",
"Asistent De Cercetare In Etnologie",
"Filozof",
"Istoric",
"Istoriograf",
"Politolog",
"Cercetator In Filozofie",
"Asistent De Cercetare In Filozofie",
"Cercetator In Istorie",
"Asistent De Cercetare In Istorie",
"Cercetator In Stiintele Politice",
"Asistent De Cercetare In Stiintele Politice",
"Psiholog In Specialitatea Psihologie Clinica",
"Psiholog In Specialitatea Consiliere Psihologica",
"Psiholog In Specialitatea Psihoterapie",
"Psiholog In Specialitatea Psihologia Muncii Si Organizationala",
"Psiholog In Specialitatea Psihologia Transporturilor",
"Psiholog In Specialitatea Psihologia Aplicata In Servicii",
"Psiholog In Specialitatea Psihologie Educationala, Consiliere Scolara Si Vocationala",
"Psiholog In Specialitatea Psihopedagogie Speciala",
"Psiholog In Specialitatea Psihologie Aplicata In Domeniul Securitatii Nationale",
"Psiholog In Specialitatea Psihologie Judiciara ",
"Psiholog",
"Psihopedagog",
"Expert Poligraf",
"Psiholog Scolar",
"Cercetator In Psihologie",
"Asistent De Cercetare In Psihologie",
"Cercetator In Psihopedagogie Speciala",
"Asistent De Cercetare In Psihopedagogie Speciala",
"Terapeut Ocupational",
"Asistent Social Nivel Superior",
"Consilier In Domeniul Adictiilor",
"Ofiter Control Doping",
"Art-Terapeut",
"Asistent Social Cu Competenta In Sanatatea Mintala",
"Specialist In Evaluarea Vocationala A Persoanelor Cu Dizabilitati",
"Specialist In Angajare Asistata",
"Instructor-Educator Pentru Activitati De Resocializare",
"Asistent Pentru Ingrijirea Persoanelor Varstnice",
"Cercetator In Asistenta Sociala",
"Asistent De Cercetare In Asistenta Sociala",
"Arhiepiscop",
"Arhiereu-Vicar",
"Arhondar",
"Cantor",
"Capelan",
"Cardinal",
"Chevrasames",
"Cantaret Bisericesc",
"Consilier Culte",
"Conducator Arhiepiscopal",
"Diacon",
"Episcop",
"Exarh",
"Haham",
"Harmonist",
"Hatip",
"Imam",
"Inspector Culte",
"Majghian",
"Melamed",
"Mitropolit",
"Muezin",
"Muftiu",
"Organist",
"Pastor",
"Patriarh",
"Preot",
"Presedinte Culte",
"Protopop",
"Provicar",
"Rabin",
"Secretar Culte",
"Staret ",
"Treibar",
"Vestitor",
"Vicar",
"Poet",
"Scriitor",
"Comentator Publicist",
"Corector (Studii Superioare)",
"Corespondent Special (Tara Si Strainatate)",
"Corespondent Radio",
"Corespondent Presa",
"Critic De Arta",
"Editorialist",
"Fotoreporter",
"Lector Presa/Editura",
"Publicist Comentator",
"Redactor",
"Reporter (Studii Superioare)",
"Reporter Operator",
"Secretar De Emisie (Studii Superioare)",
"Secretar De Redactie (Studii Superioare)",
"Secretar Responsabil De Agentie",
"Sef Agentie Publicitate",
"Tehnoredactor",
"Ziarist",
"Critic Literar",
"Critic Muzical",
"Comentator Radio Tv",
"Redactor Rubrica",
"Filolog",
"Interpret",
"Interpret Relatii Diplomatice",
"Referent Literar",
"Secretar Literar",
"Traducator (Studii Superioare)",
"Translator",
"Grafolog",
"Revizor Lingvist",
"Terminolog",
"Translator Emisie",
"Cercetator In Lingvistica",
"Asistent De Cercetare In Lingvistica",
"Cercetator In Filologie",
"Asistent De Cercetare In Filologie",
"Caricaturist (Studii Superioare)",
"Artist Plastic",
"Desenator Film Animatie",
"Grafician",
"Machetist",
"Pictor",
"Pictor Scenograf",
"Sculptor",
"Sculptor Papusi",
"Restaurator Tablouri",
"Acompaniator",
"Artist Liric",
"Concert Maestru",
"Corepetitor",
"Corist",
"Dirijor",
"Ilustrator Muzical (Studii Superioare)",
"Maestru Studii Canto",
"Instrumentist",
"Maestru Cor",
"Referent Muzical",
"Secretar Muzical",
"Sef Orchestra",
"Solist Instrumentist",
"Solist Vocal",
"Sufleur Opera",
"Copiator Note Muzicale",
"Specialist Instrumente De Suflat",
"Artist Instrumentist",
"Solist Concertist",
"Dirijor Cor",
"Maestru Corepetitor",
"Artist Liric Opera",
"Corist Opera",
"Maestru Acordor Pian Clavecin",
"Maestru Lutier",
"Specialist Orga",
"Regizor Muzical",
"Cantaret",
"Instrumentist Muzicant",
"Disc-Jockey",
"Video-Jockey",
"Maestru De Ceremonii",
"Instrumentist (Studii Medii)",
"Balerin",
"Coregraf",
"Maestru Studii De Balet",
"Maestru De Balet",
"Solist Balet",
"Maestru Dans",
"Dansator",
"Instructor De Dans",
"Consultant Artistic",
"Corector Transmisie",
"Instructor Film",
"Instructor Retea Cinematografica",
"Lector Scenarii",
"Intermediarist Film De Desene Animate (Studii Superioare)",
"Stilizator Film De Desene Animate (Studii Superioare)",
"Producator Delegat Film",
"Realizator Emisiuni Rtv",
"Regizor Artistic",
"Regizor Emisie",
"Regizor Studio",
"Regizor Sunet",
"Regizor Tehnic",
"Secretar Sef Productie Film",
"Sufleur Teatru",
"Maestru Artist Circ",
"Producator Rtv (Stiri)",
"Editor Rtv (Stiri)",
"Director Imagine",
"Referent De Specialitate Selectie Programe Tv",
"Copywriter Publicitate (Studii Superioare)",
"Mediaplanner",
"Producator Delegat Evenimente De Marketing",
"Redactor Prezentator De Televiziune",
"Animator Film De Animatie (Studii Superioare)",
"Director Productie Film",
"Coordonator Productie Film",
"Asistent Productie Film",
"Producator Audiovideo",
"Editor Coordonator Programe Tv",
"Director De Creatie",
"Organizator Productie (Studii Superioare)",
"Scenograf",
"Asistent Scenograf",
"Videojurnalist (Studii Superioare)",
"Producator Delegat Pentru Teatru",
"Regizor Culise",
"Regizor Scena",
"Secretar Platou",
"Actor",
"Actor Manuitor De Papusi",
"Artist Circ",
"Prezentator (Crainic) Radio",
"Prezentator (Crainic) Televiziune",
"Acrobat",
"Clovn",
"Magician",
"Hipnotizator",
"Trapezist",
"Cascador",
"Figurant",
"Dresor",
"Laborant Chimist",
"Tehnician Chimist",
"Laborant Determinari Fizico-Mecanice",
"Tehnician Determinari Fizico-Mecanice",
"Laborant Determinari Geologice Si Geotehnice",
"Laborant Tehnica Nucleara",
"Tehnician Meteorolog",
"Tehnician Geolog",
"Tehnician Hidrometru",
"Prospector ",
"Tehnician Hidrolog",
"Tehnician Hidrogeolog",
"Laborant Operator Centrale Termice",
"Metrolog",
"Tehnician Metrolog",
"Asistent Fizica Si Chimie",
"Operator Meteorolog",
"Meteorolog Aeronautic Tehnician",
"Operator Specialist Curatare Chimica La Schimbatoarele De Caldura Cu Placi",
"Maistru Constructii Civile, Industriale Si Agricole",
"Maistru Normator",
"Tehnician Constructor",
"Tehnician Hidroamelioratii",
"Tehnician Hidrotehnic",
"Tehnician Topometrist",
"Tehnician Proiectant In Constructii",
"Maistru Instalator In Constructii",
"Tehnician Instalatii In Constructii",
"Diriginte Santier",
"Tehnician Laborant Pentru Lucrari De Drumuri Si Poduri",
"Tehnician In Industria Materialelor De Constructii",
"Maistru In Industria Materialelor De Constructii",
"Tehnician Proiectant In Industria Materialelor De Constructii",
"Tehnician Devize Si Masuratori In Constructii",
"Tehnician Devizier",
"Tehnician Atasamentist",
"Dispecer Gestiune Uraniu",
"Maistru Electromecanic",
"Maistru Energetician/Electrician",
"Tehnician Electroenergetician, Termoenergetician",
"Tehnician Electromecanic",
"Tehnician Energetician/Electrician",
"Tehnician Proiectant Energetician/Electrician",
"Maistru Electrician In Constructii",
"Tehnician Mentenanta Electromecanica ",
"Sef/Sef Adjunct Tura Statie Electrica (Studii Medii)",
"Maistru Electronica",
"Tehnician Electronica",
"Tehnician Proiectant Electronica",
"Maistru Cazangerie",
"Maistru Instalatii Navale",
"Maistru Intretinere Si Reparatii Masini-Unelte, Utilitati, Service, Prototipuri",
"Maistru Lacatus, Constructii Metalice",
"Maistru Lacatus Mecanic",
"Tehnician Proiectant Mecanic",
"Mecanic Pentru Intretinerea Aparatelor De Lansare La Zbor",
"Maistru Mecanic",
"Maistru Mecanic Auto",
"Maistru Mecanica Agricola",
"Maistru Mecanica Fina",
"Maistru Montaj",
"Maistru Prelucrari Mecanice",
"Maistru Sculer-Matriter",
"Maistru Sudura",
"Tehnician Constructii Navale",
"Tehnician Instalatii De Bord (Avion)",
"Tehnician Masini Si Utilaje",
"Tehnician Mecanic",
"Tehnician Prelucrari Mecanice",
"Tehnician Sudura",
"Tehnician Tehnolog Mecanic",
"Maistru Mecanic Masini Si Utilaje Pentru Constructii",
"Tehnician Mentenanta Mecanica Echipamente Industriale",
"Tehnician Incercari Componente Vehicule/Grup Motopropulsor/Optimizare Energetica/Sisteme De Masurare",
"Tehnician Documentatie Studii",
"Tehnician Prestatii Vehicule",
"Tehnician Reglementari/Omologari Oficiale",
"Tehnician/Tehnician Responsabil Afacere, Metode Implantare",
"Tehnician/Tehnician Responsabil Afacere, Metode Gestiune Mijloace Si Utilaje",
"Tehnician/Tehnician Responsabil Afacere, Metode Pregatire De Industrializare",
"Tehnician/Tehnician Responsabil Afacere, Metode Logistica",
"Tehnician/Tehnician Responsabil Afacere, Metode Organizarea Si Masurarea Muncii",
"Maistru Fabricarea Armamentului",
"Inspector Cu Supravegherea Si Verificarea Tehnica A Instalatiilor",
"Inspector Iscir",
"Decontaminator",
"Laborant Apa Si Apa Grea",
"Laborant Control Dozimetrie",
"Laborant Petrolist/Industria Chimica",
"Maistru Petrolist/Industria Chimica",
"Laborant Apa Potabila",
"Tehnician Petrolist Chimie Industriala",
"Laborant Petrochimist",
"Maistru La Fabricarea Munitiei",
"Laborant Structura Macroscopica Si Microscopica",
"Maistru Metalurgie",
"Maistru Minier",
"Maistru Presator Metale",
"Maistru Termotehnist",
"Tehnician Metalurgie",
"Tehnician Minier",
"Tehnician Proiectant Minier",
"Tehnician Proiectant Metalurg",
"Tehnician Mineralurg",
"Maistru Mineralurg",
"Maistru Termist-Tratamentist",
"Probator Hidraulic Piese Turnate",
"Desenator Tehnic",
"Trasator",
"Desenator",
"Topograf",
"Trasator Naval ",
"Trasator Optic",
"Tehnician Proiectant",
"Maistru In Industria Celulozei Si Hartiei",
"Maistru Tipograf",
"Paginator Tipograf",
"Tehnician Normare, Salarizare, Organizare",
"Tehnician Pret De Cost",
"Tehnician Programare, Lansare, Urmarirea Productiei",
"Tehnician Pret De Revenire/Costuri",
"Tehnician Gestiune Salariala",
"Tehnician Gestiunea Productiei",
"Tehnician Gestiune Stoc",
"Maistru In Industriile Textila, Pielarie",
"Tehnician In Industria Confectiilor Din Piele Si Inlocuitori",
"Tehnician In Industria Confectiilor Si Tricotajelor",
"Tehnician In Industria Incaltamintei",
"Tehnician In Industria Pielariei",
"Tehnician In Industria Textila",
"Tehnician Proiectant Textile, Pielarie",
"Laborant In Industriile Textila, Pielarie",
"Sef Formatie Industria Confectiilor Imbracaminte",
"Expert Tehnic Extrajudiciar",
"Tehnician In Industria Sticlei Si Ceramicii",
"Maistru In Industria Sticlei Si Ceramicii",
"Maistru Frigotehnist",
"Tehnician Frigotehnist",
"Tehnician In Industria Alimentara",
"Tehnician Laborant Analize Produse Alimentare",
"Tehnician In Industria Alimentara Extractiva",
"Tehnician In Industria Alimentara Fermentativa",
"Tehnician In Industria Carnii, Laptelui Si Conservelor",
"Tehnician In Morarit Si Panificatie",
"Tehnician Proiectant In Industria Alimentara",
"Maistru In Industria Alimentara",
"Tehnolog Alimentatie Publica",
"Operator Control Nedistructiv",
"Operator Control Nedistructiv Cu Radiatii Penetrante",
"Operator Control Nedistructiv Cu Ultrasunete",
"Operator Control Nedistructiv Cu Lichide Penetrante",
"Operator Control Nedistructiv Cu Particule Magnetice",
"Operator Control Nedistructiv Cu Curenti Turbionari",
"Operator Control Nedistructiv Pentru Verificarea Etanseitatii",
"Operator Responsabil Cu Supravegherea Tehnica A Instalatiilor",
"Masurator De Gaze, Temperatura Si Radiatii",
"Controlor De Productie La Minele De Aur Nativ",
"Salvator Minier",
"Controlor Calitate Dupa Efectuarea Probelor La Armament Si Munitie",
"Controlor Calitate Pentru Executia Elementelor La Armament Si Munitie",
"Controlor De Calitate La Protejari Metalice",
"Operator La Instalatiile Din Centrale Electrice",
"Masinist La Instalatiile Din Centrale Electrice",
"Operator La Instalatiile De Cazane Din Centrale Electrice",
"Operator La Instalatiile De Turbine Cu Abur Sau Gaze",
"Operator La Camera De Comanda Termica",
"Masinist La Instalatiile Hidrotehnice Din Centraleelectrice",
"Masinist La Instalatiile De Turbine Hidraulice",
"Masinist La Centrale Diesel",
"Operator Punct Termic",
"Operator Centrala Termica",
"Automatist Pentru Supraveghere Si Intretinere Cazane",
"Operator Curatare Chimica La Schimbatoarele De Caldura Cu Placi",
"Operator Surse Regenerabile De Energie",
"Operator La Instalatii De Incinerare",
"Operator Hidraulic In Alimentarile Cu Apa",
"Operator Circuite Retea Apa",
"Masinist La Conditionarea Aerului",
"Operator La Tratarea Apei Tehnologice",
"Operator Masini Refrigeratoare (Conservare Prin Frig)",
"Operator Chimist La Chimizarea Metanului",
"Rafinor",
"Distilator La Prelucrarea Titeiului",
"Operator Instalatii Imbuteliere Gaz Petrol Lichefiat",
"Maistru-Operator La Roboti Industriali",
"Tehnician-Operator La Roboti Industriali",
"Tehnician In Industria Celulozei Si Hartiei",
"Controlor De Conformitate In Industria De Masini",
"Tehnician Asigurarea Calitatii",
"Tehnician Analist Calitate",
"Tehnician Cotator Calitate",
"Tehnician In Bacteriologie",
"Tehnician In Biochimie",
"Tehnician In Hematologie",
"Tehnician In Serologie",
"Tehnician In Biologie",
"Tehnician In Protectia Mediului (Tehnician Ecolog)",
"Evaluator Si Auditor De Mediu",
"Tehnician Agronom ",
"Tehnician Zootehnist ",
"Tehnician Pedolog",
"Tehnician In Industrializarea Lemnului",
"Tehnician Proiectant In Industrializarea Lemnului",
"Maistru In Industrializarea Lemnului",
"Tehnician Silvic ",
"Tehnician In Reconstructia Ecologica",
"Tehnician Cadastru Forestier",
"Tehnician Amenajist",
"Tehnician Proiectant In Reconstructie Ecologica",
"Ofiter Ajutor Fluvial/Portuar",
"Ofiter Rtg",
"Ofiter Electrician Fluvial/Portuar",
"Agent De Nava",
"Ofiter Electrician Maritim",
"Ofiter Mecanic Maritim",
"Ajutor Ofiter Mecanic Fluvial",
"Capitan Fluvial",
"Capitan Port",
"Ofiter Intendent",
"Ofiter Port",
"Ofiter De Punte Fluvial/Portuar",
"Pilot De Dunare Maritima",
"Sef Echipaj Maritim/Fluvial",
"Pilot De Mare Larga, Pilot De Port Maritim",
"Dragor Maritim/Fluvial",
"Pilot De Port Maritim Aspirant/Pilot De Dunare Aspirant",
"Ofiter De Punte Maritim",
"Ofiter De Punte Maritim Aspirant/Ofiter Mecanic Maritim Aspirant/Ofiter Electrician Maritim Aspirant",
"Comandant Detasament Zbor",
"Comandant Insotitor De Bord",
"Copilot",
"Inspector Pilotaj",
"Mecanic Navigant Aviatie",
"Pilot Aeronave",
"Pilot Comandant Avion",
"Pilot Incercare",
"Pilot Receptie Si Control Aeronave",
"Parasutist Receptie Si Control",
"Parasutist Incercator",
"Pilot Parasutism Incercator",
"Pilot Instructor Aeronave",
"Instructor Parasutism",
"Mecanic Navigant Instructor",
"Maistru Aviatie",
"Tehnician Aviatie",
"Controlor Dirijare Nonradar",
"Controlor Sol",
"Controlor Trafic Aviatia Civila",
"Dispecer Sol",
"Navigator Dirijare Radar",
"Navigator Aviatia Civila",
"Navigator Dirijare Nonradar",
"Navigator Dirijare Zbor",
"Navigator Instructor Dirijare Radar Si Nonradar",
"Navigator Sol",
"Operator Radar",
"Operator Radiotelecomunicatii Aeronautice",
"Controlor Trafic Aerian Dirijare Nonradar",
"Controlor Trafic Aerian Dirijare Radar",
"Controlor Trafic Aerian Informare",
"Navigator Informare",
"Operator/Specialist/Instructor Telecomunicatii Aeronauticeaviatie Civila",
"Sef Tura Telecomunicatii Aeronautice Aviatie Civila",
"Controlor Trafic Aerian (Simulator Trafic Aerian)",
"Navigator Instructor Informare",
"Agent Salvare Aeroportuara Si Instalatii De Stinsincendii",
"Masinist Agregate Aerodrom",
"Operator Instalatii Control Antiterorist/Antideturnare",
"Operator Radionavigant Aviatie",
"Operator Radionavigant Instructor Aviatie",
"Tehnician Securitate Aeriana",
"Operator De Handling",
"Inspector Siguranta Operationala",
"Agent De Securitate Aeroportuara",
"Dispecer Operatiuni De Zbor",
"Referent/Inspector In Serviciile De Trafic Aerian",
"Operator Dispecerat Operational De Supraveghere In Serviciile De Trafic Aerian",
"Tehnician Protectia Navigatiei Aeriene (Comunicatii, Navigatie, Supraveghere)",
"Maistru Protectia Navigatiei Aeriene (Comunicatii, Navigatie, Supraveghere)",
"Sef Tura Protectia Navigatiei Aeriene (Comunicatii, Navigatie, Supraveghere)",
"Tehnician In Serviciile De Trafic Aerian",
"Maistru Aparate Electromedicale",
"Tehnician Aparate Electromedicale",
"Autopsier",
"Asistent Farmacist",
"Laborant Farmacie",
"Tehnician Protezist-Ortezist",
"Tehnician Acustician-Audioprotezist",
"Evaluator Asigurari",
"Laborant In Ocrotirea Sanatatii",
"Sora Medicala",
"Asistenta Puericultoare",
"Mercantizor",
"Agent Veterinar",
"Asistent Veterinar",
"Autopsier La Ecarisaj",
"Tehnician Veterinar",
"Tehnician Dentar",
"Asistent Medical Comunitar",
"Optician Medical",
"Optometrist (Studii Medii)",
"Maseur",
"Asistent Medical Fizioterapie",
"Ergoterapeut",
"Reflexoterapeut",
"Oficiant Medical",
"Tehnician Sanitar",
"Asistent Medical Consiliere Hiv/Sida",
"Manager Al Sistemelor De Management Al Calitatii",
"Manager Al Sistemelor De Management De Mediu",
"Auditor De Mediu",
"Expert/Specialist Standardizare",
"Monitor Mediul Inconjurator",
"Inspector Pentru Conformare Ecologica",
"Asistent Standardizare",
"Manager Al Sistemului De Management Al Riscului",
"Manager Al Sistemului De Management Securitate Si Sanatate In Munca",
"Responsabil De Mediu",
"Auditor De Sistem De Management Pentru Sanatate Si Securitate Ocupationala",
"Inspector Protectia Mediului",
"Specialist In Managementul Deseurilor",
"Auditor In Domeniul Sigurantei Alimentare",
"Manager In Domeniul Sigurantei Alimentare",
"Auditor Responsabilitate Sociala",
"Responsabil Al Managementului Responsabilitatii Sociale",
"Manager De Responsabilitate Sociala",
"Inspector Sanitar",
"Inspector Protectie Sociala",
"Tehnician In Securitate Si Sanatate In Munca",
"Tehnician Conditii De Munca Si Securitate",
"Inspector In Domeniul Securitatii Si Sanatatii In Munca",
"Coordonator In Materie De Securitate Si Sanatate In Munca (Studii Medii)",
"Operator Control Nedistructiv Pentru Examinare Vizuala",
"Brancardier",
"Cambist (Broker Valori)",
"Agent De Schimb",
"Intermediar In Activitatea Financiara Si Comerciala (Broker)",
"Broker Bursa De Marfuri",
"Agent De Vanzari Directe (Produse Financiar-Bancare)",
"Teleoperator Financiar-Bancar",
"Agent Marketing Pensii Private",
"Analist Tehnic Piete Financiare",
"Ofiter Operatiuni Financiar-Bancare",
"Calculator Devize",
"Contabil",
"Tehnician Merceolog",
"Planificator",
"Revizor Gestiune",
"Contabil Bugetar",
"Secretar Economic (Studii Medii)",
"Merceolog",
"Referent",
"Referent Statistician",
"Statistician",
"Statistician Medical",
"Actuar",
"Tehnician Planificare/Urmarire Sinteze",
"Estimator Licitatii",
"Evaluator Tehnic Daune Auto",
"Agent De Asigurare",
"Broker In Asigurari",
"Reprezentant Tehnic",
"Reprezentant Comercial",
"Agent De Vanzari",
"Agent Comercial",
"Agent Vanzari Standarde Si Produse Conexe",
"Agent Contractari Si Achizitii (Broker Marfuri)",
"Receptioner Contractari-Achizitii",
"Administrator Cumparari",
"Agent Cumparari",
"Declarant Vamal",
"Agent Tranzit",
"Agent Maritim",
"Agent Repartizare A Fortei De Munca",
"Agent Orientare Profesionala A Somerilor/Agent Informare Privind Cariera",
"Agent Evidenta Si Plata A Ajutorului De Somaj",
"Inspector/Referent Resurse Umane",
"Agent Ocupare",
"Analist Resurse Umane",
"Tehnician Mobilitate Personal",
"Tehnician Reconversie Personal",
"Instructor/Preparator Formare",
"Tehnician Calificare Gestiune Competente",
"Tehnician Resurse Umane",
"Agent Imobiliar (Broker Imobiliar)",
"Agent Reclama Publicitara",
"Agent Literar",
"Impresar Muzical",
"Impresar Teatru",
"Manager Sportiv",
"Asistent Relatii Publice Si Comunicare (Studii Medii)",
"Agent Servicii Client",
"Impresar Artistic",
"Organizator Spectacole",
"Operator De Interviu",
"Referent Comert Exterior",
"Operator Vanzari Prin Telefon",
"Secretar Procuratura",
"Secretar Administrativ",
"Secretar Asistent Director",
"Asistent Manager",
"Asistent De Cabinet",
"Registrator Medical",
"Controlor Vamal, Controlor Pentru Datoria Vamala (Studii Medii)",
"Revizor Vamal",
"Referent Tir Si Tranzite (Studii Medii)",
"Referent Vamal (Studii Medii)",
"Expert/Inspector Vamal",
"Controlor Vamal, Controlor Pentru Datoria Vamala, Agent Vamal (Studii Superioare)",
"Inspector De Trafic A.R.R. (Studii Medii)",
"Inspector Taxe Si Impozite",
"Operator Rol",
"Perceptor",
"Inspector Pensii, Asigurari Sociale Si Asistenta Sociala",
"Referent Pensii, Asigurari Sociale Si Asistenta Sociala",
"Inspector Pentru Acordarea De Permise, Licente Sauautorizatii",
"Inspector De Politie",
"Detectiv",
"Anchetator Politie",
"Detectiv Particular",
"Inspector Metrolog",
"Inspector Preturi",
"Inspector Salarii",
"Comisar",
"Agent Procedural",
"Conducator Carte Funciara",
"Grefier",
"Secretar Notariat",
"Tehnician Criminalist",
"Functionar In Activitati Comerciale, Administrative Si Preturi",
"Executor Judecatoresc (Tribunal, Judecatorie)",
"Arhivar Notariat",
"Grefier Dactilograf (Curtea De Apel, Tribunal, Judecatorie)",
"Grefier Statistician",
"Grefier Documentarist",
"Secretar Dactilograf Laborator Expertize Criminalistice",
"Asistent Registrator",
"Grefier Arhivar",
"Grefier Registrator",
"Ofiter Antifrauda Financiar-Bancara (Studii Medii)",
"Asistent Social Nivel Mediu",
"Pedagog Social",
"Lucrator Social Pentru Persoane Cu Probleme De Dependenta",
"Facilitator De Dezvoltare Comunitara",
"Lucrator De Tineret",
"Calugar",
"Calugarita",
"Predicator",
"Fotbalist Profesionist",
"Sportiv Profesionist In Alte Discipline Sportive",
"Jucator De Rugbi",
"Antrenor",
"Instructor Sportiv",
"Secretar Federatie",
"Antrenor De Fotbal Profesionist",
"Instructor Arte Martiale",
"Instructor (Monitor) Schi/Calarie/Golf/Tenis/ Inot/Sporturi Extreme",
"Antrenor Coordonator",
"Arbitru Judecator Sportiv",
"Preparator Sportiv",
"Impresar Sportiv",
"Oficial Sportiv Acreditat",
"Instructor In Poligonul De Tir",
"Supraveghetor In Poligonul De Tir",
"Monitor De Schi, Snow-Board Si Sporturi De Alunecare Pe Zapada",
"Instructor Educatie Acvatica",
"Animator Sportiv",
"Instructor De Fitness",
"Antrenor De Fitness",
"Instructor De Aerobic-Fitness",
"Instructor Educatie Fizica",
"Fotograf",
"Laborant Foto",
"Retusor Foto",
"Operator Prelucrare Pelicula",
"Fotograf Si Tehnician La Echipamente De Inregistrare Imagine Si Sunet",
"Butafor",
"Decorator Interioare",
"Desenator Artistic (Studii Medii)",
"Decorator Vitrine",
"Desenator Artistic (Studii Superioare)",
"Sef De Sala Restaurant",
"Barman-Sef",
"Bucatar-Sef",
"Cofetar-Sef",
"Inspector Calitate Productie Culinara",
"Manuitor, Montator Decor",
"Tehnician Machetist",
"Tehnician Reclame (Decorator)",
"Maestru De Lumini",
"Maestru De Sunet",
"Caricaturist (Studii Medii)",
"Tehnoredactor",
"Secretar De Redactie (Studii Medii)",
"Organizator De Productie",
"Asistent Regizor Artistic",
"Reporter (Studii Medii)",
"Machior Spectacole",
"Peruchier",
"Secretar De Emisie (Studii Medii)",
"Ghid De Animatie",
"Documentarist (Studii Medii)",
"Traducator (Studii Medii)",
"Videojurnalist (Studii Medii)",
"Copywriter Publicitate (Studii Medii)",
"Corector (Studii Medii)",
"Electrician Iluminare Scena",
"Secretar Artistic",
"Operator Calculator Electronic Si Retele",
"Sef Tura Exploatare In Centre Sau Oficii De Calcul",
"Tehnician Echipamente De Calcul Si Retele",
"Operator In Domeniul Proiectarii Asistate Pe Calculator",
"Administrator Sistem Documentar",
"Operator Prompter",
"Operator Suport Tehnic Pentru Servicii De Comunicatii Electronice",
"Programator Ajutor",
"Analist Ajutor",
"Acustician Cinematografic",
"Controlor Si Reconditioner Filme",
"Electrician Iluminare Filmare",
"Etaloner",
"Maistru Aparate Video Si Sunet",
"Masinist Mecanic Traweling",
"Mecanic Camera Filmare",
"Montor Imagine",
"Montor Negative Si De Pregatire A Peliculei",
"Montor Pozitive",
"Operator Camera Diafilm, Diapozitive",
"Operator Emisie-Receptie",
"Operator Productie Rtv",
"Preparator Filmare",
"Proiectionist",
"Senzitometrist",
"Stantator De Filme",
"Operator Truka",
"Editor Imagine",
"Tehnician Iluminat Tehnologic",
"Ilustrator Muzical",
"Controlor Emisii Rtv",
"Montor Emisie",
"Operator Imagine",
"Operator Radio-Radioficare",
"Operator Sunet",
"Tehnician Radioelectronist",
"Tehnician Catv",
"Operator Dispecer Sisteme De Monitorizare Si Aparatura De Control",
"Tehnician Pentru Sisteme De Detectie, Supraveghere Video, Control Acces",
"Cameraman",
"Tehnician De Echipamente Tv",
"Radioelectronist Statii De Emisie Radio-Tv",
"Tehnician La Echipamente De Inregistrare Imagine Si Sunet",
"Designer Video",
"Sef Formatie Sisteme Radiante (Antene)",
"Tehnician Constructii Telefonice",
"Tehnician Radiolocatii",
"Tehnician Turn Parasutism",
"Inspector Exploatare Postala",
"Tehnician Statii De Emisie Radio-Tv",
"Tehnician Statii Radiorelee Si Satelit",
"Maistru Materiale Emisie Rtv Si Telecomunicatii",
"Pilonist Antenist",
"Tehnician Retele De Telecomunicatii",
"Operator Retele De Telecomunicatii",
"Radioelectronist Statii Radiorelee Si Satelit",
"Maistru Transporturi, Posta Si Telecomunicatii",
"Tehnician Transporturi, Posta Si Telecomunicatii",
"Tehnician Proiectant Transporturi Si Comunicatii",
"Functionar Administrativ",
"Inspector Documente Secrete",
"Secretara",
"Secretara Dactilografa",
"Secretara Prelucrare Texte",
"Dactilografa",
"Stenodactilografa",
"Referent Transmitere",
"Telefaxist",
"Teletipist",
"Telexist",
"Telebanker",
"Operator Introducere, Validare Si Prelucrare Date",
"Operator Tehnica Poligraf",
"Operator Procesare Text Si Imagine",
"Registrator De Arhiva Electronica De Garantii Reale Mobiliare",
"Operator Masina Contabilizat",
"Operator Masina De Calculat",
"Casier Tezaur",
"Casier Valuta",
"Manuitor Valori (Presa, Posta)",
"Numarator Bani",
"Verificator Bani",
"Verificator Valori",
"Casier Trezorier",
"Sef Casierie Centrala",
"Sef Supraveghere Case",
"Operator Ghiseu Banca",
"Operator Ghiseu Birouri De Schimb",
"Administrator Cont",
"Referent Operatii Intre Sedii",
"Referent Casier",
"Crupier",
"Schimbator Fise ",
"Supraveghetor Jocuri (Cazino)",
"Sef De Masa (Cazino)",
"Cap De Masa (Cazino)",
"Amanetar",
"Agent Fiscal",
"Colector (Recuperator) Creante/Debite",
"Agent De Voiaj",
"Agent De Turism",
"Agent De Turism Tour-Operator",
"Agent De Transport Turistic Intern",
"Agent De Transport International",
"Agent De Asistenta Turistica",
"Agent Turism De Afaceri",
"Agent Transporturi Externe",
"Agent Transporturi Interne",
"Functionar Agentie Voiaj",
"Oficiant Telefoane",
"Oficiant Telegraf",
"Radiotelegrafist",
"Telefonist",
"Telefonist Instructor",
"Telegrafist (Teleimprimatorist)",
"Receptioner De Hotel",
"Lucrator Concierge",
"Sef De Receptie Hotel",
"Tehnician Compartiment Securitate Hotel",
"Responsabil Cazare",
"Impiegat Informatii",
"Functionar Informatii Clienti",
"Receptionist",
"Calculator Pret Cost",
"Functionar Economic",
"Operator Devize",
"Sef Sectie Inventar",
"Agent Bursa",
"Contabil Financiar Bancar",
"Administrator Financiar (Patrimoniu) ",
"Pontator",
"Gestionar Depozit",
"Magaziner",
"Operator Siloz (Silozar)",
"Primitor-Distribuitor Materiale Si Scule",
"Recuziter",
"Sortator Produse",
"Trezorier (Studii Medii)",
"Gestionar Custode Sala",
"Pivnicer",
"Primitor-Distribuitor Benzina Si Motorina",
"Lucrator Gestionar",
"Sef Raion/Adjunct Marfuri Alimentare/Nealimentare",
"Dispecer",
"Facturist",
"Lansator Produse",
"Programator Productie",
"Dispecer Operatiuni Salubrizare",
"Agent Transporturi",
"Functionar Informatii",
"Controlor Trafic",
"Impiegat Auto",
"Impiegat Informatii Aviatie",
"Impiegat Registru Miscare",
"Insotitor Vagoane",
"Inspector Rnr (Registru Naval Roman)",
"Inspector Exploatare Trafic",
"Instructor Depou",
"Instructor Revizie Vagoane",
"Instructor Statie",
"Operator Circulatie Miscare",
"Operator Comercial",
"Operator Dana",
"Operator Programare",
"Picher",
"Reditionar",
"Revizor Tehnic Vagoane",
"Scriitor Vagoane",
"Sef Agentie Colectare Si Expeditie Marfuri",
"Sef Autogara",
"Avizier Cai Ferate",
"Sef Halta",
"Sef Statie Taxare",
"Sef Tura La Comanda Personalului De Tren",
"Sef Tura Pregatirea Personalului La Vagon-Restaurant Si De Dormit",
"Sef Tura Revizie Vagoane",
"Veghetor Incarcare-Descarcare",
"Verificator Documente Expeditie",
"Expeditor International",
"Operator Receptie",
"Agent Curier",
"Agent Statie Metrou",
"Impiegat De Miscare Metrou",
"Operator Miscare Metrou",
"Operator Portuar Stivator",
"Operator Portuar Expeditor",
"Operator Portuar Dispecer/Planificator",
"Grafician Mers De Tren",
"Referent De Specialitate Tir Si Tranzite (Studii Superioare)",
"Agent Feroviar Marfa",
"Bibliotecar (Studii Medii)",
"Discotecar",
"Filmotecar",
"Fonotecar",
"Fototecar",
"Manuitor Carte",
"Videotecar",
"Agent Postal",
"Cartator Postal",
"Cartator Presa",
"Cartator Telegrame",
"Diriginte Posta",
"Factor Postal",
"Inspector Telegrame",
"Oficiant Posta Telegrame",
"Oficiant Presa",
"Prelucrator Presa Scrisa",
"Responsabil Tura Expeditie",
"Sef Vagon Postal",
"Codificator",
"Corector Editura Presa",
"Corector-Revizor Poligrafie",
"Arhivar",
"Functionar Documentare",
"Restaurator Arhiva (Studii Medii)",
"Conservator Arhiva (Studii Medii)",
"Restaurator Opere De Arta Si Monumente Istorice (Studii Medii)",
"Conservator Opere De Arta Si Monumente Istorice (Studii Medii)",
"Restaurator Bunuri Culturale (Studii Medii)",
"Conservator Bunuri Culturale",
"Referent Evidenta Persoanelor",
"Functionar Ghiseu Servicii Publice",
"Expert Local Pe Problemele Romilor",
"Insotitor De Bord",
"Stewardesa",
"Conductor Tren",
"Revizor Bilete",
"Controlor Bilete",
"Conductor Vagon De Dormit Si Cuseta",
"Controlor Acces Metrou",
"Sef Tura Comanda Vagon De Dormit ",
"Ghid De Turism",
"Ghid De Turism Intern (Local)",
"Ghid National De Turism (Tour-Operator)",
"Ghid De Turism Montan, Drumetie Montana",
"Ghid Galerii De Arta/Interpret",
"Ghid Habitat Natural Flora, Fauna",
"Ghid Turism Ornitologic",
"Ghid Turism Speologic",
"Ghid Turism Ecvestru",
"Ghid De Turism Sportiv ",
"Ghid Montan",
"Ghid Obiectiv Cultural",
"Insotitor Grup Turistic",
"Organizator Activitate Turism (Studii Medii)",
"Ranger",
"Custode Pentru Arii Protejate",
"Animator De Hotel",
"Bucatar",
"Pizzar",
"Bucatar Specialist/Vegetarian/Dietetician",
"Maestru In Arta Culinara",
"Ajutor Ospatar",
"Ospatar (Chelner)",
"Somelier",
"Barman",
"Barman Preparator",
"Coafor",
"Frizer",
"Cosmetician",
"Manichiurist",
"Pedichiurist",
"Maseur De Intretinere Si Relaxare",
"Machior",
"Tatuator",
"Montator Bijuterii Pe Corp",
"Stilist Protezist De Unghii",
"Cabanier",
"Guvernanta De Hotel/Etaj",
"Lenjereasa De Hotel",
"Administrator",
"Administrator Piete Si Targuri",
"Intendent",
"Sef Cantina",
"Dispecer Pentru Servire In Camera (Hotel)",
"Gospodar",
"Ingrijitor Vila",
"Administrator Pensiune Turistica",
"Lucrator In Gospodaria Agroturistica",
"Ingrijitor Cladiri",
"Agent Curatenie Cladiri Si Mijloace De Transport",
"Administrator Imobile",
"Astrolog",
"Camerista Hotel",
"Insotitor",
"Valet",
"Antreprenor Servicii Funerare",
"Decorator Servicii Funerare",
"Imbalsamator",
"Ingrijitor Farmacii, Cabinete Veterinare",
"Coafor Canin",
"Instructor Scolar Auto",
"Instructor Auto",
"Agent Dezinfectie, Deratizare, Dezinsectie",
"Gazda Club",
"Organizator Prestari Servicii",
"Agent Ecolog",
"Raportor Ecolog",
"Intretinator Textile-Piele",
"Animator Socioeducativ",
"Animator Centre De Vacanta",
"Lucrator Pensiune Turistica",
"Operator Partie De Schi",
"Vanzator Ambulant De Produse Alimentare",
"Vanzator",
"Anticar",
"Librar",
"Lucrator Controlor Final",
"Lucrator Comercial",
"Lucrator Produse Naturiste",
"Vanzator De Produse Naturiste",
"Taxator",
"Vanzator De Bilete",
"Casier",
"Casier Metrou",
"Manechin",
"Model ",
"Prezentator Moda",
"Vanzator La Domiciliul Clientului Pe Baza De Comanda",
"Bufetier",
"Ingrijitor De Copii",
"Guvernanta",
"Babysitter",
"Asistent Maternal",
"Parinte Social",
"Educator Specializat",
"Baies",
"Gipsar",
"Infirmier/Infirmiera",
"Ingrijitoare La Unitati De Ocrotire Sociala Si Sanitara",
"Lacar",
"Namolar",
"Ingrijitor Batrani La Domiciliu",
"Ingrijitor Bolnavi La Domiciliu",
"Asistent Personal Al Persoanei Cu Handicap Grav",
"Ingrijitor La Domiciliu",
"Mediator Sanitar",
"Mediator Social",
"Lucrator Prin Arte Combinate",
"Asistent Personal Profesionist",
"Asistent Personal De Ingrijire",
"Operator Prestatii Sociale",
"Supraveghetor De Noapte Servicii Sociale",
"Lucrator Social",
"Sef Compartiment Pentru Prevenire",
"Sef Formatie Interventie, Salvare Si Prim Ajutor",
"Specialisti Pentru Prevenire",
"Servant Pompier",
"Sef Grupa Interventie",
"Sef Echipa Specializata",
"Agent Politie Comunitara",
"Gardian De Inchisoare",
"Educator In Penitenciare",
"Agent De Securitate",
"Agent Control Acces",
"Agent De Securitate Incinta (Magazin, Hotel, Intreprindere Etc.)",
"Agent Garda De Corp",
"Sef Formatie Paza Si Ordine",
"Agent De Interventie Paza Si Ordine",
"Agent Transport Valori",
"Dispecer Centru De Alarma",
"Sef Tura Servicii Securitate",
"Inspector De Securitate",
"Evaluator De Risc De Efractie",
"Consultant De Securitate",
"Agent Cu Atributii Pe Linia Ordinii Si Sigurantei Publice In Incinta Arenelor Sportive",
"Sef Serviciu Voluntar/Privat Pentru Situatii De Urgenta",
"Cadru Tehnic Cu Atributii In Domeniul Prevenirii Si Stingerii Incendiilor",
"Salvator La Strand",
"Salvator Montan",
"Salvamar",
"Gardian Feroviar",
"Agent Conducator Caini De Serviciu",
"Agricultor",
"Gradinar",
"Legumicultor",
"Lucrator Calificat In Culturi De Camp Si Legumicultura",
"Agricultor Pentru Culturi De Camp Ecologice",
"Arboricultor",
"Ciupercar",
"Florar-Decorator",
"Floricultor",
"Peisagist-Floricultor",
"Lucrator Calificat In Floricultura Si Arboricultura",
"Pomicultor",
"Viticultor",
"Fermier In Horticultura",
"Cioban (Oier)",
"Crescator-Ingrijitor De Animale Domestice Pentru Productia De Lapte Si Carne",
"Tocator De Furaje",
"Lucrator Calificat In Cresterea Animalelor",
"Crescator Bovine",
"Crescator Porcine",
"Mamos Porcine",
"Baci Montan",
"Cioban Montan",
"Crescator De Oi Montan",
"Oier Montan",
"Crescator De Pasari",
"Fazanier",
"Apicultor",
"Sericicultor",
"Crescator De Animale Mici",
"Crescator-Ingrijitor Animale Salbatice Captive",
"Crescator-Ingrijitor De Animale De Laborator",
"Crescator De Melci",
"Antrenor Cabaline",
"Crescator-Ingrijitor De Cabaline",
"Herghelegiu",
"Fermier In Productia Vegetala",
"Fermier In Productia Animala",
"Agricultor In Culturi Vegetale Si Crescator De Animale",
"Cioplitor In Lemn",
"Carbonitor",
"Fasonator Mecanic (Cherestea)",
"Muncitor Plantatii Si Amenajare Zona Verde",
"Pepinierist",
"Presator Stuf",
"Protectionist Silvic",
"Recoltator Stuf",
"Rezinator",
"Sef Coloana Exploatare Stuf",
"Stivuitor Si Receptioner Silvic",
"Taietor Silvic",
"Preparator Mangal",
"Mangalizator",
"Lucrator In Culturi Acvatice",
"Piscicultor",
"Pescar In Ape Interioare Si De Coasta",
"Pescar In Mari Si Oceane",
"Paznic De Vanatoare",
"Muncitor Constructor Barne, Chirpici, Piatra",
"Confectioner Placi Din Diverse Materiale",
"Confectioner Plase Si Panze Rabit Din Stuf",
"Sobar",
"Zidar Cosuri Fabrica",
"Zidar Pietrar",
"Zidar Samotor",
"Zidar Rosar-Tencuitor",
"Zidar Restaurator",
"Cioplitor In Piatra Si Marmura",
"Cioplitor-Montator Piatra, Marmura",
"Gaterist La Taiat Blocuri De Piatra, Marmura",
"Taietor, Slefuitor, Lustruitor Piatra, Marmura",
"Restaurator Piatra",
"Betonist",
"Fierar Betonist",
"Montator Elemente Prefabricate Din Beton Armat",
"Constructor Structuri Monolite",
"Operator Injectorist",
"Injectorist In Constructii",
"Dulgher (Exclusiv Restaurator)",
"Dulgher Restaurator",
"Muncitor Hidrometru",
"Pavator",
"Sapator Fantani",
"Asfaltator",
"Cantonier",
"Chesonier",
"Constructor Cai Ferate",
"Constructor Linii Tramvai",
"Drenor Canalist",
"Fascinar",
"Finisor Terasamente",
"Muncitor Hidrogeolog",
"Muncitor Constructor Senal Navigabil, Lucrari Hidrotehnice Si Portuare",
"Sef Echipa Intretinere Poduri Metalice, Viaducte Si Tuneluri",
"Agent Hidrotehnic",
"Revizor Cale Sau Puncte Periculoase",
"Meserias Intretinere Cale",
"Sef Echipa Intretinere Cale",
"Meserias Intretinere Poduri Metalice, Viaducte Si Tuneluri",
"Alpinist Utilitar",
"Laborant Determinari Fizico-Mecanice Pentru Lucrari De Drumuri Si Poduri",
"Sef Echipa Intretinere Cale Metrou",
"Sef Echipa Lucrari Arta Metrou",
"Lucrator Pentru Drumuri Si Cai Ferate",
"Muncitor In Taieri Structuri Cu Scule Diamantate",
"Acoperitor-Invelitor Tigla, Azbociment, Tabla",
"Constructor De Acoperisuri",
"Faiantar",
"Montator Placaje Interioare Si Exterioare",
"Mozaicar (Exclusiv Restaurator)",
"Parchetar",
"Mozaicar Restaurator",
"Linolist",
"Montator Placaje Uscate",
"Ipsosar (Exclusiv Restaurator)",
"Turnator Ornamentalist",
"Ipsosar Restaurator Ornamente Din Ipsos",
"Izolator Fonic",
"Izolator Frigorific",
"Izolator Hidrofug",
"Izolator Lucrari Speciale (Antiacide Si De Protectie)",
"Izolator Termic",
"Montator Pereti Si Plafoane Din Ghips-Carton",
"Asamblator-Montator Profile Aluminiu Si Geam Termopan",
"Confectioner Geam Termoizolator",
"Confectioner Tamplarie Din Aluminiu Si Mase Plastice",
"Montator Tamplarie Din Aluminiu Si Mase Plastice",
"Confectioner-Montator Tamplarie Din Aluminiu Si Mase Plastice Cu Geam Termoizolator",
"Montator Materiale Geosintetice",
"Sudor Geomembrana",
"Montator Geogrile",
"Montator Materiale Geotextile Si Geocompozite",
"Geamgiu",
"Detector Pierderi Apa Si Gaze",
"Instalator Apa, Canal",
"Instalator Frigotehnist",
"Instalator Incalzire Centrala Si Gaze",
"Instalator Retele De Distributie/Transport Fluide",
"Instalator Ventilare Si Conditionare Apa",
"Verificator Canale Subterane",
"Instalator Centrale Termice",
"Instalator Instalatii Tehnico-Sanitare Si De Gaze",
"Instalator Autorizat Proiectare Executie Si/Sau Exploatare Obiectiv/Sisteme De Transport",
"Instalator Autorizat Proiectare Executie Si/Sau Exploatare Obiectiv/Sisteme De Distributie",
"Instalator Retele Termice Si Sanitare",
"Operator Instalatii Apa Si Canalizare",
"Frigoriferist (Frigotehnist)",
"Tapetar",
"Zugrav",
"Stucaturist",
"Ignifugator",
"Operator Termoprotectie",
"Lacuitor Lemn",
"Vopsitor Industrial",
"Finisor-Lacuitor Lemn",
"Vopsitor",
"Vopsitor Auto",
"Cosar",
"Curatitor De Fatade",
"Modelier Lemn",
"Modelier Metal",
"Modelator-Miezuitor",
"Modelier Naval",
"Operator La Masini De Brichetat Span",
"Pregatitor Metale Vechi Pentru Retopire",
"Recuperator Metale Vechi",
"Topitor Aliaje Tipografie",
"Topitor Fonta Si Neferoase",
"Topitor, Turnator Metale Si Aliaje Neferoase",
"Turnator Fonta Pe Banda",
"Turnator Formator",
"Turnator Pregatitor Otelarie",
"Turnator Modelier",
"Turnator Metale Si Neferoase",
"Modelor Prototipuri Auto",
"Brazor",
"Sudor Manual Cu Flacara De Gaze",
"Sudor Manual Cu Arc Electric",
"Sudor Cu Arc Electric Acoperit Sub Strat De Flux",
"Operator Taiere",
"Sudor Cu Arc Electric Cu Electrod Fuzibil In Mediu De Gaz Protector",
"Sudor Cu Arc Electric Cu Electrod Nefuzibil In Mediu De Gaz Protector",
"Sudor",
"Cazangiu Recipiente",
"Probator Hidraulic Cazane, Tevi, Recipiente",
"Tinichigiu Carosier",
"Tinichigiu Industrial",
"Tinichigiu De Santier",
"Tinichigiu Structurist De Aviatie",
"Cazangiu Tevar",
"Cazangiu Formator",
"Tinichigiu Restaurator",
"Tinichigiu Sisteme De Acoperisuri Si Invelitori",
"Tinichigiu Sisteme De Ventilatie",
"Tinichigiu In Constructii",
"Finisor Cocleti",
"Finisor Ace Si Accesorii",
"Confectioner Capace De Carde",
"Confectioner Cocleti",
"Confectioner Plase Din Sarma",
"Formator Tevi Prin Sudare",
"Lacatus Constructii Metalice Si Navale",
"Lacatus De Mina",
"Lacatus Revizie Vagoane",
"Lacatus Mecanic",
"Lacatus-Montator",
"Presator Metale La Rece",
"Reconditioner Scule Si Utilaje Petroliere",
"Sanfrenator",
"Pregatitor, Montator, Reparator Ite, Cocleti, Lamele, Spete",
"Repasator Garnituri Carde",
"Tubulator Naval",
"Masinist La Litografiat Si Vernisat Tabla",
"Masinist La Confectionarea Ambalajelor Metalice",
"Masinist La Confectionarea Tuburilor De Aluminiu",
"Constructor-Montator De Structuri Metalice",
"Masinist La Fabricarea Acelor Si Accesoriilor",
"Nituitor",
"Lacatus Mecanic De Intretinere Si Reparatii Universale",
"Masinist La Confectionarea Spetelor Si Spiralelor",
"Montator-Ajustor Spete",
"Lipitor Si Protejator Spete",
"Taietor Garnituri De Carde",
"Masinist Mecanic La Confectionare Garnituri Carde",
"Lacatus-Depanator Utilaje Calcul",
"Operator La Montarea Si Conservarea Produselor Dupa Probe",
"Schelar",
"Confectioner-Montator Structuri Metalice Pentru Constructii",
"Montator Fatade Si Pereti Cortina",
"Mecanic-Montator Instalatii Cu Cablu In Silvicultura Si Exploatari Forestiere",
"Forjor-Matriter",
"Prelucrator Mecanic Metale Pretioase",
"Presator Piese Din Pulberi Metalice",
"Stantator",
"Presator, Ambutisor La Cald",
"Forjor Manual",
"Forjor-Arcurar",
"Forjor Mecanic",
"Confectioner Ferodouri",
"Preparator Pulberi",
"Cuptorar-Termist Pentru Ferite",
"Fierar/Potcovar",
"Formator-Presator Ferite",
"Finisor Ferite",
"Controlor De Calitate La Forjare",
"Controlor De Calitate La Turnare",
"Debitator-Ebosator",
"Dusisator-Polizator",
"Lacatus Sdv",
"Sculer-Matriter",
"Lacatus Amc",
"Lacatus Mecanica Fina",
"Prelucrator Prin Electroeroziune",
"Lacatus La Prelucrarea Si Indreptarea Tevilor Ghintuite",
"Reglor La Masini Pentru Fabricarea Cablurilor, Conductorilor Electrici Si Materialelor Electrice",
"Reglor La Masini Pentru Confectionarea Elementelor Galvanice",
"Reglor Benzi Montaj",
"Masinist La Linii Automate Aschietoare",
"Reglor Masini De Bobinat Si Platinat",
"Reglor La Masini De Prelucrare Mase Plastice",
"Reglor Masini-Unelte",
"Reglor-Montator",
"Reglor La Masini Pentru Fabricarea Lampilor Electrice",
"Reglor Si Reglor-Conductor La Masini-Unelte",
"Conductor De Instalatii",
"Operator La Masini-Unelte Semiautomate Si Automate",
"Dozator La Fabricarea Electrozilor De Sudura",
"Masinist La Lame De Masini Pentru Automate Aschietoare",
"Degresator-Imersioner",
"Uscator Electrozi De Sudura",
"Masinist La Masini Speciale Fara Aschiere",
"Preparator Amestec De Invelis",
"Pregatitor Sarma",
"Finisator Electrozi De Sudura",
"Masinist La Masini Speciale De Aschiere",
"Masinist La Confectionarea Tuburilor De Protectie Si A Dozelor De Ramificatie",
"Operator La Masini-Unelte Cu Comanda Numerica",
"Operator La Masini De Electroeroziune Automate",
"Ascutitor Laminate La Cald",
"Ascutitor Laminate La Rece",
"Ascutitor-Calitor Garnituri De Carde",
"Ascutitor Scule, Instrumente Medicale Si Obiecte De Uz Casnic",
"Debitator-Slefuitor Perii De Masini Electrice",
"Polizator",
"Slefuitor Metale",
"Frezor Universal",
"Gauritor-Filetator",
"Honuitor, Rodator-Lepuitor",
"Rabotor-Mortezor Universal",
"Rectificator Universal",
"Strungar Universal",
"Brosator",
"Frezor La Masini Roti Dintate",
"Gravor Mecanic",
"Rabotor, Mortezor Roti Dintate",
"Rectificator Dantura Caneluri",
"Strungar La Strung Paralel Si De Detalonat",
"Strungar La Strung Revolver",
"Strungar La Strung Carusel",
"Strungar La Masini Orizontale",
"Strungar La Masini De Alezat",
"Strungar La Masini De Prelucrat In Coordonate",
"Strungar La Masini De Strunjit Roti Cai Ferate",
"Rectificator Piese Producatoare De Ochiuri",
"Slefuitor Metale Cu Plumb Industria De Armament",
"Debitator Semifabricate",
"Curatitor-Sablator",
"Electrician Auto",
"Electromecanic Auto",
"Mecanic Auto",
"Operator Standuri Incercari",
"Operator Pregatire Incercari Vehicule",
"Mecanic Aviatie",
"Lacatus-Montator Agregate Energetice Si De Transport",
"Mecanic Utilaj",
"Mecanic Agricol",
"Motorist",
"Ungator-Gresor",
"Operator In Verificarea, Reincarcarea Si Repararea Stingatoarelor De Incendiu",
"Mecanic Intretinere Si Reparatii Masini De Cusut Industriale",
"Operator In Verificarea, Intretinerea Si Repararea Autospecialelor Destinate Apararii Impotriva Incendiilor",
"Mecanic Masini Agricole",
"Mecanic Trolist",
"Amc-Ist",
"Armurier",
"Blocator, Chituitor, Deblocator",
"Ceasornicar",
"Centrator, Debordator Piese Optice",
"Centrator, Finisor Aparate Optice",
"Degresator, Curatator Piese Si Aparate Optice",
"Lipitor Lentile Si Prisme",
"Montator Aparatura Optica",
"Optician",
"Optician Armament",
"Confectioner Seringi",
"Presator Piese Optice",
"Reparator Aparate Foto",
"Reparator Stilouri, Brichete",
"Reparator Umbrele",
"Gravor Piese Optice",
"Tratamentist Piese Optice",
"Metrolog Si Depanator Mecanica Fina, Tehnica Digitala Si Analogica (Mftda)",
"Metrolog Verificator",
"Acordor Acordeoane, Armonici",
"Acordor Piane, Pianine, Orga, Tambal",
"Caserator Si Slefuitor De Celuloid Pentru Carcase Acordeoane",
"Ceruitor De Piastrine Pentru Acordeoane",
"Constructor Claviatura",
"Constructor-Reparator De Acordeoane Si Armonici",
"Constructor-Reparator De Alte Instrumente Muzicale (Suflat, Percutie)",
"Filator Corzi Pentru Piane",
"Montator Corp Sonor La Piane",
"Montator-Reglor Piane",
"Montator-Ajustor De Acordeoane",
"Lutier",
"Constructor Restaurator De Orgi",
"Argintar",
"Bijutier Metale Pretioase",
"Cizelator",
"Cizelator Clisee Galvanice",
"Confectioner Stampile De Cauciuc, Metal, Facsimile",
"Gravor Manual",
"Tintuitor",
"Bijutier Metale Comune",
"Giuvaergiu",
"Slefuitor Diamante Naturale",
"Aplicator De Detalii La Produse Din Ceramica",
"Debavurator-Retusor La Produse Din Ceramica Fina",
"Turnator Produse Ceramice",
"Fasonator Produse Ceramice",
"Glazurator Produse Din Ceramica Fina",
"Modelator Ceramica",
"Olar Ceramica (Artizanat)",
"Preparator Mase Ceramice",
"Presator Produse Ceramice",
"Rasnitor Smalt",
"Slefuitor Produse Din Ceramica Fina",
"Brigadier La Fabricarea Sticlei",
"Modelator Tuburi Spectrale",
"Tragator, Slefuitor, Gradator Nivele",
"Prelucrator Topitura Sticla La Presa",
"Prelucrator Topitura Sticla La Teava",
"Formator Tuburi Spectrale",
"Pictor Decor",
"Gradator Vase Si Aparate De Laborator",
"Gravor Produse De Sticla",
"Inscriptioner Pe Produse De Sticla Si Ceramica",
"Pictor Pe Sticla Si Ceramica",
"Oglindar",
"Confectioner Piese, Linguri, Spite, Albii, Donite, Cozi De Unelte, Sindrila, Ciubere",
"Confectioner Jucarii",
"Confectioner Obiecte Artizanale Din Lemn",
"Confectioner Plute",
"Confectioner Garnituri Pentru Etansare",
"Pirogravor",
"Ramar Poleitor",
"Sculptor In Lemn",
"Confectioner Creta Scolara",
"Traforator Manual Lemn",
"Dogar Manual",
"Rotar Caretas",
"Lumanarar",
"Confectioner Cutite, Brice, Bratari, Andrele, Agrafe, Inele",
"Confectioner Nasturi, Piepteni",
"Confectioner Obiecte Casnice Din Deseuri De Aluminiu Si Alte Metale",
"Confectioner Obiecte Din Ipsos",
"Confectioner Obiecte Din Os, Scoica, Mica Etc.",
"Confectioner Corzi Din Intestine",
"Incadrator Tablouri",
"Confectioner Materiale Didactice Pentru Stiintele Naturii",
"Confectioner Bidinele, Pensule, Perii",
"Confectioner Maturi",
"Impletitor De Nuiele",
"Impletitor Obiecte Din Foi De Porumb",
"Impletitor Papura",
"Legator De Par",
"Sortator, Spalator Par",
"Prelucrator De Par La Masina",
"Prelucrator Manual De Par",
"Impletitor Din Panglica Impletita",
"Pieptanator De Par La Masina",
"Fierbator-Uscator De Par",
"Confectioner Plase Pescaresti",
"Confectioner Articole Hartie",
"Confectioner Bibelouri Din Jenille",
"Decorator In Piele",
"Velator-Matisor",
"Confectioner Manual De Produse Din Sfori Sau Franghii",
"Ghemuitor",
"Polierator Franghii",
"Cablator Franghii",
"Saluzitor Franghii",
"Confectioner Unelte Pescuit Din Plase",
"Confectioner Imbracare Volane In Piele",
"Prelucrator De Fulgi Si Pene",
"Filator",
"Ajutor Maistru Filator",
"Tesator",
"Tricoter Manual",
"Ajutor Maistru Tesator, Tricoter",
"Crosetor",
"Impletitor Textile",
"Confectioner Presuri",
"Tesator Restaurator Manual Covoare",
"Pregatitor Si Confectioner Cataloage Mostre",
"Confectioner Tricotaje Dupa Comanda",
"Finisor Textile (Vopsitor, Imprimeur)",
"Repasator",
"Aburitor Textile",
"Reparator Covoare",
"Tesator Manual",
"Cusator Manusi Piele",
"Croitor Manusi Piele",
"Finisor Manusi Piele",
"Croitor-Stantator Articole Marochinarie",
"Cusator Articole Marochinarie",
"Pregatitor Articole Marochinarie",
"Asamblator-Montator Articole Marochinarie",
"Confectioner Manual In Metaloplastie",
"Confectioner Proteze Dentare",
"Confectioner Proteze Ortopedice",
"Confectioner Jaluzele",
"Impletitor Fibre Plastice",
"Confectioner Flori Artificiale",
"Culegator La Masina De Cules Si Turnat Randuri (Linotipist)",
"Culegator La Masina De Perforat Programe Pentru Masinile De Turnat Text (Monotastor)",
"Culegator La Masina De Turnat Randuri Pentru Titluri (Ludlov)",
"Culegator Manual (Zetar)",
"Frezor-Montator Clisee",
"Stereotipar",
"Galvanotipist",
"Corodor Prin Procedee Chimice (Tiefdruc Si Zinco)",
"Zincograf",
"Copist Formare Tipar Plan",
"Desenator Cromolitograf",
"Slefuitor-Granulator",
"Gravor Placi Metalice",
"Gravor Placi Litografice",
"Fotogravor",
"Turnator Valuri",
"Manipulant Cutter-Plotter",
"Tipograf Turnator La Masinile De Turnat Text",
"Imprimeur Textil",
"Imprimator Serigraf",
"Taietor Matrite Serigrafie",
"Serigraf",
"Operator Presa De Transfer Termic",
"Tipograf-Tiparitor",
"Dactilo-Rotaprint",
"Operator La Masina Electronica De Gravat",
"Operator La Masinile De Fotoculegere (Monofoto)",
"Operator Tiparituri Braille",
"Heliografist",
"Operator Xerox",
"Operator Masini Multiplicat",
"Tipograf Print Digital Si Offset",
"Operator La Masina De Gravat Si Decupat Cu Laser",
"Operator La Masina De Tampografiat",
"Tipograf Flexograf",
"Legator Manual (In Poligrafie Si Ateliere Speciale)",
"Colator Publicitar",
"Masinist In Legatorie Mecanica",
"Strungar Slefuitor Tipografie",
"Electrician In Constructii",
"Electrician De Intretinere In Constructii",
"Instalator Pentru Sisteme Fotovoltaice Solare",
"Instalator Pentru Sisteme Termice Solare",
"Montator Instalatii Solare",
"Electrician Constructor Montator Aparataj Si Cabluri De Joasa Tensiune",
"Electrician Constructor Montator Aparataj Si Cabluri De Medie Si Inalta Tensiune",
"Electrician Constructor Instalator Aparatura De Masura Si Control",
"Electrician Constructor Pentru Probe Si Incercari Functionale",
"Electrician In Constructii Civile Si Industriale",
"Electrician Echipamente Electrice Si Energetice",
"Bobinator Aparataj Electric",
"Electromecanic Reparator Obiecte De Uz Casnic",
"Bobinator Masini Electrice Rotative",
"Electrician Aparate Masura-Control Si Automatizare In Centrale Termoelectrice Si Nuclearoelectrice",
"Electrician Montare Si Reparatii Aparataj Electric De Protectie, Relee, Automatizare",
"Bobinator Condensatori Pentru Instalatii Electrice",
"Electrician Verificari Si Masuratori Electrice In Centrale Si Retele Electrice",
"Bobinator Transformatoare",
"Montator/Reglor/Depanator De Aparataj Electric",
"Montator, Reglor Si Depanator Pentru Aparate De Masura Electrice Si Relee",
"Montator, Reglor Si Depanator De Ascensoare",
"Electrician Nave",
"Confectioner Cablaje Auto",
"Electromecanic Masini Si Echipamente Electrice",
"Electromecanic Statie Pompare Apa-Canal",
"Electrician Exploatare Centrale Si Statii Electrice",
"Electrician Exploatare Retele Electrice",
"Electrician Montare Si Reparatii Cabluri Electrice Subterane",
"Electrician Montare Si Reparatii Linii Electrice Aeriene",
"Electrician Montare Si Reparatii Echipament Electric Din Centrale, Statii Si Posturi De Transformare",
"Electrician Protectie Relee, Automatizari Si Masuratori Electrice",
"Electrician De Intretinere Si Reparatii",
"Electrician Montator De Instalatii Automatizate",
"Electrician Montator De Instalatii Electrice La Mijloace De Transport",
"Electrician Pentru Protectia Catodica",
"Electrician Rural",
"Electrician De Mina",
"Electrician Pentru Utilizarea Energiei Electrice",
"Electrician Depanator Utilaje Calcul",
"Electronist Depanator Utilaje Calcul",
"Plantator Elemente Electronice",
"Operator In Verificarea, Intretinerea Si Repararea Instalatiilor Speciale De Prevenire A Incendiilor",
"Tehnician Pentru Sisteme Si Instalatii De Semnalizare, Alarmare Si Alertare In Caz De Incendiu",
"Tehnician Pentru Sisteme Si Instalatii De Limitare Si Stingere A Incendiilor",
"Automatist",
"Electromecanic Scb (Semnalizare, Centralizare, Blocare)",
"Electromecanic Radio-Radioficare",
"Electromecanic Retele Cabluri",
"Electromecanic Retele Linii",
"Electromecanic Telegrafie, Telefonie",
"Electronist Telecomunicatii",
"Jonctor",
"Linior",
"Montator, Reglor, Testor Aparatura De Telecomunicatii Si Instalatii De Semnalizare, Centralizare Si Blocare",
"Electromecanic Electroalimentare",
"Muncitor Radioelectronist",
"Electromecanic Automatizari Si Telecomunicatii",
"Electromecanic",
"Carmangier",
"Ciontolitor Transator Carne",
"Macelar",
"Sterilizator",
"Taietor Pasari",
"Lucrator La Prelucrarea Pestelui",
"Afumator Carne",
"Brutar",
"Cofetar",
"Patiser",
"Preparator De Semifabricate Si Preparate Culinare",
"Operator La Fabricarea Produselor Congelate De Patiserie Si Panificatie",
"Pasteurizator Produse Lactate",
"Preparator Produse Lactate",
"Smantanitor",
"Preparator Conserve, Legume Si Fructe",
"Uscator-Deshidrator Legume, Fructe",
"Preparator Castane, Dovleac, Porumb",
"Lucrator In Procesarea De Fructe De Padure Si Ciuperci De Padure",
"Degustator",
"Conditioner Tutun Pentru Fabricarea Tigaretelor",
"Vopsitor Lemn",
"Pregatitor Paste Chimice",
"Pregatitor Placi Fibrolemnoase Si Hartie Pentru Filme",
"Uscator, Aburitor Material Lemnos",
"Tamplar Universal",
"Tamplar Carosier",
"Tamplar Manual/Artizanal",
"Marangoz-Calafatuitor",
"Asamblator Lazi",
"Confectioner-Montator Produse Din Lemn",
"Curbator-Montator Butoaie Din Lemn",
"Tamplar Manual La Presare Si Incleiere",
"Corhanitor",
"Tamplar Manual Ajustor Montator",
"Tamplar Manual La Imbinarea Furnirelor",
"Marangoz Cala-Tachelagiu",
"Slefuitor, Lustruitor",
"Pregatitor Suprafete Pentru Lacuit",
"Gardinator",
"Confectioner-Montator Cercuri La Butoaie",
"Decupator Lamele Din Lemn Pentru Lazi",
"Preparator-Dozator Adezive, Rasini, Lacuri Si Emailuri In Industria Lemnului",
"Tamplar Restaurator",
"Restaurator Sarpante Si Structuri Din Lemn",
"Tamplar Binale",
"Strungar In Lemn",
"Reglor Masini De Prelucrat Lemn",
"Taietor De Precizie In Lemn",
"Confectioner Articole Speciale Din Lemn",
"Confectioner Parchete",
"Confectioner Cutii Chibrituri Din Furnir",
"Impregnator-Uscator Chibrituri",
"Confectioner Gamalii Chibrituri",
"Masinist La Umplerea Si Inchiderea Cutiilor De Chibrituri",
"Pastator Cutii De Chibrituri",
"Fasonator Calapoade",
"Montator Accesorii Pentru Calapoade",
"Finisor Calapoade",
"Circularist La Taiat Lemne De Foc",
"Curbator Lemn",
"Gradator Rechizite Si Articole Tehnice Din Lemn",
"Tamplar Mecanic La Croit Si Dimensionat",
"Tamplar Mecanic La Rindeluit",
"Tamplar Mecanic La Frezat Si Gaurit",
"Tamplar Mecanic La Strunjit",
"Tamplar Mecanic La Slefuit",
"Confectioner Mine Pentru Creioane",
"Innobilator Scandurele Pentru Creioane",
"Fasonator Creioane Si Tocuri",
"Finisor Creioane Si Tocuri",
"Preparator Paste Chimice Pentru Chibrituri",
"Confectioner Cutii Chibrituri Din Carton",
"Operator La Masini Unelte Cu Comanda Numerica In Prelucrarea Lemnului",
"Croitor",
"Lenjer, Confectioner Lenjerie Dupa Comanda",
"Confectioner Palarii",
"Ajutor Maistru Croitor",
"Plior Confectii",
"Modista",
"Ceaprazar-Sepcar",
"Curatitor-Reparator Palarii",
"Retusier Confectii",
"Blanar-Confectioner Imbracaminte Din Blana, Dupa Comanda",
"Confectioner Imbracaminte Din Piele Si Inlocuitori, Dupa Comanda",
"Cojocar",
"Confectioner, Prelucrator In Industria Textila",
"Croitor-Confectioner Imbracaminte, Dupa Comanda",
"Multiplicator Sabloane Croitorie",
"Confectioner Corsete",
"Confectioner Reparator Cravate",
"Planimetror Sabloane",
"Croitor Confectioner Costume Teatru",
"Broder Manual",
"Stopeur",
"Remaieur Ciorapi",
"Broder Manual-Mecanic",
"Broder La Gherghef",
"Tapiter",
"Saltelar",
"Plapumar",
"Mestesugar Argasitor",
"Mestesugar Cenuseritor",
"Mestesugar Finisor Mineral",
"Mestesugar Finisor Vegetal",
"Mestesugar Sortator In Industria Pielariei",
"Cizmar-Confectioner Incaltaminte, Dupa Comanda",
"Confectioner Articole Din Piele Si Inlocuitori",
"Confectioner Incaltaminte Ortopedica",
"Curelar, Confectioner Harnasamente",
"Marochiner-Confectioner Marochinarie, Dupa Comanda",
"Opincar",
"Talpuitor (Confectioner-Reparatii Incaltaminte)",
"Scafandru",
"Scafandru Lucrator Subacvatic",
"Scafandru Sef Grup",
"Scafandru Sef Utilaj",
"Scafandru Greu",
"Operator Barocamera",
"Scafandru Salvator",
"Sef De Scufundare",
"Tehnician De Scufundare",
"Artificier De Mina",
"Artificier La Lucrari De Suprafata",
"Pirotehnician Cinematografie Si Teatru",
"Controlor Calitate",
"Miner In Subteran",
"Miner La Suprafata",
"Miner In Subteran Pentru Constructii",
"Masinist Pentru Utilaje Specifice La Extractie Si Executia Tunelurilor",
"Semnalist-Cuplator",
"Excavatorist Pentru Excavatoare Cu Rotor De Mare Capacitate",
"Trolist",
"Brichetator Carbune",
"Distilator La Prepararea Carbunelui",
"Operator La Prepararea Minereurilor",
"Operator La Sfaramarea Minereurilor",
"Prajitor Minereu",
"Prelucrator Mica",
"Spalator La Prepararea Carbunilor",
"Flotator La Prepararea Carbunilor",
"Separator La Prepararea Carbunilor",
"Morar La Masini De Maruntit Roci",
"Tocator La Masini De Maruntit Roci",
"Concasorist",
"Operator Mineralurg",
"Operator Extractie Titei",
"Sondor La Foraj Manual",
"Operator-Prospector Lucrari Geologice Si Geofizice",
"Operator Transport Pe Conducte Singulare Gaze",
"Operator Extractie Gaze",
"Operator Extractie Titei In Subteran",
"Operator Extractie Sare In Salina",
"Operator Masuratori Speciale Sonde",
"Operator Lucrari Speciale Sonde",
"Sondor La Forajul Mecanizat Si Reparatii Sonde",
"Sondor La Interventii De Sonde",
"Sondor La Punerea In Productie",
"Primitor-Preparator Produse Fluide",
"Operator Flotare Produse Fluide",
"Cuptorar Lianti",
"Fasonator Produse Din Azbociment",
"Finisor Produse Din Azbociment",
"Morar Lianti",
"Preparator Pasta De Azbociment",
"Tratamentist Produse Din Azbociment",
"Operator La Impregnarea Produselor Hidroizolatoare",
"Masinist Pentru Prefabricate Din Beton Si Beton Armat",
"Operator La Fabricarea Vatei Si Produselor Din Vata Minerala",
"Confectioner Garnituri Din Azbest",
"Cocsar",
"Furnalist",
"Otelar",
"Pregatitor Materiale De Sarje",
"Melanjorist",
"Operator Oxizi De Plumb",
"Dezbatator Lingouri",
"Metalurgist Pulberi Din Oxid De Fier",
"Curatitor Lingouri",
"Preparator La Concentratele Miniere",
"Topitor La Concentrate Miniere",
"Rafinator Metale Neferoase",
"Electrometalurgist",
"Conditioner-Finisor",
"Turnator Fonta",
"Granulator Zgura",
"Epurator Gaze",
"Masinist Suflante",
"Pregatitor De Sarje",
"Dozator La Producerea Aglomeratului",
"Aglomeratorist",
"Masinist Exhaustor",
"Operator Separare Magnetica",
"Laminator Semifabricate, Profiluri Tabla Si Platbanda",
"Laminator, Presator Tevi Plumb",
"Topitor, Turnator Metale Pretioase",
"Laminator Sarma",
"Laminator Tabla Subtire",
"Laminator De Bandaje Si Discuri",
"Laminator De Tevi",
"Laminator Pe Laminoare Continue",
"Laminator De Benzi La Rece",
"Presator De Tevi La Cald Si Profiluri Prin Extruziune",
"Alimentator-Incalzitor De Materiale",
"Operator La Cuptoare Si Instalatii Pentru Turnarea Si Laminarea Metalelor",
"Laminator",
"Termist-Tratamentist De Produse Brute, Forjate, Turnate Sau Laminate",
"Termist Tratamentist De Piese Semifabricate, Finite",
"Calitor Prin Inductie Sau Cu Flacara",
"Calitor Scule",
"Termist-Tratamentist",
"Operator La Instalatii De Tratament Termic Cu Procesare",
"Operator La Pregatirea Sarjelor Pentru Tratament Termic",
"Finisor Laminate Si Trefilate",
"Decapator",
"Regulator Tevi",
"Trefilator, Tragator",
"Galvanizator",
"Metalizator Prin Pulverizare",
"Metalizator Prin Cufundare In Metal Topit",
"Confectioner Protectii Si Obiecte Anticorozive",
"Emailator",
"Operator La Confectionarea Materialelor Electroizolante",
"Matisor Cabluri",
"Metalizator-Termist",
"Arzator Email",
"Emailator Insigne Si Decoratii",
"Emailator Firme Si Decoruri",
"Emailator Prin Pudrare",
"Emailator Prin Pulverizare",
"Preparator Email",
"Emailator Prin Imersiune",
"Acoperitor Metale",
"Poleitor Filiere",
"Protejator Conductori Cabluri Si Condensatori Statici De Forta",
"Confectioner Izolatii La Conductori Electrici",
"Confectioner Mantale De Plumb Prin Presare La Cabluri",
"Confectioner Toroane Si Cablaje La Conductori Electrici",
"Pregatitor Seturi De Cabluri Electrice Pentru Autotrac Si Accesorii",
"Preparator Electrolit Si Amestec Depolarizator",
"Confectioner Si Legator Depolarizator",
"Asamblator Elemente Si Baterii Galvanice",
"Confectioner De Elemente Galvanice",
"Confectioner Celule De Electroliza",
"Operator La Masini De Macinare Fina (Produse Chimice)",
"Operator La Masini De Fragmentare (Produse Chimice)",
"Operator La Masini De Amestecare (Produse Chimice)",
"Operator La Instalatii De Ardere",
"Uscator In Industria Chimica",
"Preparator In Industria Chimica",
"Sinterizator",
"Operator La Fabricarea Sticlei",
"Impaslitor Panza Sticla",
"Filator Fibre Sticla",
"Preparator Amestec Si Topitor Sticla",
"Operator Poliesteri Armati Cu Fibra De Sticla",
"Extractorist In Chimie",
"Fermentator In Chimie",
"Concentrator-Purificator In Chimie",
"Extractorist Uleiuri Volatile Naturale Si Colesterina",
"Operator Chimist La Producerea Compusilor Organici Ai Sulfului Si Ingrasamintelor Fosfatice",
"Distilator In Industria Chimica",
"Operator Chimist La Producerea Diverselor Produse Anorganice",
"Operator Chimist La Fabricarea Lacurilor, Vopselelor Si Uleiurilor",
"Operator Chimist La Fabricarea Colorantilor",
"Operator La Obtinerea Produselor Din Spume Poliuretanice Si Latex",
"Operator Chimist La Producerea Compusilor Anorganici Ai Azotului Si Ingrasamintelor Azotoase",
"Operator Lacuri Electroizolante",
"Preparator Lacuri, Vopsele, Paste De Fludor Folosite La Aparataj Electric",
"Operator Chimist La Chimizarea Gazelor De Rafinarie",
"Operator Cracare, Deformare Si Fabricare Bitum",
"Operator Chimist La Fabricarea Altor Produse Organice",
"Operator Chimist La Chimizarea Gazului De Cocs",
"Producator De Fire Si Fibre Sintetice",
"Operator La Fabricarea Pieii Sintetice",
"Preparator Ferodouri",
"Finisor Universal Ferodouri",
"Operator La Fabricarea Glicerinei Si Acizilor Grasi",
"Operator La Fabricarea Sapunurilor",
"Operator La Produse Odorante Sintetice",
"Operator La Fabricarea Detergentilor",
"Operator Chimist La Produsele Farmaceutice Si Chimice Pure",
"Preparator Benzi Cauciucate Si Compozitii Emplastre",
"Preparator Prafuri De Spalat Si Curatat",
"Preparator La Prepararea Produselor Cosmetice Si De Parfumerie",
"Preparator Ser Vaccin",
"Conditioner Finisor Produse Explozive",
"Confectioner Fitile",
"Confectioner Produse Pirotehnice",
"Nitrator",
"Pregatitor La Produse Explozive",
"Preparator La Produse Explozive",
"Confectioner Cartuse De Vanatoare",
"Operator La Tragere Si Munitie",
"Operator La Pregatirea, Conservarea Si Ambalarea Armamentului Si Munitiei",
"Pregatitor, Completator De Echipamente Tehnice Si Sdv-Uri",
"Delaborator Munitie",
"Operator La Producerea Sodei Si Produselor Clorosodice",
"Operator La Fabricarea Altor Produse Chimice",
"Fotoceramist",
"Fotocopist",
"Fotopoligraf",
"Fotoreproducator",
"Montator Filme",
"Retusor Clisee",
"Pregatitor Hartie Fotosensibila",
"Operator La Fabricarea Filmelor Fotografice",
"Preparator La Confectionarea Produselor Industriale Din Cauciuc",
"Pregnator Prize Tehnice Si Banda Izolatoare",
"Confectioner De Produse Industriale Din Cauciuc",
"Vulcanizator De Produse Industriale Din Cauciuc",
"Finisor-Reparator De Produse Industriale Din Cauciuc",
"Operator La Prelucrarea Cauciucului",
"Confectioner Garnituri De Etansare Din Cauciuc",
"Finisor Incaltaminte Si Articole Tehnice Din Cauciuc",
"Calandror La Finisarea Cauciucului",
"Pregatitor Regenerare Cauciuc",
"Devulcanizator Regenerare Cauciuc",
"Rafinator Regenerare Cauciuc",
"Dozator Prelucrare Cauciuc",
"Impregnator Produse Din Cauciuc",
"Profilator Produse Din Cauciuc",
"Stantator Piese Pentru Incaltaminte Din Piele Si Cauciuc",
"Cusator Piese La Incaltaminte Din Cauciuc",
"Vulcanizator Piese Din Cauciuc La Prese",
"Vulcanizator La Autoclava",
"Preparator Cauciuc Electroizolant",
"Valtar Cauciuc Electroizolant",
"Masinist La Confectionarea Materialelor Electroizolante Impregnate",
"Masinist La Confectionarea Materialelor Electroizolante Stratificate (Mica)",
"Masinist La Confectionarea Foliilor De Cauciuc Electroizolante",
"Presator-Formator Materiale Stratificate, Pregnator Prize Tehnice Si Banda Izolatoare",
"Conditioner-Finisor Produse Din Cauciuc",
"Croitor Pentru Incaltaminte Si Articole Tehnice Din Cauciuc",
"Pregatitor Pentru Incaltaminte Si Articole Tehnice Din Cauciuc",
"Preparator Placi De Etansare Comprimate",
"Operator Fabricarea Si Prelucrarea Polimerilor",
"Preparator Mase Plastice",
"Valtar Calandru Mase Plastice",
"Operator La Prelucrarea Maselor Plastice",
"Presator Mase Plastice",
"Finisor-Asamblator Obiecte Din Mase Plastice",
"Creator, Modelier Mase Plastice",
"Operator La Confectionarea Discurilor Fonografice",
"Operator Sudare Tevi Si Fitinguri Din Polietilena De Inalta Densitate Pehd",
"Operator Mase Plastice",
"Operator Masini De Termoformatare",
"Cartonagist",
"Operator La Masina De Laminat",
"Decapsulator",
"Decuscutor",
"Topitor In Si Canepa",
"Melitator In Si Canepa",
"Innobilator In Si Canepa",
"Scuturator In Si Canepa",
"Lucrator In Bataje (Batator)",
"Destramator",
"Carbonizator Textile",
"Scuturator",
"Pregatitor Amestecuri In Filaturi",
"Cardator",
"Laminator Benzi Din Fibre",
"Pieptanator",
"Zdrobitor Si Uleiator Textile",
"Operator Masini Puitoare",
"Formator Manusi Fibre Liberiene",
"Montator La Lipit Ace",
"Curatitor-Slefuitor Garnituri Carde",
"Cilindror In Filaturi",
"Disponent In Filaturi",
"Pregatitor Gogosi De Matase (Fierbator)",
"Prelucrator Fire De Matase Din Gogosi (Filator)",
"Innodator Fire De Matase Naturala",
"Finisor Fire De Matase Naturala",
"Prelucrator Deseuri Gogosi De Matase Naturala",
"Operator La Masini De Tricotat Rectiliniu",
"Operator La Deservirea Razboaielor De Tesut",
"Snuruitor Jaqard",
"Operator La Masini De Tricotat Circular",
"Operator Sculuitor",
"Operator Canelator",
"Operator La Masini De Urzit",
"Operator Incheietor Fire",
"Operator Navaditor, Lipitor, Innodator Fire",
"Operator Bobinator-Dublator",
"Operator Batirator Fire",
"Operator Rasucitor Fire",
"Paslitor",
"Confectioner Polizoare Din Pasla",
"Operator Confectioner Industrial Imbracaminte Din Tesaturi, Tricotaje, Materiale Sintetice",
"Operator Confectii Imbracaminte Din Piele Si Inlocuitori",
"Operator La Confectionarea Industriala A Imbracamintei Din Blana",
"Operator La Confectionarea Industriala A Manusilor Din Piele",
"Incadrator Confectii",
"Rihtuitor Confectii",
"Pregatitor-Lansator Confectii",
"Spanuitor Confectii",
"Taietor Confectii",
"Operator Gazator Textile",
"Operator Descleietor Textile",
"Operator Degamator Textile",
"Operator Spalator Textile",
"Operator Albitor Textile",
"Operator Fierbator Textile",
"Operator Mercerizator Textile",
"Operator Pregatitor Chimicale In Industria Textila",
"Operator Vopsitor Textile",
"Operator Imprimeur Textile",
"Confectioner Sabloane Si Cilindri De Imprimat",
"Operator Apretor Textile",
"Operator Calandor-Govrator Textile",
"Operator Impregnator Textile",
"Operator Decator",
"Operator Presator Tesaturi Textile (Storcator Textile)",
"Operator Fixator Textile",
"Operator Piuar",
"Operator Scamosetor",
"Operator Tunsator Textile",
"Operator Curatitor Chimic",
"Operator Metrar-Volator-Dublator Textile",
"Operator Taietor Textile",
"Operator Uscator Textile",
"Operator Ingreunator Matase Naturala",
"Naphtolator La Imprimerie",
"Vaporizator La Imprimerie",
"Confectioner Sabloane La Imprimerie",
"Presator Axe La Cilindrul De Imprimat",
"Operator Universal ",
"Operator Cenuseritor",
"Operator Tabacitor Mineral Argasitor",
"Operator Tabacitor Vegetal",
"Operator Finisor Mineral",
"Operator Finisor Vegetal",
"Operator Argasitor",
"Operator Sortator In Industria Pielariei",
"Vopsitor Imbracaminte Din Blana",
"Operator La Prepararea Talpii De Incaltaminte Din Fibre",
"Operator La Confectionarea Industriala A Articolelor Din Cauciuc Si Textile Cauciucate",
"Croitor-Stantator Piese Incaltaminte",
"Pregatitor Piese Incaltaminte",
"Cusator Piese Din Piele Si Inlocuitori",
"Tragator Fete Pe Calapod",
"Talpuitor Industrial",
"Finisor Incaltaminte",
"Operator La Confectionarea Industriala A Palariilor",
"Croitor Confectii Industriale Din Blana",
"Operator La Confectionarea Industriala A Articolelor De Sport Si Protectie, Din Piele Si Inlocuitori",
"Cusator Confectii Industriale Din Blana",
"Pregatitor Confectii Industriale Din Blana",
"Finisor Confectii Industriale Din Blana",
"Operator Textile Netesute",
"Operator La Prepararea Conservelor Din Carne, Peste Si In Amestec Legume Si Peste",
"Operator La Valorificarea Subproduselor De Abator",
"Tripier",
"Preparator Peste, Raci, Broaste In Cherhanale Si Oficii",
"Preparator Faina Din Peste",
"Topitor Grasimi Comestibile Si De Uz Industrial",
"Operator La Fabricarea Mezelurilor",
"Colector Si Sortator Par",
"Preparator Ulei De Copite",
"Colector Si Preparator Faina, Sange, Carne, Oase",
"Curatitor Piei",
"Valorificator Glande",
"Preparator Pepsina, Cheag",
"Matar",
"Operator La Prepararea Branzeturilor",
"Operator La Prepararea Produselor Lactate",
"Operator Centru De Racire Lapte",
"Operator La Fabricarea Untului",
"Preparator Conserve Lapte Si Lactoza",
"Morar",
"Operator La Fabricarea Nutreturilor Combinate",
"Preparator Boia De Ardei",
"Preparator Mustar",
"Preparator Extracte, Arome Si Esente",
"Decorticator Crupe",
"Operator La Prepararea Produselor Zaharoase",
"Operator La Fabricarea Produselor Fainoase",
"Preparator Inghetata",
"Operator La Fabricarea Biscuitilor",
"Preparator Napolitane",
"Operator La Fabricarea Uleiurilor Vegetale",
"Operator La Fabricarea Conservelor Din Legume Sau Fructe",
"Operator La Fabricarea Zaharului",
"Conditioner Miere",
"Preparator De Produse Apicole",
"Preparator Surogate Cafea",
"Operator La Conditionarea Si Prelucrarea Plantelor Medicinale",
"Operator La Fermentarea Tutunului Si Fabricarea Produselor Din Tutun",
"Preparator Halva",
"Operator La Prepararea Bauturilor Alcoolice Si Racoritoare",
"Operator La Fabricarea Berii",
"Operator La Fabricarea Maltului",
"Operator La Fabricarea Spirtului Si Drojdiei De Panificatie",
"Vinificator-Pivnicer",
"Fermentator Otet",
"Operator La Fabricarea Glucozei",
"Preparator Bauturi Racoritoare",
"Preparator Rachiuri Industriale Si Lichioruri",
"Distilator Rachiuri Naturale",
"Operator La Fabricarea Amidonului Si Dextrinei",
"Preparator Coniac",
"Preparator Vermut",
"Preparator Sampanie",
"Pregatitor Lemn, Stuf, Paie",
"Preparator Pasta",
"Fierbator-Spalator Celuloza, Hartie",
"Albitor Pasta Hartie",
"Confectioner Tambur Filigranare",
"Masinist La Deshidratare Pasta Hartie",
"Finisor Hartie, Carton, Mucava",
"Confectioner Produse Igienico-Sanitare",
"Confectioner Rondele Din Pluta",
"Confectioner Bastoane Din Pluta",
"Confectioner Colaci Si Centuri De Salvare",
"Masinist La Masina De Taiat Sraifuri Si Dopuri Din Pluta",
"Masinist La Masina De Zdrobit Si Macinat Pluta",
"Confectioner Placi Izolatoare",
"Aburitor Pluta",
"Pregatitor Lemn Aschietor",
"Pregatitor Aschii",
"Incleietor Placi Aglomerate",
"Formator Presator Placi Brute",
"Formator Finisor Placi",
"Tocatorist-Defibratorist",
"Filtrator-Deshidratorist",
"Presator Pfl",
"Tratamentist Pfl",
"Formator Pfl",
"Impregnator La Innobilare Pfl",
"Fasonator-Sortator Filme",
"Presator La Innobilare Pfl",
"Formator La Innobilare Pfl",
"Finisor La Innobilare Pfl",
"Pregatitor Pfl Si Hartie Pentru Filme",
"Gaterist La Taiat Busteni",
"Taietor La Ferastrau Panglica",
"Fasonator Cherestea",
"Desenator-Insemnator Cherestea",
"Decupator Furnire",
"Derulatorist",
"Fasonator-Uscator Furnire",
"Frezor-Imbinator Furnire Tehnice",
"Presator Produse Stratificate",
"Formator Slefuitor Produse Stratificate",
"Preparator Ppf",
"Miezuitor Panele Si Placi Celulare",
"Operator La Recoltarea Si Toaletarea Arborilor Forestieri",
"Topitor Sticla",
"Prelucrator De Topituri La Semiautomate",
"Prelucrator De Topituri La Instalatii De Tras Tevi",
"Prelucrator De Tuburi Si Baghete",
"Cuptorar Recoacere Sticla",
"Confectioner Termosuri",
"Taietor Produse Din Sticla",
"Slefuitor Produse Din Sticla",
"Arzator Produse Din Sticla",
"Sablator Produse Din Sticla",
"Argintar Produse Din Sticla",
"Operator La Instalatii Automate Pentru Prepararea Amestecului",
"Operator La Instalatii Automate Pentru Prelucrarea Topiturii De Sticla",
"Operator La Prelucrarea Tuburilor Din Sticla",
"Operator La Masini De Inscriptionat",
"Prelucrator Fire Si Tesaturi Din Fire De Sticla",
"Operator La Instalatii De Tras Si Laminat Geam",
"Turnator Geam",
"Preparator Vata De Sticla",
"Taietor Geam",
"Slefuitor/ Sablator Geam",
"Securizator Geam",
"Pregatitor De Materii Prime Pentru Producerea Sticlei",
"Strungar Produse Ceramice",
"Cuptorar Ceramica Fina Si Decor",
"Arzator Produse Ceramice",
"Operator La Fabricarea Produselor Refractare",
"Operator La Fabricarea Produselor Abrazive",
"Operator La Fabricarea Produselor Din Carbune",
"Operator Abrazive Pe Suporti",
"Finisor Produse Abrazive",
"Granulator/ Sortator Abrazive",
"Cuptorar Produse Abrazive",
"Preparator-Presator Abrazive",
"Cuptorar Produse Refractare",
"Formator Produse Refractare",
"Preparator-Presator Produse Din Carbune",
"Cuptorar Produse Din Carbune",
"Finisor Produse Din Carbune",
"Prelucrator Produse Ceramice Prin Extrudare",
"Discuitor De Produse Ceramice La Masini",
"Prelucrator Produse Ceramice Prin Injectare",
"Armator De Izolatori Electrici (Prelucrator Produse Electrotehnice",
"Fochist Locomotiva Cu Abur",
"Fochist La Masini Cu Abur",
"Masinist La Instalatii Pentru Incalzit Tren",
"Fochist Pentru Cazane De Abur Si De Apa Fierbinte",
"Ajutor Fochist",
"Fochist Pentru Cazane Mici De Abur",
"Fochist La Cazane De Apa Calda Si Cazane De Abur De Joasa Presiune",
"Fochist Pentru Cazane Conduse De Calculator",
"Masinist La Masini De Ambalat",
"Operator La Masina De Etichetat",
"Operator La Roboti Industriali",
"Operator La Tratarea Si Epurarea Apelor Uzate",
"Operator Instalatie De Sortare Si Reciclare Deseuri Menajere Si Asimilabile",
"Operator Generatoare Terestre Sonice Si Cu Agent Activ De Insamantare",
"Operator Punct De Lansare",
"Pompagiu",
"Compresorist",
"Operator Montaj Linii Automate",
"Lacatus Montator Pentru Utilaje Industriale, De Constructii Si Agricole",
"Pregatitor Si Montator Utilaje Tehnologice",
"Montator Subansamble",
"Montator Aparate Aer Conditionat",
"Operator Calitate Flux",
"Lacatus-Montator Masini Electrice Rotative, Transformatoare Si Aparataj",
"Confectioner Protectie Dielectrica Pentru Acumulatori",
"Morar La Prepararea Materialelor Pentru Acumulatori",
"Confectioner Gratare Si Accesorii Pentru Acumulatori",
"Confectioner Placi Pentru Acumulatori",
"Preparator Lesie Pentru Acumulatori",
"Montator Acumulatori",
"Confectioner Cabluri Si Arbori De Cabluri",
"Montator-Reglor, Depanator Aparate Electronice, Telecomunicatii, Radio",
"Confectioner Tuburi Cinescop",
"Montator-Reglor, Depanator De Instalatii De Electronica Si Curenti",
"Montator-Reglor, Depanator De Aparate Radio Si Tv, Redresoare Si",
"Confectioner Piese Radio Si Semiconductori",
"Confectioner Circuite Integrate",
"Confectioner Scala Radio",
"Confectioner Circuite Imprimate",
"Montator, Reglor, Testor Tehnica De Calcul",
"Confectioner Lampi Fluorescente",
"Confectioner Lampi Cu Vapori De Mercur",
"Confectioner Lampi Cu Vapori De Sodiu",
"Confectioner Becuri",
"Montator Electromecanic",
"Asamblor Biciclete",
"Asamblor Jucarii",
"Asamblor Articole De Sport",
"Operator La Fabricarea Fermoarelor",
"Confectioner De Bete, Lansete, Manere Si Dopuri Pentru Unelte De Pescuit",
"Confectioner-Asamblor Articole Din Lemn",
"Confectioner-Asamblor Articole Din Carton",
"Confectioner-Asamblor Articole Din Textile",
"Montor Articole Din Piele",
"Mecanic Locomotiva Si Automotor",
"Mecanic Ajutor Locomotiva Si Automotor",
"Conducator Autodrezina",
"Mecanic Conducator Vagon Motor De Rectificare A Liniei Aeriene",
"Mecanic Locomotiva Si Rama Electrica Metrou",
"Mecanic Ajutor Locomotiva Si Rama Electrica Metrou",
"Franar",
"Manevrant Vagoane",
"Sef Manevra",
"Acar",
"Sef Tren",
"Paznic Bariera",
"Revizor Ace",
"Conducator De Motocicleta",
"Conducator De Motoscuter",
"Sofer De Autoturisme Si Camionete",
"Sofer Autosanitara",
"Sofer Autoambulanta",
"Pilot Incercare Auto",
"Sofer Autobuz",
"Conducator Troleibuz",
"Conducator Tramvai (Vatman",
"Sofer Autocamion/ Masina De Mare Tonaj",
"Sofer Transport Valori Bancare",
"Lucrator Operativ Pentru Autocontainere",
"Conducator Autospeciala",
"Camionagiu",
"Tractorist",
"Combiner Agricol",
"Motorist La Motoagregate Si Masini In Silvicultura",
"Mecanic De Exploatare In Cultura Mare",
"Mecanic De Exploatare In Zootehnie",
"Operator La Colectatul Si Manipulatul Lemnului",
"Masinist La Masini Pentru Terasamente (Ifronist",
"Masinist La Instalatiile De Preparat Si Turnat Beton Si Mixturi Asfaltice",
"Masinist La Masini Cale Mecanizare Usoara Si Grea",
"Operator La Utilaje De Forjat Dirijat",
"Operator La Utilaje De Reabilitari Conducte Subterane",
"Operator La Utilaje Pentru Subtraversari",
"Macaragiu",
"Masinist Pod Rulant",
"Funicularist",
"Macaragiu Macarale Plutitoare",
"Sofer Automacaragiu",
"Supraveghetor Statie Senal Navigabil",
"Funicularist, Funiculare Pasagere",
"Mecanizator (Muncitor Portuar",
"Liftier",
"Docher",
"Sef Echipa Docheri",
"Docher Instalatii De Incarcare/ Descarcare La Bordul Navei Si Cheu",
"Docher-Amarator",
"Docher-Mecanizator",
"Macaragiu Portuar",
"Masinist La Masini Mobile Pentru Transporturi Interioare",
"Masinist La Alte Masini Fixe De Transport Pe Orizontala Si Verticala",
"Stivuitorist",
"Tractorist Portuar",
"Conducator Autotrailer",
"Conducator Autoincarcator Portuar",
"Stivuitorist Portuar",
"Marinar, Pilot Naval, Barjist",
"Observator Far Maritim Si Statie Semnal De Ceata",
"Marinar Legator",
"Conducator Ambarcatiuni Agrement Pe Ape Interioare",
"Pontonier Feribot",
"Servator Far Maritim Si Statie Semnal De Ceata",
"Conducator De Salupa Maritima/ Fluviala",
"Timonier Maritim/ Fluvial",
"Motopompist",
"Menajera",
"Femeie De Serviciu",
"Ingrijitor Spatii Hoteliere",
"Lucrator Room-Service Hotel",
"Calcatoreasa Lenjerie",
"Curatatoreasa Lenjerie",
"Spalatoreasa Lenjerie",
"Spalator Covoare Innodate",
"Spalator Vehicule",
"Spalator Vitrine Si Geamuri",
"Ingrijitor Animale",
"Muncitor Manipulare Si Pregatire Furaje",
"Muncitor Necalificat In Agricultura",
"Vacar",
"Ingrijitor Pomi",
"Muncitor Necalificat In Silvicultura",
"Taietor Manual Lemn De Foc",
"Muncitor Necalificat In Pescuit Si Vanatoare",
"Muncitor Piscicol",
"Muncitor Necalificat In Mine Si Cariere",
"Impingator Vagoneti",
"Lucrator La Amenajarea Terenurilor Sportive (Amenajator Baza Sportiva)",
"Ingrijitor Spatii Verzi",
"Muncitor Necalificat La Intretinerea De Drumuri, Sosele, Poduri, Baraje",
"Sapator Manual",
"Muncitor Necalificat La Demolarea Cladirilor, Captuseli Zidarie, Placi",
"Muncitor Necalificat La Spargerea Si Taierea Materialelor De Constructii",
"Ambalator Manual",
"Imbuteliator Fluide Sub Presiune",
"Marcator Piese",
"Muncitor Necalificat La Ambalarea Produselor Sub Forma De Praf Si",
"Muncitor Necalificat La Ambalarea Produselor Solide Si Semisolide",
"Muncitor Necalificat In Industria Confectiilor",
"Muncitor Necalificat La Asamblarea, Montarea Pieselor",
"Lucrator Sortator Deseuri Reciclabile",
"Imbuteliator Gaz Petrol Lichefiat",
"Muncitor In Activitatea De Gospodarire A Spanului",
"Umplutor Sifoane",
"Muncitor Necalificat In Metalurgie",
"Caraus",
"Incarcator-Descarcator",
"Legator De Sarcina",
"Manipulant Marfuri",
"Operator Transport Si Distribuire Butelii De Gpl",
"Muncitor Spalare Si Curatare Cisterne",
"Muncitor In Serviciile De Trafic Aerian",
"Lucrator Bucatarie (Spalator Vase Mari",
"Lustragiu",
"Spalator Geamuri Si Parbrize",
"Vanzator Ambulant De Produse Nealimentare",
"Vanzator De Ziare",
"Lucrator Operativ Pentru Autocompactoare",
"Lucrator Pentru Salubrizare Cai Publice",
"Lucrator Pentru Salubrizare Spatii Verzi",
"Lucrator Pentru Salubrizare",
"Lucrator Utilaje Specializate Pentru Salubrizare",
"Curier",
"Hamal",
"Comisioner",
"Distribuitor Presa",
"Ingrijitor Caini In Adaposturi",
"Gropar",
"Incinerator",
"Prinzator Caini",
"Cantaragiu",
"Casier Incasator",
"Incasator Si Cititor Contoare De Energie Electrica, Gaze, Apa",
"Vidanjor-Curatitor Canale",
"Operator Deratizare, Dezinsectie, Dezinfectie",
"Model (Invatamant",
"Gonaci",
"Garderobier",
"Ucenic",
"Aprod",
"Controlor Poarta",
"Paznic",
"Plasator",
"Portar",
"Supraveghetor Muzeu",
"Supraveghetor Noapte (Invatamant)",
"Supraveghetor Sali Spectacole",
"Supraveghetor Hotel",
"Operator Masini Insacuire Pentru Fabrica De Ciment",
"Operator Masini Paletizate Si Infoliere Pentru Fabrica De Ciment",
"Operator Fabricatie Flux",
"Montator, Reglor Si Depanator De Aparate Si Echipamente Electronice",
"Operator La Platforme Pentru Lucru La Inaltime",
"Operator Umplere Recipiente Gpl",
"Ajutor Bucatar",
]
| Provider |
python | PrefectHQ__prefect | src/prefect/locking/filesystem.py | {
"start": 394,
"end": 716
} | class ____(TypedDict):
"""
A dictionary containing information about a lock.
Attributes:
holder: The holder of the lock.
expiration: Datetime when the lock expires.
path: Path to the lock file.
"""
holder: str
expiration: Optional[datetime.datetime]
path: Path
| _LockInfo |
python | tornadoweb__tornado | tornado/locks.py | {
"start": 1628,
"end": 4905
} | class ____(_TimeoutGarbageCollector):
"""A condition allows one or more coroutines to wait until notified.
Like a standard `threading.Condition`, but does not need an underlying lock
that is acquired and released.
With a `Condition`, coroutines can wait to be notified by other coroutines:
.. testcode::
import asyncio
from tornado import gen
from tornado.locks import Condition
condition = Condition()
async def waiter():
print("I'll wait right here")
await condition.wait()
print("I'm done waiting")
async def notifier():
print("About to notify")
condition.notify()
print("Done notifying")
async def runner():
# Wait for waiter() and notifier() in parallel
await gen.multi([waiter(), notifier()])
asyncio.run(runner())
.. testoutput::
I'll wait right here
About to notify
Done notifying
I'm done waiting
`wait` takes an optional ``timeout`` argument, which is either an absolute
timestamp::
io_loop = IOLoop.current()
# Wait up to 1 second for a notification.
await condition.wait(timeout=io_loop.time() + 1)
...or a `datetime.timedelta` for a timeout relative to the current time::
# Wait up to 1 second.
await condition.wait(timeout=datetime.timedelta(seconds=1))
The method returns False if there's no notification before the deadline.
.. versionchanged:: 5.0
Previously, waiters could be notified synchronously from within
`notify`. Now, the notification will always be received on the
next iteration of the `.IOLoop`.
"""
def __repr__(self) -> str:
result = f"<{self.__class__.__name__}"
if self._waiters:
result += " waiters[%s]" % len(self._waiters)
return result + ">"
def wait(
self, timeout: Optional[Union[float, datetime.timedelta]] = None
) -> Awaitable[bool]:
"""Wait for `.notify`.
Returns a `.Future` that resolves ``True`` if the condition is notified,
or ``False`` after a timeout.
"""
waiter = Future() # type: Future[bool]
self._waiters.append(waiter)
if timeout:
def on_timeout() -> None:
if not waiter.done():
future_set_result_unless_cancelled(waiter, False)
self._garbage_collect()
io_loop = ioloop.IOLoop.current()
timeout_handle = io_loop.add_timeout(timeout, on_timeout)
waiter.add_done_callback(lambda _: io_loop.remove_timeout(timeout_handle))
return waiter
def notify(self, n: int = 1) -> None:
"""Wake ``n`` waiters."""
waiters = [] # Waiters we plan to run right now.
while n and self._waiters:
waiter = self._waiters.popleft()
if not waiter.done(): # Might have timed out.
n -= 1
waiters.append(waiter)
for waiter in waiters:
future_set_result_unless_cancelled(waiter, True)
def notify_all(self) -> None:
"""Wake all waiters."""
self.notify(len(self._waiters))
| Condition |
python | davidhalter__parso | parso/python/tree.py | {
"start": 3889,
"end": 4892
} | class ____(PythonMixin, Leaf):
__slots__ = ()
def _split_prefix(self):
return split_prefix(self, self.get_start_pos_of_prefix())
def get_start_pos_of_prefix(self):
"""
Basically calls :py:meth:`parso.tree.NodeOrLeaf.get_start_pos_of_prefix`.
"""
# TODO it is really ugly that we have to override it. Maybe change
# indent error leafs somehow? No idea how, though.
previous_leaf = self.get_previous_leaf()
if previous_leaf is not None and previous_leaf.type == 'error_leaf' \
and previous_leaf.token_type in ('INDENT', 'DEDENT', 'ERROR_DEDENT'):
previous_leaf = previous_leaf.get_previous_leaf()
if previous_leaf is None: # It's the first leaf.
lines = split_lines(self.prefix)
# + 1 is needed because split_lines always returns at least [''].
return self.line - len(lines) + 1, 0 # It's the first leaf.
return previous_leaf.end_pos
| PythonLeaf |
python | django__django | tests/model_formsets/models.py | {
"start": 3496,
"end": 3837
} | class ____(MexicanRestaurant):
the_restaurant = models.OneToOneField(
MexicanRestaurant, models.CASCADE, parent_link=True, primary_key=True
)
tacos_are_yummy = models.BooleanField(default=False)
# models for testing unique_together validation when a fk is involved and
# using inlineformset_factory.
| ClassyMexicanRestaurant |
python | pytorch__pytorch | torch/_export/error.py | {
"start": 1064,
"end": 1349
} | class ____(Exception):
"""
Raised when an internal invariance is violated in EXIR stack.
Should hint users to report a bug to dev and expose the original
error message.
"""
def __init__(self, message: str) -> None:
super().__init__(message)
| InternalError |
python | weaviate__weaviate-python-client | weaviate/collections/classes/config_vector_index.py | {
"start": 3167,
"end": 3693
} | class ____(_VectorIndexConfigCreate):
cleanupIntervalSeconds: Optional[int]
dynamicEfMin: Optional[int]
dynamicEfMax: Optional[int]
dynamicEfFactor: Optional[int]
efConstruction: Optional[int]
ef: Optional[int]
filterStrategy: Optional[VectorFilterStrategy]
flatSearchCutoff: Optional[int]
maxConnections: Optional[int]
vectorCacheMaxObjects: Optional[int]
@staticmethod
def vector_index_type() -> VectorIndexType:
return VectorIndexType.HNSW
| _VectorIndexConfigHNSWCreate |
python | pytorch__pytorch | test/dynamo/test_modules.py | {
"start": 19398,
"end": 19609
} | class ____(ParametersModule1):
def forward(self, x):
ones = torch.ones(10, dtype=next(self.parameters(recurse=False)).dtype)
return F.relu(self.linear1(x)) * self.scale + ones
| ParametersModule4 |
python | scipy__scipy | scipy/integrate/_quad_vec.py | {
"start": 203,
"end": 662
} | class ____(collections.OrderedDict):
def __init__(self, max_size):
self.__max_size = max_size
def __setitem__(self, key, value):
existing_key = (key in self)
super().__setitem__(key, value)
if existing_key:
self.move_to_end(key)
elif len(self) > self.__max_size:
self.popitem(last=False)
def update(self, other):
# Not needed below
raise NotImplementedError()
| LRUDict |
python | PrefectHQ__prefect | src/prefect/logging/filters.py | {
"start": 690,
"end": 1136
} | class ____(logging.Filter):
"""
A logging filter that obfuscates any string that matches the obfuscate_string function.
"""
def filter(self, record: logging.LogRecord) -> bool:
# Need to import here to avoid circular imports
from prefect.settings import PREFECT_API_KEY
if PREFECT_API_KEY:
record.msg = redact_substr(record.msg, PREFECT_API_KEY.value())
return True
| ObfuscateApiKeyFilter |
python | ray-project__ray | python/ray/util/collective/tests/util.py | {
"start": 4251,
"end": 12263
} | class ____:
def __init__(self):
self.buffer0 = None
self.buffer1 = None
self.list_buffer0 = None
self.list_buffer1 = None
def __del__(self):
self.buffer0 = None
self.buffer1 = None
self.list_buffer0 = None
self.list_buffer1 = None
def init_tensors(self):
with cp.cuda.Device(0):
self.buffer0 = cp.ones((10,), dtype=cp.float32)
self.list_buffer0 = [cp.ones((10,), dtype=cp.float32) for _ in range(4)]
with cp.cuda.Device(1):
self.buffer1 = cp.ones((10,), dtype=cp.float32)
self.list_buffer1 = [cp.ones((10,), dtype=cp.float32) for _ in range(4)]
cp.cuda.Stream.null.synchronize()
return True
def init_group(self, world_size, rank, backend=Backend.NCCL, group_name="default"):
col.init_collective_group(world_size, rank, backend, group_name)
return True
def set_buffer(
self,
size,
value0=1.0,
value1=1.0,
dtype=cp.float32,
tensor_type0="cupy",
tensor_type1="cupy",
):
if tensor_type0 == "cupy":
with cp.cuda.Device(0):
self.buffer0 = cp.ones(size, dtype=dtype) * value0
elif tensor_type0 == "torch":
self.buffer0 = torch.ones(size, dtype=torch.float32).cuda(0) * value0
else:
raise RuntimeError()
if tensor_type1 == "cupy":
with cp.cuda.Device(1):
self.buffer1 = cp.ones(size, dtype=dtype) * value1
elif tensor_type1 == "torch":
self.buffer1 = torch.ones(size, dtype=torch.float32).cuda(1) * value1
else:
raise RuntimeError()
cp.cuda.Device(0).synchronize()
cp.cuda.Device(1).synchronize()
# cp.cuda.Stream.null.synchronize()
return True
def set_list_buffer(
self,
size,
value0=1.0,
value1=1.0,
dtype=cp.float32,
tensor_type0="cupy",
tensor_type1="cupy",
):
if tensor_type0 == "cupy":
with cp.cuda.Device(0):
self.list_buffer0 = [
cp.ones(size, dtype=dtype) * value0 for _ in range(4)
]
elif tensor_type0 == "torch":
self.list_buffer0 = [
torch.ones(size, dtype=torch.float32).cuda(0) * value0 for _ in range(4)
]
else:
raise RuntimeError()
if tensor_type1 == "cupy":
with cp.cuda.Device(1):
self.list_buffer1 = [
cp.ones(size, dtype=dtype) * value1 for _ in range(4)
]
elif tensor_type1 == "torch":
self.list_buffer1 = [
torch.ones(size, dtype=torch.float32).cuda(1) * value1 for _ in range(4)
]
else:
raise RuntimeError()
cp.cuda.Device(0).synchronize()
cp.cuda.Device(1).synchronize()
return True
@ray.method(num_returns=2)
def get_buffer(self):
return self.buffer0, self.buffer1
def do_allreduce_multigpu(self, group_name="default", op=ReduceOp.SUM):
col.allreduce_multigpu([self.buffer0, self.buffer1], group_name, op)
cp.cuda.Device(0).synchronize()
cp.cuda.Device(1).synchronize()
return self.buffer0
def do_reduce_multigpu(
self, group_name="default", dst_rank=0, dst_gpu_index=0, op=ReduceOp.SUM
):
col.reduce_multigpu(
[self.buffer0, self.buffer1], dst_rank, dst_gpu_index, group_name, op
)
cp.cuda.Device(0).synchronize()
cp.cuda.Device(1).synchronize()
return self.buffer0, self.buffer1
def do_broadcast_multigpu(self, group_name="default", src_rank=0, src_gpu_index=0):
col.broadcast_multigpu(
[self.buffer0, self.buffer1], src_rank, src_gpu_index, group_name
)
return self.buffer0, self.buffer1
def do_allgather_multigpu(self, group_name="default"):
col.allgather_multigpu(
[self.list_buffer0, self.list_buffer1],
[self.buffer0, self.buffer1],
group_name,
)
cp.cuda.Device(0).synchronize()
cp.cuda.Device(1).synchronize()
return self.list_buffer0, self.list_buffer1
def do_reducescatter_multigpu(self, group_name="default", op=ReduceOp.SUM):
col.reducescatter_multigpu(
[self.buffer0, self.buffer1],
[self.list_buffer0, self.list_buffer1],
group_name,
op,
)
cp.cuda.Device(0).synchronize()
cp.cuda.Device(1).synchronize()
return self.buffer0, self.buffer1
def do_send_multigpu(
self, group_name="default", dst_rank=0, dst_gpu_index=0, src_gpu_index=0
):
if src_gpu_index == 0:
col.send_multigpu(self.buffer0, dst_rank, dst_gpu_index, group_name)
cp.cuda.Device(0).synchronize()
return self.buffer0
elif src_gpu_index == 1:
col.send_multigpu(self.buffer1, dst_rank, dst_gpu_index, group_name)
cp.cuda.Device(1).synchronize()
return self.buffer1
else:
raise RuntimeError()
def do_recv_multigpu(
self, group_name="default", src_rank=0, src_gpu_index=0, dst_gpu_index=0
):
if dst_gpu_index == 0:
col.recv_multigpu(self.buffer0, src_rank, src_gpu_index, group_name)
cp.cuda.Device(0).synchronize()
return self.buffer0
elif dst_gpu_index == 1:
col.recv_multigpu(self.buffer1, src_rank, src_gpu_index, group_name)
cp.cuda.Device(1).synchronize()
return self.buffer1
else:
raise RuntimeError()
def destroy_group(self, group_name="default"):
col.destroy_collective_group(group_name)
return True
def report_rank(self, group_name="default"):
rank = col.get_rank(group_name)
return rank
def report_world_size(self, group_name="default"):
ws = col.get_collective_group_size(group_name)
return ws
def report_nccl_availability(self):
avail = col.nccl_available()
return avail
def report_gloo_availability(self):
avail = col.gloo_available()
return avail
def report_is_group_initialized(self, group_name="default"):
is_init = col.is_group_initialized(group_name)
return is_init
def report_num_gpus(self):
n_gpus = get_num_gpus()
return n_gpus
def create_collective_multigpu_workers(
num_workers=2, group_name="default", backend="nccl"
):
actors = [None] * num_workers
for i in range(num_workers):
actor = MultiGPUWorker.remote()
ray.get([actor.set_buffer.remote([10])], timeout=10)
ray.get([actor.set_list_buffer.remote([10])], timeout=10)
actors[i] = actor
world_size = num_workers
init_results = ray.get(
[
actor.init_group.remote(world_size, i, backend, group_name)
for i, actor in enumerate(actors)
]
)
return actors, init_results
def init_tensors_for_gather_scatter_multigpu(
actors, array_size=10, tensor_backend="cupy"
):
for i, a in enumerate(actors):
if tensor_backend == "cupy":
ray.get([a.set_buffer.remote(array_size)])
ray.get([a.set_list_buffer.remote(array_size)])
elif tensor_backend == "torch":
ray.get(
[
a.set_buffer.remote(
array_size, tensor_type0="torch", tensor_type1="torch"
)
]
)
ray.get(
[
a.set_list_buffer.remote(
array_size, tensor_type0="torch", tensor_type1="torch"
)
]
)
else:
raise RuntimeError("Unsupported tensor backend.")
| MultiGPUWorker |
python | arrow-py__arrow | tests/test_arrow.py | {
"start": 11745,
"end": 13091
} | class ____:
def test_add_timedelta(self):
result = self.arrow.__add__(timedelta(days=1))
assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc())
def test_add_other(self):
with pytest.raises(TypeError):
self.arrow + 1
def test_radd(self):
result = self.arrow.__radd__(timedelta(days=1))
assert result._datetime == datetime(2013, 1, 2, tzinfo=tz.tzutc())
def test_sub_timedelta(self):
result = self.arrow.__sub__(timedelta(days=1))
assert result._datetime == datetime(2012, 12, 31, tzinfo=tz.tzutc())
def test_sub_datetime(self):
result = self.arrow.__sub__(datetime(2012, 12, 21, tzinfo=tz.tzutc()))
assert result == timedelta(days=11)
def test_sub_arrow(self):
result = self.arrow.__sub__(arrow.Arrow(2012, 12, 21, tzinfo=tz.tzutc()))
assert result == timedelta(days=11)
def test_sub_other(self):
with pytest.raises(TypeError):
self.arrow - object()
def test_rsub_datetime(self):
result = self.arrow.__rsub__(datetime(2012, 12, 21, tzinfo=tz.tzutc()))
assert result == timedelta(days=-11)
def test_rsub_other(self):
with pytest.raises(TypeError):
timedelta(days=1) - self.arrow
@pytest.mark.usefixtures("time_utcnow")
| TestArrowMath |
python | walkccc__LeetCode | solutions/1644. Lowest Common Ancestor of a Binary Tree II/1644.py | {
"start": 0,
"end": 753
} | class ____:
def lowestCommonAncestor(
self,
root: 'TreeNode',
p: 'TreeNode',
q: 'TreeNode',
) -> 'TreeNode':
seenP = False
seenQ = False
def getLCA(root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
nonlocal seenP
nonlocal seenQ
if not root:
return None
# Need to traverse the entire tree to update `seenP` and `seenQ`.
left = getLCA(root.left, p, q)
right = getLCA(root.right, p, q)
if root == p:
seenP = True
return root
if root == q:
seenQ = True
return root
if left and right:
return root
return left or right
lca = getLCA(root, p, q)
return lca if seenP and seenQ else None
| Solution |
python | google__jax | tests/unary_ops_accuracy_test.py | {
"start": 5019,
"end": 12083
} | class ____(jtu.JaxTestCase):
def setUp(self):
if not jtu.stablehlo_version_at_least("1.10.0"):
self.skipTest("Test requires StableHLO v1.10.0 or higher.")
if not jtu.is_device_tpu():
self.skipTest("Skipping test on non TPU devices.")
# TODO(b/412112097): Enable this test on TPU version 7 and above once
# accuracy analysis is done.
if jtu.get_tpu_version() >= 7:
self.skipTest("Accuracy analysis is not yet done on TPU version 7 and above.")
super().setUp()
def test_result_accuracy_mode_attr(self):
with ir.Context() as context:
hlo.register_dialect(context)
attr = hlo.ResultAccuracyModeAttr.get("DEFAULT")
assert attr is not None
assert attr.value == "DEFAULT"
def test_result_accuracy_attr(self):
with ir.Context() as context:
hlo.register_dialect(context)
attr = hlo.ResultAccuracyAttr.get(
atol=1e-5, rtol=0.0, ulps=1, mode="TOLERANCE"
)
assert attr is not None
assert attr.mode == "TOLERANCE"
assert attr.atol == 1e-5
assert attr.rtol == 0.0
assert attr.ulps == 1
@parameterized.named_parameters(
*generate_test_cases(["exp", "expm1", "exp2", "log", "log1p", "tanh"])
)
def test_unary_ops_choose_impl(self, op, x, tp, **kwargs):
@jax.jit
def f_default(x):
y = op(x, accuracy=tp.high)
return y
@jax.jit
def f_accurate(x):
y = op(x, accuracy=tp.low)
return y
# Input values that would cause large differences between the two
# implementations.
diff = abs(f_default(x) - f_accurate(x))
if jtu.get_tpu_version() >= 5 and op in [
lax.tanh,
jnp.tanh,
lax.log,
jnp.log,
]:
# From tpu version 5 and onwards, even with tighter tolerance, the high performant
# implementation for tanh is chosen because the chip implementation has improved accuracy.
self.assertTrue(jnp.all(diff == 0))
else:
self.assertTrue(jnp.any(diff > 0))
@parameterized.named_parameters(
*generate_test_cases(["exp", "expm1", "exp2", "log", "log1p", "tanh"])
)
def test_unary_vmap(self, op, x, tp, min_error_val):
@jax.jit
def f(x, y):
diff = lambda val: abs(
op(val, accuracy=tp.high) - op(val, accuracy=tp.low)
)
return diff(x), diff(y)
diff_x, diff_y = jax.vmap(f, in_axes=(None, 0), out_axes=0)(
min_error_val, x
)
# diff(min_error_val) should be 0
self.assertTrue(jnp.all(diff_x == 0))
# diff(x) should be > 0
if jtu.get_tpu_version() >= 5 and op in [
lax.tanh,
jnp.tanh,
lax.log,
jnp.log,
]:
# From tpu version 5 and onwards, even with tighter tolerance, the high performant
# implementation for tanh and log is chosen because the chip implementation has improved accuracy.
self.assertTrue(jnp.all(diff_y == 0))
else:
self.assertTrue(jnp.any(diff_y > 0))
@parameterized.named_parameters(
*generate_test_cases(["exp", "expm1", "exp2"])
)
def test_diff_grad(self, op, x, tp, **kwargs):
@jax.jit
def f_default(x):
default_op = op(x, accuracy=tp.low)
return jnp.sum(default_op)
f_default_grad = jax.grad(f_default)
@jax.jit
def f_accurate(x):
high_op = op(x, accuracy=tp.high)
return jnp.sum(high_op)
f_accurate_grad = jax.grad(f_accurate)
# Accuracy should be carried through to the gradient causing
# a large diff.
diff = abs(f_default_grad(x) - f_accurate_grad(x))
self.assertTrue(jnp.any(diff > 0))
@parameterized.named_parameters(
*generate_test_cases(["log", "log1p", "tanh"])
)
def test_grad_unchanged(self, op, x, tp, **kwargs):
@jax.jit
def f(x):
return jnp.sum(op(x))
f_grad = jax.grad(f)
@jax.jit
def f_default(x):
default_op = op(x, accuracy=tp.low)
return jnp.sum(default_op)
f_default_grad = jax.grad(f_default)
@jax.jit
def f_accurate(x):
high_op = op(x, accuracy=tp.high)
return jnp.sum(high_op)
f_accurate_grad = jax.grad(f_accurate)
# Accuracy should be carried through to the gradient causing a large diff.
# Diff between f_default and f_accurate should follow diff(f_grad,f_default_grad).
expected_diff = abs(f_grad(x) - f_default_grad(x))
if jnp.all(expected_diff > 0):
# Don't expect f_accurate_grad and f_default_grad to be equal.
self.assertFalse(
jnp.all(abs(f_default_grad(x) - f_accurate_grad(x)) == 0)
)
elif jnp.all(expected_diff == 0):
# f_accurate_grad and f_default_grad should be equal.
diff = abs(f_default_grad(x) - f_accurate_grad(x))
self.assertTrue(jnp.all(diff == 0))
else:
raise ValueError("Unexpected diff: ", expected_diff)
@parameterized.named_parameters(
*generate_test_cases(["cos", "sin", "tan", "sqrt", "rsqrt"])
)
def test_single_impl(self, op, x, tp, **kwargs):
@jax.jit
def f_tol(x):
return op(x, accuracy=tp.high)
@jax.jit
def f(x):
return op(x)
diff = abs(f_tol(x) - f(x))
self.assertTrue(jnp.all(diff == 0))
@parameterized.named_parameters(
*generate_test_cases(["cos", "sin", "tan", "sqrt", "rsqrt"])
)
def test_default_grad(self, op, x, tp, **kwargs):
@jax.jit
def f_tol(x):
return jnp.sum(op(x, accuracy=tp.high))
@jax.jit
def f(x):
return jnp.sum(op(x))
self.assertTrue(jnp.all(abs(jax.grad(f_tol)(x) - jax.grad(f)(x)) == 0))
def test_invalid_accuracy(self):
with self.assertRaisesRegex(
ValueError, "At least one of atol, rtol, or ulps must be set."
):
lax.exp(1.0, accuracy=lax.Tolerance(atol=0.0, rtol=0.0, ulps=0))
with self.assertRaisesRegex(ValueError, "Tolerances must be non-negative."):
lax.exp(1.0, accuracy=lax.Tolerance(atol=-4e-10, rtol=0.0, ulps=0))
@parameterized.named_parameters(
*generate_test_cases([
"exp",
"expm1",
"exp2",
"log",
"log1p",
"tanh",
"cos",
"sin",
"tan",
"sqrt",
"rsqrt",
])
)
def test_low_tol(self, op, x, **kwargs):
with self.assertRaisesRegex(
jax.errors.JaxRuntimeError, "impl_type.ok()"
):
op(x, accuracy=lax.Tolerance(atol=1e-60, rtol=1e-60, ulps=0))
def test_accuracy_jaxpr(self):
# Since accuracy is not set, the jaxpr should not contain "accuracy".
self.assertNotIn(
"accuracy",
str(
jax.make_jaxpr(lambda x: lax.exp(x, accuracy=None))(
np.arange(4.0, dtype=np.float32)
)
),
)
# Set accuracy.
self.assertIn(
"accuracy",
str(
jax.make_jaxpr(
lambda x: lax.exp(
x, accuracy=lax.Tolerance(atol=1e-60, rtol=1e-60, ulps=0)
)
)(np.arange(4.0, dtype=np.float32))
),
)
if __name__ == "__main__":
absltest.main(testLoader=jtu.JaxTestLoader())
| UnaryOpsAccuracyTest |
python | doocs__leetcode | lcci/10.10.Rank from Stream/Solution.py | {
"start": 0,
"end": 404
} | class ____:
__slots__ = "n", "c"
def __init__(self, n: int):
self.n = n
self.c = [0] * (n + 1)
def update(self, x: int, delta: int) -> None:
while x <= self.n:
self.c[x] += delta
x += x & -x
def query(self, x: int) -> int:
s = 0
while x:
s += self.c[x]
x -= x & -x
return s
| BinaryIndexedTree |
python | kubernetes-client__python | kubernetes/client/models/v1_resource_claim_template_list.py | {
"start": 383,
"end": 7180
} | class ____(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'items': 'list[V1ResourceClaimTemplate]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501
"""V1ResourceClaimTemplateList - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this V1ResourceClaimTemplateList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1ResourceClaimTemplateList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1ResourceClaimTemplateList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1ResourceClaimTemplateList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this V1ResourceClaimTemplateList. # noqa: E501
Items is the list of resource claim templates. # noqa: E501
:return: The items of this V1ResourceClaimTemplateList. # noqa: E501
:rtype: list[V1ResourceClaimTemplate]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this V1ResourceClaimTemplateList.
Items is the list of resource claim templates. # noqa: E501
:param items: The items of this V1ResourceClaimTemplateList. # noqa: E501
:type: list[V1ResourceClaimTemplate]
"""
if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this V1ResourceClaimTemplateList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1ResourceClaimTemplateList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1ResourceClaimTemplateList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1ResourceClaimTemplateList. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1ResourceClaimTemplateList. # noqa: E501
:return: The metadata of this V1ResourceClaimTemplateList. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1ResourceClaimTemplateList.
:param metadata: The metadata of this V1ResourceClaimTemplateList. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1ResourceClaimTemplateList):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1ResourceClaimTemplateList):
return True
return self.to_dict() != other.to_dict()
| V1ResourceClaimTemplateList |
python | bokeh__bokeh | src/bokeh/events.py | {
"start": 16211,
"end": 16774
} | class ____(PointEvent):
''' Announce a mouse enter event onto a Bokeh plot.
Attributes:
sx (float) : x-coordinate of the event in *screen* space
sy (float) : y-coordinate of the event in *screen* space
x (float) : x-coordinate of the event in *data* space
y (float) : y-coordinate of the event in *data* space
.. note::
The enter event is generated when the mouse leaves the entire Plot
canvas, including any border padding and space for axes or legends.
'''
event_name = 'mouseenter'
| MouseEnter |
python | prompt-toolkit__python-prompt-toolkit | src/prompt_toolkit/styles/style_transformation.py | {
"start": 2947,
"end": 3201
} | class ____(StyleTransformation):
"""
Swap the 'reverse' attribute.
(This is still experimental.)
"""
def transform_attrs(self, attrs: Attrs) -> Attrs:
return attrs._replace(reverse=not attrs.reverse)
| ReverseStyleTransformation |
python | Textualize__textual | src/textual/widgets/_placeholder.py | {
"start": 1720,
"end": 6435
} | class ____(Widget):
"""A simple placeholder widget to use before you build your custom widgets.
This placeholder has a couple of variants that show different data.
Clicking the placeholder cycles through the available variants, but a placeholder
can also be initialised in a specific variant.
The variants available are:
| Variant | Placeholder shows |
|---------|------------------------------------------------|
| default | Identifier label or the ID of the placeholder. |
| size | Size of the placeholder. |
| text | Lorem Ipsum text. |
"""
DEFAULT_CSS = """
Placeholder {
content-align: center middle;
overflow: hidden;
color: $text;
&:disabled {
opacity: 0.7;
}
}
Placeholder.-text {
padding: 1;
}
"""
# Consecutive placeholders get assigned consecutive colors.
_COLORS: WeakKeyDictionary[App, int] = WeakKeyDictionary()
_SIZE_RENDER_TEMPLATE = "[b]{} x {}[/b]"
variant: Reactive[PlaceholderVariant] = reactive[PlaceholderVariant]("default")
_renderables: dict[PlaceholderVariant, str]
def __init__(
self,
label: str | None = None,
variant: PlaceholderVariant = "default",
*,
name: str | None = None,
id: str | None = None,
classes: str | None = None,
disabled: bool = False,
) -> None:
"""Create a Placeholder widget.
Args:
label: The label to identify the placeholder.
If no label is present, uses the placeholder ID instead.
variant: The variant of the placeholder.
name: The name of the placeholder.
id: The ID of the placeholder in the DOM.
classes: A space separated string with the CSS classes
of the placeholder, if any.
disabled: Whether the placeholder is disabled or not.
"""
# Create and cache renderables for all the variants.
self._renderables = {
"default": label if label else f"#{id}" if id else "Placeholder",
"size": "",
"text": "\n\n".join(_LOREM_IPSUM_PLACEHOLDER_TEXT for _ in range(5)),
}
super().__init__(name=name, id=id, classes=classes, disabled=disabled)
self.variant = self.validate_variant(variant)
"""The current variant of the placeholder."""
try:
self._COLORS[self.app] = self._COLORS.setdefault(self.app, -1) + 1
self._color_offset = self._COLORS[self.app]
except NoActiveAppError:
self._color_offset = 0
# Set a cycle through the variants with the correct starting point.
self._variants_cycle = cycle(_VALID_PLACEHOLDER_VARIANTS_ORDERED)
while next(self._variants_cycle) != self.variant:
pass
async def _on_compose(self, event: events.Compose) -> None:
"""Set the color for this placeholder."""
color_count = len(_PLACEHOLDER_BACKGROUND_COLORS)
color = _PLACEHOLDER_BACKGROUND_COLORS[self._color_offset % color_count]
self.styles.background = f"{color} 50%"
def render(self) -> RenderResult:
"""Render the placeholder.
Returns:
The value to render.
"""
return self._renderables[self.variant]
def cycle_variant(self) -> Self:
"""Get the next variant in the cycle.
Returns:
The `Placeholder` instance.
"""
self.variant = next(self._variants_cycle)
return self
def watch_variant(
self, old_variant: PlaceholderVariant, variant: PlaceholderVariant
) -> None:
self.remove_class(f"-{old_variant}")
self.add_class(f"-{variant}")
def validate_variant(self, variant: PlaceholderVariant) -> PlaceholderVariant:
"""Validate the variant to which the placeholder was set."""
if variant not in _VALID_PLACEHOLDER_VARIANTS:
raise InvalidPlaceholderVariant(
"Valid placeholder variants are "
+ f"{friendly_list(_VALID_PLACEHOLDER_VARIANTS)}"
)
return variant
async def _on_click(self, _: events.Click) -> None:
"""Click handler to cycle through the placeholder variants."""
self.cycle_variant()
def _on_resize(self, event: events.Resize) -> None:
"""Update the placeholder "size" variant with the new placeholder size."""
self._renderables["size"] = self._SIZE_RENDER_TEMPLATE.format(*event.size)
if self.variant == "size":
self.refresh()
| Placeholder |
python | django__django | tests/defer_regress/models.py | {
"start": 1950,
"end": 2435
} | class ____(models.Model):
profile = models.ForeignKey(Profile, models.SET_NULL, null=True, blank=True)
location = models.ForeignKey(Location, models.CASCADE)
items = models.ManyToManyField(Item)
request1 = models.CharField(default="request1", max_length=255)
request2 = models.CharField(default="request2", max_length=255)
request3 = models.CharField(default="request3", max_length=255)
request4 = models.CharField(default="request4", max_length=255)
| Request |
python | huggingface__transformers | src/transformers/models/unispeech/configuration_unispeech.py | {
"start": 844,
"end": 17510
} | class ____(PreTrainedConfig):
r"""
This is the configuration class to store the configuration of a [`UniSpeechModel`]. It is used to instantiate an
UniSpeech model according to the specified arguments, defining the model architecture. Instantiating a
configuration with the defaults will yield a similar configuration to that of the UniSpeech
[microsoft/unispeech-large-1500h-cv](https://huggingface.co/microsoft/unispeech-large-1500h-cv) architecture.
Configuration objects inherit from [`PreTrainedConfig`] and can be used to control the model outputs. Read the
documentation from [`PreTrainedConfig`] for more information.
Args:
vocab_size (`int`, *optional*, defaults to 32):
Vocabulary size of the UniSpeech model. Defines the number of different tokens that can be represented by
the `inputs_ids` passed when calling [`UniSpeechModel`]. Vocabulary size of the model. Defines the
different tokens that can be represented by the *inputs_ids* passed to the forward method of
[`UniSpeechModel`].
hidden_size (`int`, *optional*, defaults to 768):
Dimensionality of the encoder layers and the pooler layer.
num_hidden_layers (`int`, *optional*, defaults to 12):
Number of hidden layers in the Transformer encoder.
num_attention_heads (`int`, *optional*, defaults to 12):
Number of attention heads for each attention layer in the Transformer encoder.
intermediate_size (`int`, *optional*, defaults to 3072):
Dimensionality of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder.
hidden_act (`str` or `function`, *optional*, defaults to `"gelu"`):
The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`,
`"relu"`, `"selu"` and `"gelu_new"` are supported.
hidden_dropout (`float`, *optional*, defaults to 0.1):
The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
activation_dropout (`float`, *optional*, defaults to 0.1):
The dropout ratio for activations inside the fully connected layer.
attention_dropout (`float`, *optional*, defaults to 0.1):
The dropout ratio for the attention probabilities.
feat_proj_dropout (`float`, *optional*, defaults to 0.0):
The dropout probability for output of the feature encoder.
feat_quantizer_dropout (`float`, *optional*, defaults to 0.0):
The dropout probability for the output of the feature encoder that's used by the quantizer.
final_dropout (`float`, *optional*, defaults to 0.1):
The dropout probability for the final projection layer of [`UniSpeechForCTC`].
layerdrop (`float`, *optional*, defaults to 0.1):
The LayerDrop probability. See the [LayerDrop paper](see https://huggingface.co/papers/1909.11556) for more
details.
initializer_range (`float`, *optional*, defaults to 0.02):
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
layer_norm_eps (`float`, *optional*, defaults to 1e-05):
The epsilon used by the layer normalization layers.
feat_extract_norm (`str`, *optional*, defaults to `"group"`):
The norm to be applied to 1D convolutional layers in feature encoder. One of `"group"` for group
normalization of only the first 1D convolutional layer or `"layer"` for layer normalization of all 1D
convolutional layers.
feat_extract_activation (`str, *optional*, defaults to `"gelu"`):
The non-linear activation function (function or string) in the 1D convolutional layers of the feature
extractor. If string, `"gelu"`, `"relu"`, `"selu"` and `"gelu_new"` are supported.
conv_dim (`tuple[int]` or `list[int]`, *optional*, defaults to `(512, 512, 512, 512, 512, 512, 512)`):
A tuple of integers defining the number of input and output channels of each 1D convolutional layer in the
feature encoder. The length of *conv_dim* defines the number of 1D convolutional layers.
conv_stride (`tuple[int]` or `list[int]`, *optional*, defaults to `(5, 2, 2, 2, 2, 2, 2)`):
A tuple of integers defining the stride of each 1D convolutional layer in the feature encoder. The length
of *conv_stride* defines the number of convolutional layers and has to match the length of *conv_dim*.
conv_kernel (`tuple[int]` or `list[int]`, *optional*, defaults to `(10, 3, 3, 3, 3, 2, 2)`):
A tuple of integers defining the kernel size of each 1D convolutional layer in the feature encoder. The
length of *conv_kernel* defines the number of convolutional layers and has to match the length of
*conv_dim*.
conv_bias (`bool`, *optional*, defaults to `False`):
Whether the 1D convolutional layers have a bias.
num_conv_pos_embeddings (`int`, *optional*, defaults to 128):
Number of convolutional positional embeddings. Defines the kernel size of 1D convolutional positional
embeddings layer.
num_conv_pos_embedding_groups (`int`, *optional*, defaults to 16):
Number of groups of 1D convolutional positional embeddings layer.
do_stable_layer_norm (`bool`, *optional*, defaults to `False`):
Whether to apply *stable* layer norm architecture of the Transformer encoder. `do_stable_layer_norm is
True` corresponds to applying layer norm before the attention layer, whereas `do_stable_layer_norm is
False` corresponds to applying layer norm after the attention layer.
apply_spec_augment (`bool`, *optional*, defaults to `True`):
Whether to apply *SpecAugment* data augmentation to the outputs of the feature encoder. For reference see
[SpecAugment: A Simple Data Augmentation Method for Automatic Speech
Recognition](https://huggingface.co/papers/1904.08779).
mask_time_prob (`float`, *optional*, defaults to 0.05):
Percentage (between 0 and 1) of all feature vectors along the time axis which will be masked. The masking
procedure generates ''mask_time_prob*len(time_axis)/mask_time_length'' independent masks over the axis. If
reasoning from the probability of each feature vector to be chosen as the start of the vector span to be
masked, *mask_time_prob* should be `prob_vector_start*mask_time_length`. Note that overlap may decrease the
actual percentage of masked vectors. This is only relevant if `apply_spec_augment is True`.
mask_time_length (`int`, *optional*, defaults to 10):
Length of vector span along the time axis.
mask_time_min_masks (`int`, *optional*, defaults to 2):
The minimum number of masks of length `mask_feature_length` generated along the time axis, each time step,
irrespectively of `mask_feature_prob`. Only relevant if ''mask_time_prob*len(time_axis)/mask_time_length <
mask_time_min_masks''
mask_feature_prob (`float`, *optional*, defaults to 0.0):
Percentage (between 0 and 1) of all feature vectors along the feature axis which will be masked. The
masking procedure generates ''mask_feature_prob*len(feature_axis)/mask_time_length'' independent masks over
the axis. If reasoning from the probability of each feature vector to be chosen as the start of the vector
span to be masked, *mask_feature_prob* should be `prob_vector_start*mask_feature_length`. Note that overlap
may decrease the actual percentage of masked vectors. This is only relevant if `apply_spec_augment is
True`.
mask_feature_length (`int`, *optional*, defaults to 10):
Length of vector span along the feature axis.
mask_feature_min_masks (`int`, *optional*, defaults to 0):
The minimum number of masks of length `mask_feature_length` generated along the feature axis, each time
step, irrespectively of `mask_feature_prob`. Only relevant if
''mask_feature_prob*len(feature_axis)/mask_feature_length < mask_feature_min_masks''
num_codevectors_per_group (`int`, *optional*, defaults to 320):
Number of entries in each quantization codebook (group).
num_codevector_groups (`int`, *optional*, defaults to 2):
Number of codevector groups for product codevector quantization.
contrastive_logits_temperature (`float`, *optional*, defaults to 0.1):
The temperature *kappa* in the contrastive loss.
num_negatives (`int`, *optional*, defaults to 100):
Number of negative samples for the contrastive loss.
codevector_dim (`int`, *optional*, defaults to 256):
Dimensionality of the quantized feature vectors.
proj_codevector_dim (`int`, *optional*, defaults to 256):
Dimensionality of the final projection of both the quantized and the transformer features.
diversity_loss_weight (`int`, *optional*, defaults to 0.1):
The weight of the codebook diversity loss component.
ctc_loss_reduction (`str`, *optional*, defaults to `"mean"`):
Specifies the reduction to apply to the output of `torch.nn.CTCLoss`. Only relevant when training an
instance of [`UniSpeechForCTC`].
ctc_zero_infinity (`bool`, *optional*, defaults to `False`):
Whether to zero infinite losses and the associated gradients of `torch.nn.CTCLoss`. Infinite losses mainly
occur when the inputs are too short to be aligned to the targets. Only relevant when training an instance
of [`UniSpeechForCTC`].
use_weighted_layer_sum (`bool`, *optional*, defaults to `False`):
Whether to use a weighted average of layer outputs with learned weights. Only relevant when using an
instance of [`UniSpeechForSequenceClassification`].
classifier_proj_size (`int`, *optional*, defaults to 256):
Dimensionality of the projection before token mean-pooling for classification.
num_ctc_classes (`int`, *optional*, defaults to 80):
Specifies the number of classes (phoneme tokens and blank token) for phoneme-level CTC loss. Only relevant
when using an instance of [`UniSpeechForPreTraining`].
pad_token_id (`int`, *optional*, defaults to 0):
The id of the padding token.
bos_token_id (`int`, *optional*, defaults to 1):
The id of the "beginning-of-sequence" token.
eos_token_id (`int`, *optional*, defaults to 2):
The id of the "end-of-sequence" token.
replace_prob (`float`, *optional*, defaults to 0.5):
Probability that transformer feature is replaced by quantized feature for pretraining.
Example:
```python
>>> from transformers import UniSpeechConfig, UniSpeechModel
>>> # Initializing a UniSpeech facebook/unispeech-base-960h style configuration
>>> configuration = UniSpeechConfig()
>>> # Initializing a model (with random weights) from the facebook/unispeech-base-960h style configuration
>>> model = UniSpeechModel(configuration)
>>> # Accessing the model configuration
>>> configuration = model.config
```"""
model_type = "unispeech"
def __init__(
self,
vocab_size=32,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout=0.1,
activation_dropout=0.1,
attention_dropout=0.1,
feat_proj_dropout=0.0,
feat_quantizer_dropout=0.0,
final_dropout=0.1,
layerdrop=0.1,
initializer_range=0.02,
layer_norm_eps=1e-5,
feat_extract_norm="group",
feat_extract_activation="gelu",
conv_dim=(512, 512, 512, 512, 512, 512, 512),
conv_stride=(5, 2, 2, 2, 2, 2, 2),
conv_kernel=(10, 3, 3, 3, 3, 2, 2),
conv_bias=False,
num_conv_pos_embeddings=128,
num_conv_pos_embedding_groups=16,
do_stable_layer_norm=False,
apply_spec_augment=True,
mask_time_prob=0.05,
mask_time_length=10,
mask_time_min_masks=2,
mask_feature_prob=0.0,
mask_feature_length=10,
mask_feature_min_masks=0,
num_codevectors_per_group=320,
num_codevector_groups=2,
contrastive_logits_temperature=0.1,
num_negatives=100,
codevector_dim=256,
proj_codevector_dim=256,
diversity_loss_weight=0.1,
ctc_loss_reduction="mean",
ctc_zero_infinity=False,
use_weighted_layer_sum=False,
classifier_proj_size=256,
num_ctc_classes=80,
pad_token_id=0,
bos_token_id=1,
eos_token_id=2,
replace_prob=0.5,
**kwargs,
):
super().__init__(**kwargs, pad_token_id=pad_token_id, bos_token_id=bos_token_id, eos_token_id=eos_token_id)
self.hidden_size = hidden_size
self.feat_extract_norm = feat_extract_norm
self.feat_extract_activation = feat_extract_activation
self.conv_dim = list(conv_dim)
self.conv_stride = list(conv_stride)
self.conv_kernel = list(conv_kernel)
self.conv_bias = conv_bias
self.num_conv_pos_embeddings = num_conv_pos_embeddings
self.num_conv_pos_embedding_groups = num_conv_pos_embedding_groups
self.num_feat_extract_layers = len(self.conv_dim)
self.num_hidden_layers = num_hidden_layers
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.num_attention_heads = num_attention_heads
self.hidden_dropout = hidden_dropout
self.attention_dropout = attention_dropout
self.activation_dropout = activation_dropout
self.feat_proj_dropout = feat_proj_dropout
self.final_dropout = final_dropout
self.layerdrop = layerdrop
self.layer_norm_eps = layer_norm_eps
self.initializer_range = initializer_range
self.num_ctc_classes = num_ctc_classes
self.vocab_size = vocab_size
self.do_stable_layer_norm = do_stable_layer_norm
self.use_weighted_layer_sum = use_weighted_layer_sum
self.classifier_proj_size = classifier_proj_size
if (
(len(self.conv_stride) != self.num_feat_extract_layers)
or (len(self.conv_kernel) != self.num_feat_extract_layers)
or (len(self.conv_dim) != self.num_feat_extract_layers)
):
raise ValueError(
"Configuration for convolutional layers is incorrect. It is required that `len(config.conv_dim)` =="
" `len(config.conv_stride)` == `len(config.conv_kernel)`, but is `len(config.conv_dim) ="
f" {len(self.conv_dim)}`, `len(config.conv_stride) = {len(self.conv_stride)}`,"
f" `len(config.conv_kernel) = {len(self.conv_kernel)}`."
)
# fine-tuning config parameters for SpecAugment: https://huggingface.co/papers/1904.08779
self.apply_spec_augment = apply_spec_augment
self.mask_time_prob = mask_time_prob
self.mask_time_length = mask_time_length
self.mask_time_min_masks = mask_time_min_masks
self.mask_feature_prob = mask_feature_prob
self.mask_feature_length = mask_feature_length
self.mask_feature_min_masks = mask_feature_min_masks
# parameters for pretraining with codevector quantized representations
self.num_codevectors_per_group = num_codevectors_per_group
self.num_codevector_groups = num_codevector_groups
self.contrastive_logits_temperature = contrastive_logits_temperature
self.feat_quantizer_dropout = feat_quantizer_dropout
self.num_negatives = num_negatives
self.codevector_dim = codevector_dim
self.proj_codevector_dim = proj_codevector_dim
self.diversity_loss_weight = diversity_loss_weight
# ctc loss
self.ctc_loss_reduction = ctc_loss_reduction
self.ctc_zero_infinity = ctc_zero_infinity
# pretraining loss
self.replace_prob = replace_prob
@property
def inputs_to_logits_ratio(self):
return functools.reduce(operator.mul, self.conv_stride, 1)
__all__ = ["UniSpeechConfig"]
| UniSpeechConfig |
python | dask__dask | dask/dataframe/dask_expr/_cumulative.py | {
"start": 3628,
"end": 3788
} | class ____(CumulativeAggregations):
chunk_operation = M.cumsum
aggregate_operation = staticmethod(methods.cumsum_aggregate)
neutral_element = 0
| CumSum |
python | allegroai__clearml | clearml/backend_interface/model.py | {
"start": 1470,
"end": 22651
} | class ____(IdObjectBase, AsyncManagerMixin, _StorageUriMixin):
"""Manager for backend model objects"""
_EMPTY_MODEL_ID = "empty"
_local_model_to_id_uri = {}
@property
def model_id(self) -> str:
return self.id
def __init__(
self,
upload_storage_uri: str,
cache_dir: str,
model_id: str = None,
upload_storage_suffix: str = "models",
session: Session = None,
log: Any = None,
) -> None:
super(Model, self).__init__(id=model_id, session=session, log=log)
self._upload_storage_suffix = upload_storage_suffix
if model_id == self._EMPTY_MODEL_ID:
# Set an empty data object
self._data = models.Model()
else:
self._data = None
self._cache_dir = cache_dir
self.upload_storage_uri = upload_storage_uri
def publish(self) -> None:
self.send(models.SetReadyRequest(model=self.id, publish_task=False))
self.reload()
def archive(self) -> None:
if Session.check_min_api_server_version("2.13", raise_error=True):
self.send(models.ArchiveManyRequest(ids=[self.id]))
self.reload()
else:
from ..model import BaseModel
# edit will reload
self._edit(
system_tags=list(
set((self.data.system_tags or []) if hasattr(self.data, "system_tags") else [])
| {BaseModel._archived_tag}
)
)
def unarchive(self) -> None:
if Session.check_min_api_server_version("2.13", raise_error=True):
self.send(models.UnarchiveManyRequest(ids=[self.id]))
self.reload()
else:
from ..model import BaseModel
# edit will reload
self._edit(
system_tags=list(
set((self.data.system_tags or []) if hasattr(self.data, "system_tags") else [])
- {BaseModel._archived_tag}
)
)
def _reload(self) -> Optional["models.Model"]:
"""Reload the model object"""
if self._offline_mode:
return models.Model()
if self.id == self._EMPTY_MODEL_ID:
return
res = self.send(models.GetByIdRequest(model=self.id))
# import here, avoid circular imports
from clearml import Task
current_task = Task.current_task()
if current_task:
# reload the task such that the model changes are also reflected in the task
current_task.reload()
return res.response.model
def _upload_model(
self,
model_file: str,
async_enable: bool = False,
target_filename: str = None,
cb: Callable = None,
) -> str:
if not self.upload_storage_uri:
raise ValueError("Model has no storage URI defined (nowhere to upload to)")
target_filename = target_filename or Path(model_file).name
dest_path = "/".join(
(
self.upload_storage_uri,
self._upload_storage_suffix or ".",
target_filename,
)
)
result = StorageHelper.get(dest_path).upload(
src_path=model_file,
dest_path=dest_path,
async_enable=async_enable,
cb=partial(self._upload_callback, cb=cb),
return_canonized=False,
)
if async_enable:
def msg(num_results: int) -> None:
self.log.info("Waiting for previous model to upload (%d pending, %s)" % (num_results, dest_path))
self._add_async_result(result, wait_on_max_results=2, wait_cb=msg)
return dest_path
def _upload_callback(self, res: Any, cb: Callable[[Any], None] = None) -> None:
if res is None:
self.log.debug("Starting model upload")
elif res is False:
self.log.info("Failed model upload")
else:
self.log.info("Completed model upload to {}".format(res))
if cb:
cb(res)
@staticmethod
def _wrap_design(design: Union[Dict[str, Any], str, None]) -> Dict[str, str]:
"""
Wrap design text with a dictionary.
In the backend, the design is a dictionary with a 'design' key in it.
For the client, it is a text. This function wraps a design string with
the proper dictionary.
:param design: If it is a dictionary, it mast have a 'design' key in it.
In that case, return design as-is.
If it is a string, return the dictionary {'design': design}.
If it is None (or any False value), return the dictionary {'design': ''}
:return: A proper design dictionary according to design parameter.
"""
if isinstance(design, dict):
if "design" not in design:
raise ValueError("design dictionary must have 'design' key in it")
return design
return {"design": design if design else ""}
@staticmethod
def _unwrap_design(design: Union[Dict[str, Any], str, None]) -> str:
"""
Unwrap design text from a dictionary.
In the backend, the design is a dictionary with a 'design' key in it.
For the client, it is a text. This function unwraps a design string from
the dictionary.
:param design: If it is a dictionary with a 'design' key in it, return
design['design'].
If it is a dictionary without 'design' key, return the first value
in it's values list.
If it is an empty dictionary, None, or any other False value,
return an empty string.
If it is a string, return design as-is.
:return: The design string according to design parameter.
"""
if not design:
return ""
if isinstance(design, six.string_types):
return design
if isinstance(design, dict):
if "design" in design:
return design["design"]
return list(design.values())[0]
raise ValueError("design must be a string or a dictionary with at least one value")
def update(
self,
model_file: str = None,
design: str = None,
labels: dict = None,
name: str = None,
comment: str = None,
tags: list = None,
task_id: str = None,
project_id: str = None,
parent_id: str = None,
uri: str = None,
framework: str = None,
upload_storage_uri: str = None,
target_filename: str = None,
iteration: int = None,
system_tags: list = None,
) -> None:
"""Update model weights file and various model properties"""
if self.id is None:
if upload_storage_uri:
self.upload_storage_uri = upload_storage_uri
self._create_empty_model(self.upload_storage_uri, project_id=project_id)
elif upload_storage_uri:
self.upload_storage_uri = upload_storage_uri
if model_file and uri:
Model._local_model_to_id_uri[str(model_file)] = (self.model_id, uri)
# upload model file if needed and get uri
uri = uri or (self._upload_model(model_file, target_filename=target_filename) if model_file else self.data.uri)
# update fields
design = self._wrap_design(design) if design else self.data.design
name = name or self.data.name
comment = comment or self.data.comment
labels = labels or self.data.labels
task = task_id or self.data.task
project = project_id or self.data.project
parent = parent_id or self.data.parent
tags = tags or self.data.tags
if Session.check_min_api_version("2.3"):
system_tags = system_tags or self.data.system_tags
self._edit(
uri=uri,
name=name,
comment=comment,
labels=labels,
design=design,
framework=framework or self.data.framework,
iteration=iteration,
task=task,
project=project,
parent=parent,
tags=tags,
system_tags=system_tags,
)
def edit(
self,
design: Optional[str] = None,
labels: Optional[Dict[str, Any]] = None,
name: Optional[str] = None,
comment: Optional[str] = None,
tags: Optional[List[str]] = None,
uri: Optional[str] = None,
framework: Optional[str] = None,
iteration: Optional[int] = None,
system_tags: Optional[List[str]] = None,
) -> None:
return self._edit(
design=design,
labels=labels,
name=name,
comment=comment,
tags=tags,
uri=uri,
framework=framework,
iteration=iteration,
system_tags=system_tags,
)
def _edit(
self,
design: Optional[Union[str, Dict[str, str]]] = None,
labels: Optional[Dict[str, Any]] = None,
name: Optional[str] = None,
comment: Optional[str] = None,
tags: Optional[List[str]] = None,
uri: Optional[str] = None,
framework: Optional[str] = None,
iteration: Optional[int] = None,
system_tags: Optional[List[str]] = None,
**extra: Any,
) -> None:
def offline_store(**kwargs: Any) -> None:
for k, v in kwargs.items():
setattr(self.data, k, v or getattr(self.data, k, None))
return
if self._offline_mode:
return offline_store(
design=design,
labels=labels,
name=name,
comment=comment,
tags=tags,
uri=uri,
framework=framework,
iteration=iteration,
**extra,
)
if Session.check_min_api_version("2.3"):
if tags is not None:
extra.update({"tags": tags})
if system_tags is not None:
extra.update({"system_tags": system_tags})
elif tags is not None or system_tags is not None:
if tags and system_tags:
system_tags = system_tags[:]
system_tags += [t for t in tags if t not in system_tags]
extra.update({"system_tags": system_tags or tags or self.data.system_tags})
self.send(
models.EditRequest(
model=self.id,
uri=uri,
name=name,
comment=comment,
labels=labels,
design=self._wrap_design(design) if design else None,
framework=framework,
iteration=iteration,
**extra,
)
)
self.reload()
def update_and_upload(
self,
model_file: str,
design: str = None,
labels: dict = None,
name: str = None,
comment: str = None,
tags: list = None,
task_id: str = None,
project_id: str = None,
parent_id: str = None,
framework: str = None,
async_enable: bool = False,
target_filename: str = None,
cb: callable = None,
iteration: int = None,
) -> str:
"""Update the given model for a given task ID"""
if async_enable:
def callback(uploaded_uri: Optional[str]) -> None:
if uploaded_uri is None:
return
# If not successful, mark model as failed_uploading,
# but dont override valid urls
if uploaded_uri is False:
uploaded_uri = (
self.data.uri
if self.data.uri != "{}/uploading_file".format(self._upload_storage_uri or "file://")
else "{}/failed_uploading".format(self._upload_storage_uri or "file://")
)
Model._local_model_to_id_uri[str(model_file)] = (
self.model_id,
uploaded_uri,
)
self.update(
uri=uploaded_uri,
task_id=task_id,
name=name,
comment=comment,
tags=tags,
design=design,
labels=labels,
project_id=project_id,
parent_id=parent_id,
framework=framework,
iteration=iteration,
)
if cb:
cb(model_file)
uri = self._upload_model(
model_file,
async_enable=async_enable,
target_filename=target_filename,
cb=callback,
)
return uri
else:
uri = self._upload_model(model_file, async_enable=async_enable, target_filename=target_filename)
Model._local_model_to_id_uri[str(model_file)] = (self.model_id, uri)
self.update(
uri=uri,
task_id=task_id,
name=name,
comment=comment,
tags=tags,
design=design,
labels=labels,
project_id=project_id,
parent_id=parent_id,
framework=framework,
)
return uri
def update_for_task(
self,
task_id: str,
name: str = None,
model_id: str = None,
type_: str = "output",
iteration: int = None,
) -> None:
if Session.check_min_api_version("2.13"):
req = tasks.AddOrUpdateModelRequest(
task=task_id, name=name, type=type_, model=model_id, iteration=iteration
)
elif type_ == "output":
# backwards compatibility
req = models.UpdateForTaskRequest(task=task_id, override_model_id=model_id)
elif type_ == "input":
# backwards compatibility, None
req = None
else:
raise ValueError("Type '{}' unsupported (use either 'input' or 'output')".format(type_))
if req:
self.send(req)
self.reload()
@property
def model_design(self) -> Optional[str]:
"""Get the model design. For now, this is stored as a single key in the design dict."""
try:
return self._unwrap_design(self.data.design)
except ValueError:
# no design is yet specified
return None
@property
def labels(self) -> Optional[List[str]]:
try:
return self.data.labels
except ValueError:
# no labels is yet specified
return None
@property
def name(self) -> Optional[str]:
try:
return self.data.name
except ValueError:
# no name is yet specified
return None
@property
def project(self) -> Optional[str]:
try:
return self.data.project
except ValueError:
# no project is yet specified
return None
@property
def comment(self) -> Optional[str]:
try:
return self.data.comment
except ValueError:
# no comment is yet specified
return None
@property
def tags(self) -> List[str]:
return self.data.system_tags if hasattr(self.data, "system_tags") else self.data.tags
@property
def task(self) -> Optional[str]:
try:
return self.data.task
except ValueError:
# no task is yet specified
return None
@property
def uri(self) -> Optional[str]:
try:
return self.data.uri
except ValueError:
# no uri is yet specified
return None
@property
def locked(self) -> bool:
if self.id is None:
return False
return bool(self.data.ready)
def download_model_weights(
self,
raise_on_error: bool = False,
force_download: bool = False,
extract_archive: bool = False,
) -> str:
"""
Download the model weights into a local file in our cache
:param bool raise_on_error: If True and the artifact could not be downloaded,
raise ValueError, otherwise return None on failure and output log warning.
:param bool force_download: If True, the base artifact will be downloaded,
even if the artifact is already cached.
:param bool extract_archive: If True, unzip the downloaded file if possible
:return: a local path to a downloaded copy of the model
"""
if not hasattr(self.data, "uri"):
return None
uri = self.data.uri
if not uri or not uri.strip():
return None
# check if we already downloaded the file
downloaded_models = [k for k, (i, u) in Model._local_model_to_id_uri.items() if i == self.id and u == uri]
for dl_file in downloaded_models:
if Path(dl_file).exists() and not force_download:
return dl_file
# remove non existing model file
Model._local_model_to_id_uri.pop(dl_file, None)
local_download = StorageManager.get_local_copy(
uri, extract_archive=extract_archive, force_download=force_download
)
# save local model, so we can later query what was the original one
if local_download is not None:
Model._local_model_to_id_uri[str(local_download)] = (self.model_id, uri)
elif raise_on_error:
raise ValueError(
"Could not retrieve a local copy of model weights {}, "
"failed downloading {}".format(self.model_id, uri)
)
return local_download
@property
def cache_dir(self) -> str:
return self._cache_dir
def save_model_design_file(self) -> str:
"""Download model description file into a local file in our cache_dir"""
design = self.model_design
filename = self.data.name + ".txt"
p = Path(self.cache_dir) / filename
# we always write the original model design to file, to prevent any mishaps
# if p.is_file():
# return str(p)
p.parent.mkdir(parents=True, exist_ok=True)
p.write_text(six.text_type(design))
return str(p)
def get_model_package(self) -> ModelPackage:
"""Get a named tuple containing the model's weights and design"""
return ModelPackage(weights=self.download_model_weights(), design=self.save_model_design_file())
def get_model_design(self) -> str:
"""Get model description (text)"""
return self.model_design
@classmethod
def get_all(
cls,
session: Session,
log: Optional[logging.Logger] = None,
**kwargs: Any,
) -> Any:
req = models.GetAllRequest(**kwargs)
res = cls._send(session=session, req=req, log=log)
return res
def clone(
self,
name: str,
comment: str = None,
child: bool = True,
tags: list = None,
task: str = None,
ready: bool = True,
) -> str:
"""
Clone this model into a new model.
:param name: Name for the new model
:param comment: Optional comment for the new model
:param child: Should the new model be a child of this model (default True)
:param tags: Optional tags for the cloned model
:param task: Creating Task of the Model
:param ready: If True, set the true flag for the newly created model
:return: The new model's ID
"""
data = self.data
assert isinstance(data, models.Model)
parent = self.id if child else None
extra = (
{"system_tags": tags or data.system_tags}
if Session.check_min_api_version("2.3")
else {"tags": tags or data.tags}
)
req = models.CreateRequest(
uri=data.uri,
name=name,
labels=data.labels,
comment=comment or data.comment,
framework=data.framework,
design=data.design,
ready=ready,
project=data.project,
parent=parent,
task=task,
**extra,
)
res = self.send(req)
return res.response.id
def _create_empty_model(self, upload_storage_uri: str = None, project_id: str = None) -> bool:
upload_storage_uri = upload_storage_uri or self.upload_storage_uri
name = make_message("Anonymous model %(time)s")
uri = "{}/uploading_file".format(upload_storage_uri or "file://")
req = models.CreateRequest(uri=uri, name=name, labels={}, project=project_id)
res = self.send(req)
if not res:
return False
self.id = res.response.id
return True
| Model |
python | sympy__sympy | sympy/matrices/expressions/blockmatrix.py | {
"start": 1113,
"end": 18864
} | class ____(MatrixExpr):
"""A BlockMatrix is a Matrix comprised of other matrices.
The submatrices are stored in a SymPy Matrix object but accessed as part of
a Matrix Expression
>>> from sympy import (MatrixSymbol, BlockMatrix, symbols,
... Identity, ZeroMatrix, block_collapse)
>>> n,m,l = symbols('n m l')
>>> X = MatrixSymbol('X', n, n)
>>> Y = MatrixSymbol('Y', m, m)
>>> Z = MatrixSymbol('Z', n, m)
>>> B = BlockMatrix([[X, Z], [ZeroMatrix(m,n), Y]])
>>> print(B)
Matrix([
[X, Z],
[0, Y]])
>>> C = BlockMatrix([[Identity(n), Z]])
>>> print(C)
Matrix([[I, Z]])
>>> print(block_collapse(C*B))
Matrix([[X, Z + Z*Y]])
Some matrices might be comprised of rows of blocks with
the matrices in each row having the same height and the
rows all having the same total number of columns but
not having the same number of columns for each matrix
in each row. In this case, the matrix is not a block
matrix and should be instantiated by Matrix.
>>> from sympy import ones, Matrix
>>> dat = [
... [ones(3,2), ones(3,3)*2],
... [ones(2,3)*3, ones(2,2)*4]]
...
>>> BlockMatrix(dat)
Traceback (most recent call last):
...
ValueError:
Although this matrix is comprised of blocks, the blocks do not fill
the matrix in a size-symmetric fashion. To create a full matrix from
these arguments, pass them directly to Matrix.
>>> Matrix(dat)
Matrix([
[1, 1, 2, 2, 2],
[1, 1, 2, 2, 2],
[1, 1, 2, 2, 2],
[3, 3, 3, 4, 4],
[3, 3, 3, 4, 4]])
See Also
========
sympy.matrices.matrixbase.MatrixBase.irregular
"""
def __new__(cls, *args, **kwargs):
from sympy.matrices.immutable import ImmutableDenseMatrix
isMat = lambda i: getattr(i, 'is_Matrix', False)
if len(args) != 1 or \
not is_sequence(args[0]) or \
len({isMat(r) for r in args[0]}) != 1:
raise ValueError(filldedent('''
expecting a sequence of 1 or more rows
containing Matrices.'''))
rows = args[0] if args else []
if not isMat(rows):
if rows and isMat(rows[0]):
rows = [rows] # rows is not list of lists or []
# regularity check
# same number of matrices in each row
blocky = ok = len({len(r) for r in rows}) == 1
if ok:
# same number of rows for each matrix in a row
for r in rows:
ok = len({i.rows for i in r}) == 1
if not ok:
break
blocky = ok
if ok:
# same number of cols for each matrix in each col
for c in range(len(rows[0])):
ok = len({rows[i][c].cols
for i in range(len(rows))}) == 1
if not ok:
break
if not ok:
# same total cols in each row
ok = len({
sum(i.cols for i in r) for r in rows}) == 1
if blocky and ok:
raise ValueError(filldedent('''
Although this matrix is comprised of blocks,
the blocks do not fill the matrix in a
size-symmetric fashion. To create a full matrix
from these arguments, pass them directly to
Matrix.'''))
raise ValueError(filldedent('''
When there are not the same number of rows in each
row's matrices or there are not the same number of
total columns in each row, the matrix is not a
block matrix. If this matrix is known to consist of
blocks fully filling a 2-D space then see
Matrix.irregular.'''))
mat = ImmutableDenseMatrix(rows, evaluate=False)
obj = Basic.__new__(cls, mat)
return obj
@property
def shape(self):
numrows = numcols = 0
M = self.blocks
for i in range(M.shape[0]):
numrows += M[i, 0].shape[0]
for i in range(M.shape[1]):
numcols += M[0, i].shape[1]
return (numrows, numcols)
@property
def blockshape(self):
return self.blocks.shape
@property
def blocks(self):
return self.args[0]
@property
def rowblocksizes(self):
return [self.blocks[i, 0].rows for i in range(self.blockshape[0])]
@property
def colblocksizes(self):
return [self.blocks[0, i].cols for i in range(self.blockshape[1])]
def structurally_equal(self, other):
return (isinstance(other, BlockMatrix)
and self.shape == other.shape
and self.blockshape == other.blockshape
and self.rowblocksizes == other.rowblocksizes
and self.colblocksizes == other.colblocksizes)
def _blockmul(self, other):
if (isinstance(other, BlockMatrix) and
self.colblocksizes == other.rowblocksizes):
return BlockMatrix(self.blocks*other.blocks)
return self * other
def _blockadd(self, other):
if (isinstance(other, BlockMatrix)
and self.structurally_equal(other)):
return BlockMatrix(self.blocks + other.blocks)
return self + other
def _eval_transpose(self):
# Flip all the individual matrices
matrices = [transpose(matrix) for matrix in self.blocks]
# Make a copy
M = Matrix(self.blockshape[0], self.blockshape[1], matrices)
# Transpose the block structure
M = M.transpose()
return BlockMatrix(M)
def _eval_adjoint(self):
return BlockMatrix(
Matrix(self.blockshape[0], self.blockshape[1], self.blocks).adjoint()
)
def _eval_trace(self):
if self.rowblocksizes == self.colblocksizes:
blocks = [self.blocks[i, i] for i in range(self.blockshape[0])]
return Add(*[trace(block) for block in blocks])
def _eval_determinant(self):
if self.blockshape == (1, 1):
return det(self.blocks[0, 0])
if self.blockshape == (2, 2):
[[A, B],
[C, D]] = self.blocks.tolist()
if ask(Q.invertible(A)):
return det(A)*det(D - C*A.I*B)
elif ask(Q.invertible(D)):
return det(D)*det(A - B*D.I*C)
return Determinant(self)
def _eval_as_real_imag(self):
real_matrices = [re(matrix) for matrix in self.blocks]
real_matrices = Matrix(self.blockshape[0], self.blockshape[1], real_matrices)
im_matrices = [im(matrix) for matrix in self.blocks]
im_matrices = Matrix(self.blockshape[0], self.blockshape[1], im_matrices)
return (BlockMatrix(real_matrices), BlockMatrix(im_matrices))
def _eval_derivative(self, x):
return BlockMatrix(self.blocks.diff(x))
def transpose(self):
"""Return transpose of matrix.
Examples
========
>>> from sympy import MatrixSymbol, BlockMatrix, ZeroMatrix
>>> from sympy.abc import m, n
>>> X = MatrixSymbol('X', n, n)
>>> Y = MatrixSymbol('Y', m, m)
>>> Z = MatrixSymbol('Z', n, m)
>>> B = BlockMatrix([[X, Z], [ZeroMatrix(m,n), Y]])
>>> B.transpose()
Matrix([
[X.T, 0],
[Z.T, Y.T]])
>>> _.transpose()
Matrix([
[X, Z],
[0, Y]])
"""
return self._eval_transpose()
def schur(self, mat = 'A', generalized = False):
"""Return the Schur Complement of the 2x2 BlockMatrix
Parameters
==========
mat : String, optional
The matrix with respect to which the
Schur Complement is calculated. 'A' is
used by default
generalized : bool, optional
If True, returns the generalized Schur
Component which uses Moore-Penrose Inverse
Examples
========
>>> from sympy import symbols, MatrixSymbol, BlockMatrix
>>> m, n = symbols('m n')
>>> A = MatrixSymbol('A', n, n)
>>> B = MatrixSymbol('B', n, m)
>>> C = MatrixSymbol('C', m, n)
>>> D = MatrixSymbol('D', m, m)
>>> X = BlockMatrix([[A, B], [C, D]])
The default Schur Complement is evaluated with "A"
>>> X.schur()
-C*A**(-1)*B + D
>>> X.schur('D')
A - B*D**(-1)*C
Schur complement with non-invertible matrices is not
defined. Instead, the generalized Schur complement can
be calculated which uses the Moore-Penrose Inverse. To
achieve this, `generalized` must be set to `True`
>>> X.schur('B', generalized=True)
C - D*(B.T*B)**(-1)*B.T*A
>>> X.schur('C', generalized=True)
-A*(C.T*C)**(-1)*C.T*D + B
Returns
=======
M : Matrix
The Schur Complement Matrix
Raises
======
ShapeError
If the block matrix is not a 2x2 matrix
NonInvertibleMatrixError
If given matrix is non-invertible
References
==========
.. [1] Wikipedia Article on Schur Component : https://en.wikipedia.org/wiki/Schur_complement
See Also
========
sympy.matrices.matrixbase.MatrixBase.pinv
"""
if self.blockshape == (2, 2):
[[A, B],
[C, D]] = self.blocks.tolist()
d={'A' : A, 'B' : B, 'C' : C, 'D' : D}
try:
inv = (d[mat].T*d[mat]).inv()*d[mat].T if generalized else d[mat].inv()
if mat == 'A':
return D - C * inv * B
elif mat == 'B':
return C - D * inv * A
elif mat == 'C':
return B - A * inv * D
elif mat == 'D':
return A - B * inv * C
#For matrices where no sub-matrix is square
return self
except NonInvertibleMatrixError:
raise NonInvertibleMatrixError('The given matrix is not invertible. Please set generalized=True \
to compute the generalized Schur Complement which uses Moore-Penrose Inverse')
else:
raise ShapeError('Schur Complement can only be calculated for 2x2 block matrices')
def LDUdecomposition(self):
"""Returns the Block LDU decomposition of
a 2x2 Block Matrix
Returns
=======
(L, D, U) : Matrices
L : Lower Diagonal Matrix
D : Diagonal Matrix
U : Upper Diagonal Matrix
Examples
========
>>> from sympy import symbols, MatrixSymbol, BlockMatrix, block_collapse
>>> m, n = symbols('m n')
>>> A = MatrixSymbol('A', n, n)
>>> B = MatrixSymbol('B', n, m)
>>> C = MatrixSymbol('C', m, n)
>>> D = MatrixSymbol('D', m, m)
>>> X = BlockMatrix([[A, B], [C, D]])
>>> L, D, U = X.LDUdecomposition()
>>> block_collapse(L*D*U)
Matrix([
[A, B],
[C, D]])
Raises
======
ShapeError
If the block matrix is not a 2x2 matrix
NonInvertibleMatrixError
If the matrix "A" is non-invertible
See Also
========
sympy.matrices.expressions.blockmatrix.BlockMatrix.UDLdecomposition
sympy.matrices.expressions.blockmatrix.BlockMatrix.LUdecomposition
"""
if self.blockshape == (2,2):
[[A, B],
[C, D]] = self.blocks.tolist()
try:
AI = A.I
except NonInvertibleMatrixError:
raise NonInvertibleMatrixError('Block LDU decomposition cannot be calculated when\
"A" is singular')
Ip = Identity(B.shape[0])
Iq = Identity(B.shape[1])
Z = ZeroMatrix(*B.shape)
L = BlockMatrix([[Ip, Z], [C*AI, Iq]])
D = BlockDiagMatrix(A, self.schur())
U = BlockMatrix([[Ip, AI*B],[Z.T, Iq]])
return L, D, U
else:
raise ShapeError("Block LDU decomposition is supported only for 2x2 block matrices")
def UDLdecomposition(self):
"""Returns the Block UDL decomposition of
a 2x2 Block Matrix
Returns
=======
(U, D, L) : Matrices
U : Upper Diagonal Matrix
D : Diagonal Matrix
L : Lower Diagonal Matrix
Examples
========
>>> from sympy import symbols, MatrixSymbol, BlockMatrix, block_collapse
>>> m, n = symbols('m n')
>>> A = MatrixSymbol('A', n, n)
>>> B = MatrixSymbol('B', n, m)
>>> C = MatrixSymbol('C', m, n)
>>> D = MatrixSymbol('D', m, m)
>>> X = BlockMatrix([[A, B], [C, D]])
>>> U, D, L = X.UDLdecomposition()
>>> block_collapse(U*D*L)
Matrix([
[A, B],
[C, D]])
Raises
======
ShapeError
If the block matrix is not a 2x2 matrix
NonInvertibleMatrixError
If the matrix "D" is non-invertible
See Also
========
sympy.matrices.expressions.blockmatrix.BlockMatrix.LDUdecomposition
sympy.matrices.expressions.blockmatrix.BlockMatrix.LUdecomposition
"""
if self.blockshape == (2,2):
[[A, B],
[C, D]] = self.blocks.tolist()
try:
DI = D.I
except NonInvertibleMatrixError:
raise NonInvertibleMatrixError('Block UDL decomposition cannot be calculated when\
"D" is singular')
Ip = Identity(A.shape[0])
Iq = Identity(B.shape[1])
Z = ZeroMatrix(*B.shape)
U = BlockMatrix([[Ip, B*DI], [Z.T, Iq]])
D = BlockDiagMatrix(self.schur('D'), D)
L = BlockMatrix([[Ip, Z],[DI*C, Iq]])
return U, D, L
else:
raise ShapeError("Block UDL decomposition is supported only for 2x2 block matrices")
def LUdecomposition(self):
"""Returns the Block LU decomposition of
a 2x2 Block Matrix
Returns
=======
(L, U) : Matrices
L : Lower Diagonal Matrix
U : Upper Diagonal Matrix
Examples
========
>>> from sympy import symbols, MatrixSymbol, BlockMatrix, block_collapse
>>> m, n = symbols('m n')
>>> A = MatrixSymbol('A', n, n)
>>> B = MatrixSymbol('B', n, m)
>>> C = MatrixSymbol('C', m, n)
>>> D = MatrixSymbol('D', m, m)
>>> X = BlockMatrix([[A, B], [C, D]])
>>> L, U = X.LUdecomposition()
>>> block_collapse(L*U)
Matrix([
[A, B],
[C, D]])
Raises
======
ShapeError
If the block matrix is not a 2x2 matrix
NonInvertibleMatrixError
If the matrix "A" is non-invertible
See Also
========
sympy.matrices.expressions.blockmatrix.BlockMatrix.UDLdecomposition
sympy.matrices.expressions.blockmatrix.BlockMatrix.LDUdecomposition
"""
if self.blockshape == (2,2):
[[A, B],
[C, D]] = self.blocks.tolist()
try:
A = A**S.Half
AI = A.I
except NonInvertibleMatrixError:
raise NonInvertibleMatrixError('Block LU decomposition cannot be calculated when\
"A" is singular')
Z = ZeroMatrix(*B.shape)
Q = self.schur()**S.Half
L = BlockMatrix([[A, Z], [C*AI, Q]])
U = BlockMatrix([[A, AI*B],[Z.T, Q]])
return L, U
else:
raise ShapeError("Block LU decomposition is supported only for 2x2 block matrices")
def _entry(self, i, j, **kwargs):
# Find row entry
orig_i, orig_j = i, j
for row_block, numrows in enumerate(self.rowblocksizes):
cmp = i < numrows
if cmp == True:
break
elif cmp == False:
i -= numrows
elif row_block < self.blockshape[0] - 1:
# Can't tell which block and it's not the last one, return unevaluated
return MatrixElement(self, orig_i, orig_j)
for col_block, numcols in enumerate(self.colblocksizes):
cmp = j < numcols
if cmp == True:
break
elif cmp == False:
j -= numcols
elif col_block < self.blockshape[1] - 1:
return MatrixElement(self, orig_i, orig_j)
return self.blocks[row_block, col_block][i, j]
@property
def is_Identity(self):
if self.blockshape[0] != self.blockshape[1]:
return False
for i in range(self.blockshape[0]):
for j in range(self.blockshape[1]):
if i==j and not self.blocks[i, j].is_Identity:
return False
if i!=j and not self.blocks[i, j].is_ZeroMatrix:
return False
return True
@property
def is_structurally_symmetric(self):
return self.rowblocksizes == self.colblocksizes
def equals(self, other):
if self == other:
return True
if (isinstance(other, BlockMatrix) and self.blocks == other.blocks):
return True
return super().equals(other)
| BlockMatrix |
python | pandas-dev__pandas | asv_bench/benchmarks/rolling.py | {
"start": 4299,
"end": 5215
} | class ____:
params = (
["DataFrame", "Series"],
[
({"halflife": 10}, "mean"),
({"halflife": 10}, "std"),
({"halflife": 1000}, "mean"),
({"halflife": 1000}, "std"),
(
{
"halflife": "1 Day",
"times": pd.date_range("1900", periods=10**5, freq="23s"),
},
"mean",
),
],
["int", "float"],
)
param_names = ["constructor", "kwargs_method", "dtype"]
def setup(self, constructor, kwargs_method, dtype):
N = 10**5
kwargs, method = kwargs_method
arr = (100 * np.random.random(N)).astype(dtype)
self.method = method
self.ewm = getattr(pd, constructor)(arr).ewm(**kwargs)
def time_ewm(self, constructor, kwargs_method, dtype):
getattr(self.ewm, self.method)()
| EWMMethods |
python | getsentry__sentry | src/sentry/integrations/source_code_management/commit_context.py | {
"start": 14903,
"end": 19220
} | class ____(ABC):
def __init__(self, integration: CommitContextIntegration):
self.integration = integration
@property
@abstractmethod
def organization_option_key(self) -> str:
raise NotImplementedError
@property
@abstractmethod
def referrer(self) -> Referrer:
raise NotImplementedError
@property
@abstractmethod
def referrer_id(self) -> str:
raise NotImplementedError
def queue_task(self, pr: PullRequest, project_id: int) -> None:
from sentry.integrations.source_code_management.tasks import pr_comment_workflow
pr_comment_workflow.delay(pr_id=pr.id, project_id=project_id)
@abstractmethod
def get_comment_body(self, issue_ids: list[int]) -> str:
raise NotImplementedError
@abstractmethod
def get_comment_data(
self,
organization: Organization,
repo: Repository,
pr: PullRequest,
comment_body: str,
issue_ids: list[int],
) -> dict[str, Any]:
raise NotImplementedError
def get_issue_ids_from_pr(self, pr: PullRequest, limit: int = MAX_SUSPECT_COMMITS) -> list[int]:
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT go.group_id issue_id
FROM sentry_groupowner go
JOIN sentry_pullrequest_commit c ON c.commit_id = (go.context::jsonb->>'commitId')::bigint
JOIN sentry_pull_request pr ON c.pull_request_id = pr.id
WHERE go.type=0
AND pr.id=%s
ORDER BY go.date_added
LIMIT %s
""",
params=[pr.id, limit],
)
return [issue_id for (issue_id,) in cursor.fetchall()]
def get_top_5_issues_by_count(
self, issue_ids: list[int], project: Project
) -> list[dict[str, Any]]:
"""Given a list of issue group ids, return a sublist of the top 5 ordered by event count"""
request = SnubaRequest(
dataset=Dataset.Events.value,
app_id="default",
tenant_ids={"organization_id": project.organization_id},
query=(
Query(Entity("events"))
.set_select([Column("group_id"), Function("count", [], "event_count")])
.set_groupby([Column("group_id")])
.set_where(
[
Condition(Column("project_id"), Op.EQ, project.id),
Condition(Column("group_id"), Op.IN, issue_ids),
Condition(Column("timestamp"), Op.GTE, datetime.now() - timedelta(days=30)),
Condition(Column("timestamp"), Op.LT, datetime.now()),
Condition(Column("level"), Op.NEQ, "info"),
]
)
.set_orderby([OrderBy(Column("event_count"), Direction.DESC)])
.set_limit(5)
),
)
return raw_snql_query(request, referrer=self.referrer.value)["data"]
@staticmethod
def _truncate_title(title: str, max_length: int = ISSUE_TITLE_MAX_LENGTH) -> str:
"""Truncate title if it's too long and add ellipsis."""
if len(title) <= max_length:
return title
return title[:max_length].rstrip() + "..."
def get_environment_info(self, issue: Group) -> str:
try:
recommended_event = issue.get_recommended_event()
if recommended_event:
environment = recommended_event.get_environment()
if environment and environment.name:
return f" in `{environment.name}`"
except Exception as e:
# If anything goes wrong, just continue without environment info
logger.info(
"get_environment_info.no-environment",
extra={"issue_id": issue.id, "error": e},
)
return ""
@staticmethod
def get_merged_pr_single_issue_template(title: str, url: str, environment: str) -> str:
truncated_title = PRCommentWorkflow._truncate_title(title)
return MERGED_PR_SINGLE_ISSUE_TEMPLATE.format(
title=truncated_title,
url=url,
environment=environment,
)
| PRCommentWorkflow |
python | apache__airflow | airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_backfills.py | {
"start": 26317,
"end": 29725
} | class ____(TestBackfillEndpoint):
def test_cancel_backfill(self, session, test_client):
(dag,) = self._create_dag_models()
from_date = timezone.utcnow()
to_date = timezone.utcnow()
backfill = Backfill(dag_id=dag.dag_id, from_date=from_date, to_date=to_date)
session.add(backfill)
session.commit()
response = test_client.put(
f"/backfills/{backfill.id}/cancel",
)
assert response.status_code == 200
assert response.json() == {
"completed_at": mock.ANY,
"created_at": mock.ANY,
"dag_display_name": "TEST_DAG_1",
"dag_id": "TEST_DAG_1",
"dag_run_conf": {},
"from_date": to_iso(from_date),
"id": backfill.id,
"is_paused": True,
"reprocess_behavior": "none",
"max_active_runs": 10,
"to_date": to_iso(to_date),
"updated_at": mock.ANY,
}
assert pendulum.parse(response.json()["completed_at"])
# now it is marked as completed
assert pendulum.parse(response.json()["completed_at"])
# get conflict when canceling already-canceled backfill
response = test_client.put(f"/backfills/{backfill.id}/cancel")
assert response.status_code == 409
check_last_log(session, dag_id=None, event="cancel_backfill", logical_date=None)
def test_cancel_backfill_end_states(self, dag_maker, session, test_client):
"""
Queued runs should be marked *failed*.
Every other dag run should be left alone.
"""
with dag_maker(schedule="@daily") as dag:
PythonOperator(task_id="hi", python_callable=print)
b = _create_backfill(
dag_id=dag.dag_id,
from_date=timezone.datetime(2021, 1, 1),
to_date=timezone.datetime(2021, 1, 5),
max_active_runs=2,
reverse=False,
dag_run_conf={},
triggering_user_name="test_user",
)
query = (
select(DagRun)
.join(BackfillDagRun.dag_run)
.where(BackfillDagRun.backfill_id == b.id)
.order_by(BackfillDagRun.sort_ordinal)
)
dag_runs = session.scalars(query).all()
dates = [str(x.logical_date.date()) for x in dag_runs]
expected_dates = ["2021-01-01", "2021-01-02", "2021-01-03", "2021-01-04", "2021-01-05"]
assert dates == expected_dates
assert all(x.state == DagRunState.QUEUED for x in dag_runs)
dag_runs[0].state = "running"
session.commit()
response = test_client.put(f"/backfills/{b.id}/cancel")
assert response.status_code == 200
session.expunge_all()
dag_runs = session.scalars(query).all()
states = [x.state for x in dag_runs]
assert states == ["running", "failed", "failed", "failed", "failed"]
def test_invalid_id(self, test_client):
response = test_client.put("/backfills/invalid_id/cancel")
assert response.status_code == 422
response_detail = response.json()["detail"][0]
assert response_detail["input"] == "invalid_id"
assert response_detail["loc"] == ["path", "backfill_id"]
assert (
response_detail["msg"] == "Input should be a valid integer, unable to parse string as an integer"
)
| TestCancelBackfill |
python | PrefectHQ__prefect | tests/blocks/test_core.py | {
"start": 1104,
"end": 42312
} | class ____:
class MyBlock(Block):
x: str
y: int = 1
@register_type
class MyRegisteredBlock(Block):
x: str
y: int = 1
@register_type
class MyOtherRegisteredBlock(Block):
x: str
y: int = 1
z: int = 2
def test_registration(self):
assert (
lookup_type(Block, self.MyRegisteredBlock.__dispatch_key__())
is self.MyRegisteredBlock
)
assert (
lookup_type(Block, self.MyOtherRegisteredBlock.__dispatch_key__())
is self.MyOtherRegisteredBlock
)
assert lookup_type(Block, self.MyBlock.__dispatch_key__()) is self.MyBlock
def test_create_api_block_schema(self, block_type_x):
block_schema = self.MyRegisteredBlock._to_block_schema(
block_type_id=block_type_x.id
)
assert (
block_schema.checksum
== "sha256:876ee010b459f79fe6a31f00442a2ba47ee36202968830efda4378544051da64"
)
assert block_schema.fields == {
"title": "MyRegisteredBlock",
"type": "object",
"properties": {
"x": {"title": "X", "type": "string"},
"y": {"title": "Y", "default": 1, "type": "integer"},
},
"block_schema_references": {},
"block_type_slug": "myregisteredblock",
"required": ["x"],
"secret_fields": [],
}
def test_create_api_block_with_secret_fields_reflected_in_schema(self):
class SecretBlockE(Block):
w: SecretDict
x: SecretStr
y: SecretBytes
z: str
assert SecretBlockE.model_json_schema()["secret_fields"] == ["w.*", "x", "y"]
schema = SecretBlockE._to_block_schema(block_type_id=uuid4())
assert schema.fields["secret_fields"] == ["w.*", "x", "y"]
assert schema.fields == {
"block_schema_references": {},
"block_type_slug": "secretblocke",
"properties": {
"w": {"title": "W", "type": "object"},
"x": {
"format": "password",
"title": "X",
"type": "string",
"writeOnly": True,
},
"y": {
"format": "password",
"title": "Y",
"type": "string",
"writeOnly": True,
},
"z": {"title": "Z", "type": "string"},
},
"required": ["w", "x", "y", "z"],
"secret_fields": ["w.*", "x", "y"],
"title": "SecretBlockE",
"type": "object",
}
def test_create_api_block_with_nested_secret_fields_reflected_in_schema(self):
class Child(Block):
a: SecretStr
b: str
c: SecretDict
class Parent(Block):
a: SecretStr
b: str
child: Child
assert Child.model_json_schema()["secret_fields"] == ["a", "c.*"]
assert Parent.model_json_schema()["secret_fields"] == [
"a",
"child.a",
"child.c.*",
]
schema = Parent._to_block_schema(block_type_id=uuid4())
assert schema.fields["secret_fields"] == ["a", "child.a", "child.c.*"]
assert schema.fields == {
"block_schema_references": {
"child": {
"block_schema_checksum": "sha256:3e50c75591f4071c7df082d8a7969c57ae97f6a62c2345017e6b64bc13c39cd0",
"block_type_slug": "child",
}
},
"block_type_slug": "parent",
"definitions": {
"Child": {
"block_schema_references": {},
"block_type_slug": "child",
"properties": {
"a": {
"format": "password",
"title": "A",
"type": "string",
"writeOnly": True,
},
"b": {"title": "B", "type": "string"},
"c": {"title": "C", "type": "object"},
},
"required": ["a", "b", "c"],
"secret_fields": ["a", "c.*"],
"title": "Child",
"type": "object",
}
},
"properties": {
"a": {
"format": "password",
"title": "A",
"type": "string",
"writeOnly": True,
},
"b": {"title": "B", "type": "string"},
"child": {"$ref": "#/definitions/Child"},
},
"required": ["a", "b", "child"],
"secret_fields": ["a", "child.a", "child.c.*"],
"title": "Parent",
"type": "object",
}
def test_create_api_block_with_nested_secret_fields_in_base_model_reflected_in_schema(
self,
):
class Child(BaseModel):
a: SecretStr
b: str
c: SecretDict
class Parent(Block):
a: SecretStr
b: str
child: Child
assert Parent.model_json_schema()["secret_fields"] == [
"a",
"child.a",
"child.c.*",
]
schema = Parent._to_block_schema(block_type_id=uuid4())
assert schema.fields["secret_fields"] == ["a", "child.a", "child.c.*"]
assert schema.fields == {
"title": "Parent",
"type": "object",
"properties": {
"a": {
"title": "A",
"type": "string",
"writeOnly": True,
"format": "password",
},
"b": {"title": "B", "type": "string"},
"child": {"$ref": "#/definitions/Child"},
},
"required": ["a", "b", "child"],
"block_type_slug": "parent",
"secret_fields": ["a", "child.a", "child.c.*"],
"block_schema_references": {},
"definitions": {
"Child": {
"title": "Child",
"type": "object",
"properties": {
"a": {
"title": "A",
"type": "string",
"writeOnly": True,
"format": "password",
},
"b": {"title": "B", "type": "string"},
"c": {"title": "C", "type": "object"},
},
"required": ["a", "b", "c"],
}
},
}
def test_create_api_block_with_nested_union_secret_fields_in_base_model_reflected_in_schema(
self,
):
class Child(BaseModel):
a: SecretStr
b: str
c: SecretDict
class Parent(Block):
a: SecretStr
b: str
child: Union[Child, str]
assert Parent.model_json_schema()["secret_fields"] == [
"a",
"child.a",
"child.c.*",
]
schema = Parent._to_block_schema(block_type_id=uuid4())
assert schema.fields["secret_fields"] == ["a", "child.a", "child.c.*"]
assert schema.fields == {
"title": "Parent",
"type": "object",
"properties": {
"a": {
"title": "A",
"type": "string",
"writeOnly": True,
"format": "password",
},
"b": {"title": "B", "type": "string"},
"child": {
"title": "Child",
"anyOf": [{"$ref": "#/definitions/Child"}, {"type": "string"}],
},
},
"required": ["a", "b", "child"],
"block_type_slug": "parent",
"secret_fields": ["a", "child.a", "child.c.*"],
"block_schema_references": {},
"definitions": {
"Child": {
"title": "Child",
"type": "object",
"properties": {
"a": {
"title": "A",
"type": "string",
"writeOnly": True,
"format": "password",
},
"b": {"title": "B", "type": "string"},
"c": {"title": "C", "type": "object"},
},
"required": ["a", "b", "c"],
}
},
}
def test_create_api_block_with_deeply_nested_secret_fields_in_base_model_reflected_in_schema(
self,
):
class SubChild(BaseModel):
a: str
b: SecretDict
c: SecretBytes
class Child(BaseModel):
a: SecretStr
b: str
sub_child: SubChild
class Parent(Block):
a: SecretStr
b: str
child: Child
assert Parent.model_json_schema()["secret_fields"] == [
"a",
"child.a",
"child.sub_child.b.*",
"child.sub_child.c",
]
schema = Parent._to_block_schema(block_type_id=uuid4())
assert schema.fields["secret_fields"] == [
"a",
"child.a",
"child.sub_child.b.*",
"child.sub_child.c",
]
assert schema.fields == {
"title": "Parent",
"type": "object",
"properties": {
"a": {
"title": "A",
"type": "string",
"writeOnly": True,
"format": "password",
},
"b": {"title": "B", "type": "string"},
"child": {"$ref": "#/definitions/Child"},
},
"required": ["a", "b", "child"],
"block_type_slug": "parent",
"secret_fields": [
"a",
"child.a",
"child.sub_child.b.*",
"child.sub_child.c",
],
"block_schema_references": {},
"definitions": {
"SubChild": {
"title": "SubChild",
"type": "object",
"properties": {
"a": {"title": "A", "type": "string"},
"b": {"title": "B", "type": "object"},
"c": {
"title": "C",
"type": "string",
"writeOnly": True,
"format": "password",
},
},
"required": ["a", "b", "c"],
},
"Child": {
"title": "Child",
"type": "object",
"properties": {
"a": {
"title": "A",
"type": "string",
"writeOnly": True,
"format": "password",
},
"b": {"title": "B", "type": "string"},
"sub_child": {"$ref": "#/definitions/SubChild"},
},
"required": ["a", "b", "sub_child"],
},
},
}
def test_create_api_block_with_secret_values_are_obfuscated_by_default(self):
class SecretBlockA(Block):
w: SecretDict
x: SecretStr
y: SecretBytes
z: str
block = SecretBlockA(
w={
"Here's my shallow secret": "I don't like olives",
"deeper secrets": {"Here's my deeper secret": "I've never seen Lost"},
},
x="x",
y=b"y",
z="z",
)
block_type_id = uuid4()
block_schema_id = uuid4()
blockdoc = block._to_block_document(
name="name", block_type_id=block_type_id, block_schema_id=block_schema_id
)
assert isinstance(blockdoc.data["w"], SecretDict)
assert isinstance(blockdoc.data["x"], SecretStr)
assert isinstance(blockdoc.data["y"], SecretBytes)
json_blockdoc = blockdoc.model_dump(mode="json")
assert json_blockdoc["data"] == {
"w": "********",
"x": "********",
"y": "********",
"z": "z",
}
blockdoc_with_secrets = block._to_block_document(
name="name",
block_type_id=block_type_id,
block_schema_id=block_schema_id,
include_secrets=True,
)
json_blockdoc_with_secrets = blockdoc_with_secrets.model_dump(
context={"include_secrets": True}, mode="json"
)
assert json_blockdoc_with_secrets["data"] == {
"w": {
"Here's my shallow secret": "I don't like olives",
"deeper secrets": {"Here's my deeper secret": "I've never seen Lost"},
},
"x": "x",
"y": "y",
"z": "z",
}
def test_create_nested_api_block_with_secret_values_are_obfuscated_by_default(self):
class Child(Block):
a: SecretStr
b: str
c: SecretDict
class Parent(Block):
a: SecretStr
b: str
child: Child
block = Parent(a="a", b="b", child=dict(a="a", b="b", c=dict(secret="value")))
block_type_id = uuid4()
block_schema_id = uuid4()
blockdoc = block._to_block_document(
name="name", block_type_id=block_type_id, block_schema_id=block_schema_id
)
assert isinstance(blockdoc.data["a"], SecretStr)
assert isinstance(blockdoc.data["child"]["a"], SecretStr)
json_blockdoc = json.loads(blockdoc.model_dump_json())
assert json_blockdoc["data"] == {
"a": "********",
"b": "b",
# The child includes the type slug because it is not a block document
"child": {
"a": "********",
"b": "b",
"c": "********",
"block_type_slug": "child",
},
}
blockdoc_with_secrets = block._to_block_document(
name="name",
block_type_id=block_type_id,
block_schema_id=block_schema_id,
include_secrets=True,
)
json_blockdoc_with_secrets = blockdoc_with_secrets.model_dump(
mode="json", context={"include_secrets": True}
)
assert json_blockdoc_with_secrets["data"] == {
"a": "a",
"b": "b",
# The child includes the type slug because it is not a block document
"child": {
"a": "a",
"b": "b",
"c": {"secret": "value"},
"block_type_slug": "child",
},
}
def test_registering_blocks_with_capabilities(self):
@register_type
class IncapableBlock(Block):
# could use a little confidence
_block_type_id = uuid4()
class CanBluff(Block):
_block_schema_capabilities = ["bluffing"]
def bluff(self):
pass
@register_type
class CapableBlock(CanBluff, Block):
# kind of rude to the other Blocks
_block_type_id = uuid4()
all_the_answers: str = "42 or something"
capable_schema = CapableBlock._to_block_schema()
assert capable_schema.capabilities == ["bluffing"]
incapable_schema = IncapableBlock._to_block_schema()
assert incapable_schema.capabilities == []
def test_create_api_block_schema_only_includes_pydantic_fields(self, block_type_x):
@register_type
class MakesALottaAttributes(Block):
real_field: str
authentic_field: str
def block_initialization(self):
self.evil_fake_field = "evil fake data"
my_block = MakesALottaAttributes(real_field="hello", authentic_field="marvin")
block_schema = my_block._to_block_schema(block_type_id=block_type_x.id)
assert "real_field" in block_schema.fields["properties"].keys()
assert "authentic_field" in block_schema.fields["properties"].keys()
assert "evil_fake_field" not in block_schema.fields["properties"].keys()
def test_create_api_block_schema_with_different_registered_slug(self, block_type_x):
block_schema = self.MyOtherRegisteredBlock._to_block_schema(
block_type_id=block_type_x.id
)
assert (
block_schema.checksum
== "sha256:5f8577df3c90cfe24ebcb553323d54736cd90b9a155f8e724653fe39de9ada6a"
)
assert block_schema.fields == {
"title": "MyOtherRegisteredBlock",
"type": "object",
"properties": {
"x": {"title": "X", "type": "string"},
"y": {"title": "Y", "default": 1, "type": "integer"},
"z": {"default": 2, "title": "Z", "type": "integer"},
},
"block_type_slug": "myotherregisteredblock",
"block_schema_references": {},
"required": ["x"],
"secret_fields": [],
}
def test_create_api_block_with_arguments(self, block_type_x):
with pytest.raises(ValueError, match="(No name provided)"):
self.MyRegisteredBlock(x="x")._to_block_document()
with pytest.raises(ValueError, match="(No block schema ID provided)"):
self.MyRegisteredBlock(x="x")._to_block_document(name="block")
assert self.MyRegisteredBlock(x="x")._to_block_document(
name="block", block_schema_id=uuid4(), block_type_id=block_type_x.id
)
def test_to_block_document_anonymous_no_name(self, block_type_x):
anon_block = self.MyRegisteredBlock(x="x")._to_block_document(
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
is_anonymous=True,
)
assert anon_block.is_anonymous is True
assert anon_block.name is None
def test_to_block_document_anonymous(self, block_type_x):
"""Test passing different values to the `is_anonymous` argument, in
combination with different values of the _is_anonymous class fallback"""
# explicit true
anon_block = self.MyRegisteredBlock(x="x")._to_block_document(
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
is_anonymous=True,
)
assert anon_block.is_anonymous is True
# explicit false
anon_block_2 = self.MyRegisteredBlock(x="x")._to_block_document(
name="block",
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
is_anonymous=False,
)
assert anon_block_2.is_anonymous is False
# none with no fallback
anon_block_3 = self.MyRegisteredBlock(x="x")._to_block_document(
name="block",
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
is_anonymous=None,
)
assert anon_block_3.is_anonymous is False
# none with True fallback
anon_block_4 = self.MyRegisteredBlock(x="x")
anon_block_4._is_anonymous = True
doc_4 = anon_block_4._to_block_document(
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
is_anonymous=None,
)
assert doc_4.is_anonymous is True
# False with True fallback
anon_block_5 = self.MyRegisteredBlock(x="x")
anon_block_5._is_anonymous = True
doc_5 = anon_block_5._to_block_document(
name="block",
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
is_anonymous=False,
)
assert doc_5.is_anonymous is False
def test_to_block_document_anonymous_errors(self, block_type_x):
"""Test passing different values to the `is_anonymous` argument, in
combination with different values of the _is_anonymous class fallback"""
# explicit false
with pytest.raises(
ValueError,
match="(No name provided, either as an argument or on the block)",
):
self.MyRegisteredBlock(x="x")._to_block_document(
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
is_anonymous=False,
)
# none with no fallback
with pytest.raises(
ValueError,
match="(No name provided, either as an argument or on the block)",
):
self.MyRegisteredBlock(x="x")._to_block_document(
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
is_anonymous=None,
)
# none with False fallback
anon_block_4 = self.MyRegisteredBlock(x="x")
anon_block_4._is_anonymous = False
with pytest.raises(
ValueError,
match="(No name provided, either as an argument or on the block)",
):
anon_block_4._to_block_document(
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
is_anonymous=None,
)
def test_from_block_document(self, block_type_x):
block_schema_id = uuid4()
api_block = self.MyRegisteredBlock(x="x")._to_block_document(
name="block", block_schema_id=block_schema_id, block_type_id=block_type_x.id
)
block = Block._from_block_document(api_block)
assert type(block) is self.MyRegisteredBlock
assert block.x == "x"
assert block._block_schema_id == block_schema_id
assert block._block_document_id == api_block.id
assert block._block_type_id == block_type_x.id
assert block._is_anonymous is False
assert block._block_document_name == "block"
def test_from_block_document_anonymous(self, block_type_x):
block_schema_id = uuid4()
api_block = self.MyRegisteredBlock(x="x")._to_block_document(
block_schema_id=block_schema_id,
block_type_id=block_type_x.id,
is_anonymous=True,
)
block = Block._from_block_document(api_block)
assert type(block) is self.MyRegisteredBlock
assert block.x == "x"
assert block._block_schema_id == block_schema_id
assert block._block_document_id == api_block.id
assert block._block_type_id == block_type_x.id
assert block._is_anonymous is True
assert block._block_document_name is None
def test_from_block_document_with_unregistered_block(self):
class BlockyMcBlock(Block):
fizz: str
block_schema_id = uuid4()
block_type_id = uuid4()
api_block = BlockyMcBlock(fizz="buzz")._to_block_document(
name="super-important-config",
block_schema_id=block_schema_id,
block_type_id=block_type_id,
)
block = BlockyMcBlock._from_block_document(api_block)
assert type(block) is BlockyMcBlock
assert block.fizz == "buzz"
assert block._block_schema_id == block_schema_id
assert block._block_document_id == api_block.id
assert block._block_type_id == block_type_id
def test_create_block_document_from_block(self, block_type_x):
@register_type
class MakesALottaAttributes(Block):
real_field: str
authentic_field: str
def block_initialization(self):
self.evil_fake_field = "evil fake data"
my_block = MakesALottaAttributes(real_field="hello", authentic_field="marvin")
api_block = my_block._to_block_document(
name="a-corrupted-api-block",
block_schema_id=uuid4(),
block_type_id=block_type_x.id,
)
assert "real_field" in api_block.data
assert "authentic_field" in api_block.data
assert "evil_fake_field" not in api_block.data
@pytest.mark.parametrize("block_name", ["a_block", "a.block"])
async def test_create_block_document_create_invalid_characters(self, block_name):
"""This gets raised on instantiation of BlockDocumentCreate"""
@register_type
class ABlock(Block):
a_field: str
a_block = ABlock(a_field="my_field")
with pytest.raises(ValidationError, match="name must only contain"):
await a_block.save(block_name)
@pytest.mark.parametrize("block_name", ["a/block", "a\\block"])
async def test_create_block_document_invalid_characters(self, block_name):
"""
This gets raised on instantiation of BlockDocument which shares
INVALID_CHARACTERS with Flow, Deployment, etc.
"""
@register_type
class ABlock(Block):
a_field: str
a_block = ABlock(a_field="my_field")
with pytest.raises(ValidationError, match="name"):
await a_block.save(block_name)
def test_create_block_schema_from_block_without_capabilities(
self, test_block: Type[Block], block_type_x
):
block_schema = test_block._to_block_schema(block_type_id=block_type_x.id)
assert block_schema.checksum == test_block._calculate_schema_checksum()
assert block_schema.fields == test_block.model_json_schema()
assert block_schema.capabilities == [], (
"No capabilities should be defined for this Block and defaults to []"
)
assert block_schema.version == DEFAULT_BLOCK_SCHEMA_VERSION
def test_create_block_schema_from_block_with_capabilities(
self, test_block: Type[Block], block_type_x
):
block_schema = test_block._to_block_schema(block_type_id=block_type_x.id)
assert block_schema.checksum == test_block._calculate_schema_checksum()
assert block_schema.fields == test_block.model_json_schema()
assert block_schema.capabilities == [], (
"No capabilities should be defined for this Block and defaults to []"
)
assert block_schema.version == DEFAULT_BLOCK_SCHEMA_VERSION
def test_create_block_schema_with_no_version_specified(
self, test_block: Type[Block], block_type_x
):
block_schema = test_block._to_block_schema(block_type_id=block_type_x.id)
assert block_schema.version == DEFAULT_BLOCK_SCHEMA_VERSION
def test_create_block_schema_with_version_specified(
self, test_block: Type[Block], block_type_x
):
test_block._block_schema_version = "1.0.0"
block_schema = test_block._to_block_schema(block_type_id=block_type_x.id)
assert block_schema.version == "1.0.0"
async def test_create_block_schema_uses_prefect_version_for_built_in_blocks(self):
try:
await Secret.register_type_and_schema()
except PrefectHTTPStatusError as exc:
if exc.response.status_code == 403:
pass
else:
raise exc
block_schema = Secret._to_block_schema()
assert block_schema.version == Version(prefect.__version__).base_version
def test_collecting_capabilities(self):
class CanRun(Block):
_block_schema_capabilities = ["run"]
class CanFly(Block):
_block_schema_capabilities = ["fly"]
class CanSwim(Block):
_block_schema_capabilities = ["swim"]
class Duck(CanSwim, CanFly):
pass
class Bird(CanFly):
pass
class Crow(Bird, CanRun):
pass
class Cat(CanRun):
pass
class FlyingCat(Cat, Bird):
pass
assert Duck.get_block_capabilities() == {"swim", "fly"}
assert Bird.get_block_capabilities() == {"fly"}
assert Cat.get_block_capabilities() == {"run"}
assert Crow.get_block_capabilities() == {"fly", "run"}
assert FlyingCat.get_block_capabilities() == {"fly", "run"}
def test_create_block_schema_from_nested_blocks(self):
block_schema_id = uuid4()
block_type_id = uuid4()
class NestedBlock(Block):
_block_type_name = "Nested Block"
_block_schema_id = block_schema_id
_block_type_id = block_type_id
x: str
class ParentBlock(Block):
y: str
z: NestedBlock
block_schema = ParentBlock._to_block_schema(block_type_id=block_type_id)
assert block_schema.fields == {
"title": "ParentBlock",
"type": "object",
"properties": {
"y": {"title": "Y", "type": "string"},
"z": {"$ref": "#/definitions/NestedBlock"},
},
"required": ["y", "z"],
"block_type_slug": "parentblock",
"block_schema_references": {
"z": {
"block_schema_checksum": "sha256:85dbfce0d5cfb3b77266422b96c5560f4b9de4ad2ecd74946512e954fb54d650",
"block_type_slug": "nested-block",
}
},
"secret_fields": [],
"definitions": {
"NestedBlock": {
"block_schema_references": {},
"block_type_slug": "nested-block",
"properties": {"x": {"title": "X", "type": "string"}},
"required": ["x"],
"secret_fields": [],
"title": "NestedBlock",
"type": "object",
},
},
}
async def test_block_load(
self, test_block, block_document, in_memory_prefect_client
):
my_block = test_block.load(
block_document.name, client=in_memory_prefect_client, _sync=True
)
assert my_block._block_document_name == block_document.name
assert my_block._block_document_id == block_document.id
assert my_block._block_type_id == block_document.block_type_id
assert my_block._block_schema_id == block_document.block_schema_id
assert my_block.foo == "bar"
my_aloaded_block = await test_block.aload(
block_document.name, client=in_memory_prefect_client
)
assert my_aloaded_block._block_document_name == block_document.name
assert my_aloaded_block._block_document_id == block_document.id
assert my_aloaded_block._block_type_id == block_document.block_type_id
assert my_aloaded_block._block_schema_id == block_document.block_schema_id
assert my_aloaded_block.foo == "bar"
@patch("prefect.blocks.core.load_prefect_collections")
async def test_block_load_loads_collections(
self,
mock_load_prefect_collections,
test_block,
block_document: BlockDocument,
in_memory_prefect_client,
):
Block.load(
block_document.block_type.slug + "/" + block_document.name,
client=in_memory_prefect_client,
_sync=True,
)
mock_load_prefect_collections.assert_called_once()
mock_load_prefect_collections.reset_mock()
await Block.aload(
block_document.block_type.slug + "/" + block_document.name,
client=in_memory_prefect_client,
)
mock_load_prefect_collections.assert_called_once()
async def test_load_from_block_base_class(self):
class Custom(Block):
message: str
my_custom_block = Custom(message="hello")
await my_custom_block.save("my-custom-block")
loaded_block = Block.load("custom/my-custom-block", _sync=True)
assert loaded_block.message == "hello"
aloaded_block = await Block.aload("custom/my-custom-block")
assert aloaded_block.message == "hello"
async def test_load_nested_block(self, session, in_memory_prefect_client):
class B(Block):
_block_schema_type = "abc"
x: int
block_type_b = await models.block_types.create_block_type(
session=session, block_type=B._to_block_type()
)
block_schema_b = await models.block_schemas.create_block_schema(
session=session,
block_schema=B._to_block_schema(block_type_id=block_type_b.id),
)
class C(Block):
y: int
block_type_c = await models.block_types.create_block_type(
session=session, block_type=C._to_block_type()
)
block_schema_c = await models.block_schemas.create_block_schema(
session=session,
block_schema=C._to_block_schema(block_type_id=block_type_c.id),
)
class D(Block):
b: B
z: str
block_type_d = await models.block_types.create_block_type(
session=session, block_type=D._to_block_type()
)
block_schema_d = await models.block_schemas.create_block_schema(
session=session,
block_schema=D._to_block_schema(block_type_id=block_type_d.id),
)
class E(Block):
c: C
d: D
block_type_e = await models.block_types.create_block_type(
session=session, block_type=E._to_block_type()
)
block_schema_e = await models.block_schemas.create_block_schema(
session=session,
block_schema=E._to_block_schema(block_type_id=block_type_e.id),
)
await session.commit()
inner_block_document = await models.block_documents.create_block_document(
session=session,
block_document=BlockDocumentCreate(
name="inner-block-document",
data=dict(x=1),
block_schema_id=block_schema_b.id,
block_type_id=block_schema_b.block_type_id,
),
)
middle_block_document_1 = await models.block_documents.create_block_document(
session=session,
block_document=BlockDocumentCreate(
name="middle-block-document-1",
data=dict(y=2),
block_schema_id=block_schema_c.id,
block_type_id=block_schema_c.block_type_id,
),
)
middle_block_document_2 = await models.block_documents.create_block_document(
session=session,
block_document=BlockDocumentCreate(
name="middle-block-document-2",
data={
"b": {"$ref": {"block_document_id": inner_block_document.id}},
"z": "ztop",
},
block_schema_id=block_schema_d.id,
block_type_id=block_schema_d.block_type_id,
),
)
outer_block_document = await models.block_documents.create_block_document(
session=session,
block_document=BlockDocumentCreate(
name="outer-block-document",
data={
"c": {"$ref": {"block_document_id": middle_block_document_1.id}},
"d": {"$ref": {"block_document_id": middle_block_document_2.id}},
},
block_schema_id=block_schema_e.id,
block_type_id=block_schema_e.block_type_id,
),
)
await session.commit()
block_instance = E.load(
"outer-block-document", client=in_memory_prefect_client, _sync=True
)
assert isinstance(block_instance, E)
assert isinstance(block_instance.c, C)
assert isinstance(block_instance.d, D)
assert block_instance._block_document_name == outer_block_document.name
assert block_instance._block_document_id == outer_block_document.id
assert block_instance._block_type_id == outer_block_document.block_type_id
assert block_instance._block_schema_id == outer_block_document.block_schema_id
assert block_instance.c.model_dump() == {
"y": 2,
"_block_document_id": middle_block_document_1.id,
"_block_document_name": "middle-block-document-1",
"_is_anonymous": False,
"block_type_slug": "c",
}
assert block_instance.d.model_dump() == {
"b": {
"x": 1,
"_block_document_id": inner_block_document.id,
"_block_document_name": "inner-block-document",
"_is_anonymous": False,
"block_type_slug": "b",
},
"z": "ztop",
"_block_document_id": middle_block_document_2.id,
"_block_document_name": "middle-block-document-2",
"_is_anonymous": False,
"block_type_slug": "d",
}
aloaded_block_instance = await E.aload(
"outer-block-document", client=in_memory_prefect_client
)
assert isinstance(aloaded_block_instance, E)
assert isinstance(aloaded_block_instance.c, C)
assert isinstance(aloaded_block_instance.d, D)
assert aloaded_block_instance._block_document_name == outer_block_document.name
assert aloaded_block_instance._block_document_id == outer_block_document.id
assert (
aloaded_block_instance._block_type_id == outer_block_document.block_type_id
)
assert (
aloaded_block_instance._block_schema_id
== outer_block_document.block_schema_id
)
assert aloaded_block_instance.c.model_dump() == {
"y": 2,
"_block_document_id": middle_block_document_1.id,
"_block_document_name": "middle-block-document-1",
"_is_anonymous": False,
"block_type_slug": "c",
}
assert aloaded_block_instance.d.model_dump() == {
"b": {
"x": 1,
"_block_document_id": inner_block_document.id,
"_block_document_name": "inner-block-document",
"_is_anonymous": False,
"block_type_slug": "b",
},
"z": "ztop",
"_block_document_id": middle_block_document_2.id,
"_block_document_name": "middle-block-document-2",
"_is_anonymous": False,
"block_type_slug": "d",
}
async def test_create_block_from_nonexistent_name(self, test_block):
with pytest.raises(
ValueError,
match="Unable to find block document named blocky for block type x",
):
test_block.load("blocky", _sync=True)
with pytest.raises(
ValueError,
match="Unable to find block document named blocky for block type x",
):
await test_block.aload("blocky")
async def test_save_block_from_flow(self):
class Test(Block):
a: str
@prefect.flow
def save_block_flow():
Test(a="foo").save("test")
save_block_flow()
block = await Test.load("test")
assert block.a == "foo"
async def test_save_protected_block_with_new_block_schema_version(
self, session, prefect_client: PrefectClient
):
"""
This testcase would fail when block protection was enabled for block type
updates and block schema creation.
"""
await models.block_registration.run_block_auto_registration(session=session)
await session.commit()
mock_version = (
uuid4().hex
) # represents a version that does not exist on the server
Secret._block_schema_version = mock_version
block_document_id = await Secret(value="secret").save("test")
block_document = await prefect_client.read_block_document(block_document_id)
assert block_document.block_schema.version == mock_version
@pytest.mark.skipif(
sys.version_info < (3, 10), reason="requires python3.10 or higher for `| None`"
)
def test_maintain_secrets_after_load_for_union_type(self):
"""
Regression test for https://github.com/PrefectHQ/prefect/issues/18486
"""
block_name = f"test-conf-{uuid4()}"
class NestedModel(BaseModel):
password: SecretStr | None = None
class Conf(Block):
nested: NestedModel
nested = NestedModel(password="1234")
Conf(nested=nested).save(
block_name,
overwrite=True,
)
conf: Conf = Conf.load(block_name)
assert conf.nested.password.get_secret_value() == "1234"
| TestAPICompatibility |
python | pandas-dev__pandas | pandas/tests/indexes/timedeltas/methods/test_factorize.py | {
"start": 130,
"end": 1292
} | class ____:
def test_factorize(self):
idx1 = TimedeltaIndex(["1 day", "1 day", "2 day", "2 day", "3 day", "3 day"])
exp_arr = np.array([0, 0, 1, 1, 2, 2], dtype=np.intp)
exp_idx = TimedeltaIndex(["1 day", "2 day", "3 day"])
arr, idx = idx1.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
assert idx.freq == exp_idx.freq
arr, idx = idx1.factorize(sort=True)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
assert idx.freq == exp_idx.freq
def test_factorize_preserves_freq(self):
# GH#38120 freq should be preserved
idx3 = timedelta_range("1 day", periods=4, freq="s")
exp_arr = np.array([0, 1, 2, 3], dtype=np.intp)
arr, idx = idx3.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, idx3)
assert idx.freq == idx3.freq
arr, idx = factorize(idx3)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, idx3)
assert idx.freq == idx3.freq
| TestTimedeltaIndexFactorize |
python | tensorflow__tensorflow | tensorflow/tools/compatibility/ast_edits.py | {
"start": 7054,
"end": 28047
} | class ____(ast.NodeVisitor):
"""AST Visitor that processes function calls.
Updates function calls from old API version to new API version using a given
change spec.
"""
def __init__(self, api_change_spec):
self._api_change_spec = api_change_spec
self._log = [] # Holds 4-tuples: severity, line, col, msg.
self._stack = [] # Allow easy access to parents.
# Overridden to maintain a stack of nodes to allow for parent access
def visit(self, node):
self._stack.append(node)
super(_PastaEditVisitor, self).visit(node)
self._stack.pop()
@property
def errors(self):
return [log for log in self._log if log[0] == ERROR]
@property
def warnings(self):
return [log for log in self._log if log[0] == WARNING]
@property
def warnings_and_errors(self):
return [log for log in self._log if log[0] in (WARNING, ERROR)]
@property
def info(self):
return [log for log in self._log if log[0] == INFO]
@property
def log(self):
return self._log
def add_log(self, severity, lineno, col, msg):
self._log.append((severity, lineno, col, msg))
print("%s line %d:%d: %s" % (severity, lineno, col, msg))
def add_logs(self, logs):
"""Record a log and print it.
The log should be a tuple `(severity, lineno, col_offset, msg)`, which will
be printed and recorded. It is part of the log available in the `self.log`
property.
Args:
logs: The logs to add. Must be a list of tuples
`(severity, lineno, col_offset, msg)`.
"""
self._log.extend(logs)
for log in logs:
print("%s line %d:%d: %s" % log)
def _get_applicable_entries(self, transformer_field, full_name, name):
"""Get all list entries indexed by name that apply to full_name or name."""
# Transformers are indexed to full name, name, or no name
# as a performance optimization.
function_transformers = getattr(self._api_change_spec,
transformer_field, {})
glob_name = "*." + name if name else None
transformers = []
if full_name in function_transformers:
transformers.append(function_transformers[full_name])
if glob_name in function_transformers:
transformers.append(function_transformers[glob_name])
if "*" in function_transformers:
transformers.append(function_transformers["*"])
return transformers
def _get_applicable_dict(self, transformer_field, full_name, name):
"""Get all dict entries indexed by name that apply to full_name or name."""
# Transformers are indexed to full name, name, or no name
# as a performance optimization.
function_transformers = getattr(self._api_change_spec,
transformer_field, {})
glob_name = "*." + name if name else None
transformers = function_transformers.get("*", {}).copy()
transformers.update(function_transformers.get(glob_name, {}))
transformers.update(function_transformers.get(full_name, {}))
return transformers
def _get_full_name(self, node):
"""Traverse an Attribute node to generate a full name, e.g., "tf.foo.bar".
This is the inverse of `full_name_node`.
Args:
node: A Node of type Attribute.
Returns:
a '.'-delimited full-name or None if node was not Attribute or Name.
i.e. `foo()+b).bar` returns None, while `a.b.c` would return "a.b.c".
"""
curr = node
items = []
while not isinstance(curr, ast.Name):
if not isinstance(curr, ast.Attribute):
return None
items.append(curr.attr)
curr = curr.value
items.append(curr.id)
return ".".join(reversed(items))
def _maybe_add_warning(self, node, full_name):
"""Adds an error to be printed about full_name at node."""
function_warnings = self._api_change_spec.function_warnings
if full_name in function_warnings:
level, message = function_warnings[full_name]
message = message.replace("<function name>", full_name)
self.add_log(level, node.lineno, node.col_offset,
"%s requires manual check. %s" % (full_name, message))
return True
else:
return False
def _maybe_add_module_deprecation_warning(self, node, full_name, whole_name):
"""Adds a warning if full_name is a deprecated module."""
warnings = self._api_change_spec.module_deprecations
if full_name in warnings:
level, message = warnings[full_name]
message = message.replace("<function name>", whole_name)
self.add_log(level, node.lineno, node.col_offset,
"Using member %s in deprecated module %s. %s" % (whole_name,
full_name,
message))
return True
else:
return False
def _maybe_add_call_warning(self, node, full_name, name):
"""Print a warning when specific functions are called with selected args.
The function _print_warning_for_function matches the full name of the called
function, e.g., tf.foo.bar(). This function matches the function name that
is called, as long as the function is an attribute. For example,
`tf.foo.bar()` and `foo.bar()` are matched, but not `bar()`.
Args:
node: ast.Call object
full_name: The precomputed full name of the callable, if one exists, None
otherwise.
name: The precomputed name of the callable, if one exists, None otherwise.
Returns:
Whether an error was recorded.
"""
# Only look for *.-warnings here, the other will be handled by the Attribute
# visitor. Also, do not warn for bare functions, only if the call func is
# an attribute.
warned = False
if isinstance(node.func, ast.Attribute):
warned = self._maybe_add_warning(node, "*." + name)
# All arg warnings are handled here, since only we have the args
arg_warnings = self._get_applicable_dict("function_arg_warnings",
full_name, name)
variadic_args = uses_star_args_or_kwargs_in_call(node)
for (kwarg, arg), (level, warning) in sorted(arg_warnings.items()):
present, _ = get_arg_value(node, kwarg, arg) or variadic_args
if present:
warned = True
warning_message = warning.replace("<function name>", full_name or name)
template = "%s called with %s argument, requires manual check: %s"
if variadic_args:
template = ("%s called with *args or **kwargs that may include %s, "
"requires manual check: %s")
self.add_log(level, node.lineno, node.col_offset,
template % (full_name or name, kwarg, warning_message))
return warned
def _maybe_rename(self, parent, node, full_name):
"""Replace node (Attribute or Name) with a node representing full_name."""
new_name = self._api_change_spec.symbol_renames.get(full_name, None)
if new_name:
self.add_log(INFO, node.lineno, node.col_offset,
"Renamed %r to %r" % (full_name, new_name))
new_node = full_name_node(new_name, node.ctx)
ast.copy_location(new_node, node)
pasta.ast_utils.replace_child(parent, node, new_node)
return True
else:
return False
def _maybe_change_to_function_call(self, parent, node, full_name):
"""Wraps node (typically, an Attribute or Expr) in a Call."""
if full_name in self._api_change_spec.change_to_function:
if not isinstance(parent, ast.Call):
# ast.Call's constructor is really picky about how many arguments it
# wants, and also, it changed between Py2 and Py3.
new_node = ast.Call(node, [], [])
pasta.ast_utils.replace_child(parent, node, new_node)
ast.copy_location(new_node, node)
self.add_log(INFO, node.lineno, node.col_offset,
"Changed %r to a function call" % full_name)
return True
return False
def _maybe_add_arg_names(self, node, full_name):
"""Make args into keyword args if function called full_name requires it."""
function_reorders = self._api_change_spec.function_reorders
if full_name in function_reorders:
if uses_star_args_in_call(node):
self.add_log(WARNING, node.lineno, node.col_offset,
"(Manual check required) upgrading %s may require "
"re-ordering the call arguments, but it was passed "
"variable-length positional *args. The upgrade "
"script cannot handle these automatically." % full_name)
reordered = function_reorders[full_name]
new_args = []
new_keywords = []
idx = 0
for arg in node.args:
if sys.version_info[:2] >= (3, 5) and isinstance(arg, ast.Starred):
continue # Can't move Starred to keywords
keyword_arg = reordered[idx]
if keyword_arg:
new_keywords.append(ast.keyword(arg=keyword_arg, value=arg))
else:
new_args.append(arg)
idx += 1
if new_keywords:
self.add_log(INFO, node.lineno, node.col_offset,
"Added keywords to args of function %r" % full_name)
node.args = new_args
node.keywords = new_keywords + (node.keywords or [])
return True
return False
def _maybe_modify_args(self, node, full_name, name):
"""Rename keyword args if the function called full_name requires it."""
renamed_keywords = self._get_applicable_dict("function_keyword_renames",
full_name, name)
if not renamed_keywords:
return False
if uses_star_kwargs_in_call(node):
self.add_log(WARNING, node.lineno, node.col_offset,
"(Manual check required) upgrading %s may require "
"renaming or removing call arguments, but it was passed "
"variable-length *args or **kwargs. The upgrade "
"script cannot handle these automatically." %
(full_name or name))
modified = False
new_keywords = []
for keyword in node.keywords:
argkey = keyword.arg
if argkey in renamed_keywords:
modified = True
if renamed_keywords[argkey] is None:
lineno = getattr(keyword, "lineno", node.lineno)
col_offset = getattr(keyword, "col_offset", node.col_offset)
self.add_log(INFO, lineno, col_offset,
"Removed argument %s for function %s" % (
argkey, full_name or name))
else:
keyword.arg = renamed_keywords[argkey]
lineno = getattr(keyword, "lineno", node.lineno)
col_offset = getattr(keyword, "col_offset", node.col_offset)
self.add_log(INFO, lineno, col_offset,
"Renamed keyword argument for %s from %s to %s" % (
full_name, argkey, renamed_keywords[argkey]))
new_keywords.append(keyword)
else:
new_keywords.append(keyword)
if modified:
node.keywords = new_keywords
return modified
def visit_Call(self, node): # pylint: disable=invalid-name
"""Handle visiting a call node in the AST.
Args:
node: Current Node
"""
assert self._stack[-1] is node
# Get the name for this call, so we can index stuff with it.
full_name = self._get_full_name(node.func)
if full_name:
name = full_name.split(".")[-1]
elif isinstance(node.func, ast.Name):
name = node.func.id
elif isinstance(node.func, ast.Attribute):
name = node.func.attr
else:
name = None
# Call standard transformers for this node.
# Make sure warnings come first, since args or names triggering warnings
# may be removed by the other transformations.
self._maybe_add_call_warning(node, full_name, name)
# Make all args into kwargs
self._maybe_add_arg_names(node, full_name)
# Argument name changes or deletions
self._maybe_modify_args(node, full_name, name)
# Call transformers. These have the ability to modify the node, and if they
# do, will return the new node they created (or the same node if they just
# changed it). The are given the parent, but we will take care of
# integrating their changes into the parent if they return a new node.
#
# These are matched on the old name, since renaming is performed by the
# Attribute visitor, which happens later.
transformers = self._get_applicable_entries("function_transformers",
full_name, name)
parent = self._stack[-2]
if transformers:
if uses_star_args_or_kwargs_in_call(node):
self.add_log(WARNING, node.lineno, node.col_offset,
"(Manual check required) upgrading %s may require "
"modifying call arguments, but it was passed "
"variable-length *args or **kwargs. The upgrade "
"script cannot handle these automatically." %
(full_name or name))
for transformer in transformers:
logs = []
new_node = transformer(parent, node, full_name, name, logs)
self.add_logs(logs)
if new_node and new_node is not node:
pasta.ast_utils.replace_child(parent, node, new_node)
node = new_node
self._stack[-1] = node
self.generic_visit(node)
def visit_Attribute(self, node): # pylint: disable=invalid-name
"""Handle bare Attributes i.e. [tf.foo, tf.bar]."""
assert self._stack[-1] is node
full_name = self._get_full_name(node)
if full_name:
parent = self._stack[-2]
# Make sure the warning comes first, otherwise the name may have changed
self._maybe_add_warning(node, full_name)
# Once we did a modification, node is invalid and not worth inspecting
# further. Also, we only perform modifications for simple nodes, so
# There'd be no point in descending further.
if self._maybe_rename(parent, node, full_name):
return
if self._maybe_change_to_function_call(parent, node, full_name):
return
# The isinstance check is enough -- a bare Attribute is never root.
i = 2
while isinstance(self._stack[-i], ast.Attribute):
i += 1
whole_name = pasta.dump(self._stack[-(i-1)])
self._maybe_add_module_deprecation_warning(node, full_name, whole_name)
self.generic_visit(node)
def visit_Import(self, node): # pylint: disable=invalid-name
"""Handle visiting an import node in the AST.
Args:
node: Current Node
"""
new_aliases = []
import_updated = False
import_renames = getattr(self._api_change_spec, "import_renames", {})
max_submodule_depth = getattr(self._api_change_spec, "max_submodule_depth",
1)
inserts_after_imports = getattr(self._api_change_spec,
"inserts_after_imports", {})
# This loop processes imports in the format
# import foo as f, bar as b
for import_alias in node.names:
all_import_components = import_alias.name.split(".")
# Look for rename, starting with longest import levels.
found_update = False
for i in reversed(list(range(1, max_submodule_depth + 1))):
import_component = all_import_components[0]
for j in range(1, min(i, len(all_import_components))):
import_component += "." + all_import_components[j]
import_rename_spec = import_renames.get(import_component, None)
if not import_rename_spec or excluded_from_module_rename(
import_alias.name, import_rename_spec):
continue
new_name = (
import_rename_spec.new_name +
import_alias.name[len(import_component):])
# If current import is
# import foo
# then new import should preserve imported name:
# import new_foo as foo
# This happens when module has just one component.
new_asname = import_alias.asname
if not new_asname and "." not in import_alias.name:
new_asname = import_alias.name
new_alias = ast.alias(name=new_name, asname=new_asname)
new_aliases.append(new_alias)
import_updated = True
found_update = True
# Insert any followup lines that should happen after this import.
full_import = (import_alias.name, import_alias.asname)
insert_offset = 1
for line_to_insert in inserts_after_imports.get(full_import, []):
assert self._stack[-1] is node
parent = self._stack[-2]
new_line_node = pasta.parse(line_to_insert)
ast.copy_location(new_line_node, node)
parent.body.insert(
parent.body.index(node) + insert_offset, new_line_node)
insert_offset += 1
# Insert a newline after the import if necessary
old_suffix = pasta.base.formatting.get(node, "suffix")
if old_suffix is None:
old_suffix = os.linesep
if os.linesep not in old_suffix:
pasta.base.formatting.set(node, "suffix", old_suffix + os.linesep)
# Apply indentation to new node.
pasta.base.formatting.set(new_line_node, "prefix",
pasta.base.formatting.get(node, "prefix"))
pasta.base.formatting.set(new_line_node, "suffix", os.linesep)
self.add_log(
INFO, node.lineno, node.col_offset,
"Adding `%s` after import of %s" %
(new_line_node, import_alias.name))
# Find one match, break
if found_update:
break
# No rename is found for all levels
if not found_update:
new_aliases.append(import_alias) # no change needed
# Replace the node if at least one import needs to be updated.
if import_updated:
assert self._stack[-1] is node
parent = self._stack[-2]
new_node = ast.Import(new_aliases)
ast.copy_location(new_node, node)
pasta.ast_utils.replace_child(parent, node, new_node)
self.add_log(
INFO, node.lineno, node.col_offset,
"Changed import from %r to %r." %
(pasta.dump(node), pasta.dump(new_node)))
self.generic_visit(node)
def visit_ImportFrom(self, node): # pylint: disable=invalid-name
"""Handle visiting an import-from node in the AST.
Args:
node: Current Node
"""
if not node.module:
self.generic_visit(node)
return
from_import = node.module
# Look for rename based on first component of from-import.
# i.e. based on foo in foo.bar.
from_import_first_component = from_import.split(".")[0]
import_renames = getattr(self._api_change_spec, "import_renames", {})
import_rename_spec = import_renames.get(from_import_first_component, None)
if not import_rename_spec:
self.generic_visit(node)
return
# Split module aliases into the ones that require import update
# and those that don't. For e.g. if we want to rename "a" to "b"
# unless we import "a.c" in the following:
# from a import c, d
# we want to update import for "d" but not for "c".
updated_aliases = []
same_aliases = []
for import_alias in node.names:
full_module_name = "%s.%s" % (from_import, import_alias.name)
if excluded_from_module_rename(full_module_name, import_rename_spec):
same_aliases.append(import_alias)
else:
updated_aliases.append(import_alias)
if not updated_aliases:
self.generic_visit(node)
return
assert self._stack[-1] is node
parent = self._stack[-2]
# Replace first component of from-import with new name.
new_from_import = (
import_rename_spec.new_name +
from_import[len(from_import_first_component):])
updated_node = ast.ImportFrom(new_from_import, updated_aliases, node.level)
ast.copy_location(updated_node, node)
pasta.ast_utils.replace_child(parent, node, updated_node)
# If some imports had to stay the same, add another import for them.
additional_import_log = ""
if same_aliases:
same_node = ast.ImportFrom(from_import, same_aliases, node.level,
col_offset=node.col_offset, lineno=node.lineno)
ast.copy_location(same_node, node)
parent.body.insert(parent.body.index(updated_node), same_node)
# Apply indentation to new node.
pasta.base.formatting.set(
same_node, "prefix",
pasta.base.formatting.get(updated_node, "prefix"))
additional_import_log = " and %r" % pasta.dump(same_node)
self.add_log(
INFO, node.lineno, node.col_offset,
"Changed import from %r to %r%s." %
(pasta.dump(node),
pasta.dump(updated_node),
additional_import_log))
self.generic_visit(node)
| _PastaEditVisitor |
python | py-pdf__pypdf | pypdf/constants.py | {
"start": 434,
"end": 571
} | class ____:
SIZE = "/Size"
PREV = "/Prev"
ROOT = "/Root"
ENCRYPT = "/Encrypt"
INFO = "/Info"
ID = "/ID"
| TrailerKeys |
python | anthropics__anthropic-sdk-python | src/anthropic/lib/bedrock/_client.py | {
"start": 10172,
"end": 15915
} | class ____(BaseBedrockClient[httpx.AsyncClient, AsyncStream[Any]], AsyncAPIClient):
messages: AsyncMessages
completions: AsyncCompletions
beta: AsyncBeta
def __init__(
self,
aws_secret_key: str | None = None,
aws_access_key: str | None = None,
aws_region: str | None = None,
aws_profile: str | None = None,
aws_session_token: str | None = None,
base_url: str | httpx.URL | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
max_retries: int = DEFAULT_MAX_RETRIES,
default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
# Configure a custom httpx client. See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details.
http_client: httpx.AsyncClient | None = None,
# Enable or disable schema validation for data returned by the API.
# When enabled an error APIResponseValidationError is raised
# if the API responds with invalid data for the expected schema.
#
# This parameter may be removed or changed in the future.
# If you rely on this feature, please open a GitHub issue
# outlining your use-case to help us decide if it should be
# part of our public interface in the future.
_strict_response_validation: bool = False,
) -> None:
self.aws_secret_key = aws_secret_key
self.aws_access_key = aws_access_key
self.aws_region = _infer_region() if aws_region is None else aws_region
self.aws_profile = aws_profile
self.aws_session_token = aws_session_token
if base_url is None:
base_url = os.environ.get("ANTHROPIC_BEDROCK_BASE_URL")
if base_url is None:
base_url = f"https://bedrock-runtime.{self.aws_region}.amazonaws.com"
super().__init__(
version=__version__,
base_url=base_url,
timeout=timeout,
max_retries=max_retries,
custom_headers=default_headers,
custom_query=default_query,
http_client=http_client,
_strict_response_validation=_strict_response_validation,
)
self.messages = AsyncMessages(self)
self.completions = AsyncCompletions(self)
self.beta = AsyncBeta(self)
@override
def _make_sse_decoder(self) -> AWSEventStreamDecoder:
return AWSEventStreamDecoder()
@override
async def _prepare_options(self, options: FinalRequestOptions) -> FinalRequestOptions:
return _prepare_options(options)
@override
async def _prepare_request(self, request: httpx.Request) -> None:
from ._auth import get_auth_headers
data = request.read().decode()
headers = get_auth_headers(
method=request.method,
url=str(request.url),
headers=request.headers,
aws_access_key=self.aws_access_key,
aws_secret_key=self.aws_secret_key,
aws_session_token=self.aws_session_token,
region=self.aws_region or "us-east-1",
profile=self.aws_profile,
data=data,
)
request.headers.update(headers)
def copy(
self,
*,
aws_secret_key: str | None = None,
aws_access_key: str | None = None,
aws_region: str | None = None,
aws_session_token: str | None = None,
base_url: str | httpx.URL | None = None,
timeout: float | Timeout | None | NotGiven = NOT_GIVEN,
http_client: httpx.AsyncClient | None = None,
max_retries: int | NotGiven = NOT_GIVEN,
default_headers: Mapping[str, str] | None = None,
set_default_headers: Mapping[str, str] | None = None,
default_query: Mapping[str, object] | None = None,
set_default_query: Mapping[str, object] | None = None,
_extra_kwargs: Mapping[str, Any] = {},
) -> Self:
"""
Create a new client instance re-using the same options given to the current client with optional overriding.
"""
if default_headers is not None and set_default_headers is not None:
raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
if default_query is not None and set_default_query is not None:
raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
headers = self._custom_headers
if default_headers is not None:
headers = {**headers, **default_headers}
elif set_default_headers is not None:
headers = set_default_headers
params = self._custom_query
if default_query is not None:
params = {**params, **default_query}
elif set_default_query is not None:
params = set_default_query
return self.__class__(
aws_secret_key=aws_secret_key or self.aws_secret_key,
aws_access_key=aws_access_key or self.aws_access_key,
aws_region=aws_region or self.aws_region,
aws_session_token=aws_session_token or self.aws_session_token,
base_url=base_url or self.base_url,
timeout=self.timeout if isinstance(timeout, NotGiven) else timeout,
http_client=http_client,
max_retries=max_retries if is_given(max_retries) else self.max_retries,
default_headers=headers,
default_query=params,
**_extra_kwargs,
)
# Alias for `copy` for nicer inline usage, e.g.
# client.with_options(timeout=10).foo.create(...)
with_options = copy
| AsyncAnthropicBedrock |
python | mlflow__mlflow | mlflow/genai/judges/tools/list_spans.py | {
"start": 666,
"end": 1552
} | class ____:
"""Result from listing spans with optional pagination."""
spans: list[SpanInfo]
next_page_token: str | None = None
def _create_span_info(span) -> SpanInfo:
"""Create SpanInfo from a span object."""
start_time_ms = span.start_time_ns / 1_000_000
end_time_ms = span.end_time_ns / 1_000_000
duration_ms = end_time_ms - start_time_ms
# Get attribute names
attribute_names = list(span.attributes.keys()) if span.attributes else []
return SpanInfo(
span_id=span.span_id,
name=span.name,
span_type=span.span_type,
start_time_ms=start_time_ms,
end_time_ms=end_time_ms,
duration_ms=duration_ms,
parent_id=span.parent_id,
status=span.status,
is_root=(span.parent_id is None),
attribute_names=attribute_names,
)
@experimental(version="3.4.0")
| ListSpansResult |
python | facebookresearch__faiss | contrib/big_batch_search.py | {
"start": 452,
"end": 5556
} | class ____:
"""
Object that manages all the data related to the computation
except the actual within-bucket matching and the organization of the
computation (parallel or not)
"""
def __init__(
self,
index, xq, k,
verbose=0,
use_float16=False):
# verbosity
self.verbose = verbose
self.tictoc = []
self.xq = xq
self.index = index
self.use_float16 = use_float16
keep_max = faiss.is_similarity_metric(index.metric_type)
self.rh = faiss.ResultHeap(len(xq), k, keep_max=keep_max)
self.t_accu = [0] * 6
self.t_display = self.t0 = time.time()
def start_t_accu(self):
self.t_accu_t0 = time.time()
def stop_t_accu(self, n):
self.t_accu[n] += time.time() - self.t_accu_t0
def tic(self, name):
self.tictoc = (name, time.time())
if self.verbose > 0:
print(name, end="\r", flush=True)
def toc(self):
name, t0 = self.tictoc
dt = time.time() - t0
if self.verbose > 0:
print(f"{name}: {dt:.3f} s")
return dt
def report(self, l):
if self.verbose == 1 or (
self.verbose == 2 and (
l > 1000 and time.time() < self.t_display + 1.0
)
):
return
t = time.time() - self.t0
print(
f"[{t:.1f} s] list {l}/{self.index.nlist} "
f"times prep q {self.t_accu[0]:.3f} prep b {self.t_accu[1]:.3f} "
f"comp {self.t_accu[2]:.3f} res {self.t_accu[3]:.3f} "
f"wait in {self.t_accu[4]:.3f} "
f"wait out {self.t_accu[5]:.3f} "
f"eta {datetime.timedelta(seconds=t*self.index.nlist/(l+1)-t)} "
f"mem {faiss.get_mem_usage_kb()}",
end="\r" if self.verbose <= 2 else "\n",
flush=True,
)
self.t_display = time.time()
def coarse_quantization(self):
self.tic("coarse quantization")
bs = 65536
nq = len(self.xq)
q_assign = np.empty((nq, self.index.nprobe), dtype='int32')
for i0 in range(0, nq, bs):
i1 = min(nq, i0 + bs)
q_dis_i, q_assign_i = self.index.quantizer.search(
self.xq[i0:i1], self.index.nprobe)
# q_dis[i0:i1] = q_dis_i
q_assign[i0:i1] = q_assign_i
self.toc()
self.q_assign = q_assign
def reorder_assign(self):
self.tic("bucket sort")
q_assign = self.q_assign
q_assign += 1 # move -1 -> 0
self.bucket_lims = faiss.matrix_bucket_sort_inplace(
self.q_assign, nbucket=self.index.nlist + 1, nt=16)
self.query_ids = self.q_assign.ravel()
if self.verbose > 0:
print(' number of -1s:', self.bucket_lims[1])
self.bucket_lims = self.bucket_lims[1:] # shift back to ignore -1s
del self.q_assign # inplace so let's forget about the old version...
self.toc()
def prepare_bucket(self, l):
""" prepare the queries and database items for bucket l"""
t0 = time.time()
index = self.index
# prepare queries
i0, i1 = self.bucket_lims[l], self.bucket_lims[l + 1]
q_subset = self.query_ids[i0:i1]
xq_l = self.xq[q_subset]
if self.by_residual:
xq_l = xq_l - index.quantizer.reconstruct(l)
t1 = time.time()
# prepare database side
list_ids, xb_l = get_invlist(index.invlists, l)
if self.decode_func is None:
xb_l = xb_l.ravel()
else:
xb_l = self.decode_func(xb_l)
if self.use_float16:
xb_l = xb_l.astype('float16')
xq_l = xq_l.astype('float16')
t2 = time.time()
self.t_accu[0] += t1 - t0
self.t_accu[1] += t2 - t1
return q_subset, xq_l, list_ids, xb_l
def add_results_to_heap(self, q_subset, D, list_ids, I):
"""add the bucket results to the heap structure"""
if D is None:
return
t0 = time.time()
if I is None:
I = list_ids
else:
I = list_ids[I]
self.rh.add_result_subset(q_subset, D, I)
self.t_accu[3] += time.time() - t0
def sizes_in_checkpoint(self):
return (self.xq.shape, self.index.nprobe, self.index.nlist)
def write_checkpoint(self, fname, completed):
# write to temp file then move to final file
tmpname = fname + ".tmp"
with open(tmpname, "wb") as f:
pickle.dump(
{
"sizes": self.sizes_in_checkpoint(),
"completed": completed,
"rh": (self.rh.D, self.rh.I),
}, f, -1)
os.replace(tmpname, fname)
def read_checkpoint(self, fname):
with open(fname, "rb") as f:
ckp = pickle.load(f)
assert ckp["sizes"] == self.sizes_in_checkpoint()
self.rh.D[:] = ckp["rh"][0]
self.rh.I[:] = ckp["rh"][1]
return ckp["completed"]
| BigBatchSearcher |
python | openai__openai-python | src/openai/types/evals/create_eval_completions_run_data_source_param.py | {
"start": 5475,
"end": 7587
} | class ____(TypedDict, total=False):
max_completion_tokens: int
"""The maximum number of tokens in the generated output."""
reasoning_effort: Optional[ReasoningEffort]
"""
Constrains effort on reasoning for
[reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently
supported values are `none`, `minimal`, `low`, `medium`, and `high`. Reducing
reasoning effort can result in faster responses and fewer tokens used on
reasoning in a response.
- `gpt-5.1` defaults to `none`, which does not perform reasoning. The supported
reasoning values for `gpt-5.1` are `none`, `low`, `medium`, and `high`. Tool
calls are supported for all reasoning values in gpt-5.1.
- All models before `gpt-5.1` default to `medium` reasoning effort, and do not
support `none`.
- The `gpt-5-pro` model defaults to (and only supports) `high` reasoning effort.
"""
response_format: SamplingParamsResponseFormat
"""An object specifying the format that the model must output.
Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured
Outputs which ensures the model will match your supplied JSON schema. Learn more
in the
[Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs).
Setting to `{ "type": "json_object" }` enables the older JSON mode, which
ensures the message the model generates is valid JSON. Using `json_schema` is
preferred for models that support it.
"""
seed: int
"""A seed value to initialize the randomness, during sampling."""
temperature: float
"""A higher temperature increases randomness in the outputs."""
tools: Iterable[ChatCompletionFunctionToolParam]
"""A list of tools the model may call.
Currently, only functions are supported as a tool. Use this to provide a list of
functions the model may generate JSON inputs for. A max of 128 functions are
supported.
"""
top_p: float
"""An alternative to temperature for nucleus sampling; 1.0 includes all tokens."""
| SamplingParams |
python | microsoft__pyright | packages/pyright-internal/src/tests/samples/typedDictClosed4.py | {
"start": 854,
"end": 1043
} | class ____(TypedDict, extra_items=int):
name: str
year: NotRequired[int]
details3: MovieDetails3 = {"name": "Kill Bill Vol. 2", "year": 2004}
movie3: Movie3 = details3
| MovieDetails3 |
python | fsspec__filesystem_spec | fsspec/implementations/git.py | {
"start": 103,
"end": 3731
} | class ____(AbstractFileSystem):
"""Browse the files of a local git repo at any hash/tag/branch
(experimental backend)
"""
root_marker = ""
cachable = True
def __init__(self, path=None, fo=None, ref=None, **kwargs):
"""
Parameters
----------
path: str (optional)
Local location of the repo (uses current directory if not given).
May be deprecated in favour of ``fo``. When used with a higher
level function such as fsspec.open(), may be of the form
"git://[path-to-repo[:]][ref@]path/to/file" (but the actual
file path should not contain "@" or ":").
fo: str (optional)
Same as ``path``, but passed as part of a chained URL. This one
takes precedence if both are given.
ref: str (optional)
Reference to work with, could be a hash, tag or branch name. Defaults
to current working tree. Note that ``ls`` and ``open`` also take hash,
so this becomes the default for those operations
kwargs
"""
super().__init__(**kwargs)
self.repo = pygit2.Repository(fo or path or os.getcwd())
self.ref = ref or "master"
@classmethod
def _strip_protocol(cls, path):
path = super()._strip_protocol(path).lstrip("/")
if ":" in path:
path = path.split(":", 1)[1]
if "@" in path:
path = path.split("@", 1)[1]
return path.lstrip("/")
def _path_to_object(self, path, ref):
comm, ref = self.repo.resolve_refish(ref or self.ref)
parts = path.split("/")
tree = comm.tree
for part in parts:
if part and isinstance(tree, pygit2.Tree):
if part not in tree:
raise FileNotFoundError(path)
tree = tree[part]
return tree
@staticmethod
def _get_kwargs_from_urls(path):
path = path.removeprefix("git://")
out = {}
if ":" in path:
out["path"], path = path.split(":", 1)
if "@" in path:
out["ref"], path = path.split("@", 1)
return out
@staticmethod
def _object_to_info(obj, path=None):
# obj.name and obj.filemode are None for the root tree!
is_dir = isinstance(obj, pygit2.Tree)
return {
"type": "directory" if is_dir else "file",
"name": (
"/".join([path, obj.name or ""]).lstrip("/") if path else obj.name
),
"hex": str(obj.id),
"mode": "100644" if obj.filemode is None else f"{obj.filemode:o}",
"size": 0 if is_dir else obj.size,
}
def ls(self, path, detail=True, ref=None, **kwargs):
tree = self._path_to_object(self._strip_protocol(path), ref)
return [
GitFileSystem._object_to_info(obj, path)
if detail
else GitFileSystem._object_to_info(obj, path)["name"]
for obj in (tree if isinstance(tree, pygit2.Tree) else [tree])
]
def info(self, path, ref=None, **kwargs):
tree = self._path_to_object(self._strip_protocol(path), ref)
return GitFileSystem._object_to_info(tree, path)
def ukey(self, path, ref=None):
return self.info(path, ref=ref)["hex"]
def _open(
self,
path,
mode="rb",
block_size=None,
autocommit=True,
cache_options=None,
ref=None,
**kwargs,
):
obj = self._path_to_object(path, ref or self.ref)
return MemoryFile(data=obj.data)
| GitFileSystem |
python | charliermarsh__ruff | crates/ruff_linter/resources/test/fixtures/pylint/invalid_return_type_bytes.py | {
"start": 786,
"end": 831
} | class ____:
def __bytes__(self): ...
| Bytes3 |
python | bokeh__bokeh | tests/unit/bokeh/plotting/test_figure.py | {
"start": 12726,
"end": 13527
} | class ____:
def test_returns_renderers(self) -> None:
fruits = ['Apples', 'Pears', 'Nectarines', 'Plums', 'Grapes', 'Strawberries']
years = ["2015", "2016", "2017"]
colors = ["#c9d9d3", "#718dbf", "#e84d60"]
data = {'fruits' : fruits,
'2015' : [2, 1, 4, 3, 2, 4],
'2016' : [5, 3, 4, 2, 4, 6],
'2017' : [3, 2, 4, 4, 5, 3]}
source = ColumnDataSource(data=data)
p = bpf.figure()
renderers = p.hbar_stack(years, y='fruits', height=0.9, color=colors, source=source,
legend_label=years, name=years)
assert len(renderers) == 3
assert renderers[0].name == "2015"
assert renderers[1].name == "2016"
assert renderers[2].name == "2017"
| Test_hbar_stack |
python | pydantic__pydantic | tests/mypy/outputs/mypy-plugin_ini/root_models.py | {
"start": 468,
"end": 645
} | class ____(RootModel[list[str]]):
pets: list[str]
# MYPY: error: Only `root` is allowed as a field of a `RootModel` [pydantic-field]
T = TypeVar('T')
V = TypeVar('V')
| Pets4 |
python | django-guardian__django-guardian | example_project/articles/views.py | {
"start": 416,
"end": 543
} | class ____(PermissionRequiredMixin, DetailView):
model = Article
permission_required = ["view_article"]
| ArticleDetailView |
python | redis__redis-py | redis/commands/search/field.py | {
"start": 3214,
"end": 3593
} | class ____(Field):
"""
GeoShapeField is used to enable within/contain indexing/searching
"""
SPHERICAL = "SPHERICAL"
FLAT = "FLAT"
def __init__(self, name: str, coord_system=None, **kwargs):
args = [Field.GEOSHAPE]
if coord_system:
args.append(coord_system)
Field.__init__(self, name, args=args, **kwargs)
| GeoShapeField |
python | getsentry__sentry | src/sentry/hybridcloud/services/organization_mapping/model.py | {
"start": 883,
"end": 1623
} | class ____(RpcModel):
name: str = ""
status: int = 0
slug: str = ""
region_name: str = ""
# When not set, no change to customer id performed,
# when set with a CustomerId, the customer_id set to either None or string
customer_id: CustomerId | None = None
requires_2fa: bool = False
early_adopter: bool = False
codecov_access: bool = False
disable_shared_issues: bool = False
allow_joinleave: bool = False
disable_new_visibility_features: bool = False
enhanced_privacy: bool = False
require_email_verification: bool = False
disable_member_project_creation: bool = False
prevent_superuser_access: bool = False
disable_member_invite: bool = False
| RpcOrganizationMappingUpdate |
python | getsentry__sentry | src/sentry_plugins/pushover/plugin.py | {
"start": 572,
"end": 5174
} | class ____(CorePluginMixin, NotificationPlugin):
description = DESCRIPTION
slug = "pushover"
title = "Pushover"
conf_title = "Pushover"
conf_key = "pushover"
required_field = "apikey"
feature_descriptions = [
FeatureDescription(
"""
Have Pushover notifications get sent to your mobile device with the Pushover app.
""",
IntegrationFeatures.MOBILE,
),
FeatureDescription(
"""
Configure Sentry rules to trigger notifications based on conditions you set.
""",
IntegrationFeatures.ALERT_RULE,
),
]
def is_configured(self, project) -> bool:
return all(self.get_option(key, project) for key in ("userkey", "apikey"))
def get_config(self, project, user=None, initial=None, add_additional_fields: bool = False):
userkey = self.get_option("userkey", project)
apikey = self.get_option("apikey", project)
userkey_field = get_secret_field_config(
userkey, "Your user key. See https://pushover.net/", include_prefix=True
)
userkey_field.update({"name": "userkey", "label": "User Key"})
apikey_field = get_secret_field_config(
apikey, "Application API token. See https://pushover.net/apps/", include_prefix=True
)
apikey_field.update({"name": "apikey", "label": "API Key"})
return [
userkey_field,
apikey_field,
{
"name": "priority",
"label": "Message Priority",
"type": "choice",
"required": True,
"choices": [
("-2", "Lowest"),
("-1", "Low"),
("0", "Normal"),
("1", "High"),
("2", "Emergency"),
],
"default": "0",
},
{
"name": "retry",
"label": "Retry",
"type": "number",
"required": False,
"placeholder": "e.g. 30",
"help": 'How often (in seconds) you will receive the same notification. Minimum of 30 seconds. Only required for "Emergency" level priority.',
},
{
"name": "expire",
"label": "Expire",
"type": "number",
"required": False,
"placeholder": "e.g. 9000",
"help": 'How many seconds your notification will continue to be retried for. Maximum of 10800 seconds. Only required for "Emergency" level priority.',
},
]
def validate_config(self, project, config, actor=None):
if int(config["priority"]) == 2 and config["retry"] < 30:
retry = str(config["retry"])
self.logger.exception(str(f"Retry not 30 or higher. It is {retry}."))
raise PluginError(f"Retry must be 30 or higher. It is {retry}.")
return config
def get_client(self, project):
return PushoverClient(
apikey=self.get_option("apikey", project), userkey=self.get_option("userkey", project)
)
def error_message_from_json(self, data):
errors = data.get("errors")
if errors:
return " ".join(errors)
return "unknown error"
def notify(self, notification: Notification, raise_exception: bool = False) -> None:
event = notification.event
group = event.group
project = group.project
priority = int(self.get_option("priority", project) or 0)
retry = int(self.get_option("retry", project) or 30)
expire = int(self.get_option("expire", project) or 90)
title = f"{project.name}: {group.title}"
link = group.get_absolute_url(params={"referrer": "pushover_plugin"})
message = event.title[:256]
tags = event.tags
if tags:
message += "\n\nTags: %s" % (", ".join(f"{k}={v}" for (k, v) in tags))
client = self.get_client(project)
try:
response = client.send_message(
{
"message": message[:1024],
"title": title[:250],
"url": link,
"url_title": "Issue Details",
"priority": priority,
"retry": retry,
"expire": expire,
}
)
except Exception as e:
self.raise_error(e)
assert response["status"]
| PushoverPlugin |
python | Lightning-AI__lightning | src/lightning/pytorch/loggers/logger.py | {
"start": 1786,
"end": 5120
} | class ____(Logger):
"""Dummy logger for internal use.
It is useful if we want to disable user's logger for a feature, but still ensure that user code can run
"""
def __init__(self) -> None:
super().__init__()
self._experiment = DummyExperiment()
@property
def experiment(self) -> DummyExperiment:
"""Return the experiment object associated with this logger."""
return self._experiment
@override
def log_metrics(self, *args: Any, **kwargs: Any) -> None:
pass
@override
def log_hyperparams(self, *args: Any, **kwargs: Any) -> None:
pass
@property
@override
def name(self) -> str:
"""Return the experiment name."""
return ""
@property
@override
def version(self) -> str:
"""Return the experiment version."""
return ""
def __getitem__(self, idx: int) -> "DummyLogger":
# enables self.logger[0].experiment.add_image(...)
return self
def __getattr__(self, name: str) -> Callable:
"""Allows the DummyLogger to be called with arbitrary methods, to avoid AttributeErrors."""
def method(*args: Any, **kwargs: Any) -> None:
return None
return method
# TODO: this should have been deprecated
def merge_dicts( # pragma: no cover
dicts: Sequence[Mapping],
agg_key_funcs: Optional[Mapping] = None,
default_func: Callable[[Sequence[float]], float] = statistics.mean,
) -> dict:
"""Merge a sequence with dictionaries into one dictionary by aggregating the same keys with some given function.
Args:
dicts:
Sequence of dictionaries to be merged.
agg_key_funcs:
Mapping from key name to function. This function will aggregate a
list of values, obtained from the same key of all dictionaries.
If some key has no specified aggregation function, the default one
will be used. Default is: ``None`` (all keys will be aggregated by the
default function).
default_func:
Default function to aggregate keys, which are not presented in the
`agg_key_funcs` map.
Returns:
Dictionary with merged values.
Examples:
>>> import pprint
>>> d1 = {'a': 1.7, 'b': 2.0, 'c': 1, 'd': {'d1': 1, 'd3': 3}}
>>> d2 = {'a': 1.1, 'b': 2.2, 'v': 1, 'd': {'d1': 2, 'd2': 3}}
>>> d3 = {'a': 1.1, 'v': 2.3, 'd': {'d3': 3, 'd4': {'d5': 1}}}
>>> dflt_func = min
>>> agg_funcs = {'a': statistics.mean, 'v': max, 'd': {'d1': sum}}
>>> pprint.pprint(merge_dicts([d1, d2, d3], agg_funcs, dflt_func))
{'a': 1.3,
'b': 2.0,
'c': 1,
'd': {'d1': 3, 'd2': 3, 'd3': 3, 'd4': {'d5': 1}},
'v': 2.3}
"""
agg_key_funcs = agg_key_funcs or {}
keys = list(functools.reduce(operator.or_, [set(d.keys()) for d in dicts]))
d_out: dict = defaultdict(dict)
for k in keys:
fn = agg_key_funcs.get(k)
values_to_agg = [v for v in [d_in.get(k) for d_in in dicts] if v is not None]
if isinstance(values_to_agg[0], dict):
d_out[k] = merge_dicts(values_to_agg, fn, default_func)
else:
d_out[k] = (fn or default_func)(values_to_agg)
return dict(d_out)
| DummyLogger |
python | keras-team__keras | keras/src/ops/numpy_test.py | {
"start": 216484,
"end": 334232
} | class ____(testing.TestCase):
"""Test the dtype to verify that the behavior matches JAX."""
ALL_DTYPES = [
x
for x in dtypes.ALLOWED_DTYPES
if x
not in (
"string",
"complex64",
"complex128",
# Remove 64-bit dtypes.
"float64",
"uint64",
"int64",
)
+ dtypes.FLOAT8_TYPES # Remove float8 dtypes for the following tests
] + [None]
INT_DTYPES = [x for x in dtypes.INT_TYPES if x not in ("uint64", "int64")]
FLOAT_DTYPES = [x for x in dtypes.FLOAT_TYPES if x not in ("float64",)]
if backend.backend() == "torch":
ALL_DTYPES = [x for x in ALL_DTYPES if x not in ("uint16", "uint32")]
INT_DTYPES = [x for x in INT_DTYPES if x not in ("uint16", "uint32")]
elif backend.backend() == "tensorflow":
# TODO(hongyu): Re-enable uint32 tests once we determine how to handle
# dtypes.result_type(uint32, int*) -> int64 promotion.
# Since TF variables require int64 to be placed on the GPU, we
# exclusively enable the int64 dtype for TF. However, JAX does not
# natively support int64, which prevents us from comparing the dtypes.
ALL_DTYPES = [x for x in ALL_DTYPES if x not in ("uint32",)]
INT_DTYPES = [x for x in INT_DTYPES if x not in ("uint32",)]
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_add(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.add(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.add(x1, x2).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Add().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_array_split(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 2), dtype=dtype)
x_jax = jnp.ones((1, 2), dtype=dtype)
expected_dtype = standardize_dtype(jnp.split(x_jax, 2, -1)[0].dtype)
self.assertEqual(
standardize_dtype(knp.split(x, 2, -1)[0].dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_add_python_types(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
# python int
expected_dtype = standardize_dtype(jnp.add(x_jax, 1).dtype)
self.assertDType(knp.add(x, 1), expected_dtype)
self.assertDType(knp.Add().symbolic_call(x, 1), expected_dtype)
# python float
expected_dtype = standardize_dtype(jnp.add(x_jax, 1.0).dtype)
self.assertDType(knp.add(x, 1.0), expected_dtype)
self.assertDType(knp.Add().symbolic_call(x, 1.0), expected_dtype)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_bartlett(self, dtype):
x = knp.ones((), dtype=dtype)
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.bartlett(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Bartlett().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_blackman(self, dtype):
x = knp.ones((), dtype=dtype)
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.blackman(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Blackman().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_hamming(self, dtype):
x = knp.ones((), dtype=dtype)
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.hamming(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Hamming().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_hanning(self, dtype):
x = knp.ones((), dtype=dtype)
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.hanning(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Hanning().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_kaiser(self, dtype):
x = knp.ones((), dtype=dtype)
beta = knp.ones((), dtype=dtype)
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.kaiser(x, beta).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Kaiser(beta).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=INT_DTYPES))
def test_bincount(self, dtype):
import jax.numpy as jnp
if backend.backend() == "tensorflow":
import tensorflow as tf
if tf.test.is_gpu_available():
self.skipTest("bincount does not work in tensorflow gpu")
x = np.array([1, 1, 2, 3, 2, 4, 4, 5], dtype=dtype)
weights = np.array([0, 0, 3, 2, 1, 1, 4, 2], dtype=dtype)
minlength = 3
self.assertEqual(
standardize_dtype(
knp.bincount(x, weights=weights, minlength=minlength).dtype
),
standardize_dtype(
jnp.bincount(x, weights=weights, minlength=minlength).dtype
),
)
self.assertEqual(
knp.Bincount(weights=weights, minlength=minlength)
.symbolic_call(x)
.dtype,
standardize_dtype(
jnp.bincount(x, weights=weights, minlength=minlength).dtype
),
)
# test float32 weights
weights = np.array([0, 0, 3, 2, 1, 1, 4, 2], dtype="float32")
self.assertEqual(
standardize_dtype(knp.bincount(x, weights=weights).dtype),
standardize_dtype(jnp.bincount(x, weights=weights).dtype),
)
self.assertEqual(
knp.Bincount(weights=weights).symbolic_call(x).dtype,
standardize_dtype(jnp.bincount(x, weights=weights).dtype),
)
# test float16 weights
weights = np.array([0, 0, 3, 2, 1, 1, 4, 2], dtype="float16")
self.assertEqual(
standardize_dtype(knp.bincount(x, weights=weights).dtype),
standardize_dtype(jnp.bincount(x, weights=weights).dtype),
)
self.assertEqual(
knp.Bincount(weights=weights).symbolic_call(x).dtype,
standardize_dtype(jnp.bincount(x, weights=weights).dtype),
)
# test weights=None
self.assertEqual(
standardize_dtype(knp.bincount(x).dtype),
standardize_dtype(jnp.bincount(x).dtype),
)
self.assertEqual(
knp.Bincount().symbolic_call(x).dtype,
standardize_dtype(jnp.bincount(x).dtype),
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_subtract(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
if dtype1 == "bool" and dtype2 == "bool":
self.skipTest("subtract does not support bool")
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.subtract(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.subtract(x1, x2).dtype), expected_dtype
)
self.assertEqual(
knp.Subtract().symbolic_call(x1, x2).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_subtract_python_types(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
# python int
expected_dtype = standardize_dtype(jnp.subtract(x_jax, 1).dtype)
self.assertDType(knp.subtract(x, 1), expected_dtype)
self.assertDType(knp.Subtract().symbolic_call(x, 1), expected_dtype)
# python float
expected_dtype = standardize_dtype(jnp.subtract(x_jax, 1.0).dtype)
self.assertDType(knp.subtract(x, 1.0), expected_dtype)
self.assertDType(knp.Subtract().symbolic_call(x, 1.0), expected_dtype)
@parameterized.named_parameters(
named_product(
dtypes=list(itertools.combinations(ALL_DTYPES, 2))
+ [("int8", "int8")]
)
)
def test_matmul(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
# The shape of the matrix needs to meet the requirements of
# torch._int_mm to test hardware-accelerated matmul
x1 = knp.ones((17, 16), dtype=dtype1)
x2 = knp.ones((16, 8), dtype=dtype2)
x1_jax = jnp.ones((17, 16), dtype=dtype1)
x2_jax = jnp.ones((16, 8), dtype=dtype2)
if dtype1 == "int8" and dtype2 == "int8":
preferred_element_type = "int32"
else:
preferred_element_type = None
expected_dtype = standardize_dtype(
jnp.matmul(
x1_jax, x2_jax, preferred_element_type=preferred_element_type
).dtype
)
self.assertEqual(
standardize_dtype(knp.matmul(x1, x2).dtype), expected_dtype
)
self.assertEqual(
knp.Matmul().symbolic_call(x1, x2).dtype, expected_dtype
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_multiply(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.multiply(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.multiply(x1, x2).dtype), expected_dtype
)
self.assertEqual(
knp.Multiply().symbolic_call(x1, x2).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_multiply_python_types(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
# python int
expected_dtype = standardize_dtype(jnp.multiply(x_jax, 1).dtype)
self.assertDType(knp.multiply(x, 1), expected_dtype)
self.assertDType(knp.Multiply().symbolic_call(x, 1), expected_dtype)
# python float
expected_dtype = standardize_dtype(jnp.multiply(x_jax, 1.0).dtype)
self.assertDType(knp.multiply(x, 1.0), expected_dtype)
self.assertDType(knp.Multiply().symbolic_call(x, 1.0), expected_dtype)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_mean(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.mean(x_jax).dtype)
if dtype == "int64":
expected_dtype = "float32"
self.assertEqual(standardize_dtype(knp.mean(x).dtype), expected_dtype)
self.assertEqual(knp.Mean().symbolic_call(x).dtype, expected_dtype)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_max(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.max(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.max(x).dtype), expected_dtype)
self.assertEqual(knp.Max().symbolic_call(x).dtype, expected_dtype)
# Test with initial
initial = 1
expected_dtype = standardize_dtype(
jnp.max(x_jax, initial=initial).dtype
)
self.assertEqual(
standardize_dtype(knp.max(x, initial=initial).dtype), expected_dtype
)
self.assertEqual(
knp.Max(initial=initial).symbolic_call(x).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_ones(self, dtype):
import jax.numpy as jnp
expected_dtype = standardize_dtype(jnp.ones([2, 3], dtype=dtype).dtype)
self.assertEqual(
standardize_dtype(knp.ones([2, 3], dtype=dtype).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_zeros(self, dtype):
import jax.numpy as jnp
expected_dtype = standardize_dtype(jnp.zeros([2, 3], dtype=dtype).dtype)
self.assertEqual(
standardize_dtype(knp.zeros([2, 3], dtype=dtype).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_absolute(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.absolute(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.absolute(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Absolute().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_all(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.all(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.all(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.All().symbolic_call(x).dtype), expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_amax(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.amax(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.amax(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Amax().symbolic_call(x).dtype), expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_amin(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.amin(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.amin(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Amin().symbolic_call(x).dtype), expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_any(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.any(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.any(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Any().symbolic_call(x).dtype), expected_dtype
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_append(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.append(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.append(x1, x2).dtype), expected_dtype
)
self.assertEqual(
knp.Append().symbolic_call(x1, x2).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_argmax(self, dtype):
import jax.numpy as jnp
if dtype == "bool":
value = [[True, False, True], [False, True, False]]
else:
value = [[1, 2, 3], [3, 2, 1]]
x = knp.array(value, dtype=dtype)
x_jax = jnp.array(value, dtype=dtype)
expected_dtype = standardize_dtype(jnp.argmax(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.argmax(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Argmax().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_argmin(self, dtype):
import jax.numpy as jnp
if dtype == "bool":
value = [[True, False, True], [False, True, False]]
else:
value = [[1, 2, 3], [3, 2, 1]]
x = knp.array(value, dtype=dtype)
x_jax = jnp.array(value, dtype=dtype)
expected_dtype = standardize_dtype(jnp.argmin(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.argmin(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Argmin().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_argpartition(self, dtype):
import jax.numpy as jnp
if dtype == "bool":
self.skipTest("argpartition doesn't support bool dtype")
x = knp.array([1, 2, 3], dtype=dtype)
x_jax = jnp.array([1, 2, 3], dtype=dtype)
expected_dtype = standardize_dtype(jnp.argpartition(x_jax, 1).dtype)
self.assertEqual(
standardize_dtype(knp.argpartition(x, 1).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Argpartition(1).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_argsort(self, dtype):
import jax.numpy as jnp
if dtype == "bool":
value = [[True, False, True], [False, True, False]]
else:
value = [[1, 2, 3], [4, 5, 6]]
x = knp.array(value, dtype=dtype)
x_jax = jnp.array(value, dtype=dtype)
expected_dtype = standardize_dtype(jnp.argsort(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.argsort(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Argsort().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.parameters(
(10, None, None, None), # stop
(2, 10, None, None), # start, stop
(10, None, 2, None), # stop, step
(0, 10, 2, None), # start, stop, step
(0, 10, 0.5, None),
(10.0, None, 1, None),
(0, 10.0, 1, None),
(0.0, 10, 1, None),
(10, None, 1, "float32"),
(10, None, 1, "int32"),
(10, None, 1, "int16"),
(10, None, 1, "float16"),
)
def test_arange(self, start, stop, step, dtype):
import jax.numpy as jnp
expected_dtype = standardize_dtype(
jnp.arange(start, stop, step, dtype).dtype
)
self.assertEqual(
standardize_dtype(knp.arange(start, stop, step, dtype).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(
knp.Arange(dtype).symbolic_call(start, stop, step).dtype
),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_arccos(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.arccos(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.arccos(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Arccos().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_arccosh(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.arccosh(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.arccosh(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Arccosh().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_arcsin(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.arcsin(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.arcsin(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Arcsin().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_arcsinh(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.arcsinh(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.arcsinh(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Arcsinh().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_arctan(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.arctan(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.arctan(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Arctan().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_arctan2(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.arctan2(x1_jax, x2_jax).dtype)
if dtype1 is not None and "float" not in dtype1:
if dtype2 is not None and "float" not in dtype2:
if "int64" in (dtype1, dtype2) or "uint32" in (dtype1, dtype2):
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.arctan2(x1, x2).dtype), expected_dtype
)
self.assertEqual(
knp.Arctan2().symbolic_call(x1, x2).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_arctanh(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.arctanh(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.arctanh(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Arctanh().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.parameters(
(bool(0), "bool"),
(int(0), "int32"),
(float(0), backend.floatx()),
([False, True, False], "bool"),
([1, 2, 3], "int32"),
([1.0, 2.0, 3.0], backend.floatx()),
([1, 2.0, 3], backend.floatx()),
([[False], [True], [False]], "bool"),
([[1], [2], [3]], "int32"),
([[1], [2.0], [3]], backend.floatx()),
*[
(np.array(0, dtype=dtype), dtype)
for dtype in ALL_DTYPES
if dtype is not None
],
)
def test_array(self, x, expected_dtype):
self.assertDType(knp.array(x), expected_dtype)
# TODO: support the assertion of knp.Array
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_average(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.average(x1_jax, weights=x2_jax).dtype
)
if dtype1 is not None and "float" not in dtype1:
if dtype2 is not None and "float" not in dtype2:
if "int64" in (dtype1, dtype2) or "uint32" in (dtype1, dtype2):
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.average(x1, weights=x2).dtype), expected_dtype
)
self.assertEqual(
knp.Average().symbolic_call(x1, weights=x2).dtype, expected_dtype
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(INT_DTYPES, 2))
)
def test_bitwise_and(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.bitwise_and(x1_jax, x2_jax).dtype
)
self.assertDType(knp.bitwise_and(x1, x2), expected_dtype)
self.assertDType(knp.BitwiseAnd().symbolic_call(x1, x2), expected_dtype)
@parameterized.named_parameters(named_product(dtype=INT_DTYPES))
def test_bitwise_invert(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.invert(x_jax).dtype)
self.assertDType(knp.bitwise_invert(x), expected_dtype)
self.assertDType(knp.BitwiseInvert().symbolic_call(x), expected_dtype)
# bitwise_not is same as bitwise_invert
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(INT_DTYPES, 2))
)
def test_bitwise_or(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.bitwise_or(x1_jax, x2_jax).dtype)
self.assertDType(knp.bitwise_or(x1, x2), expected_dtype)
self.assertDType(knp.BitwiseOr().symbolic_call(x1, x2), expected_dtype)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(INT_DTYPES, 2))
)
def test_bitwise_xor(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.bitwise_xor(x1_jax, x2_jax).dtype
)
self.assertDType(knp.bitwise_xor(x1, x2), expected_dtype)
self.assertDType(knp.BitwiseXor().symbolic_call(x1, x2), expected_dtype)
@parameterized.named_parameters(
named_product(dtypes=itertools.product(INT_DTYPES, INT_DTYPES + [None]))
)
def test_bitwise_left_shift(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2) if dtype2 else 1
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2) if dtype2 else 1
expected_dtype = standardize_dtype(jnp.left_shift(x1_jax, x2_jax).dtype)
self.assertDType(knp.bitwise_left_shift(x1, x2), expected_dtype)
self.assertDType(
knp.BitwiseLeftShift().symbolic_call(x1, x2), expected_dtype
)
# left_shift is same as bitwise_left_shift
@parameterized.named_parameters(
named_product(dtypes=itertools.product(INT_DTYPES, INT_DTYPES + [None]))
)
def test_bitwise_right_shift(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2) if dtype2 else 1
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2) if dtype2 else 1
expected_dtype = standardize_dtype(
jnp.right_shift(x1_jax, x2_jax).dtype
)
self.assertDType(knp.bitwise_right_shift(x1, x2), expected_dtype)
self.assertDType(
knp.BitwiseRightShift().symbolic_call(x1, x2), expected_dtype
)
# right_shift is same as bitwise_right_shift
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_broadcast_to(self, dtype):
import jax.numpy as jnp
x = knp.ones((3,), dtype=dtype)
x_jax = jnp.ones((3,), dtype=dtype)
expected_dtype = standardize_dtype(
jnp.broadcast_to(x_jax, (3, 3)).dtype
)
self.assertEqual(
standardize_dtype(knp.broadcast_to(x, (3, 3)).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.BroadcastTo((3, 3)).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_cbrt(self, dtype):
import jax.numpy as jnp
x1 = knp.ones((1,), dtype=dtype)
x1_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.cbrt(x1_jax).dtype)
self.assertEqual(standardize_dtype(knp.cbrt(x1).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Cbrt().symbolic_call(x1).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_ceil(self, dtype):
import jax.numpy as jnp
if dtype is None:
dtype = backend.floatx()
if dtype == "bool":
value = [[True, False, True], [True, False, True]]
elif "int" in dtype:
value = [[1, 2, 2], [2, 11, 5]]
else:
value = [[1.2, 2.1, 2.5], [2.4, 11.9, 5.5]]
x = knp.array(value, dtype=dtype)
x_jax = jnp.array(value, dtype=dtype)
expected_dtype = standardize_dtype(jnp.ceil(x_jax).dtype)
# Here, we follow Numpy's rule, not JAX's; ints are promoted to floats.
if dtype == "bool" or is_int_dtype(dtype):
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.ceil(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Ceil().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_clip(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.clip(x_jax, 1, 2).dtype)
if dtype == "bool":
expected_dtype = "int32"
self.assertEqual(
standardize_dtype(knp.clip(x, 1, 2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Clip(1, 2).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_concatenate(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.concatenate([x1_jax, x2_jax]).dtype
)
self.assertEqual(
standardize_dtype(knp.concatenate([x1, x2]).dtype), expected_dtype
)
self.assertEqual(
knp.Concatenate().symbolic_call([x1, x2]).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_cos(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.cos(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.cos(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Cos().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_cosh(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.cosh(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.cosh(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Cosh().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_copy(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.copy(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.copy(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Copy().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_corrcoef(self, dtype):
import jax.numpy as jnp
x = knp.ones((2, 4), dtype=dtype)
x_jax = jnp.ones((2, 4), dtype=dtype)
expected_dtype = standardize_dtype(jnp.corrcoef(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.corrcoef(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Corrcoef().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_correlate(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((3,), dtype=dtype1)
x2 = knp.ones((3,), dtype=dtype2)
x1_jax = jnp.ones((3,), dtype=dtype1)
x2_jax = jnp.ones((3,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.correlate(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.correlate(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Correlate().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_count_nonzero(self, dtype):
x = knp.ones((1,), dtype=dtype)
expected_dtype = "int32"
self.assertEqual(
standardize_dtype(knp.count_nonzero(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.CountNonzero().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_cross(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1, 1, 3), dtype=dtype1)
x2 = knp.ones((1, 1, 3), dtype=dtype2)
x1_jax = jnp.ones((1, 1, 3), dtype=dtype1)
x2_jax = jnp.ones((1, 1, 3), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.cross(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.cross(x1, x2).dtype), expected_dtype
)
self.assertEqual(
knp.Cross().symbolic_call(x1, x2).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_cumprod(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.cumprod(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.cumprod(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Cumprod().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_cumsum(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.cumsum(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.cumsum(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Cumsum().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_deg2rad(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.deg2rad(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.deg2rad(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Deg2rad().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_diag(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.diag(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.diag(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Diag().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_diagflat(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.diagflat(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.diagflat(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Diagflat().symbolic_call(x).dtype),
expected_dtype,
)
x_2d = knp.ones((1, 1), dtype=dtype)
x_jax_2d = jnp.ones((1, 1), dtype=dtype)
expected_dtype_2d = standardize_dtype(jnp.diagflat(x_jax_2d).dtype)
self.assertEqual(
standardize_dtype(knp.diagflat(x_2d).dtype), expected_dtype_2d
)
self.assertEqual(
standardize_dtype(knp.Diagflat().symbolic_call(x_2d).dtype),
expected_dtype_2d,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_diagonal(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.diagonal(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.diagonal(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Diagonal().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_diff(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.diff(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.diff(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Diff().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_digitize(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
bins = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
x_bins = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.digitize(x_jax, x_bins).dtype)
self.assertEqual(
standardize_dtype(knp.digitize(x, bins).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Digitize().symbolic_call(x, bins).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_divide(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.divide(x1_jax, x2_jax).dtype)
self.assertDType(knp.divide(x1, x2), expected_dtype)
self.assertDType(knp.Divide().symbolic_call(x1, x2), expected_dtype)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_divide_python_types(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
# python int
expected_dtype = standardize_dtype(jnp.divide(x_jax, 1).dtype)
self.assertDType(knp.divide(x, 1), expected_dtype)
self.assertDType(knp.Divide().symbolic_call(x, 1), expected_dtype)
# python float
expected_dtype = standardize_dtype(jnp.divide(x_jax, 1.0).dtype)
self.assertDType(knp.divide(x, 1.0), expected_dtype)
self.assertDType(knp.Divide().symbolic_call(x, 1.0), expected_dtype)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_dot(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((2, 3, 4), dtype=dtype1)
x2 = knp.ones((4, 3), dtype=dtype2)
x1_jax = jnp.ones((2, 3, 4), dtype=dtype1)
x2_jax = jnp.ones((4, 3), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.dot(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.dot(x1, x2).dtype), expected_dtype
)
self.assertEqual(knp.Dot().symbolic_call(x1, x2).dtype, expected_dtype)
@parameterized.named_parameters(
named_product(
dtypes=list(itertools.combinations(ALL_DTYPES, 2))
+ [("int8", "int8")]
)
)
def test_einsum(self, dtypes):
import jax.numpy as jnp
def get_input_shapes(subscripts):
x1_labels = subscripts.split(",")[0]
x2_labels = subscripts.split("->")[0][len(x1_labels) + 1 :]
x1_shape = [1] * len(x1_labels)
x2_shape = [1] * len(x2_labels)
return x1_shape, x2_shape
dtype1, dtype2 = dtypes
subscripts = "ijk,lkj->il"
x1_shape, x2_shape = get_input_shapes(subscripts)
x1 = knp.ones(x1_shape, dtype=dtype1)
x2 = knp.ones(x2_shape, dtype=dtype2)
x1_jax = jnp.ones(x1_shape, dtype=dtype1)
x2_jax = jnp.ones(x2_shape, dtype=dtype2)
if dtype1 == "int8" and dtype2 == "int8":
preferred_element_type = "int32"
else:
preferred_element_type = None
expected_dtype = standardize_dtype(
jnp.einsum(
subscripts,
x1_jax,
x2_jax,
preferred_element_type=preferred_element_type,
).dtype
)
self.assertEqual(
standardize_dtype(knp.einsum(subscripts, x1, x2).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(
knp.Einsum(subscripts).symbolic_call(x1, x2).dtype
),
expected_dtype,
)
@parameterized.named_parameters(
named_product(
dtypes=list(itertools.combinations(ALL_DTYPES, 2))
+ [("int8", "int8")]
)
)
@pytest.mark.skipif(
backend.backend() != "tensorflow",
reason=f"{backend.backend()} doesn't implement custom ops for einsum.",
)
def test_einsum_custom_ops_for_tensorflow(self, dtypes):
import jax.numpy as jnp
def get_input_shapes(subscripts):
x1_labels = subscripts.split(",")[0]
x2_labels = subscripts.split("->")[0][len(x1_labels) + 1 :]
x1_shape = [1] * len(x1_labels)
x2_shape = [1] * len(x2_labels)
return x1_shape, x2_shape
dtype1, dtype2 = dtypes
for subscripts in [
"a,b->ab",
"ab,b->a",
"ab,bc->ac",
"ab,cb->ac",
"abc,cd->abd",
"abc,cde->abde",
"abc,dc->abd",
"abc,dce->abde",
"abc,dec->abde",
"abcd,abde->abce",
"abcd,abed->abce",
"abcd,acbe->adbe",
"abcd,adbe->acbe",
"abcd,aecd->acbe",
"abcd,aecd->aceb",
"abcd,cde->abe",
"abcd,ced->abe",
"abcd,ecd->abe",
"abcde,aebf->adbcf",
"abcde,afce->acdbf",
]:
x1_shape, x2_shape = get_input_shapes(subscripts)
x1 = knp.ones(x1_shape, dtype=dtype1)
x2 = knp.ones(x2_shape, dtype=dtype2)
x1_jax = jnp.ones(x1_shape, dtype=dtype1)
x2_jax = jnp.ones(x2_shape, dtype=dtype2)
if dtype1 == "int8" and dtype2 == "int8":
preferred_element_type = "int32"
else:
preferred_element_type = None
expected_dtype = standardize_dtype(
jnp.einsum(
subscripts,
x1_jax,
x2_jax,
preferred_element_type=preferred_element_type,
).dtype
)
self.assertEqual(
standardize_dtype(knp.einsum(subscripts, x1, x2).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(
knp.Einsum(subscripts).symbolic_call(x1, x2).dtype
),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_empty(self, dtype):
import jax.numpy as jnp
expected_dtype = standardize_dtype(jnp.empty([2, 3], dtype=dtype).dtype)
self.assertEqual(
standardize_dtype(knp.empty([2, 3], dtype=dtype).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_empty_like(self, dtype):
import jax.numpy as jnp
x = jnp.empty([2, 3, 4], dtype=dtype)
expected_dtype = standardize_dtype(jnp.empty_like(x, dtype=dtype).dtype)
self.assertEqual(
standardize_dtype(knp.empty_like(x, dtype=dtype).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.EmptyLike().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_equal(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.equal(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.equal(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Equal().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_exp(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.exp(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.exp(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Exp().symbolic_call(x).dtype), expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_exp2(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.exp2(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.exp2(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Exp2().symbolic_call(x).dtype), expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_expand_dims(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.expand_dims(x_jax, -1).dtype)
self.assertEqual(
standardize_dtype(knp.expand_dims(x, -1).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.ExpandDims(-1).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_expm1(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.expm1(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.expm1(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Expm1().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_eye(self, dtype):
import jax.numpy as jnp
expected_dtype = standardize_dtype(jnp.eye(3, dtype=dtype).dtype)
if dtype is None:
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.eye(3, dtype=dtype).dtype),
expected_dtype,
)
expected_dtype = standardize_dtype(jnp.eye(3, 4, 1, dtype=dtype).dtype)
if dtype is None:
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.eye(3, 4, k=1, dtype=dtype).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_flip(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.flip(x_jax, -1).dtype)
self.assertEqual(
standardize_dtype(knp.flip(x, -1).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Flip(-1).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_floor(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.floor(x_jax).dtype)
# Here, we follow Numpy's rule, not JAX's; ints are promoted to floats.
if dtype == "bool" or is_int_dtype(dtype):
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.floor(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Floor().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_floor_divide(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.floor_divide(x1_jax, x2_jax).dtype
)
self.assertEqual(
standardize_dtype(knp.floor_divide(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.FloorDivide().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_floor_divide_python_types(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
# python int
expected_dtype = standardize_dtype(jnp.floor_divide(x_jax, 1).dtype)
self.assertDType(knp.floor_divide(x, 1), expected_dtype)
self.assertDType(knp.FloorDivide().symbolic_call(x, 1), expected_dtype)
# python float
expected_dtype = standardize_dtype(jnp.floor_divide(x_jax, 1.0).dtype)
self.assertDType(knp.floor_divide(x, 1.0), expected_dtype)
self.assertDType(
knp.FloorDivide().symbolic_call(x, 1.0), expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_full(self, dtype):
import jax.numpy as jnp
expected_dtype = standardize_dtype(jnp.full((), 0, dtype=dtype).dtype)
if dtype is None:
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.full((), 0, dtype=dtype).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Full((), dtype=dtype).symbolic_call(0).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_full_like(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.full_like(x_jax, 0).dtype)
self.assertEqual(
standardize_dtype(knp.full_like(x, 0).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.FullLike().symbolic_call(x, 0).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(INT_DTYPES, 2))
)
def test_gcd(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.gcd(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.gcd(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Gcd().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_greater(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.greater(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.greater(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Greater().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_greater_equal(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.greater_equal(x1_jax, x2_jax).dtype
)
self.assertEqual(
standardize_dtype(knp.greater_equal(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.GreaterEqual().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_heaviside(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1, 1), dtype=dtype1)
x2 = knp.ones((1, 1), dtype=dtype2)
x1_jax = jnp.ones((1, 1), dtype=dtype1)
x2_jax = jnp.ones((1, 1), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.heaviside(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.heaviside(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Heaviside().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_hstack(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1, 1), dtype=dtype1)
x2 = knp.ones((1, 1), dtype=dtype2)
x1_jax = jnp.ones((1, 1), dtype=dtype1)
x2_jax = jnp.ones((1, 1), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.hstack([x1_jax, x2_jax]).dtype)
self.assertEqual(
standardize_dtype(knp.hstack([x1, x2]).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Hstack().symbolic_call([x1, x2]).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_hypot(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1, 1), dtype=dtype1)
x2 = knp.ones((1, 1), dtype=dtype2)
x1_jax = jnp.ones((1, 1), dtype=dtype1)
x2_jax = jnp.ones((1, 1), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.hypot(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.hypot(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Hypot().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_identity(self, dtype):
import jax.numpy as jnp
expected_dtype = standardize_dtype(jnp.identity(3, dtype=dtype).dtype)
if dtype is None:
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.identity(3, dtype=dtype).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_isclose(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.isclose(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.isclose(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Isclose().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_isfinite(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.isfinite(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.isfinite(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Isfinite().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_isin(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.isin(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.isin(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.IsIn().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_isinf(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.isinf(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.isinf(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Isinf().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_isnan(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.isnan(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.isnan(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Isnan().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_isneginf(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.isneginf(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.isneginf(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Isneginf().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_isposinf(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.isposinf(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.isposinf(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Isposinf().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_isreal(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.isreal(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.isreal(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Isreal().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(INT_DTYPES, 2))
)
def test_kron(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.kron(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.kron(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Kron().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(INT_DTYPES, 2))
)
def test_lcm(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.lcm(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.lcm(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Lcm().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_less(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.less(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.less(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Less().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_less_equal(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.less_equal(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.less_equal(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.LessEqual().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(
start_and_stop=[
[0, 10],
[0.5, 10.5],
[np.array([0, 1], "int32"), np.array([10, 20], "int32")],
[np.array([0, 1], "float32"), np.array([10, 20], "float32")],
],
num=[0, 1, 5],
dtype=FLOAT_DTYPES + [None],
)
)
def test_linspace(self, start_and_stop, num, dtype):
import jax.numpy as jnp
start, stop = start_and_stop
expected_dtype = standardize_dtype(
jnp.linspace(start, stop, num, dtype=dtype).dtype
)
self.assertEqual(
standardize_dtype(
knp.linspace(start, stop, num, dtype=dtype).dtype
),
expected_dtype,
)
self.assertEqual(
standardize_dtype(
knp.Linspace(num, dtype=dtype).symbolic_call(start, stop).dtype
),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_log(self, dtype):
import jax.numpy as jnp
x = knp.ones((3, 3), dtype=dtype)
x_jax = jnp.ones((3, 3), dtype=dtype)
expected_dtype = standardize_dtype(jnp.log(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.log(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Log().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_log10(self, dtype):
import jax.numpy as jnp
x = knp.ones((3, 3), dtype=dtype)
x_jax = jnp.ones((3, 3), dtype=dtype)
expected_dtype = standardize_dtype(jnp.log10(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.log10(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Log10().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_log1p(self, dtype):
import jax.numpy as jnp
x = knp.ones((3, 3), dtype=dtype)
x_jax = jnp.ones((3, 3), dtype=dtype)
expected_dtype = standardize_dtype(jnp.log1p(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.log1p(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Log1p().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_log2(self, dtype):
import jax.numpy as jnp
x = knp.ones((3, 3), dtype=dtype)
x_jax = jnp.ones((3, 3), dtype=dtype)
expected_dtype = standardize_dtype(jnp.log2(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.log2(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Log2().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_logaddexp(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((3, 3), dtype=dtype1)
x2 = knp.ones((3, 3), dtype=dtype2)
x1_jax = jnp.ones((3, 3), dtype=dtype1)
x2_jax = jnp.ones((3, 3), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.logaddexp(x1_jax, x2_jax).dtype)
# jnp.logaddexp will promote "int64" and "uint32" to "float64"
# force the promotion to `backend.floatx()`
if dtype1 is not None and "float" not in dtype1:
if dtype2 is not None and "float" not in dtype2:
if "int64" in (dtype1, dtype2) or "uint32" in (dtype1, dtype2):
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.logaddexp(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Logaddexp().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_logaddexp2(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((3, 3), dtype=dtype1)
x2 = knp.ones((3, 3), dtype=dtype2)
x1_jax = jnp.ones((3, 3), dtype=dtype1)
x2_jax = jnp.ones((3, 3), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.logaddexp2(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.logaddexp2(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Logaddexp2().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(
start_and_stop=[
[0, 10],
[0.5, 10.5],
[np.array([0, 1], "int32"), np.array([10, 20], "int32")],
[np.array([0, 1], "float32"), np.array([10, 20], "float32")],
],
num=[0, 1, 5],
dtype=FLOAT_DTYPES + [None],
)
)
def test_logspace(self, start_and_stop, num, dtype):
import jax.numpy as jnp
start, stop = start_and_stop
expected_dtype = standardize_dtype(
jnp.logspace(start, stop, num, dtype=dtype).dtype
)
self.assertEqual(
standardize_dtype(
knp.logspace(start, stop, num, dtype=dtype).dtype
),
expected_dtype,
)
self.assertEqual(
standardize_dtype(
knp.Logspace(num, dtype=dtype).symbolic_call(start, stop).dtype
),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_logical_and(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.logical_and(x1_jax, x2_jax).dtype
)
self.assertEqual(
standardize_dtype(knp.logical_and(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.LogicalAnd().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_logical_not(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.logical_not(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.logical_not(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.LogicalNot().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_logical_or(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.logical_or(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.logical_or(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.LogicalOr().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_logical_xor(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.logical_xor(x1_jax, x2_jax).dtype
)
self.assertEqual(
standardize_dtype(knp.logical_xor(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.LogicalXor().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_maximum(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.maximum(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.maximum(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Maximum().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_maximum_python_types(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
# python int
expected_dtype = standardize_dtype(jnp.maximum(x_jax, 1).dtype)
self.assertDType(knp.maximum(x, 1), expected_dtype)
self.assertDType(knp.Maximum().symbolic_call(x, 1), expected_dtype)
# python float
expected_dtype = standardize_dtype(jnp.maximum(x_jax, 1.0).dtype)
self.assertDType(knp.maximum(x, 1.0), expected_dtype)
self.assertDType(knp.Maximum().symbolic_call(x, 1.0), expected_dtype)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_median(self, dtype):
import jax.numpy as jnp
x = knp.ones((3, 3), dtype=dtype)
x_jax = jnp.ones((3, 3), dtype=dtype)
expected_dtype = standardize_dtype(jnp.median(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.median(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Median().symbolic_call(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.median(x, axis=1).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Median(axis=1).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_meshgrid(self, dtype):
import jax.numpy as jnp
if dtype == "bool":
self.skipTest("meshgrid doesn't support bool dtype")
elif dtype is None:
dtype = backend.floatx()
x = knp.array([1, 2, 3], dtype=dtype)
y = knp.array([4, 5, 6], dtype=dtype)
x_jax = jnp.array([1, 2, 3], dtype=dtype)
y_jax = jnp.array([4, 5, 6], dtype=dtype)
expected_dtype = standardize_dtype(jnp.meshgrid(x_jax, y_jax)[0].dtype)
self.assertEqual(
standardize_dtype(knp.meshgrid(x, y)[0].dtype), expected_dtype
)
self.assertEqual(
knp.Meshgrid().symbolic_call(x, y)[0].dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_min(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.min(x_jax).dtype)
self.assertEqual(standardize_dtype(knp.min(x).dtype), expected_dtype)
self.assertEqual(knp.Min().symbolic_call(x).dtype, expected_dtype)
# Test with initial
initial = 0
expected_dtype = standardize_dtype(
jnp.min(x_jax, initial=initial).dtype
)
self.assertEqual(
standardize_dtype(knp.min(x, initial=initial).dtype), expected_dtype
)
self.assertEqual(
knp.Min(initial=initial).symbolic_call(x).dtype, expected_dtype
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_minimum(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.minimum(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.minimum(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Minimum().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_minimum_python_types(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
# python int
expected_dtype = standardize_dtype(jnp.minimum(x_jax, 1).dtype)
self.assertDType(knp.minimum(x, 1), expected_dtype)
self.assertDType(knp.Minimum().symbolic_call(x, 1), expected_dtype)
# python float
expected_dtype = standardize_dtype(jnp.minimum(x_jax, 1.0).dtype)
self.assertDType(knp.minimum(x, 1.0), expected_dtype)
self.assertDType(knp.Minimum().symbolic_call(x, 1.0), expected_dtype)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_mod(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.mod(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.mod(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Mod().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_moveaxis(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.moveaxis(x_jax, -2, -1).dtype)
self.assertEqual(
standardize_dtype(knp.moveaxis(x, -2, -1).dtype), expected_dtype
)
self.assertEqual(
knp.Moveaxis(-2, -1).symbolic_call(x).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_nan_to_num(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.nan_to_num(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.nan_to_num(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.NanToNum().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_nonzero(self, dtype):
import jax.numpy as jnp
x = knp.zeros((1,), dtype=dtype)
x_jax = jnp.zeros((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.nonzero(x_jax)[0].dtype)
self.assertEqual(
standardize_dtype(knp.nonzero(x)[0].dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Nonzero().symbolic_call(x)[0].dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_not_equal(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((), dtype=dtype1)
x2 = knp.ones((), dtype=dtype2)
x1_jax = jnp.ones((), dtype=dtype1)
x2_jax = jnp.ones((), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.not_equal(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.not_equal(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.NotEqual().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_ones_like(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.ones_like(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.ones_like(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.OnesLike().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_outer(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1, 2), dtype=dtype1)
x2 = knp.ones((3, 4), dtype=dtype2)
x1_jax = jnp.ones((1, 2), dtype=dtype1)
x2_jax = jnp.ones((3, 4), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.outer(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.outer(x1, x2).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Outer().symbolic_call(x1, x2).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_pad(self, dtype):
import jax.numpy as jnp
x = knp.ones((2, 2, 2, 2), dtype=dtype)
x_jax = jnp.ones((2, 2, 2, 2), dtype=dtype)
pad_width = ((0, 0), (1, 1), (1, 1), (1, 1))
for mode in ("constant", "symmetric", "reflect"):
expected_dtype = standardize_dtype(
jnp.pad(x_jax, pad_width, mode).dtype
)
self.assertEqual(
standardize_dtype(knp.pad(x, pad_width, mode).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(
knp.Pad(pad_width, mode).symbolic_call(x).dtype
),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_power(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x = knp.ones((1,), dtype=dtype1)
power = knp.ones((1,), dtype2)
x_jax = jnp.ones((1,), dtype=dtype1)
power_jax = jnp.ones((1,), dtype2)
expected_dtype = standardize_dtype(jnp.power(x_jax, power_jax).dtype)
self.assertEqual(
standardize_dtype(knp.power(x, power).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Power().symbolic_call(x, power).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_power_python_types(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
# python int
expected_dtype = standardize_dtype(jnp.power(x_jax, 1).dtype)
self.assertDType(knp.power(x, 1), expected_dtype)
self.assertDType(knp.Power().symbolic_call(x, 1), expected_dtype)
# python float
expected_dtype = standardize_dtype(jnp.power(x_jax, 1.0).dtype)
self.assertDType(knp.power(x, 1.0), expected_dtype)
self.assertDType(knp.Power().symbolic_call(x, 1.0), expected_dtype)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_prod(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.prod(x_jax).dtype)
# TODO: torch doesn't support uint32
if backend.backend() == "torch" and expected_dtype == "uint32":
expected_dtype = "int32"
self.assertEqual(
standardize_dtype(knp.prod(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Prod().symbolic_call(x).dtype), expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_quantile(self, dtype):
import jax.numpy as jnp
x = knp.ones((3,), dtype=dtype)
x_jax = jnp.ones((3,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.quantile(x_jax, 0.5).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.quantile(x, 0.5).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Quantile().symbolic_call(x, 0.5).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_searchsorted(self, dtype):
import jax.numpy as jnp
if dtype == "bool":
self.skipTest("searchsorted doesn't support bool dtype")
a = knp.ones((3,), dtype=dtype)
v = knp.ones((3,), dtype=dtype)
a_jax = jnp.ones((3,), dtype=dtype)
v_jax = jnp.ones((3,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.searchsorted(a_jax, v_jax).dtype)
self.assertEqual(
standardize_dtype(knp.searchsorted(a, v).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.SearchSorted().symbolic_call(a, v).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_ravel(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.ravel(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.ravel(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Ravel().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=INT_DTYPES))
def test_unravel_index(self, dtype):
import jax.numpy as jnp
x = knp.ones((3,), dtype=dtype)
x_jax = jnp.ones((3,), dtype=dtype)
indices = knp.array([2, 0], dtype=dtype)
indices_jax = jnp.array([2, 0], dtype=dtype)
unravel_result_knp = knp.unravel_index(indices, x.shape)
unravel_result_jax = jnp.unravel_index(indices_jax, x_jax.shape)
expected_dtype_knp = standardize_dtype(unravel_result_knp[0].dtype)
expected_dtype_jax = standardize_dtype(unravel_result_jax[0].dtype)
self.assertEqual(expected_dtype_knp, expected_dtype_jax)
unravel_result_knp_symbolic = knp.UnravelIndex(x.shape).symbolic_call(
indices
)
expected_dtype_symbolic = standardize_dtype(
unravel_result_knp_symbolic[0].dtype
)
self.assertEqual(expected_dtype_symbolic, expected_dtype_jax)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_repeat(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.repeat(x_jax, 2).dtype)
self.assertEqual(
standardize_dtype(knp.repeat(x, 2).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Repeat(2).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_reshape(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.reshape(x_jax, [1]).dtype)
self.assertEqual(
standardize_dtype(knp.reshape(x, [1]).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Reshape([1]).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_roll(self, dtype):
import jax.numpy as jnp
x = knp.ones((5,), dtype=dtype)
x_jax = jnp.ones((5,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.roll(x_jax, 2).dtype)
self.assertEqual(
standardize_dtype(knp.roll(x, 2).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Roll(2).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_round(self, dtype):
import jax.numpy as jnp
if dtype == "bool":
self.skipTest("round doesn't support bool dtype")
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.round(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.round(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Round().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_sign(self, dtype):
import jax.numpy as jnp
if dtype == "bool":
self.skipTest("sign doesn't support bool dtype")
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.sign(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.sign(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Sign().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_signbit(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.signbit(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.signbit(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Signbit().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_sin(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.sin(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.sin(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Sin().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_sinh(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.sinh(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.sinh(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Sinh().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_sort(self, dtype):
import jax.numpy as jnp
x = knp.ones((2,), dtype=dtype)
x_jax = jnp.ones((2,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.sort(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.sort(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Sort().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_split(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 2), dtype=dtype)
x_jax = jnp.ones((1, 2), dtype=dtype)
expected_dtype = standardize_dtype(jnp.split(x_jax, 2, -1)[0].dtype)
self.assertEqual(
standardize_dtype(knp.split(x, 2, -1)[0].dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Split(2, -1).symbolic_call(x)[0].dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_sqrt(self, dtype):
import jax.numpy as jnp
x1 = knp.ones((1,), dtype=dtype)
x1_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.sqrt(x1_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.sqrt(x1).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Sqrt().symbolic_call(x1).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_square(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.square(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.square(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Square().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_squeeze(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.squeeze(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.squeeze(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Squeeze().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_stack(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.stack([x1_jax, x2_jax]).dtype)
self.assertEqual(
standardize_dtype(knp.stack([x1, x2]).dtype), expected_dtype
)
self.assertEqual(
knp.Stack().symbolic_call([x1, x2]).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_std(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.std(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(
standardize_dtype(knp.std(x).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Std().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_sum(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.sum(x_jax).dtype)
# TODO: torch doesn't support uint32
if backend.backend() == "torch" and expected_dtype == "uint32":
expected_dtype = "int32"
self.assertEqual(standardize_dtype(knp.sum(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Sum().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_swapaxes(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.swapaxes(x_jax, -1, -2).dtype)
self.assertEqual(
standardize_dtype(knp.swapaxes(x, -1, -2).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Swapaxes(-1, -2).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_take(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.take(x_jax, 0).dtype)
self.assertEqual(
standardize_dtype(knp.take(x, 0).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(knp.Take().symbolic_call(x, 0).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtype=ALL_DTYPES, indices_dtype=INT_DTYPES)
)
def test_take_along_axis(self, dtype, indices_dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
indices = knp.zeros((1,), dtype=indices_dtype)
x_jax = jnp.ones((1,), dtype=dtype)
indices_jax = jnp.zeros((1,), dtype=indices_dtype)
expected_dtype = standardize_dtype(
jnp.take_along_axis(x_jax, indices_jax, 0).dtype
)
self.assertEqual(
standardize_dtype(knp.take_along_axis(x, indices, 0).dtype),
expected_dtype,
)
self.assertEqual(
standardize_dtype(
knp.TakeAlongAxis(0).symbolic_call(x, indices).dtype
),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_tan(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.tan(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.tan(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Tan().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_tanh(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.tanh(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.tanh(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Tanh().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_tensordot(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1, 1), dtype=dtype1)
x2 = knp.ones((1, 1), dtype=dtype2)
x1_jax = jnp.ones((1, 1), dtype=dtype1)
x2_jax = jnp.ones((1, 1), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.tensordot(x1_jax, x2_jax, 2).dtype
)
self.assertEqual(
standardize_dtype(knp.tensordot(x1, x2, 2).dtype), expected_dtype
)
self.assertEqual(
knp.Tensordot(2).symbolic_call(x1, x2).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_tile(self, dtype):
import jax.numpy as jnp
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.tile(x_jax, [1]).dtype)
self.assertEqual(
standardize_dtype(knp.tile(x, [1]).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Tile([1]).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_trace(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.trace(x_jax).dtype)
# jnp.trace is buggy with bool. We set the expected_dtype to int32
# for bool inputs
if dtype == "bool":
expected_dtype = "int32"
if dtype == "uint8" and backend.backend() == "torch":
# Torch backend doesn't support uint32 dtype.
expected_dtype = "int32"
self.assertDType(knp.trace(x), expected_dtype)
self.assertDType(knp.Trace().symbolic_call(x), expected_dtype)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_transpose(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.transpose(x_jax, [1, 0]).dtype)
self.assertEqual(
standardize_dtype(knp.transpose(x, [1, 0]).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Transpose([1, 0]).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_tri(self, dtype):
import jax.numpy as jnp
expected_dtype = standardize_dtype(jnp.tri(3, dtype=dtype).dtype)
self.assertEqual(
standardize_dtype(knp.tri(3, dtype=dtype).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_tril(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.tril(x_jax, 0).dtype)
self.assertEqual(
standardize_dtype(knp.tril(x, 0).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Tril(0).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_triu(self, dtype):
import jax.numpy as jnp
x = knp.ones((1, 1), dtype=dtype)
x_jax = jnp.ones((1, 1), dtype=dtype)
expected_dtype = standardize_dtype(jnp.triu(x_jax, 0).dtype)
self.assertEqual(
standardize_dtype(knp.triu(x, 0).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Triu(0).symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_true_divide(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.true_divide(x1_jax, x2_jax).dtype
)
self.assertDType(knp.true_divide(x1, x2), expected_dtype)
self.assertDType(knp.TrueDivide().symbolic_call(x1, x2), expected_dtype)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_trunc(self, dtype):
x = knp.ones((1, 1), dtype=dtype)
# TODO: jax <= 0.30.0 doesn't preserve the original dtype.
expected_dtype = dtype or backend.floatx()
self.assertEqual(standardize_dtype(knp.trunc(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Trunc().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_trapezoid(self, dtype):
import jax.numpy as jnp
x = knp.ones((2,), dtype=dtype)
x_jax = jnp.ones((2,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.trapezoid(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.trapezoid(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.Trapezoid().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_var(self, dtype):
import jax.numpy as jnp
x = knp.ones((2,), dtype=dtype)
x_jax = jnp.ones((2,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.var(x_jax).dtype)
if dtype == "int64":
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.var(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Var().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_vdot(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.vdot(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.vdot(x1, x2).dtype), expected_dtype
)
self.assertEqual(knp.Vdot().symbolic_call(x1, x2).dtype, expected_dtype)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_inner(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.inner(x1_jax, x2_jax).dtype)
self.assertEqual(
standardize_dtype(knp.inner(x1, x2).dtype), expected_dtype
)
self.assertEqual(
knp.Inner().symbolic_call(x1, x2).dtype, expected_dtype
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_vstack(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
x1 = knp.ones((1,), dtype=dtype1)
x2 = knp.ones((1,), dtype=dtype2)
x1_jax = jnp.ones((1,), dtype=dtype1)
x2_jax = jnp.ones((1,), dtype=dtype2)
expected_dtype = standardize_dtype(jnp.vstack([x1_jax, x2_jax]).dtype)
self.assertEqual(
standardize_dtype(knp.vstack([x1, x2]).dtype), expected_dtype
)
self.assertEqual(
knp.Vstack().symbolic_call([x1, x2]).dtype, expected_dtype
)
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(ALL_DTYPES, 2))
)
def test_where(self, dtypes):
import jax.numpy as jnp
dtype1, dtype2 = dtypes
condition = knp.ones((10,), dtype="bool")
x1 = knp.ones((10,), dtype=dtype1)
x2 = knp.ones((10,), dtype=dtype2)
condition_jax = jnp.ones((10,), dtype="bool")
x1_jax = jnp.ones((10,), dtype=dtype1)
x2_jax = jnp.ones((10,), dtype=dtype2)
expected_dtype = standardize_dtype(
jnp.where(condition_jax, x1_jax, x2_jax).dtype
)
self.assertEqual(
standardize_dtype(knp.where(condition, x1, x2).dtype),
expected_dtype,
)
self.assertEqual(
knp.Where().symbolic_call(condition, x1, x2).dtype, expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_where_python_types(self, dtype):
import jax.numpy as jnp
condition = knp.ones((10,), dtype="bool")
x = knp.ones((10,), dtype=dtype)
condition_jax = jnp.ones((10,), dtype="bool")
x_jax = jnp.ones((10,), dtype=dtype)
# python int
expected_dtype = standardize_dtype(
jnp.where(condition_jax, x_jax, 1).dtype
)
self.assertDType(knp.where(condition, x, 1), expected_dtype)
self.assertDType(
knp.Where().symbolic_call(condition, x, 1), expected_dtype
)
# python float
expected_dtype = standardize_dtype(
jnp.where(condition_jax, x_jax, 1.0).dtype
)
self.assertDType(knp.where(condition, x, 1.0), expected_dtype)
self.assertDType(
knp.Where().symbolic_call(condition, x, 1.0), expected_dtype
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_zeros_like(self, dtype):
import jax.numpy as jnp
x = knp.ones((), dtype=dtype)
x_jax = jnp.ones((), dtype=dtype)
expected_dtype = standardize_dtype(jnp.ones_like(x_jax).dtype)
self.assertEqual(
standardize_dtype(knp.zeros_like(x).dtype), expected_dtype
)
self.assertEqual(
standardize_dtype(knp.ZerosLike().symbolic_call(x).dtype),
expected_dtype,
)
@parameterized.named_parameters(named_product(dtype=ALL_DTYPES))
def test_angle(self, dtype):
import jax.numpy as jnp
if dtype == "bfloat16":
self.skipTest("Weirdness with numpy")
x = knp.ones((1,), dtype=dtype)
x_jax = jnp.ones((1,), dtype=dtype)
expected_dtype = standardize_dtype(jnp.angle(x_jax).dtype)
if dtype == "bool" or is_int_dtype(dtype):
expected_dtype = backend.floatx()
self.assertEqual(standardize_dtype(knp.angle(x).dtype), expected_dtype)
self.assertEqual(
standardize_dtype(knp.Angle().symbolic_call(x).dtype),
expected_dtype,
)
VIEW_DTYPES = [x for x in ALL_DTYPES if x != "bool" and x is not None]
@parameterized.named_parameters(
named_product(dtypes=itertools.combinations(VIEW_DTYPES, 2))
)
def test_view(self, dtypes):
import jax.numpy as jnp
input_dtype, output_dtype = dtypes
x = knp.ones((2, 8), dtype=input_dtype)
x_jax = jnp.ones((2, 8), dtype=input_dtype)
keras_output = knp.view(x, output_dtype)
symbolic_output = knp.View(output_dtype).symbolic_call(x)
expected_output = x_jax.view(output_dtype)
self.assertEqual(
standardize_dtype(keras_output.dtype),
standardize_dtype(expected_output.dtype),
)
self.assertEqual(
keras_output.shape,
expected_output.shape,
)
self.assertEqual(
standardize_dtype(symbolic_output.dtype),
standardize_dtype(expected_output.dtype),
)
@pytest.mark.skipif(
testing.torch_uses_gpu(),
reason="histogram op not implemented for torch on gpu",
)
| NumpyDtypeTest |
python | imageio__imageio | imageio/plugins/_swf.py | {
"start": 11283,
"end": 12990
} | class ____(DefinitionTag):
def __init__(self, im):
DefinitionTag.__init__(self)
self.tagtype = 36 # DefineBitsLossless2
# convert image (note that format is ARGB)
# even a grayscale image is stored in ARGB, nevertheless,
# the fabilous deflate compression will make it that not much
# more data is required for storing (25% or so, and less than 10%
# when storing RGB as ARGB).
if len(im.shape) == 3:
if im.shape[2] in [3, 4]:
tmp = np.ones((im.shape[0], im.shape[1], 4), dtype=np.uint8) * 255
for i in range(3):
tmp[:, :, i + 1] = im[:, :, i]
if im.shape[2] == 4:
tmp[:, :, 0] = im[:, :, 3] # swap channel where alpha is
else: # pragma: no cover
raise ValueError("Invalid shape to be an image.")
elif len(im.shape) == 2:
tmp = np.ones((im.shape[0], im.shape[1], 4), dtype=np.uint8) * 255
for i in range(3):
tmp[:, :, i + 1] = im[:, :]
else: # pragma: no cover
raise ValueError("Invalid shape to be an image.")
# we changed the image to uint8 4 channels.
# now compress!
self._data = zlib.compress(tmp.tobytes(), zlib.DEFLATED)
self.imshape = im.shape
def process_tag(self):
# build tag
bb = bytes()
bb += int2uint16(self.id) # CharacterID
bb += int2uint8(5) # BitmapFormat
bb += int2uint16(self.imshape[1]) # BitmapWidth
bb += int2uint16(self.imshape[0]) # BitmapHeight
bb += self._data # ZlibBitmapData
self.bytes = bb
| BitmapTag |
python | dagster-io__dagster | python_modules/dagster/dagster/_core/execution/plan/step.py | {
"start": 1130,
"end": 1690
} | class ____(Enum):
COMPUTE = "COMPUTE"
UNRESOLVED_MAPPED = "UNRESOLVED_MAPPED"
UNRESOLVED_COLLECT = "UNRESOLVED_COLLECT"
def is_executable_step(
step: Union["ExecutionStep", "UnresolvedMappedExecutionStep"],
) -> TypeGuard["ExecutionStep"]:
# This function is set up defensively to ensure new step types handled properly
if isinstance(step, ExecutionStep):
return True
elif isinstance(step, UnresolvedMappedExecutionStep):
return False
else:
check.failed(f"Unexpected execution step type {step}")
| StepKind |
python | kamyu104__LeetCode-Solutions | Python/domino-and-tromino-tiling.py | {
"start": 51,
"end": 1034
} | class ____(object):
def numTilings(self, N):
"""
:type N: int
:rtype: int
"""
M = int(1e9+7)
def matrix_expo(A, K):
result = [[int(i==j) for j in xrange(len(A))] \
for i in xrange(len(A))]
while K:
if K % 2:
result = matrix_mult(result, A)
A = matrix_mult(A, A)
K /= 2
return result
def matrix_mult(A, B):
ZB = zip(*B)
return [[sum(a*b for a, b in itertools.izip(row, col)) % M \
for col in ZB] for row in A]
T = [[1, 0, 0, 1], # #(|) = #(|) + #(=)
[1, 0, 1, 0], # #(「) = #(|) + #(L)
[1, 1, 0, 0], # #(L) = #(|) + #(「)
[1, 1, 1, 0]] # #(=) = #(|) + #(「) + #(L)
return matrix_mult([[1, 0, 0, 0]], matrix_expo(T, N))[0][0] # [a0, a(-1), a(-2), a(-3)] * T^N
# Time: O(n)
# Space: O(1)
| Solution |
python | sqlalchemy__sqlalchemy | test/dialect/postgresql/test_types.py | {
"start": 224692,
"end": 224820
} | class ____(suite.JSONLegacyStringCastIndexTest):
__requires__ = ("postgresql_jsonb",)
datatype = JSONB
| JSONBCastSuiteTest |
python | doocs__leetcode | solution/1900-1999/1969.Minimum Non-Zero Product of the Array Elements/Solution.py | {
"start": 0,
"end": 159
} | class ____:
def minNonZeroProduct(self, p: int) -> int:
mod = 10**9 + 7
return (2**p - 1) * pow(2**p - 2, 2 ** (p - 1) - 1, mod) % mod
| Solution |
python | realpython__materials | python-parallel-processing/07_image_processing/image_processing_bonus.py | {
"start": 199,
"end": 316
} | class ____(enum.StrEnum):
PYTHON = "Python"
NUMPY = "NumPy"
PARALLEL = "Parallel (GIL-Free)"
| ProcessingMode |
python | tensorflow__tensorflow | tensorflow/python/util/tf_export.py | {
"start": 11397,
"end": 11783
} | class ____(Protocol):
def __call__(
self,
*v2: str,
v1: Optional[Sequence[str]] = None,
allow_multiple_exports: bool = True, # Deprecated, no-op
) -> api_export:
...
tf_export: ExportType = functools.partial(
api_export, api_name=TENSORFLOW_API_NAME
)
keras_export: ExportType = functools.partial(
api_export, api_name=KERAS_API_NAME
)
| ExportType |
python | django__django | tests/admin_views/models.py | {
"start": 26362,
"end": 26526
} | class ____(models.Model):
iname = models.CharField(max_length=20, unique=True)
recipes = models.ManyToManyField(Recipe, through="RecipeIngredient")
| Ingredient |
python | pytorch__pytorch | torch/_prims_common/__init__.py | {
"start": 71053,
"end": 71953
} | class ____:
@staticmethod
def get_torch_state_as_tuple(
fake_mode: AbstractContextManager[Any] = nullcontext(),
):
if not torch.cuda.is_available():
raise RuntimeError("CUDA not available")
with fake_mode:
seed = torch.tensor(torch.cuda.initial_seed())
offset = torch.tensor(torch.cuda._get_rng_state_offset())
return seed, offset
@staticmethod
def set_torch_state_tensor(seed, offset):
# Rng state is [64-bit seed, 64-bit offset]
seed_portion = seed.reshape([1]).view(torch.uint8)
offset_portion = offset.reshape([1]).view(torch.uint8)
new_state = torch.cat([seed_portion, offset_portion])
torch.cuda.set_rng_state(new_state)
@staticmethod
def set_new_offset(relative_offset):
torch.cuda._set_rng_state_offset(relative_offset.item())
| CUDARngStateHelper |
python | apache__airflow | providers/fab/src/airflow/providers/fab/auth_manager/schemas/role_and_permission_schema.py | {
"start": 2144,
"end": 2251
} | class ____(NamedTuple):
"""List of roles."""
roles: list[Role]
total_entries: int
| RoleCollection |
python | doocs__leetcode | solution/0600-0699/0616.Add Bold Tag in String/Solution.py | {
"start": 352,
"end": 1524
} | class ____:
def addBoldTag(self, s: str, words: List[str]) -> str:
trie = Trie()
for w in words:
trie.insert(w)
n = len(s)
pairs = []
for i in range(n):
node = trie
for j in range(i, n):
idx = ord(s[j])
if node.children[idx] is None:
break
node = node.children[idx]
if node.is_end:
pairs.append([i, j])
if not pairs:
return s
st, ed = pairs[0]
t = []
for a, b in pairs[1:]:
if ed + 1 < a:
t.append([st, ed])
st, ed = a, b
else:
ed = max(ed, b)
t.append([st, ed])
ans = []
i = j = 0
while i < n:
if j == len(t):
ans.append(s[i:])
break
st, ed = t[j]
if i < st:
ans.append(s[i:st])
ans.append('<b>')
ans.append(s[st : ed + 1])
ans.append('</b>')
j += 1
i = ed + 1
return ''.join(ans)
| Solution |
python | pytorch__pytorch | test/test_mps.py | {
"start": 3141,
"end": 6608
} | class ____:
def __init__(self, testcase, name=None):
self.name = testcase.id() if name is None else name
self.testcase = testcase
def __enter__(self):
# Performs a gc if required (required if any memory is held)
caching_allocator_mem_allocated = torch.mps.current_allocated_memory()
if caching_allocator_mem_allocated > 0:
gc.collect()
torch.mps.empty_cache()
# Acquires caching allocator and driver statistics before the test is run
self.caching_allocator_before = torch.mps.current_allocated_memory()
self.driver_before = torch.mps.driver_allocated_memory()
def __exit__(self, exc_type, exc_value, traceback):
# Don't check for leaks if an exception was thrown
if exc_type is not None:
return
# Compares caching allocator before/after statistics
# An increase in allocated memory is a discrepancy indicating a possible memory leak
discrepancy_detected = False
caching_allocator_mem_allocated = torch.mps.current_allocated_memory()
if caching_allocator_mem_allocated > self.caching_allocator_before:
discrepancy_detected = True
# Short-circuits if no discrepancy detected
if not discrepancy_detected:
return
# Validates the discrepancy persists after garbage collection and
# is confirmed by the driver API
gc.collect()
torch.mps.empty_cache()
discrepancy_detected = True
# Query memory multiple items to ensure leak was not transient
for _ in range(3):
caching_allocator_mem_allocated = torch.mps.current_allocated_memory()
driver_mem_allocated = torch.mps.driver_allocated_memory()
caching_allocator_discrepancy = False
driver_discrepancy = False
if caching_allocator_mem_allocated > self.caching_allocator_before:
caching_allocator_discrepancy = True
if driver_mem_allocated > self.driver_before:
driver_discrepancy = True
if not (caching_allocator_discrepancy or driver_discrepancy):
# Leak was false positive, exit loop
discrepancy_detected = False
break
if caching_allocator_discrepancy and not driver_discrepancy:
# Just raises a warning if the leak is not validated by the driver API
msg = ("MPS caching allocator reports a memory leak not "
f"verified by the driver API in {self.name}! "
f"Caching allocator allocated memory was {self.caching_allocator_before} "
f"and is now reported as {caching_allocator_mem_allocated}. "
f"MPS driver allocated memory was {self.driver_before} and is now {driver_mem_allocated}.")
warnings.warn(msg)
elif caching_allocator_discrepancy and driver_discrepancy:
# A caching allocator discrepancy validated by the driver API is a failure
msg = (f"MPS driver API confirmed a leak in {self.name}! "
f"Caching allocator allocated memory was {self.caching_allocator_before} "
f"and is now reported as {caching_allocator_mem_allocated}. "
f"MPS driver allocated memory was {self.driver_before} and is now {driver_mem_allocated}.")
raise RuntimeError(msg)
| MpsMemoryLeakCheck |
python | pytorch__pytorch | torch/distributed/checkpoint/planner.py | {
"start": 564,
"end": 653
} | class ____(Enum):
TENSOR = auto()
SHARD = auto()
BYTE_IO = auto()
| WriteItemType |
python | pypa__setuptools | setuptools/_distutils/command/build_clib.py | {
"start": 1023,
"end": 7777
} | class ____(Command):
description = "build C/C++ libraries used by Python extensions"
user_options: ClassVar[list[tuple[str, str, str]]] = [
('build-clib=', 'b', "directory to build C/C++ libraries to"),
('build-temp=', 't', "directory to put temporary build by-products"),
('debug', 'g', "compile with debugging information"),
('force', 'f', "forcibly build everything (ignore file timestamps)"),
('compiler=', 'c', "specify the compiler type"),
]
boolean_options: ClassVar[list[str]] = ['debug', 'force']
help_options: ClassVar[list[tuple[str, str | None, str, Callable[[], object]]]] = [
('help-compiler', None, "list available compilers", show_compilers),
]
def initialize_options(self):
self.build_clib = None
self.build_temp = None
# List of libraries to build
self.libraries = None
# Compilation options for all libraries
self.include_dirs = None
self.define = None
self.undef = None
self.debug = None
self.force = False
self.compiler = None
def finalize_options(self) -> None:
# This might be confusing: both build-clib and build-temp default
# to build-temp as defined by the "build" command. This is because
# I think that C libraries are really just temporary build
# by-products, at least from the point of view of building Python
# extensions -- but I want to keep my options open.
self.set_undefined_options(
'build',
('build_temp', 'build_clib'),
('build_temp', 'build_temp'),
('compiler', 'compiler'),
('debug', 'debug'),
('force', 'force'),
)
self.libraries = self.distribution.libraries
if self.libraries:
self.check_library_list(self.libraries)
if self.include_dirs is None:
self.include_dirs = self.distribution.include_dirs or []
if isinstance(self.include_dirs, str):
self.include_dirs = self.include_dirs.split(os.pathsep)
# XXX same as for build_ext -- what about 'self.define' and
# 'self.undef' ?
def run(self) -> None:
if not self.libraries:
return
self.compiler = new_compiler(
compiler=self.compiler, dry_run=self.dry_run, force=self.force
)
customize_compiler(self.compiler)
if self.include_dirs is not None:
self.compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for name, value in self.define:
self.compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
self.compiler.undefine_macro(macro)
self.build_libraries(self.libraries)
def check_library_list(self, libraries) -> None:
"""Ensure that the list of libraries is valid.
`library` is presumably provided as a command option 'libraries'.
This method checks that it is a list of 2-tuples, where the tuples
are (library_name, build_info_dict).
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise.
"""
if not isinstance(libraries, list):
raise DistutilsSetupError("'libraries' option must be a list of tuples")
for lib in libraries:
if not isinstance(lib, tuple) and len(lib) != 2:
raise DistutilsSetupError("each element of 'libraries' must a 2-tuple")
name, build_info = lib
if not isinstance(name, str):
raise DistutilsSetupError(
"first element of each tuple in 'libraries' "
"must be a string (the library name)"
)
if '/' in name or (os.sep != '/' and os.sep in name):
raise DistutilsSetupError(
f"bad library name '{lib[0]}': may not contain directory separators"
)
if not isinstance(build_info, dict):
raise DistutilsSetupError(
"second element of each tuple in 'libraries' "
"must be a dictionary (build info)"
)
def get_library_names(self):
# Assume the library list is valid -- 'check_library_list()' is
# called from 'finalize_options()', so it should be!
if not self.libraries:
return None
lib_names = []
for lib_name, _build_info in self.libraries:
lib_names.append(lib_name)
return lib_names
def get_source_files(self):
self.check_library_list(self.libraries)
filenames = []
for lib_name, build_info in self.libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
f"in 'libraries' option (library '{lib_name}'), "
"'sources' must be present and must be "
"a list of source filenames"
)
filenames.extend(sources)
return filenames
def build_libraries(self, libraries) -> None:
for lib_name, build_info in libraries:
sources = build_info.get('sources')
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
f"in 'libraries' option (library '{lib_name}'), "
"'sources' must be present and must be "
"a list of source filenames"
)
sources = list(sources)
log.info("building '%s' library", lib_name)
# First, compile the source code to object files in the library
# directory. (This should probably change to putting object
# files in a temporary build directory.)
macros = build_info.get('macros')
include_dirs = build_info.get('include_dirs')
objects = self.compiler.compile(
sources,
output_dir=self.build_temp,
macros=macros,
include_dirs=include_dirs,
debug=self.debug,
)
# Now "link" the object files together into a static library.
# (On Unix at least, this isn't really linking -- it just
# builds an archive. Whatever.)
self.compiler.create_static_lib(
objects, lib_name, output_dir=self.build_clib, debug=self.debug
)
| build_clib |
python | allegroai__clearml | clearml/backend_api/services/v2_20/queues.py | {
"start": 8953,
"end": 16329
} | class ____(NonStrictDataModel):
"""
:param id: Queue id
:type id: str
:param name: Queue name
:type name: str
:param user: Associated user id
:type user: str
:param company: Company id
:type company: str
:param created: Queue creation time
:type created: datetime.datetime
:param tags: User-defined tags
:type tags: Sequence[str]
:param system_tags: System tags. This field is reserved for system use, please don't use it.
:type system_tags: Sequence[str]
:param entries: List of ordered queue entries
:type entries: Sequence[Entry]
:param metadata: Queue metadata
:type metadata: dict
"""
_schema = {
"definitions": {
"metadata_item": {
"properties": {
"key": {
"description": "The key uniquely identifying the metadata item inside the given entity",
"type": "string",
},
"type": {
"description": "The type of the metadata item",
"type": "string",
},
"value": {
"description": "The value stored in the metadata item",
"type": "string",
},
},
"type": "object",
}
},
"properties": {
"company": {"description": "Company id", "type": ["string", "null"]},
"created": {
"description": "Queue creation time",
"format": "date-time",
"type": ["string", "null"],
},
"entries": {
"description": "List of ordered queue entries",
"items": {"$ref": "#/definitions/entry"},
"type": ["array", "null"],
},
"id": {"description": "Queue id", "type": ["string", "null"]},
"metadata": {
"type": ["object", "null"],
"items": {"$ref": "#/definitions/metadata_item"},
"description": "Queue metadata",
},
"name": {"description": "Queue name", "type": ["string", "null"]},
"system_tags": {
"description": "System tags. This field is reserved for system use, please don't use it.",
"items": {"type": "string"},
"type": ["array", "null"],
},
"tags": {
"description": "User-defined tags",
"items": {"type": "string"},
"type": ["array", "null"],
},
"user": {"description": "Associated user id", "type": ["string", "null"]},
},
"type": "object",
}
def __init__(
self,
id: Optional[str] = None,
name: Optional[str] = None,
user: Optional[str] = None,
company: Optional[str] = None,
created: Optional[str] = None,
tags: Optional[List[str]] = None,
system_tags: Optional[List[str]] = None,
entries: Optional[List[Any]] = None,
metadata: Optional[dict] = None,
**kwargs: Any
) -> None:
super(Queue, self).__init__(**kwargs)
self.id = id
self.name = name
self.user = user
self.company = company
self.created = created
self.tags = tags
self.system_tags = system_tags
self.entries = entries
self.metadata = metadata
@schema_property("id")
def id(self) -> Optional[str]:
return self._property_id
@id.setter
def id(self, value: Optional[str]) -> None:
if value is None:
self._property_id = None
return
self.assert_isinstance(value, "id", six.string_types)
self._property_id = value
@schema_property("name")
def name(self) -> Optional[str]:
return self._property_name
@name.setter
def name(self, value: Optional[str]) -> None:
if value is None:
self._property_name = None
return
self.assert_isinstance(value, "name", six.string_types)
self._property_name = value
@schema_property("user")
def user(self) -> Optional[str]:
return self._property_user
@user.setter
def user(self, value: Optional[str]) -> None:
if value is None:
self._property_user = None
return
self.assert_isinstance(value, "user", six.string_types)
self._property_user = value
@schema_property("company")
def company(self) -> Optional[str]:
return self._property_company
@company.setter
def company(self, value: Optional[str]) -> None:
if value is None:
self._property_company = None
return
self.assert_isinstance(value, "company", six.string_types)
self._property_company = value
@schema_property("created")
def created(self) -> Optional[str]:
return self._property_created
@created.setter
def created(self, value: Optional[str]) -> None:
if value is None:
self._property_created = None
return
self.assert_isinstance(value, "created", six.string_types + (datetime,))
if not isinstance(value, datetime):
value = parse_datetime(value)
self._property_created = value
@schema_property("tags")
def tags(self) -> Optional[List[str]]:
return self._property_tags
@tags.setter
def tags(self, value: Optional[List[str]]) -> None:
if value is None:
self._property_tags = None
return
self.assert_isinstance(value, "tags", (list, tuple))
self.assert_isinstance(value, "tags", six.string_types, is_array=True)
self._property_tags = value
@schema_property("system_tags")
def system_tags(self) -> Optional[List[str]]:
return self._property_system_tags
@system_tags.setter
def system_tags(self, value: Optional[List[str]]) -> None:
if value is None:
self._property_system_tags = None
return
self.assert_isinstance(value, "system_tags", (list, tuple))
self.assert_isinstance(value, "system_tags", six.string_types, is_array=True)
self._property_system_tags = value
@schema_property("entries")
def entries(self) -> Optional[List[Any]]:
return self._property_entries
@entries.setter
def entries(self, value: Optional[List[Any]]) -> None:
if value is None:
self._property_entries = None
return
self.assert_isinstance(value, "entries", (list, tuple))
if any((isinstance(v, dict) for v in value)):
value = [Entry.from_dict(v) if isinstance(v, dict) else v for v in value]
else:
self.assert_isinstance(value, "entries", Entry, is_array=True)
self._property_entries = value
@schema_property("metadata")
def metadata(self) -> Optional[dict]:
return self._property_metadata
@metadata.setter
def metadata(self, value: Optional[dict]) -> None:
if value is None:
self._property_metadata = None
return
self.assert_isinstance(value, "metadata", (dict,))
self._property_metadata = value
| Queue |
python | django__django | django/utils/translation/__init__.py | {
"start": 723,
"end": 1242
} | class ____(SyntaxWarning):
pass
# Here be dragons, so a short explanation of the logic won't hurt:
# We are trying to solve two problems: (1) access settings, in particular
# settings.USE_I18N, as late as possible, so that modules can be imported
# without having to first configure Django, and (2) if some other code creates
# a reference to one of these functions, don't break that reference when we
# replace the functions with their real counterparts (once we do access the
# settings).
| TranslatorCommentWarning |
python | huggingface__transformers | tests/models/data2vec/test_modeling_data2vec_text.py | {
"start": 13981,
"end": 26182
} | class ____(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (
(
Data2VecTextForCausalLM,
Data2VecTextForMaskedLM,
Data2VecTextModel,
Data2VecTextForSequenceClassification,
Data2VecTextForTokenClassification,
Data2VecTextForMultipleChoice,
Data2VecTextForQuestionAnswering,
)
if is_torch_available()
else ()
)
pipeline_model_mapping = (
{
"feature-extraction": Data2VecTextModel,
"fill-mask": Data2VecTextForMaskedLM,
"question-answering": Data2VecTextForQuestionAnswering,
"text-classification": Data2VecTextForSequenceClassification,
"text-generation": Data2VecTextForCausalLM,
"token-classification": Data2VecTextForTokenClassification,
"zero-shot": Data2VecTextForSequenceClassification,
}
if is_torch_available()
else {}
)
model_split_percents = [0.5, 0.9]
# Overwriting to add `is_decoder` flag
def prepare_config_and_inputs_for_generate(self, batch_size=2):
config, inputs = super().prepare_config_and_inputs_for_generate(batch_size)
config.is_decoder = True
return config, inputs
def setUp(self):
self.model_tester = Data2VecTextModelTester(self)
self.config_tester = ConfigTester(self, config_class=Data2VecTextConfig, hidden_size=37)
def test_config(self):
self.config_tester.run_common_tests()
def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
def test_model_as_decoder(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs_for_decoder()
self.model_tester.create_and_check_model_as_decoder(*config_and_inputs)
def test_model_as_decoder_with_default_input_mask(self):
(
config,
input_ids,
token_type_ids,
input_mask,
sequence_labels,
token_labels,
choice_labels,
encoder_hidden_states,
encoder_attention_mask,
) = self.model_tester.prepare_config_and_inputs_for_decoder()
input_mask = None
self.model_tester.create_and_check_model_as_decoder(
config,
input_ids,
token_type_ids,
input_mask,
sequence_labels,
token_labels,
choice_labels,
encoder_hidden_states,
encoder_attention_mask,
)
def test_for_causal_lm(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs_for_decoder()
self.model_tester.create_and_check_for_causal_lm(*config_and_inputs)
def test_decoder_model_past_with_large_inputs(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs_for_decoder()
self.model_tester.create_and_check_decoder_model_past_large_inputs(*config_and_inputs)
def test_for_masked_lm(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_for_masked_lm(*config_and_inputs)
def test_for_token_classification(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_for_token_classification(*config_and_inputs)
def test_for_multiple_choice(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_for_multiple_choice(*config_and_inputs)
def test_for_question_answering(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_for_question_answering(*config_and_inputs)
@slow
def test_model_from_pretrained(self):
model_name = "facebook/data2vec-text-base"
model = Data2VecTextModel.from_pretrained(model_name)
self.assertIsNotNone(model)
def test_create_position_ids_respects_padding_index(self):
"""This is a regression test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is Data2VecTextForTextEmbeddings.padding_idx + 1
"""
config = self.model_tester.prepare_config_and_inputs()[0]
model = Data2VecTextEmbeddings(config=config)
input_ids = torch.as_tensor([[12, 31, 13, model.padding_idx]])
expected_positions = torch.as_tensor(
[[0 + model.padding_idx + 1, 1 + model.padding_idx + 1, 2 + model.padding_idx + 1, model.padding_idx]]
)
position_ids = Data2VecTextEmbeddings.create_position_ids_from_input_ids(input_ids, model.padding_idx)
self.assertEqual(position_ids.shape, expected_positions.shape)
self.assertTrue(torch.all(torch.eq(position_ids, expected_positions)))
def test_create_position_ids_from_inputs_embeds(self):
"""This is a regression test for https://github.com/huggingface/transformers/issues/1761
The position ids should be masked with the embedding object's padding index. Therefore, the
first available non-padding position index is Data2VecTextForTextEmbeddings.padding_idx + 1
"""
config = self.model_tester.prepare_config_and_inputs()[0]
embeddings = Data2VecTextEmbeddings(config=config)
inputs_embeds = torch.empty(2, 4, 30)
expected_single_positions = [
0 + embeddings.padding_idx + 1,
1 + embeddings.padding_idx + 1,
2 + embeddings.padding_idx + 1,
3 + embeddings.padding_idx + 1,
]
expected_positions = torch.as_tensor([expected_single_positions, expected_single_positions])
position_ids = embeddings.create_position_ids_from_inputs_embeds(inputs_embeds, embeddings.padding_idx)
self.assertEqual(position_ids.shape, expected_positions.shape)
self.assertTrue(torch.all(torch.eq(position_ids, expected_positions)))
def attention_mask_padding_matches_padding_free_with_position_ids(
self, attn_implementation: str, fa_kwargs: bool = False
):
"""
Overwritten to account for the embeddings that rely on position ids.
"""
if not self.has_attentions:
self.skipTest(reason="Model architecture does not support attentions")
max_new_tokens = 30
support_flag = {
"sdpa": "_supports_sdpa",
"flash_attention_2": "_supports_flash_attn",
"flash_attention_3": "_supports_flash_attn",
}
for model_class in self.all_generative_model_classes:
if attn_implementation != "eager" and not getattr(model_class, support_flag[attn_implementation]):
self.skipTest(f"{model_class.__name__} does not support {attn_implementation}")
# can't infer if new attn mask API is supported by assume that only model with attention backend support it
if not model_class._supports_attention_backend:
self.skipTest(f"{model_class.__name__} does not support new attention mask API")
if model_class._is_stateful: # non-transformer models most probably have no packing support
self.skipTest(f"{model_class.__name__} doesn't support packing!")
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
if config.is_encoder_decoder:
self.skipTest("Model is an encoder-decoder")
if 0 not in inputs_dict.get("attention_mask", []) or "attention_mask" not in inputs_dict:
self.skipTest("Model dummy inputs should contain padding in their attention mask")
if "input_ids" not in inputs_dict or inputs_dict["input_ids"].ndim != 2:
self.skipTest("Model dummy inputs should contain text input ids")
# make sure that all models have enough positions for generation
dummy_input_ids = inputs_dict["input_ids"]
if hasattr(config, "max_position_embeddings"):
config.max_position_embeddings = max_new_tokens + dummy_input_ids.shape[1] + 1
model = model_class(config)
if "position_ids" not in inspect.signature(model.forward).parameters:
self.skipTest("Model does not support position_ids")
if (not fa_kwargs) and "position_ids" not in inspect.signature(model.forward).parameters:
continue # this model doesn't accept position ids as input
with tempfile.TemporaryDirectory() as tmpdirname:
model.save_pretrained(tmpdirname)
# Drop all keys except for the minimal set. Hard to manipulate with multimodals etc
inputs_dict = {k: v for k, v in inputs_dict.items() if k in ["input_ids", "attention_mask"]}
# Ensure left padding, to adapt for some models
if 0 in inputs_dict["attention_mask"][:, -1]:
inputs_dict["attention_mask"] = inputs_dict["attention_mask"].flip(1)
dummy_attention_mask = inputs_dict["attention_mask"]
dummy_input_ids[~dummy_attention_mask.bool()] = config.get_text_config().pad_token_id
# Main difference to other models, we need to prepare position ids according to the attention mask
# as we use it to extract embeddings that rely on the correct position - naively increasing sequences do
# not suffice anymore atp. The solution here calculates an increasing sequences for all 1s and puts 0s else.
inputs_dict["position_ids"] = ((inputs_dict["attention_mask"] == 1).long().cumsum(dim=1) - 1) * (
inputs_dict["attention_mask"] == 1
).long()
model = (
model_class.from_pretrained(
tmpdirname,
dtype=torch.bfloat16,
attn_implementation=attn_implementation,
)
.to(torch_device)
.eval()
)
if fa_kwargs:
# flatten
features = [
{"input_ids": i[a.bool()].tolist()} for i, a in zip(dummy_input_ids, dummy_attention_mask)
]
# add position_ids + fa_kwargs
data_collator = DataCollatorWithFlattening(return_tensors="pt", return_flash_attn_kwargs=True)
batch = data_collator(features)
padfree_inputs_dict = {
k: t.to(torch_device) if torch.is_tensor(t) else t for k, t in batch.items()
}
else:
# create packed position_ids
position_ids = (
torch.cat([torch.arange(length) for length in dummy_attention_mask.sum(1).tolist()])
.long()
.unsqueeze(0)
.to(torch_device)
)
padfree_inputs_dict = {
"input_ids": dummy_input_ids[dummy_attention_mask.bool()].unsqueeze(0),
"position_ids": position_ids,
}
# We need to do simple forward without cache in order to trigger packed SDPA/flex/eager attention path
res_padded = model(**inputs_dict, use_cache=False)
res_padfree = model(**padfree_inputs_dict, use_cache=False)
logits_padded = res_padded.logits[dummy_attention_mask.bool()]
logits_padfree = res_padfree.logits[0]
# acceptable numerical instability
tol = torch.finfo(torch.bfloat16).eps
torch.testing.assert_close(logits_padded, logits_padfree, rtol=tol, atol=tol)
@require_torch
| Data2VecTextModelTest |
python | chardet__chardet | chardet/jpcntx.py | {
"start": 22465,
"end": 25312
} | class ____:
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
def __init__(self) -> None:
self._total_rel = 0
self._rel_sample: List[int] = []
self._need_to_skip_char_num = 0
self._last_char_order = -1
self._done = False
self.reset()
def reset(self) -> None:
self._total_rel = 0 # total sequence received
# category counters, each integer counts sequence in its category
self._rel_sample = [0] * self.NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._need_to_skip_char_num = 0
self._last_char_order = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._done = False
def feed(self, byte_str: Union[bytes, bytearray], num_bytes: int) -> None:
if self._done:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._need_to_skip_char_num
while i < num_bytes:
order, char_len = self.get_order(byte_str[i : i + 2])
i += char_len
if i > num_bytes:
self._need_to_skip_char_num = i - num_bytes
self._last_char_order = -1
else:
if (order != -1) and (self._last_char_order != -1):
self._total_rel += 1
if self._total_rel > self.MAX_REL_THRESHOLD:
self._done = True
break
self._rel_sample[
jp2_char_context[self._last_char_order][order]
] += 1
self._last_char_order = order
def got_enough_data(self) -> bool:
return self._total_rel > self.ENOUGH_REL_THRESHOLD
def get_confidence(self) -> float:
# This is just one way to calculate confidence. It works well for me.
if self._total_rel > self.MINIMUM_DATA_THRESHOLD:
return (self._total_rel - self._rel_sample[0]) / self._total_rel
return self.DONT_KNOW
def get_order(self, _: Union[bytes, bytearray]) -> Tuple[int, int]:
return -1, 1
| JapaneseContextAnalysis |
python | spyder-ide__spyder | spyder/plugins/remoteclient/widgets/container.py | {
"start": 853,
"end": 6818
} | class ____(PluginMainContainer):
sig_start_server_requested = Signal(str)
"""
This signal is used to request starting a remote server.
Parameters
----------
id: str
Id of the server that will be started.
"""
sig_stop_server_requested = Signal(str)
"""
This signal is used to request stopping a remote server.
Parameters
----------
id: str
Id of the server that will be stopped.
"""
sig_server_renamed = Signal(str)
"""
This signal is used to inform that a remote server was renamed.
Parameters
----------
id: str
Id of the server that was renamed.
"""
sig_connection_status_changed = Signal(dict)
"""
This signal is used to update the status of a given connection.
Parameters
----------
info: ConnectionInfo
Dictionary with the necessary info to update the status of a
connection.
"""
sig_server_changed = Signal()
"""
Signal that a remote server was deleted or added
"""
sig_server_updated = Signal(str)
"""
This signal is used to inform that a remote server was updated.
Parameters
----------
id: str
Id of the server that was updated.
"""
sig_client_message_logged = Signal(dict)
"""
This signal is used to inform that a client has logged a connection
message.
Parameters
----------
log: RemoteClientLog
Dictionary that contains the log message and its metadata.
"""
# ---- PluginMainContainer API
# -------------------------------------------------------------------------
def setup(self):
# Attributes
self.client_logs: dict[str, deque] = {}
self._connection_dialog = None
# Widgets
self.create_action(
RemoteClientActions.ManageConnections,
_("Manage remote connections"),
icon=self._plugin.get_icon(),
triggered=self._show_connection_dialog,
)
# Signals
self.sig_connection_status_changed.connect(
self._on_connection_status_changed
)
self.sig_client_message_logged.connect(self._on_client_message_logged)
def update_actions(self):
pass
# ---- Public API
# -------------------------------------------------------------------------
def on_server_version_mismatch(self, config_id, version: str):
"""
Actions to take when there's a mismatch between the
spyder-remote-services version installed in the server and the one
supported by Spyder.
"""
server_name = self._plugin.get_server_name(config_id)
QMessageBox.critical(
self,
_("Remote server error"),
_(
"The version of <tt>spyder-remote-services</tt> on the "
"remote host <b>{server}</b> (<b>{srs_version}</b>) is newer "
"than the latest Spyder supports (<b>{max_version}</b>)."
"<br><br>"
"Please update Spyder to be able to connect to this host."
).format(
server=server_name,
srs_version=version,
max_version=SPYDER_REMOTE_MAX_VERSION,
),
QMessageBox.Ok,
)
# ---- Private API
# -------------------------------------------------------------------------
def _show_connection_dialog(self):
def _dialog_finished(result_code):
"""Restore dialog instance variable."""
if PYSIDE2 or PYSIDE6:
self._connection_dialog.disconnect(None, None, None)
else:
self._connection_dialog.disconnect()
self._connection_dialog = None
if self._connection_dialog is None:
# Create dialog
self._connection_dialog = dlg = ConnectionDialog(self)
# Connect signals
dlg.sig_start_server_requested.connect(
self.sig_start_server_requested
)
dlg.sig_stop_server_requested.connect(
self.sig_stop_server_requested
)
dlg.sig_abort_connection_requested.connect(
self._plugin._abort_connection
)
dlg.sig_connections_changed.connect(self.sig_server_changed)
dlg.sig_server_renamed.connect(self.sig_server_renamed)
dlg.sig_server_updated.connect(self.sig_server_updated)
dlg.sig_create_env_requested.connect(
self._plugin.sig_create_env_requested
)
dlg.sig_import_env_requested.connect(
self._plugin.sig_import_env_requested
)
# Destroy dialog after it's closed
dlg.finished.connect(_dialog_finished)
# Show dialog
dlg.show()
else:
self._connection_dialog.show()
self._connection_dialog.activateWindow()
self._connection_dialog.raise_()
self._connection_dialog.setFocus()
def _on_connection_status_changed(self, info: ConnectionInfo):
"""Handle changes in connection status."""
host_id = info["id"]
status = info["status"]
message = info["message"]
# We need to save this info so that we can show the current status in
# the connection dialog when it's closed and opened again.
self.set_conf(f"{host_id}/status", status)
self.set_conf(f"{host_id}/status_message", message)
def _on_client_message_logged(self, message: dict):
"""Actions to take when a client message is logged."""
msg_id = message["id"]
# Create deque if not available
if not self.client_logs.get(msg_id):
self.client_logs[msg_id] = deque([], MAX_CLIENT_MESSAGES)
# Add message to deque
self.client_logs[msg_id].append(message)
| RemoteClientContainer |
python | getsentry__sentry | src/sentry/management/commands/create_sample_event.py | {
"start": 68,
"end": 1436
} | class ____(BaseCommand):
help = "Creates a sample event in Sentry (if applicable)"
def add_arguments(self, parser):
parser.add_argument(
"--project", dest="project", help="project ID or team-slug/project-slug"
),
parser.add_argument("--platform", dest="platform"),
def handle(self, **options):
from django.conf import settings
from sentry.models.project import Project
from sentry.utils.samples import create_sample_event
if not options["project"]:
project = Project.objects.get(id=settings.SENTRY_PROJECT)
else:
if options["project"].isdigit():
project = Project.objects.get(id=options["project"])
elif "/" in options["project"]:
t_slug, p_slug = options["project"].split("/", 1)
project = Project.objects.get(slug=p_slug, teams__slug=t_slug)
else:
raise CommandError(
"Project must be specified as team-slug/project-slug or a project id"
)
platform = options["platform"]
event = create_sample_event(project, platform)
if not event:
raise CommandError(f"Unable to create an event for platform {platform!r}")
self.stdout.write(f"Event created: {event.group.get_absolute_url()}")
| Command |
python | pikepdf__pikepdf | tests/test_object.py | {
"start": 14058,
"end": 17783
} | class ____:
@pytest.fixture(scope="function")
def abcxyz_stream(self):
with pikepdf.new() as pdf:
data = b'abcxyz'
stream = Stream(pdf, data)
yield stream
def test_stream_isinstance(self):
pdf = pikepdf.new()
stream = Stream(pdf, b'xyz')
assert isinstance(stream, Stream)
assert isinstance(stream, Object)
def test_stream_as_dict(self, abcxyz_stream):
stream = abcxyz_stream
assert Name.Length in stream
stream.TestAttrAccess = True
stream['/TestKeyAccess'] = True
stream[Name.TestKeyNameAccess] = True
assert len(stream.keys()) == 4 # Streams always have a /Length
assert all(
(v == len(stream.read_bytes()) or v == True) # noqa: E712
for k, v in stream.items()
)
assert stream.stream_dict.TestAttrAccess
assert stream.get(Name.MissingName, 3.14) == 3.14
assert {k for k in stream} == {
'/TestKeyAccess',
'/TestAttrAccess',
'/Length',
'/TestKeyNameAccess',
}
def test_stream_length_modify(self, abcxyz_stream):
stream = abcxyz_stream
with pytest.raises(KeyError):
stream.Length = 42
with pytest.raises(KeyError):
del stream.Length
def test_len_stream(self, abcxyz_stream):
with pytest.raises(TypeError):
len(abcxyz_stream) # pylint: disable=pointless-statement
assert len(abcxyz_stream.stream_dict) == 1
def test_stream_dict_oneshot(self):
pdf = pikepdf.new()
stream1 = Stream(pdf, b'12345', One=1, Two=2)
stream2 = Stream(pdf, b'67890', {'/Three': 3, '/Four': 4})
stream3 = pdf.make_stream(b'abcdef', One=1, Two=2)
assert stream1.One == 1
assert stream1.read_bytes() == b'12345'
assert stream2.Three == 3
assert stream3.One == 1
def test_stream_bad_params(self):
p = pikepdf.new()
with pytest.raises(TypeError, match='data'):
Stream(p)
def test_stream_no_dangling_stream_on_failure(self):
p = pikepdf.new()
num_objects = len(p.objects)
with pytest.raises(AttributeError):
Stream(p, b'3.14159', ['Not a mapping object'])
assert len(p.objects) == num_objects, "A dangling object was created"
def test_identical_streams_equal(self):
pdf = pikepdf.new()
stream1 = Stream(pdf, b'12345', One=1, Two=2)
stream2 = Stream(pdf, b'67890', {'/Three': 3, '/Four': 4})
assert stream1 == stream1
assert stream1 != stream2
def test_stream_data_equal(self):
pdf1 = pikepdf.new()
stream1 = Stream(pdf1, b'abc')
pdf2 = pikepdf.new()
stream2 = Stream(pdf2, b'abc')
stream21 = Stream(pdf2, b'abcdef')
assert stream1 == stream2
assert stream21 != stream2
stream2.stream_dict.SomeData = 1
assert stream2 != stream1
def test_stream_refcount(self, refcount, outpdf):
pdf = pikepdf.new()
stream = Stream(pdf, b'blahblah')
count = refcount(stream)
pdf.Root.SomeStream = stream
assert refcount(stream) == count
del stream
pdf.save(outpdf)
with pikepdf.open(outpdf) as pdf2:
assert pdf2.Root.SomeStream.read_bytes() == b'blahblah'
def test_stream_bool(self):
pdf = pikepdf.new()
assert bool(Stream(pdf, b'')) is False
stream = Stream(pdf, b'blahblah')
assert bool(stream) is True
@pytest.fixture
def sandwich(resources):
with Pdf.open(resources / 'sandwich.pdf') as pdf:
yield pdf
| TestStream |
python | wntrblm__nox | nox/_option_set.py | {
"start": 1701,
"end": 2930
} | class ____:
default_venv_backend: None | str = attrs.field(validator=av_opt_str)
download_python: None | Literal["auto", "never", "always"] = attrs.field(
default=None, validator=av.optional(av.in_(["auto", "never", "always"]))
)
envdir: None | str | os.PathLike[str] = attrs.field(validator=av_opt_path)
error_on_external_run: bool = attrs.field(validator=av_bool)
error_on_missing_interpreters: bool = attrs.field(validator=av_bool)
force_venv_backend: None | str = attrs.field(validator=av_opt_str)
keywords: None | str = attrs.field(validator=av_opt_str)
pythons: None | Sequence[str] = attrs.field(validator=av_opt_list_str)
report: None | str = attrs.field(validator=av_opt_str)
reuse_existing_virtualenvs: bool = attrs.field(validator=av_bool)
reuse_venv: None | Literal["no", "yes", "never", "always"] = attrs.field(
validator=av.optional(av.in_(["no", "yes", "never", "always"]))
)
sessions: None | Sequence[str] = attrs.field(validator=av_opt_list_str)
stop_on_first_error: bool = attrs.field(validator=av_bool)
tags: None | Sequence[str] = attrs.field(validator=av_opt_list_str)
verbose: bool = attrs.field(validator=av_bool)
| NoxOptions |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.