language
stringclasses
1 value
repo
stringclasses
346 values
path
stringlengths
6
201
class_span
dict
source
stringlengths
21
2.38M
target
stringlengths
1
96
python
PrefectHQ__prefect
tests/runner/test_runner.py
{ "start": 5869, "end": 7097 }
class ____: """ A mock storage class that simulates pulling code from a remote location. """ def __init__(self, base_path: Path, pull_code_spy: Union[MagicMock, None] = None): self._base_path = base_path self._pull_code_spy = pull_code_spy def set_base_path(self, path: Path): self._base_path = path code = dedent( """\ from prefect import flow @flow def test_flow(): return 1 """ ) @property def destination(self): return self._base_path @property def pull_interval(self): return 60 async def pull_code(self): if self._pull_code_spy: self._pull_code_spy() if self._base_path: with open(self._base_path / "flows.py", "w") as f: f.write(self.code) def to_pull_step(self): return {"prefect.fake.module": {}} @pytest.fixture def temp_storage() -> Generator[MockStorage, Any, None]: with tempfile.TemporaryDirectory() as temp_dir: yield MockStorage(base_path=Path(temp_dir)) @pytest.fixture def in_temporary_runner_directory(tmp_path: Path): with tmpchdir(str(tmp_path)): yield
MockStorage
python
pandas-dev__pandas
pandas/tests/series/test_arithmetic.py
{ "start": 27909, "end": 30347 }
class ____: def test_series_add_tz_mismatch_converts_to_utc(self): rng = date_range("1/1/2011", periods=100, freq="h", tz="utc") perm = np.random.default_rng(2).permutation(100)[:90] ser1 = Series( np.random.default_rng(2).standard_normal(90), index=rng.take(perm).tz_convert("US/Eastern"), ) perm = np.random.default_rng(2).permutation(100)[:90] ser2 = Series( np.random.default_rng(2).standard_normal(90), index=rng.take(perm).tz_convert("Europe/Berlin"), ) result = ser1 + ser2 uts1 = ser1.tz_convert("utc") uts2 = ser2.tz_convert("utc") expected = uts1 + uts2 # sort since input indexes are not equal expected = expected.sort_index() assert result.index.tz is timezone.utc tm.assert_series_equal(result, expected) def test_series_add_aware_naive_raises(self): rng = date_range("1/1/2011", periods=10, freq="h") ser = Series(np.random.default_rng(2).standard_normal(len(rng)), index=rng) ser_utc = ser.tz_localize("utc") msg = "Cannot join tz-naive with tz-aware DatetimeIndex" with pytest.raises(Exception, match=msg): ser + ser_utc with pytest.raises(Exception, match=msg): ser_utc + ser # TODO: belongs in tests/arithmetic? def test_datetime_understood(self, unit): # Ensures it doesn't fail to create the right series # reported in issue#16726 series = Series(date_range("2012-01-01", periods=3, unit=unit)) offset = pd.offsets.DateOffset(days=6) result = series - offset exp_dti = pd.to_datetime(["2011-12-26", "2011-12-27", "2011-12-28"]).as_unit( unit ) expected = Series(exp_dti) tm.assert_series_equal(result, expected) def test_align_date_objects_with_datetimeindex(self): rng = date_range("1/1/2000", periods=20) ts = Series(np.random.default_rng(2).standard_normal(20), index=rng) ts_slice = ts[5:] ts2 = ts_slice.copy() ts2.index = [x.date() for x in ts2.index] result = ts + ts2 result2 = ts2 + ts expected = ts + ts[5:] expected.index = expected.index._with_freq(None) tm.assert_series_equal(result, expected) tm.assert_series_equal(result2, expected)
TestTimeSeriesArithmetic
python
keras-team__keras
keras/src/callbacks/backup_and_restore.py
{ "start": 200, "end": 9361 }
class ____(Callback): """Callback to back up and restore the training state. `BackupAndRestore` callback is intended to recover training from an interruption that has happened in the middle of a `Model.fit` execution, by backing up the training states in a temporary checkpoint file, at the end of each epoch. Each backup overwrites the previously written checkpoint file, so at any given time there is at most one such checkpoint file for backup/restoring purpose. If training restarts before completion, the training state (which includes the `Model` weights and epoch number) is restored to the most recently saved state at the beginning of a new `Model.fit` run. At the completion of a `Model.fit` run, the temporary checkpoint file is deleted. Note that the user is responsible to bring jobs back after the interruption. This callback is important for the backup and restore mechanism for fault tolerance purpose, and the model to be restored from a previous checkpoint is expected to be the same as the one used to back up. If user changes arguments passed to compile or fit, the checkpoint saved for fault tolerance can become invalid. Example: >>> class InterruptingCallback(keras.callbacks.Callback): ... def on_epoch_begin(self, epoch, logs=None): ... if epoch == 4: ... raise RuntimeError('Interrupting!') >>> callback = keras.callbacks.BackupAndRestore(backup_dir="/tmp/backup") >>> model = keras.models.Sequential([keras.layers.Dense(10)]) >>> model.compile(keras.optimizers.SGD(), loss='mse') >>> model.build(input_shape=(None, 20)) >>> try: ... model.fit(np.arange(100).reshape(5, 20), np.zeros(5), epochs=10, ... batch_size=1, callbacks=[callback, InterruptingCallback()], ... verbose=0) ... except: ... pass >>> history = model.fit(np.arange(100).reshape(5, 20), np.zeros(5), ... epochs=10, batch_size=1, callbacks=[callback], ... verbose=0) >>> # Only 6 more epochs are run, since first training got interrupted at >>> # zero-indexed epoch 4, second training will continue from 4 to 9. >>> len(history.history['loss']) >>> 6 Args: backup_dir: String, path of directory where to store the data needed to restore the model. The directory cannot be reused elsewhere to store other files, e.g. by the `BackupAndRestore` callback of another training run, or by another callback (e.g. `ModelCheckpoint`) of the same training run. save_freq: `"epoch"`, integer, or `False`. When set to `"epoch"` the callback saves the checkpoint at the end of each epoch. When set to an integer, the callback saves the checkpoint every `save_freq` batches. Set `save_freq=False` only if using preemption checkpointing (i.e. with `save_before_preemption=True`). double_checkpoint: Boolean. If enabled, `BackupAndRestore` callback will save 2 last training states (current and previous). After interruption if current state can't be loaded due to IO error (e.g. file corrupted) it will try to restore previous one. Such behaviour will consume twice more space on disk, but increase fault tolerance. Defaults to `False`. delete_checkpoint: Boolean. This `BackupAndRestore` callback works by saving a checkpoint to back up the training state. If `delete_checkpoint=True`, the checkpoint will be deleted after training is finished. Use `False` if you'd like to keep the checkpoint for future usage. Defaults to `True`. """ def __init__( self, backup_dir, save_freq="epoch", double_checkpoint=False, delete_checkpoint=True, ): super().__init__() self.save_freq = save_freq self.double_checkpoint = double_checkpoint self.delete_checkpoint = delete_checkpoint self._batches_seen_since_last_saving = 0 self._last_batch_seen = 0 self._current_epoch = 0 if not backup_dir: raise ValueError("Empty `backup_dir` argument passed") self.backup_dir = backup_dir self._weights_path = file_utils.join(backup_dir, "latest.weights.h5") self._training_metadata_path = file_utils.join( backup_dir, "training_metadata.json" ) self._prev_weights_path = f"{self._weights_path}.bkp" self._prev_training_metadata_path = ( f"{self._training_metadata_path}.bkp" ) if save_freq != "epoch" and not isinstance(save_freq, int): raise ValueError( "Invalid value for argument `save_freq`. " f"Received: save_freq={save_freq}. " "Expected either 'epoch' or an integer value." ) def on_train_begin(self, logs=None): try: self._load_model() except OSError as e: # Weights may be corrupted. Trying to load previous one. if not file_utils.exists(self._prev_weights_path): raise e file_utils.copy(self._prev_weights_path, self._weights_path) if file_utils.exists(self._prev_training_metadata_path): file_utils.copy( self._prev_training_metadata_path, self._training_metadata_path, ) elif file_utils.exists(self._training_metadata_path): file_utils.remove(self._training_metadata_path) self._load_model() def _load_model(self): """Get training state from temporary file and restore it.""" if not self.model.built: raise ValueError( "To use the BackupAndRestore callback, " "you model must be built before you call `fit()`. " f"Model {self.model} is unbuilt. You can build it " "beforehand by calling it on a batch of data." ) if file_utils.exists(self._weights_path): if ( self.model.optimizer is not None and not self.model.optimizer.built ): # Make sure optimizer weights exist before loading. self.model.optimizer.build(self.model.trainable_variables) self.model.load_weights(self._weights_path) if file_utils.exists(self._training_metadata_path): with file_utils.File(self._training_metadata_path, "r") as f: training_metadata = json.loads(f.read()) epoch = training_metadata["epoch"] self.model._initial_epoch = epoch def on_epoch_end(self, epoch, logs=None): self._current_epoch = epoch + 1 self._last_batch_seen = 0 if self.save_freq == "epoch": self._save_model() def on_train_batch_end(self, batch, logs=None): if self._should_save_on_batch(batch): self._save_model() def _save_model(self): """Saves the model. Args: epoch: the epoch this iteration is in. batch: the batch this iteration is in. `None` if the `save_freq` is set to `"epoch"`. logs: the `logs` dict passed in to `on_batch_end` or `on_epoch_end`. """ # Create host directory if it doesn't exist. if not file_utils.exists(self.backup_dir): file_utils.makedirs(self.backup_dir) if self.double_checkpoint and file_utils.exists(self._weights_path): file_utils.copy(self._weights_path, self._prev_weights_path) if self.double_checkpoint and file_utils.exists( self._training_metadata_path ): file_utils.copy( self._training_metadata_path, self._prev_training_metadata_path ) self.model.save_weights(filepath=self._weights_path, overwrite=True) with file_utils.File(self._training_metadata_path, "w") as f: training_metadata = { "epoch": self._current_epoch, "batch": self._last_batch_seen, } f.write(json.dumps(training_metadata)) def _should_save_on_batch(self, batch): """Handles batch-level saving logic, supports steps_per_execution.""" if self.save_freq == "epoch": return False if batch <= self._last_batch_seen: # New epoch. add_batches = batch + 1 # batches are zero-indexed. else: add_batches = batch - self._last_batch_seen self._batches_seen_since_last_saving += add_batches self._last_batch_seen = batch if self._batches_seen_since_last_saving >= self.save_freq: self._batches_seen_since_last_saving = 0 return True return False def on_train_end(self, logs=None): if self.delete_checkpoint and file_utils.exists(self.backup_dir): file_utils.rmtree(self.backup_dir)
BackupAndRestore
python
walkccc__LeetCode
solutions/373. Find K Pairs with Smallest Sums/373.py
{ "start": 0, "end": 508 }
class ____: def kSmallestPairs(self, nums1: list[int], nums2: list[int], k: int) -> list[list[int]]: minHeap = [] for i in range(min(k, len(nums1))): heapq.heappush(minHeap, (nums1[i] + nums2[0], i, 0)) ans = [] while minHeap and len(ans) < k: _, i, j = heapq.heappop(minHeap) ans.append([nums1[i], nums2[j]]) if j + 1 < len(nums2): heapq.heappush(minHeap, (nums1[i] + nums2[j + 1], i, j + 1)) return ans
Solution
python
numba__numba
numba/core/untyped_passes.py
{ "start": 9279, "end": 15151 }
class ____(FunctionPass): """ This pass will inline a function wrapped by the numba.jit decorator directly into the site of its call depending on the value set in the 'inline' kwarg to the decorator. This is an untyped pass. CFG simplification is performed at the end of the pass but no block level clean up is performed on the mutated IR (typing information is not available to do so). """ _name = "inline_inlinables" _DEBUG = False def __init__(self): FunctionPass.__init__(self) def run_pass(self, state): """Run inlining of inlinables """ if self._DEBUG: print('before inline'.center(80, '-')) print(state.func_ir.dump()) print(''.center(80, '-')) from numba.core.inline_closurecall import (InlineWorker, callee_ir_validator) inline_worker = InlineWorker(state.typingctx, state.targetctx, state.locals, state.pipeline, state.flags, validator=callee_ir_validator) modified = False # use a work list, look for call sites via `ir.Expr.op == call` and # then pass these to `self._do_work` to make decisions about inlining. work_list = list(state.func_ir.blocks.items()) while work_list: label, block = work_list.pop() for i, instr in enumerate(block.body): if isinstance(instr, ir.Assign): expr = instr.value if isinstance(expr, ir.Expr) and expr.op == 'call': if guard(self._do_work, state, work_list, block, i, expr, inline_worker): modified = True break # because block structure changed if modified: # clean up unconditional branches that appear due to inlined # functions introducing blocks cfg = compute_cfg_from_blocks(state.func_ir.blocks) for dead in cfg.dead_nodes(): del state.func_ir.blocks[dead] post_proc = postproc.PostProcessor(state.func_ir) post_proc.run() state.func_ir.blocks = simplify_CFG(state.func_ir.blocks) if self._DEBUG: print('after inline'.center(80, '-')) print(state.func_ir.dump()) print(''.center(80, '-')) return True def _do_work(self, state, work_list, block, i, expr, inline_worker): from numba.core.compiler import run_frontend from numba.core.cpu import InlineOptions # try and get a definition for the call, this isn't always possible as # it might be a eval(str)/part generated awaiting update etc. (parfors) to_inline = None try: to_inline = state.func_ir.get_definition(expr.func) except Exception: if self._DEBUG: print("Cannot find definition for %s" % expr.func) return False # do not handle closure inlining here, another pass deals with that. if getattr(to_inline, 'op', False) == 'make_function': return False # see if the definition is a "getattr", in which case walk the IR to # try and find the python function via the module from which it's # imported, this should all be encoded in the IR. if getattr(to_inline, 'op', False) == 'getattr': val = resolve_func_from_module(state.func_ir, to_inline) else: # This is likely a freevar or global # # NOTE: getattr 'value' on a call may fail if it's an ir.Expr as # getattr is overloaded to look in _kws. try: val = getattr(to_inline, 'value', False) except Exception: raise GuardException # if something was found... if val: # check it's dispatcher-like, the targetoptions attr holds the # kwargs supplied in the jit decorator and is where 'inline' will # be if it is present. topt = getattr(val, 'targetoptions', False) if topt: inline_type = topt.get('inline', None) # has 'inline' been specified? if inline_type is not None: inline_opt = InlineOptions(inline_type) # Could this be inlinable? if not inline_opt.is_never_inline: # yes, it could be inlinable do_inline = True pyfunc = val.py_func # Has it got an associated cost model? if inline_opt.has_cost_model: # yes, it has a cost model, use it to determine # whether to do the inline py_func_ir = run_frontend(pyfunc) do_inline = inline_type(expr, state.func_ir, py_func_ir) # if do_inline is True then inline! if do_inline: _, _, _, new_blocks = \ inline_worker.inline_function(state.func_ir, block, i, pyfunc,) if work_list is not None: for blk in new_blocks: work_list.append(blk) return True return False @register_pass(mutates_CFG=False, analysis_only=False)
InlineInlinables
python
coleifer__peewee
tests/mysql_ext.py
{ "start": 929, "end": 1112 }
class ____(TestModel): key = CharField(primary_key=True, max_length=100) data = JSONField() @requires_mysql @skip_if(mysql_connector is None, 'mysql-connector not installed')
KJ
python
apache__airflow
providers/amazon/src/airflow/providers/amazon/aws/triggers/eks.py
{ "start": 3551, "end": 9486 }
class ____(AwsBaseWaiterTrigger): """ Trigger for EksDeleteClusterOperator. The trigger will asynchronously wait for the cluster to be deleted. If there are any nodegroups or fargate profiles associated with the cluster, they will be deleted before the cluster is deleted. :param cluster_name: The name of the EKS cluster :param waiter_delay: The amount of time in seconds to wait between attempts. :param waiter_max_attempts: The maximum number of attempts to be made. :param aws_conn_id: The Airflow connection used for AWS credentials. :param region_name: Which AWS region the connection should use. If this is None or empty then the default boto3 behaviour is used. :param force_delete_compute: If True, any nodegroups or fargate profiles associated with the cluster will be deleted before the cluster is deleted. """ def __init__( self, cluster_name, waiter_delay: int, waiter_max_attempts: int, aws_conn_id: str | None, region_name: str | None, force_delete_compute: bool, ): self.cluster_name = cluster_name self.waiter_delay = waiter_delay self.waiter_max_attempts = waiter_max_attempts self.aws_conn_id = aws_conn_id self.region_name = region_name self.force_delete_compute = force_delete_compute def serialize(self) -> tuple[str, dict[str, Any]]: return ( self.__class__.__module__ + "." + self.__class__.__qualname__, { "cluster_name": self.cluster_name, "waiter_delay": str(self.waiter_delay), "waiter_max_attempts": str(self.waiter_max_attempts), "aws_conn_id": self.aws_conn_id, "region_name": self.region_name, "force_delete_compute": self.force_delete_compute, }, ) def hook(self) -> AwsGenericHook: return EksHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name) async def run(self): async with await self.hook().get_async_conn() as client: waiter = client.get_waiter("cluster_deleted") if self.force_delete_compute: await self.delete_any_nodegroups(client=client) await self.delete_any_fargate_profiles(client=client) try: await client.delete_cluster(name=self.cluster_name) except ClientError as ex: if ex.response.get("Error").get("Code") == "ResourceNotFoundException": pass else: raise await async_wait( waiter=waiter, waiter_delay=int(self.waiter_delay), waiter_max_attempts=int(self.waiter_max_attempts), args={"name": self.cluster_name}, failure_message="Error deleting cluster", status_message="Status of cluster is", status_args=["cluster.status"], ) yield TriggerEvent({"status": "deleted"}) async def delete_any_nodegroups(self, client) -> None: """ Delete all EKS Nodegroups for a provided Amazon EKS Cluster. All the EKS Nodegroups are deleted simultaneously. We wait for all Nodegroups to be deleted before returning. """ nodegroups = await client.list_nodegroups(clusterName=self.cluster_name) if nodegroups.get("nodegroups", None): self.log.info("Deleting nodegroups") waiter = self.hook().get_waiter("all_nodegroups_deleted", deferrable=True, client=client) for group in nodegroups["nodegroups"]: await client.delete_nodegroup(clusterName=self.cluster_name, nodegroupName=group) await async_wait( waiter=waiter, waiter_delay=int(self.waiter_delay), waiter_max_attempts=int(self.waiter_max_attempts), args={"clusterName": self.cluster_name}, failure_message=f"Error deleting nodegroup for cluster {self.cluster_name}", status_message="Deleting nodegroups associated with the cluster", status_args=["nodegroups"], ) self.log.info("All nodegroups deleted") else: self.log.info("No nodegroups associated with cluster %s", self.cluster_name) async def delete_any_fargate_profiles(self, client) -> None: """ Delete all EKS Fargate profiles for a provided Amazon EKS Cluster. EKS Fargate profiles must be deleted one at a time, so we must wait for one to be deleted before sending the next delete command. """ fargate_profiles = await client.list_fargate_profiles(clusterName=self.cluster_name) if fargate_profiles.get("fargateProfileNames"): self.log.info("Waiting for Fargate profiles to delete. This will take some time.") for profile in fargate_profiles["fargateProfileNames"]: await client.delete_fargate_profile(clusterName=self.cluster_name, fargateProfileName=profile) await async_wait( waiter=client.get_waiter("fargate_profile_deleted"), waiter_delay=int(self.waiter_delay), waiter_max_attempts=int(self.waiter_max_attempts), args={"clusterName": self.cluster_name, "fargateProfileName": profile}, failure_message=f"Error deleting fargate profile for cluster {self.cluster_name}", status_message="Status of fargate profile is", status_args=["fargateProfile.status"], ) self.log.info("All Fargate profiles deleted") else: self.log.info("No Fargate profiles associated with cluster %s", self.cluster_name)
EksDeleteClusterTrigger
python
instagram__MonkeyType
tests/test_util.py
{ "start": 568, "end": 1270 }
class ____: def test_get_existing_name(self): obj = get_name_in_module( a_module_func.__module__, a_module_func.__qualname__) assert obj == a_module_func # Make sure we handle nested classes obj = get_name_in_module(Outer.__module__, Outer.Inner.f.__qualname__) assert obj == Outer.Inner.f def test_get_nonexistent_module(self): with pytest.raises(NameLookupError): get_name_in_module('xxx.dontexist', 'foo') def test_get_nonexistent_qualname(self): with pytest.raises(NameLookupError): get_name_in_module( a_module_func.__module__, 'Outer.xxx_i_dont_exist_xxx')
TestGetNameInModule
python
django__django
tests/model_inheritance_regress/models.py
{ "start": 1897, "end": 2076 }
class ____(models.Model): headline = models.CharField(max_length=100) pub_date = models.DateTimeField() class Meta: ordering = ("-pub_date", "headline")
Article
python
apache__airflow
airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_auth.py
{ "start": 1157, "end": 1520 }
class ____: @pytest.fixture(autouse=True) def setup(self, test_client) -> None: auth_manager_mock = MagicMock() auth_manager_mock.get_url_login.return_value = AUTH_MANAGER_LOGIN_URL auth_manager_mock.get_url_logout.return_value = AUTH_MANAGER_LOGOUT_URL test_client.app.state.auth_manager = auth_manager_mock
TestAuthEndpoint
python
ansible__ansible
lib/ansible/galaxy/api.py
{ "start": 9295, "end": 10532 }
class ____: def __init__(self, namespace, name, version, download_url, artifact_sha256, dependencies, signatures_url, signatures): """ Contains common information about a collection on a Galaxy server to smooth through API differences for Collection and define a standard meta info for a collection. :param namespace: The namespace name. :param name: The collection name. :param version: The version that the metadata refers to. :param download_url: The URL to download the collection. :param artifact_sha256: The SHA256 of the collection artifact for later verification. :param dependencies: A dict of dependencies of the collection. :param signatures_url: The URL to the specific version of the collection. :param signatures: The list of signatures found at the signatures_url. """ self.namespace = namespace self.name = name self.version = version self.download_url = download_url self.artifact_sha256 = artifact_sha256 self.dependencies = dependencies self.signatures_url = signatures_url self.signatures = signatures @functools.total_ordering
CollectionVersionMetadata
python
jmcnamara__XlsxWriter
xlsxwriter/test/comparison/test_chart_layout06.py
{ "start": 315, "end": 1632 }
class ____(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.set_filename("chart_layout06.xlsx") def test_create_file(self): """Test the creation of an XlsxWriter file with user defined layout.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() chart = workbook.add_chart({"type": "column"}) chart.axis_ids = [43496576, 45486080] data = [ [1, 2, 3, 4, 5], [2, 4, 6, 8, 10], [3, 6, 9, 12, 15], ] worksheet.write_column("A1", data[0]) worksheet.write_column("B1", data[1]) worksheet.write_column("C1", data[2]) chart.add_series({"values": "=Sheet1!$A$1:$A$5"}) chart.add_series({"values": "=Sheet1!$B$1:$B$5"}) chart.add_series({"values": "=Sheet1!$C$1:$C$5"}) chart.set_title( { "name": "Title", "overlay": 1, "layout": { "x": 0.42354155730533688, "y": 0.16203703703703703, }, } ) worksheet.insert_chart("E9", chart) workbook.close() self.assertExcelEqual()
TestCompareXLSXFiles
python
microsoft__pyright
packages/pyright-internal/src/tests/samples/typedDict2.py
{ "start": 103, "end": 170 }
class ____(TypedDict, total=False): name: str year: int
Movie
python
python-excel__xlwt
xlwt/BIFFRecords.py
{ "start": 5654, "end": 6703 }
class ____(object): _rec_data = b'' # class attribute; child classes need to set this. def get_rec_header(self): return pack('<2H', self._REC_ID, len(self._rec_data)) # Not over-ridden by any child classes, never called (except by "get"; see below). # def get_rec_data(self): # return self._rec_data def get(self): # data = self.get_rec_data() data = self._rec_data if len(data) > 0x2020: # limit for BIFF7/8 chunks = [] pos = 0 while pos < len(data): chunk_pos = pos + 0x2020 chunk = data[pos:chunk_pos] chunks.append(chunk) pos = chunk_pos continues = pack('<2H', self._REC_ID, len(chunks[0])) + chunks[0] for chunk in chunks[1:]: continues += pack('<2H%ds'%len(chunk), 0x003C, len(chunk), chunk) # 0x003C -- CONTINUE record id return continues else: return self.get_rec_header() + data
BiffRecord
python
huggingface__transformers
src/transformers/models/rt_detr/modular_rt_detr.py
{ "start": 3122, "end": 13783 }
class ____(DetrImageProcessorFast): resample = PILImageResampling.BILINEAR image_mean = IMAGENET_DEFAULT_MEAN image_std = IMAGENET_DEFAULT_STD format = AnnotationFormat.COCO_DETECTION do_convert_annotations = True do_resize = True do_rescale = True do_normalize = False do_pad = False size = {"height": 640, "width": 640} default_to_square = False model_input_names = ["pixel_values", "pixel_mask"] valid_kwargs = RTDetrImageProcessorKwargs def __init__(self, **kwargs: Unpack[RTDetrImageProcessorKwargs]) -> None: # Backwards compatibility do_convert_annotations = kwargs.get("do_convert_annotations") do_normalize = kwargs.get("do_normalize") if do_convert_annotations is None and getattr(self, "do_convert_annotations", None) is None: self.do_convert_annotations = do_normalize if do_normalize is not None else self.do_normalize BaseImageProcessorFast.__init__(self, **kwargs) def prepare_annotation( self, image: torch.Tensor, target: dict, format: Optional[AnnotationFormat] = None, return_segmentation_masks: Optional[bool] = None, masks_path: Optional[Union[str, pathlib.Path]] = None, input_data_format: Optional[Union[str, ChannelDimension]] = None, ) -> dict: format = format if format is not None else self.format if format == AnnotationFormat.COCO_DETECTION: return_segmentation_masks = False if return_segmentation_masks is None else return_segmentation_masks target = prepare_coco_detection_annotation( image, target, return_segmentation_masks, input_data_format=input_data_format ) else: raise ValueError(f"Format {format} is not supported.") return target def _preprocess( self, images: list["torch.Tensor"], annotations: Optional[Union[AnnotationType, list[AnnotationType]]], masks_path: Optional[Union[str, pathlib.Path]], return_segmentation_masks: bool, do_resize: bool, size: SizeDict, interpolation: Optional["F.InterpolationMode"], do_rescale: bool, rescale_factor: float, do_normalize: bool, do_convert_annotations: bool, image_mean: Optional[Union[float, list[float]]], image_std: Optional[Union[float, list[float]]], do_pad: bool, pad_size: Optional[SizeDict], format: Optional[Union[str, AnnotationFormat]], return_tensors: Optional[Union[str, TensorType]], **kwargs, ) -> BatchFeature: """ Preprocess an image or a batch of images so that it can be used by the model. """ if annotations is not None and isinstance(annotations, dict): annotations = [annotations] if annotations is not None and len(images) != len(annotations): raise ValueError( f"The number of images ({len(images)}) and annotations ({len(annotations)}) do not match." ) format = AnnotationFormat(format) if annotations is not None: validate_annotations(format, SUPPORTED_ANNOTATION_FORMATS, annotations) data = {} processed_images = [] processed_annotations = [] pixel_masks = [] # Initialize pixel_masks here for image, annotation in zip(images, annotations if annotations is not None else [None] * len(images)): # prepare (COCO annotations as a list of Dict -> DETR target as a single Dict per image) if annotations is not None: annotation = self.prepare_annotation( image, annotation, format, return_segmentation_masks=return_segmentation_masks, masks_path=masks_path, input_data_format=ChannelDimension.FIRST, ) if do_resize: resized_image = self.resize(image, size=size, interpolation=interpolation) if annotations is not None: annotation = self.resize_annotation( annotation, orig_size=image.size()[-2:], target_size=resized_image.size()[-2:], ) image = resized_image # Fused rescale and normalize image = self.rescale_and_normalize(image, do_rescale, rescale_factor, do_normalize, image_mean, image_std) if do_convert_annotations and annotations is not None: annotation = self.normalize_annotation(annotation, get_image_size(image, ChannelDimension.FIRST)) processed_images.append(image) processed_annotations.append(annotation) images = processed_images annotations = processed_annotations if annotations is not None else None if do_pad: # depends on all resized image shapes so we need another loop if pad_size is not None: padded_size = (pad_size.height, pad_size.width) else: padded_size = get_max_height_width(images) padded_images = [] padded_annotations = [] for image, annotation in zip(images, annotations if annotations is not None else [None] * len(images)): # Pads images and returns their mask: {'pixel_values': ..., 'pixel_mask': ...} if padded_size == image.size()[-2:]: padded_images.append(image) pixel_masks.append(torch.ones(padded_size, dtype=torch.int64, device=image.device)) padded_annotations.append(annotation) continue image, pixel_mask, annotation = self.pad( image, padded_size, annotation=annotation, update_bboxes=do_convert_annotations ) padded_images.append(image) padded_annotations.append(annotation) pixel_masks.append(pixel_mask) images = padded_images annotations = padded_annotations if annotations is not None else None data.update({"pixel_mask": torch.stack(pixel_masks, dim=0)}) data.update({"pixel_values": torch.stack(images, dim=0)}) encoded_inputs = BatchFeature(data, tensor_type=return_tensors) if annotations is not None: encoded_inputs["labels"] = [ BatchFeature(annotation, tensor_type=return_tensors) for annotation in annotations ] return encoded_inputs def post_process_object_detection( self, outputs, threshold: float = 0.5, target_sizes: Union[TensorType, list[tuple]] = None, use_focal_loss: bool = True, ): """ Converts the raw output of [`DetrForObjectDetection`] into final bounding boxes in (top_left_x, top_left_y, bottom_right_x, bottom_right_y) format. Only supports PyTorch. Args: outputs ([`DetrObjectDetectionOutput`]): Raw outputs of the model. threshold (`float`, *optional*, defaults to 0.5): Score threshold to keep object detection predictions. target_sizes (`torch.Tensor` or `list[tuple[int, int]]`, *optional*): Tensor of shape `(batch_size, 2)` or list of tuples (`tuple[int, int]`) containing the target size `(height, width)` of each image in the batch. If unset, predictions will not be resized. use_focal_loss (`bool` defaults to `True`): Variable informing if the focal loss was used to predict the outputs. If `True`, a sigmoid is applied to compute the scores of each detection, otherwise, a softmax function is used. Returns: `list[Dict]`: A list of dictionaries, each dictionary containing the scores, labels and boxes for an image in the batch as predicted by the model. """ requires_backends(self, ["torch"]) out_logits, out_bbox = outputs.logits, outputs.pred_boxes # convert from relative cxcywh to absolute xyxy boxes = center_to_corners_format(out_bbox) if target_sizes is not None: if len(out_logits) != len(target_sizes): raise ValueError( "Make sure that you pass in as many target sizes as the batch dimension of the logits" ) if isinstance(target_sizes, list): img_h, img_w = torch.as_tensor(target_sizes).unbind(1) else: img_h, img_w = target_sizes.unbind(1) scale_fct = torch.stack([img_w, img_h, img_w, img_h], dim=1).to(boxes.device) boxes = boxes * scale_fct[:, None, :] num_top_queries = out_logits.shape[1] num_classes = out_logits.shape[2] if use_focal_loss: scores = torch.nn.functional.sigmoid(out_logits) scores, index = torch.topk(scores.flatten(1), num_top_queries, axis=-1) labels = index % num_classes index = index // num_classes boxes = boxes.gather(dim=1, index=index.unsqueeze(-1).repeat(1, 1, boxes.shape[-1])) else: scores = torch.nn.functional.softmax(out_logits)[:, :, :-1] scores, labels = scores.max(dim=-1) if scores.shape[1] > num_top_queries: scores, index = torch.topk(scores, num_top_queries, dim=-1) labels = torch.gather(labels, dim=1, index=index) boxes = torch.gather(boxes, dim=1, index=index.unsqueeze(-1).tile(1, 1, boxes.shape[-1])) results = [] for score, label, box in zip(scores, labels, boxes): results.append( { "scores": score[score > threshold], "labels": label[score > threshold], "boxes": box[score > threshold], } ) return results def post_process_instance_segmentation(self): raise NotImplementedError("Segmentation post-processing is not implemented for RT-DETR yet.") def post_process_semantic_segmentation(self): raise NotImplementedError("Semantic segmentation post-processing is not implemented for RT-DETR yet.") def post_process_panoptic_segmentation(self): raise NotImplementedError("Panoptic segmentation post-processing is not implemented for RT-DETR yet.") __all__ = ["RTDetrImageProcessorFast"]
RTDetrImageProcessorFast
python
django__django
tests/utils_tests/test_lazyobject.py
{ "start": 221, "end": 378 }
class ____: """ A simple class with just one attribute. """ foo = "bar" def __eq__(self, other): return self.foo == other.foo
Foo
python
getsentry__sentry
tests/sentry/workflow_engine/processors/test_detector.py
{ "start": 15575, "end": 19044 }
class ____(BaseDetectorHandlerTest): def test_new(self) -> None: handler = self.build_handler() key = "test_key" assert handler.state_manager.get_state_data([key]) == { key: DetectorStateData( group_key=key, is_triggered=False, status=DetectorPriorityLevel.OK, dedupe_value=0, counter_updates={level: None for level in handler._thresholds}, ) } def test_existing(self) -> None: handler = self.build_handler() key = "test_key" state_data = DetectorStateData( group_key=key, is_triggered=True, status=DetectorPriorityLevel.OK, dedupe_value=10, counter_updates={ **{level: None for level in handler._thresholds}, DetectorPriorityLevel.HIGH: 1, }, ) handler.state_manager.enqueue_dedupe_update(state_data.group_key, state_data.dedupe_value) handler.state_manager.enqueue_counter_update( state_data.group_key, state_data.counter_updates ) handler.state_manager.enqueue_state_update( state_data.group_key, state_data.is_triggered, state_data.status ) handler.state_manager.commit_state_updates() assert handler.state_manager.get_state_data([key]) == {key: state_data} def test_multi(self) -> None: handler = self.build_handler() key_1 = "test_key_1" state_data_1 = DetectorStateData( group_key=key_1, is_triggered=True, status=DetectorPriorityLevel.OK, dedupe_value=100, counter_updates={ **{level: None for level in handler._thresholds}, DetectorPriorityLevel.OK: 5, }, ) handler.state_manager.enqueue_dedupe_update(key_1, state_data_1.dedupe_value) handler.state_manager.enqueue_counter_update(key_1, state_data_1.counter_updates) handler.state_manager.enqueue_state_update( key_1, state_data_1.is_triggered, state_data_1.status ) key_2 = "test_key_2" state_data_2 = DetectorStateData( group_key=key_2, is_triggered=True, status=DetectorPriorityLevel.OK, dedupe_value=10, counter_updates={ **{level: None for level in handler._thresholds}, DetectorPriorityLevel.HIGH: 5, }, ) handler.state_manager.enqueue_dedupe_update(key_2, state_data_2.dedupe_value) handler.state_manager.enqueue_counter_update(key_2, state_data_2.counter_updates) handler.state_manager.enqueue_state_update( key_2, state_data_2.is_triggered, state_data_2.status ) key_uncommitted = "test_key_uncommitted" state_data_uncommitted = DetectorStateData( group_key=key_uncommitted, is_triggered=False, status=DetectorPriorityLevel.OK, dedupe_value=0, counter_updates={level: None for level in handler._thresholds}, ) handler.state_manager.commit_state_updates() assert handler.state_manager.get_state_data([key_1, key_2, key_uncommitted]) == { key_1: state_data_1, key_2: state_data_2, key_uncommitted: state_data_uncommitted, }
TestGetStateData
python
getsentry__sentry-python
tests/test_monitor.py
{ "start": 122, "end": 258 }
class ____(Transport): def capture_envelope(self, _): pass def is_healthy(self): return True
HealthyTestTransport
python
getsentry__sentry
src/sentry/replays/migrations/0001_squashed_0005_drop_replay_index.py
{ "start": 223, "end": 2972 }
class ____(CheckedMigration): # This flag is used to mark that a migration shouldn't be automatically run in production. # This should only be used for operations where it's safe to run the migration after your # code has deployed. So this should not be used for most operations that alter the schema # of a table. # Here are some things that make sense to mark as post deployment: # - Large data migrations. Typically we want these to be run manually so that they can be # monitored and not block the deploy for a long period of time while they run. # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to # run this outside deployments so that we don't block them. Note that while adding an index # is a schema change, it's completely safe to run the operation after the code has deployed. # Once deployed, run these manually via: https://develop.sentry.dev/database-migrations/#migration-deployment is_post_deployment = True replaces = [ ("replays", "0001_init_replays"), ("replays", "0002_rename_to_segment_id"), ("replays", "0003_add_size_to_recording_segment"), ("replays", "0004_index_together"), ("replays", "0005_drop_replay_index"), ] initial = True checked = False # This is an initial migration and can take locks dependencies = [] operations = [ migrations.CreateModel( name="ReplayRecordingSegment", fields=[ ( "id", sentry.db.models.fields.bounded.BoundedBigAutoField( primary_key=True, serialize=False ), ), ("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()), ("replay_id", models.CharField(db_index=True, max_length=32)), ("file_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True)), ( "segment_id", sentry.db.models.fields.bounded.BoundedIntegerField(db_column="sequence_id"), ), ( "date_added", models.DateTimeField(db_index=True, default=django.utils.timezone.now), ), ("size", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True)), ], options={ "db_table": "replays_replayrecordingsegment", "unique_together": { ("project_id", "replay_id", "file_id"), ("project_id", "replay_id", "segment_id"), }, }, ), ]
Migration
python
django__django
tests/model_forms/models.py
{ "start": 855, "end": 1164 }
class ____(models.Model): name = models.CharField(max_length=50, help_text="Use both first and last names.") archived = models.BooleanField(default=False, editable=False) objects = WriterManager() class Meta: ordering = ("name",) def __str__(self): return self.name
Writer
python
streamlit__streamlit
lib/streamlit/elements/widgets/slider.py
{ "start": 5298, "end": 5442 }
class ____(TypedDict): min_value: SliderScalar max_value: SliderScalar step: SliderStep format: str @dataclass
SliderDefaultValues
python
pytorch__pytorch
test/inductor/test_quantization.py
{ "start": 1200, "end": 1567 }
class ____(torch.nn.Module): def __init__(self): super().__init__() def forward(self, input, normalized_shape, weight, bias): x = torch.nn.functional.layer_norm( input=input, normalized_shape=normalized_shape, weight=weight, bias=bias, eps=1e-5, ) return x
LayernormNN
python
apache__airflow
providers/google/src/airflow/providers/google/cloud/links/dataform.py
{ "start": 1994, "end": 2209 }
class ____(BaseGoogleLink): """Helper class for constructing Dataform workspace link.""" name = "Dataform Workspace" key = "dataform_workspace" format_str = DATAFORM_WORKSPACE_LINK
DataformWorkspaceLink
python
getsentry__sentry
tests/sentry/api/endpoints/test_project_tagkey_details.py
{ "start": 336, "end": 1261 }
class ____(APITestCase, SnubaTestCase): def test_simple(self) -> None: project = self.create_project() def make_event(i: int) -> None: self.store_event( data={ "tags": {"foo": f"val{i}"}, "timestamp": before_now(seconds=1).isoformat(), }, project_id=project.id, ) for i in range(0, 16): make_event(i) self.login_as(user=self.user) url = reverse( "sentry-api-0-project-tagkey-details", kwargs={ "organization_id_or_slug": project.organization.slug, "project_id_or_slug": project.slug, "key": "foo", }, ) response = self.client.get(url) assert response.status_code == 200 assert response.data["uniqueValues"] == 16
ProjectTagKeyDetailsTest
python
tiangolo__fastapi
tests/test_custom_route_class.py
{ "start": 279, "end": 3131 }
class ____(APIRoute): x_type = "C" router_a = APIRouter(route_class=APIRouteA) router_b = APIRouter(route_class=APIRouteB) router_c = APIRouter(route_class=APIRouteC) @router_a.get("/") def get_a(): return {"msg": "A"} @router_b.get("/") def get_b(): return {"msg": "B"} @router_c.get("/") def get_c(): return {"msg": "C"} router_b.include_router(router=router_c, prefix="/c") router_a.include_router(router=router_b, prefix="/b") app.include_router(router=router_a, prefix="/a") client = TestClient(app) @pytest.mark.parametrize( "path,expected_status,expected_response", [ ("/a", 200, {"msg": "A"}), ("/a/b", 200, {"msg": "B"}), ("/a/b/c", 200, {"msg": "C"}), ], ) def test_get_path(path, expected_status, expected_response): response = client.get(path) assert response.status_code == expected_status assert response.json() == expected_response def test_route_classes(): routes = {} for r in app.router.routes: assert isinstance(r, Route) routes[r.path] = r assert getattr(routes["/a/"], "x_type") == "A" # noqa: B009 assert getattr(routes["/a/b/"], "x_type") == "B" # noqa: B009 assert getattr(routes["/a/b/c/"], "x_type") == "C" # noqa: B009 def test_openapi_schema(): response = client.get("/openapi.json") assert response.status_code == 200, response.text assert response.json() == { "openapi": "3.1.0", "info": {"title": "FastAPI", "version": "0.1.0"}, "paths": { "/a/": { "get": { "responses": { "200": { "description": "Successful Response", "content": {"application/json": {"schema": {}}}, } }, "summary": "Get A", "operationId": "get_a_a__get", } }, "/a/b/": { "get": { "responses": { "200": { "description": "Successful Response", "content": {"application/json": {"schema": {}}}, } }, "summary": "Get B", "operationId": "get_b_a_b__get", } }, "/a/b/c/": { "get": { "responses": { "200": { "description": "Successful Response", "content": {"application/json": {"schema": {}}}, } }, "summary": "Get C", "operationId": "get_c_a_b_c__get", } }, }, }
APIRouteC
python
getsentry__sentry
src/sentry/search/events/datasets/spans_metrics.py
{ "start": 842, "end": 898 }
class ____(TypedDict): scope: str column: str
Args
python
PrefectHQ__prefect
src/integrations/prefect-github/prefect_github/schemas/graphql_schema.py
{ "start": 143600, "end": 145290 }
class ____(sgqlc.types.Input): """ See source code for more info. """ __schema__ = graphql_schema __field_names__ = ( "source_id", "owner_id", "source_repository_url", "repository_name", "continue_on_error", "git_archive_url", "metadata_archive_url", "access_token", "github_pat", "skip_releases", "target_repo_visibility", "lock_source", "client_mutation_id", ) source_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="sourceId") owner_id = sgqlc.types.Field(sgqlc.types.non_null(ID), graphql_name="ownerId") source_repository_url = sgqlc.types.Field( sgqlc.types.non_null(URI), graphql_name="sourceRepositoryUrl" ) repository_name = sgqlc.types.Field( sgqlc.types.non_null(String), graphql_name="repositoryName" ) continue_on_error = sgqlc.types.Field(Boolean, graphql_name="continueOnError") git_archive_url = sgqlc.types.Field(String, graphql_name="gitArchiveUrl") metadata_archive_url = sgqlc.types.Field(String, graphql_name="metadataArchiveUrl") access_token = sgqlc.types.Field( sgqlc.types.non_null(String), graphql_name="accessToken" ) github_pat = sgqlc.types.Field(String, graphql_name="githubPat") skip_releases = sgqlc.types.Field(Boolean, graphql_name="skipReleases") target_repo_visibility = sgqlc.types.Field( String, graphql_name="targetRepoVisibility" ) lock_source = sgqlc.types.Field(Boolean, graphql_name="lockSource") client_mutation_id = sgqlc.types.Field(String, graphql_name="clientMutationId")
StartRepositoryMigrationInput
python
neetcode-gh__leetcode
python/0026-remove-duplicates-from-sorted-array.py
{ "start": 0, "end": 246 }
class ____: def removeDuplicates(self, nums: List[int]) -> int: L = 1 for R in range(1, len(nums)): if nums[R] != nums[R - 1]: nums[L] = nums[R] L += 1 return L
Solution
python
pytorch__pytorch
torch/_dynamo/variables/lists.py
{ "start": 49000, "end": 58652 }
class ____(TupleVariable): _nonvar_fields = { "tuple_cls", "dynamic_attributes", *TupleVariable._nonvar_fields, } def __init__( self, items: list[VariableTracker], tuple_cls: type, dynamic_attributes: Optional[dict[str, VariableTracker]] = None, **kwargs: Any, ) -> None: super().__init__(items, **kwargs) self.tuple_cls = tuple_cls self.dynamic_attributes = dynamic_attributes if dynamic_attributes else {} def is_namedtuple(self) -> bool: return isinstance(getattr(self.tuple_cls, "_fields", None), tuple) and callable( getattr(self.tuple_cls, "_make", None) ) def is_structseq(self) -> bool: return not self.is_namedtuple() def fields(self) -> tuple[str, ...]: return namedtuple_fields(self.tuple_cls) def debug_repr(self) -> str: if self.is_structseq(): # StructSequenceType(iterable) return repr(self.tuple_cls([Lit(x.debug_repr()) for x in self.items])) # NamedTupleType(*iterable) return repr(self.tuple_cls(*(Lit(x.debug_repr()) for x in self.items))) def python_type(self) -> type: return self.tuple_cls def as_python_constant(self) -> Any: if self.is_structseq(): # StructSequenceType(iterable) result = self.python_type()([x.as_python_constant() for x in self.items]) else: # NamedTupleType(*iterable) result = self.python_type()(*[x.as_python_constant() for x in self.items]) # Apply dynamic attributes if any were set if self.dynamic_attributes: for attr_name, attr_value in self.dynamic_attributes.items(): # Convert VariableTracker to Python constant if needed if hasattr(attr_value, "as_python_constant"): python_value = attr_value.as_python_constant() else: raise NotImplementedError( "Can not convert dynamic attribute without python constant value to python constant." ) setattr(result, attr_name, python_value) return result def as_proxy(self) -> Any: assert self.python_type() is not SizeVariable if self.is_structseq(): # StructSequenceType(iterable) return self.python_type()(self._as_proxy()) # NamedTupleType(*iterable) return self.python_type()(*self._as_proxy()) def reconstruct(self, codegen: "PyCodegen") -> None: # Always reconstruct the NamedTuple normally first # Constructors: # StructSequenceType(iterable) # NamedTupleType(*iterable) # NamedTupleType._make(iterable) if self.is_structseq(): create_fn = self.tuple_cls else: create_fn = self.tuple_cls._make # type: ignore[attr-defined] codegen.add_push_null( lambda: codegen.append_output( codegen.create_load_const_unchecked(create_fn) ) ) codegen.foreach(self.items) codegen.extend_output( [ create_build_tuple(len(self.items)), ] + create_call_function(1, False) ) for name, value in self.dynamic_attributes.items(): codegen.dup_top() codegen(value) codegen.extend_output(create_rot_n(2)) codegen.store_attr(name) def _is_method_overridden(self, method_name: str) -> bool: """Checks if a method is overridden in the NamedTuple subclass. Args: method_name (str): The name of the method to check. Returns: bool: True if the method is overridden in the subclass, False otherwise. Raises: ValueError: If the NamedTuple class does not inherit from both Tuple and Object. """ if len(self.tuple_cls.__mro__) < 3: raise ValueError("NamedTuple should inherit from Tuple and Object.") if getattr(self.tuple_cls, method_name, None) == getattr( self.tuple_cls.__mro__[-3], method_name, None ): return False return True def call_method( self, tx: "InstructionTranslator", name: str, args: list[VariableTracker], kwargs: dict[str, VariableTracker], ) -> VariableTracker: if name == "__setattr__": if kwargs or len(args) != 2: raise_args_mismatch( tx, name, "2 args and 0 kwargs", f"{len(args)} args and {len(kwargs)} kwargs", ) attr, value = args attr = attr.as_python_constant() if ( # structseq is immutable self.is_structseq() # namedtuple directly created by `collections.namedtuple` is immutable or self.tuple_cls.__bases__ == (tuple,) # fields are immutable or attr in self.fields() ): raise_observed_exception(AttributeError, tx) # Subclass of namedtuple type can have dynamic attributes tx.output.side_effects.mutation(self) if self.source: tx.output.side_effects.store_attr(self, attr, value) self.dynamic_attributes[attr] = value return ConstantVariable.create(None) elif name == "_replace": # NamedTuple._replace should create a new instance with replaced fields if args: raise_args_mismatch(tx, name, "0 args", f"{len(args)} args") # Get the field names for validation fields = self.fields() # Start with current items (copy them) new_items = list(self.items) # Replace fields specified in kwargs for field_name, new_value in kwargs.items(): if field_name not in fields: raise_observed_exception( ValueError, tx, args=[ ConstantVariable.create( f"Got unexpected field name: '{field_name}'" ) ], ) # Replace the item at the field's index field_index = fields.index(field_name) new_items[field_index] = new_value return NamedTupleVariable(new_items, self.tuple_cls) return super().call_method(tx, name, args, kwargs) def getitem_const( self, tx: "InstructionTranslator", arg: VariableTracker ) -> VariableTracker: if isinstance(arg, SliceVariable): # slicing a namedtuple produces a tuple return TupleVariable( self.items[arg.as_python_constant()], source=None, ) return super().getitem_const(tx, arg) def var_getattr(self, tx: "InstructionTranslator", name: str) -> VariableTracker: def check_and_create_method() -> Optional[VariableTracker]: method = inspect.getattr_static(self.tuple_cls, name, None) if isinstance(method, classmethod): # We need the unbounded cls method to avoid the inline __self__ return UserMethodVariable( method.__func__, variables.UserDefinedClassVariable(self.tuple_cls), ) elif isinstance(method, staticmethod): # pyrefly: ignore[bad-argument-type] return UserFunctionVariable(method.__func__) elif inspect.isfunction(method): return UserMethodVariable(method, self) else: return None # Avoid UserMethodVariable fallback precisely when methods NamedTuple methods have not been overwritten. if ( name == "_replace" and not self._is_method_overridden("_replace") and not self._is_method_overridden("__getattr__") ): # Return a BuiltinVariable for the _replace method # Get the actual _replace method from the tuple class actual_replace_method = getattr(self.tuple_cls, "_replace", None) if actual_replace_method: from ..source import AttrSource source = AttrSource(self.source, name) if self.source else None return variables.GetAttrVariable(self, name, source=source) # Fallback if _replace doesn't exist (shouldn't happen for proper NamedTuples) return super().var_getattr(tx, name) if name == "_fields": result_source = NamedTupleFieldsSource(self.source) if self.source else None return VariableTracker.build(tx, self.fields(), source=result_source) if name in self.dynamic_attributes: return self.dynamic_attributes[name] fields = self.fields() if name not in fields: method = check_and_create_method() if not method: return super().var_getattr(tx, name) return method return self.items[fields.index(name)] def call_obj_hasattr( self, tx: "InstructionTranslator", name: str ) -> ConstantVariable: return variables.ConstantVariable.create( name in self.dynamic_attributes or hasattr(self.tuple_cls, name) )
NamedTupleVariable
python
ray-project__ray
python/ray/llm/_internal/batch/processor/base.py
{ "start": 749, "end": 5080 }
class ____(BaseModelExtended): """The processor configuration.""" batch_size: int = Field( default=32, description="Large batch sizes are likely to saturate the compute resources " "and could achieve higher throughput. On the other hand, small batch sizes " "are more fault-tolerant and could reduce bubbles in the data pipeline. " "You can tune the batch size to balance the throughput and fault-tolerance " "based on your use case. Defaults to 32.", ) resources_per_bundle: Optional[Dict[str, float]] = Field( default=None, description="[DEPRECATED] This parameter is deprecated and will be removed in a future version. ", deprecated=True, ) accelerator_type: Optional[str] = Field( default=None, description="The accelerator type used by the LLM stage in a processor. " "Default to None, meaning that only the CPU will be used.", ) concurrency: Union[int, Tuple[int, int]] = Field( default=1, description="The number of workers for data parallelism. Default to 1. " "If ``concurrency`` is a ``tuple`` ``(m, n)``, Ray creates an autoscaling " "actor pool that scales between ``m`` and ``n`` workers (``1 <= m <= n``). " "If ``concurrency`` is an ``int`` ``n``, Ray uses either a fixed pool of ``n`` " "workers or an autoscaling pool from ``1`` to ``n`` workers, depending on " "the processor and stage.", ) experimental: Dict[str, Any] = Field( default_factory=dict, description="[Experimental] Experimental configurations." "Supported keys:\n" "`max_tasks_in_flight_per_actor`: The maximum number of tasks in flight per actor. Default to 4.", ) @field_validator("concurrency") def validate_concurrency( cls, concurrency: Union[int, Tuple[int, int]] ) -> Union[int, Tuple[int, int]]: """Validate that `concurrency` is either: - a positive int, or - a 2-tuple `(min, max)` of positive ints with `min <= max`. """ def require(condition: bool, message: str) -> None: if not condition: raise ValueError(message) if isinstance(concurrency, int): require( concurrency > 0, f"A positive integer for `concurrency` is expected! Got: `{concurrency}`.", ) elif isinstance(concurrency, tuple): require( all(c > 0 for c in concurrency), f"`concurrency` tuple items must be positive integers! Got: `{concurrency}`.", ) min_concurrency, max_concurrency = concurrency require( min_concurrency <= max_concurrency, f"min > max in the concurrency tuple `{concurrency}`!", ) return concurrency def get_concurrency(self, autoscaling_enabled: bool = True) -> Tuple[int, int]: """Return a normalized `(min, max)` worker range from `self.concurrency`. Behavior: - If `concurrency` is an int `n`: - `autoscaling_enabled` is True -> return `(1, n)` (autoscaling). - `autoscaling_enabled` is False -> return `(n, n)` (fixed-size pool). - If `concurrency` is a 2-tuple `(m, n)`, return it unchanged (the `autoscaling_enabled` flag is ignored). Args: autoscaling_enabled: When False, treat an integer `concurrency` as fixed `(n, n)`; otherwise treat it as a range `(1, n)`. Defaults to True. Returns: tuple[int, int]: The allowed worker range `(min, max)`. Examples: >>> self.concurrency = (2, 4) >>> self.get_concurrency() (2, 4) >>> self.concurrency = 4 >>> self.get_concurrency() (1, 4) >>> self.get_concurrency(autoscaling_enabled=False) (4, 4) """ if isinstance(self.concurrency, int): if autoscaling_enabled: return 1, self.concurrency else: return self.concurrency, self.concurrency return self.concurrency class Config: validate_assignment = True arbitrary_types_allowed = True
ProcessorConfig
python
walkccc__LeetCode
solutions/883. Projection Area of 3D Shapes/883.py
{ "start": 0, "end": 222 }
class ____: def projectionArea(self, grid: list[list[int]]) -> int: return sum( a > 0 for row in grid for a in row) + sum( max(row) for row in grid) + sum( max(col) for col in zip(*grid))
Solution
python
milvus-io__pymilvus
pymilvus/grpc_gen/milvus_pb2_grpc.py
{ "start": 35707, "end": 104436 }
class ____(object): """Missing associated documentation comment in .proto file.""" def CreateCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def HasCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def LoadCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ReleaseCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DescribeCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def BatchDescribeCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetCollectionStatistics(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ShowCollections(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AlterCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AlterCollectionField(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AddCollectionFunction(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AlterCollectionFunction(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropCollectionFunction(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreatePartition(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropPartition(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def HasPartition(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def LoadPartitions(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ReleasePartitions(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetPartitionStatistics(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ShowPartitions(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetLoadingProgress(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetLoadState(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateAlias(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropAlias(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AlterAlias(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DescribeAlias(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListAliases(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateIndex(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AlterIndex(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DescribeIndex(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetIndexStatistics(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetIndexState(self, request, context): """Deprecated: use DescribeIndex instead """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetIndexBuildProgress(self, request, context): """Deprecated: use DescribeIndex instead """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropIndex(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Insert(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Delete(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Upsert(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Search(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def HybridSearch(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Flush(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Query(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CalcDistance(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def FlushAll(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AddCollectionField(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetFlushState(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetFlushAllState(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetPersistentSegmentInfo(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetQuerySegmentInfo(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetReplicas(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Dummy(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def RegisterLink(self, request, context): """TODO: remove """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetMetrics(self, request, context): """https://wiki.lfaidata.foundation/display/MIL/MEP+8+--+Add+metrics+for+proxy """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetComponentStates(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def LoadBalance(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetCompactionState(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ManualCompaction(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetCompactionStateWithPlans(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Import(self, request, context): """https://wiki.lfaidata.foundation/display/MIL/MEP+24+--+Support+bulk+load """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetImportState(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListImportTasks(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateCredential(self, request, context): """https://wiki.lfaidata.foundation/display/MIL/MEP+27+--+Support+Basic+Authentication """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def UpdateCredential(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DeleteCredential(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListCredUsers(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateRole(self, request, context): """https://wiki.lfaidata.foundation/display/MIL/MEP+29+--+Support+Role-Based+Access+Control """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropRole(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def OperateUserRole(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SelectRole(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SelectUser(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def OperatePrivilege(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def OperatePrivilegeV2(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SelectGrant(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetVersion(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CheckHealth(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateResourceGroup(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropResourceGroup(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def UpdateResourceGroups(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def TransferNode(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def TransferReplica(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListResourceGroups(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DescribeResourceGroup(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def RenameCollection(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListIndexedSegment(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DescribeSegmentIndexData(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Connect(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AllocTimestamp(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateDatabase(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropDatabase(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListDatabases(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AlterDatabase(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DescribeDatabase(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ReplicateMessage(self, request, context): """Deprecated CDC API """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def BackupRBAC(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def RestoreRBAC(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreatePrivilegeGroup(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropPrivilegeGroup(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListPrivilegeGroups(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def OperatePrivilegeGroup(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def RunAnalyzer(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AddFileResource(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def RemoveFileResource(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListFileResources(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AddUserTags(self, request, context): """Row Level Security (RLS) APIs """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DeleteUserTags(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetUserTags(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListUsersWithTag(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateRowPolicy(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DropRowPolicy(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListRowPolicies(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def UpdateReplicateConfiguration(self, request, context): """CDC v2 APIs UpdateReplicateConfiguration applies a full replacement of the current replication configuration across Milvus clusters. Semantics: - The provided ReplicateConfiguration completely replaces any existing configuration persisted in the metadata store. - Passing an empty ReplicateConfiguration is treated as a "clear" operation, effectively removing all replication configuration. - The RPC is expected to be idempotent: submitting the same configuration multiple times must not cause side effects. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetReplicateInfo(self, request, context): """ GetReplicateInfo retrieves replication-related metadata of specified channel from a target Milvus cluster. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateReplicateStream(self, request_iterator, context): """ CreateReplicateStream establishes a replication stream on the target Milvus cluster. Semantics: - Sets up a continuous data stream that receives replicated messages (DDL, insert, delete, etc.) from the source cluster via CDC. - Once established, the target cluster persists incoming messages into its WAL (Write-Ahead Log) ensuring durability and consistency. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_MilvusServiceServicer_to_server(servicer, server): rpc_method_handlers = { 'CreateCollection': grpc.unary_unary_rpc_method_handler( servicer.CreateCollection, request_deserializer=milvus__pb2.CreateCollectionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DropCollection': grpc.unary_unary_rpc_method_handler( servicer.DropCollection, request_deserializer=milvus__pb2.DropCollectionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'HasCollection': grpc.unary_unary_rpc_method_handler( servicer.HasCollection, request_deserializer=milvus__pb2.HasCollectionRequest.FromString, response_serializer=milvus__pb2.BoolResponse.SerializeToString, ), 'LoadCollection': grpc.unary_unary_rpc_method_handler( servicer.LoadCollection, request_deserializer=milvus__pb2.LoadCollectionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'ReleaseCollection': grpc.unary_unary_rpc_method_handler( servicer.ReleaseCollection, request_deserializer=milvus__pb2.ReleaseCollectionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DescribeCollection': grpc.unary_unary_rpc_method_handler( servicer.DescribeCollection, request_deserializer=milvus__pb2.DescribeCollectionRequest.FromString, response_serializer=milvus__pb2.DescribeCollectionResponse.SerializeToString, ), 'BatchDescribeCollection': grpc.unary_unary_rpc_method_handler( servicer.BatchDescribeCollection, request_deserializer=milvus__pb2.BatchDescribeCollectionRequest.FromString, response_serializer=milvus__pb2.BatchDescribeCollectionResponse.SerializeToString, ), 'GetCollectionStatistics': grpc.unary_unary_rpc_method_handler( servicer.GetCollectionStatistics, request_deserializer=milvus__pb2.GetCollectionStatisticsRequest.FromString, response_serializer=milvus__pb2.GetCollectionStatisticsResponse.SerializeToString, ), 'ShowCollections': grpc.unary_unary_rpc_method_handler( servicer.ShowCollections, request_deserializer=milvus__pb2.ShowCollectionsRequest.FromString, response_serializer=milvus__pb2.ShowCollectionsResponse.SerializeToString, ), 'AlterCollection': grpc.unary_unary_rpc_method_handler( servicer.AlterCollection, request_deserializer=milvus__pb2.AlterCollectionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'AlterCollectionField': grpc.unary_unary_rpc_method_handler( servicer.AlterCollectionField, request_deserializer=milvus__pb2.AlterCollectionFieldRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'AddCollectionFunction': grpc.unary_unary_rpc_method_handler( servicer.AddCollectionFunction, request_deserializer=milvus__pb2.AddCollectionFunctionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'AlterCollectionFunction': grpc.unary_unary_rpc_method_handler( servicer.AlterCollectionFunction, request_deserializer=milvus__pb2.AlterCollectionFunctionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DropCollectionFunction': grpc.unary_unary_rpc_method_handler( servicer.DropCollectionFunction, request_deserializer=milvus__pb2.DropCollectionFunctionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'CreatePartition': grpc.unary_unary_rpc_method_handler( servicer.CreatePartition, request_deserializer=milvus__pb2.CreatePartitionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DropPartition': grpc.unary_unary_rpc_method_handler( servicer.DropPartition, request_deserializer=milvus__pb2.DropPartitionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'HasPartition': grpc.unary_unary_rpc_method_handler( servicer.HasPartition, request_deserializer=milvus__pb2.HasPartitionRequest.FromString, response_serializer=milvus__pb2.BoolResponse.SerializeToString, ), 'LoadPartitions': grpc.unary_unary_rpc_method_handler( servicer.LoadPartitions, request_deserializer=milvus__pb2.LoadPartitionsRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'ReleasePartitions': grpc.unary_unary_rpc_method_handler( servicer.ReleasePartitions, request_deserializer=milvus__pb2.ReleasePartitionsRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'GetPartitionStatistics': grpc.unary_unary_rpc_method_handler( servicer.GetPartitionStatistics, request_deserializer=milvus__pb2.GetPartitionStatisticsRequest.FromString, response_serializer=milvus__pb2.GetPartitionStatisticsResponse.SerializeToString, ), 'ShowPartitions': grpc.unary_unary_rpc_method_handler( servicer.ShowPartitions, request_deserializer=milvus__pb2.ShowPartitionsRequest.FromString, response_serializer=milvus__pb2.ShowPartitionsResponse.SerializeToString, ), 'GetLoadingProgress': grpc.unary_unary_rpc_method_handler( servicer.GetLoadingProgress, request_deserializer=milvus__pb2.GetLoadingProgressRequest.FromString, response_serializer=milvus__pb2.GetLoadingProgressResponse.SerializeToString, ), 'GetLoadState': grpc.unary_unary_rpc_method_handler( servicer.GetLoadState, request_deserializer=milvus__pb2.GetLoadStateRequest.FromString, response_serializer=milvus__pb2.GetLoadStateResponse.SerializeToString, ), 'CreateAlias': grpc.unary_unary_rpc_method_handler( servicer.CreateAlias, request_deserializer=milvus__pb2.CreateAliasRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DropAlias': grpc.unary_unary_rpc_method_handler( servicer.DropAlias, request_deserializer=milvus__pb2.DropAliasRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'AlterAlias': grpc.unary_unary_rpc_method_handler( servicer.AlterAlias, request_deserializer=milvus__pb2.AlterAliasRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DescribeAlias': grpc.unary_unary_rpc_method_handler( servicer.DescribeAlias, request_deserializer=milvus__pb2.DescribeAliasRequest.FromString, response_serializer=milvus__pb2.DescribeAliasResponse.SerializeToString, ), 'ListAliases': grpc.unary_unary_rpc_method_handler( servicer.ListAliases, request_deserializer=milvus__pb2.ListAliasesRequest.FromString, response_serializer=milvus__pb2.ListAliasesResponse.SerializeToString, ), 'CreateIndex': grpc.unary_unary_rpc_method_handler( servicer.CreateIndex, request_deserializer=milvus__pb2.CreateIndexRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'AlterIndex': grpc.unary_unary_rpc_method_handler( servicer.AlterIndex, request_deserializer=milvus__pb2.AlterIndexRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DescribeIndex': grpc.unary_unary_rpc_method_handler( servicer.DescribeIndex, request_deserializer=milvus__pb2.DescribeIndexRequest.FromString, response_serializer=milvus__pb2.DescribeIndexResponse.SerializeToString, ), 'GetIndexStatistics': grpc.unary_unary_rpc_method_handler( servicer.GetIndexStatistics, request_deserializer=milvus__pb2.GetIndexStatisticsRequest.FromString, response_serializer=milvus__pb2.GetIndexStatisticsResponse.SerializeToString, ), 'GetIndexState': grpc.unary_unary_rpc_method_handler( servicer.GetIndexState, request_deserializer=milvus__pb2.GetIndexStateRequest.FromString, response_serializer=milvus__pb2.GetIndexStateResponse.SerializeToString, ), 'GetIndexBuildProgress': grpc.unary_unary_rpc_method_handler( servicer.GetIndexBuildProgress, request_deserializer=milvus__pb2.GetIndexBuildProgressRequest.FromString, response_serializer=milvus__pb2.GetIndexBuildProgressResponse.SerializeToString, ), 'DropIndex': grpc.unary_unary_rpc_method_handler( servicer.DropIndex, request_deserializer=milvus__pb2.DropIndexRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'Insert': grpc.unary_unary_rpc_method_handler( servicer.Insert, request_deserializer=milvus__pb2.InsertRequest.FromString, response_serializer=milvus__pb2.MutationResult.SerializeToString, ), 'Delete': grpc.unary_unary_rpc_method_handler( servicer.Delete, request_deserializer=milvus__pb2.DeleteRequest.FromString, response_serializer=milvus__pb2.MutationResult.SerializeToString, ), 'Upsert': grpc.unary_unary_rpc_method_handler( servicer.Upsert, request_deserializer=milvus__pb2.UpsertRequest.FromString, response_serializer=milvus__pb2.MutationResult.SerializeToString, ), 'Search': grpc.unary_unary_rpc_method_handler( servicer.Search, request_deserializer=milvus__pb2.SearchRequest.FromString, response_serializer=milvus__pb2.SearchResults.SerializeToString, ), 'HybridSearch': grpc.unary_unary_rpc_method_handler( servicer.HybridSearch, request_deserializer=milvus__pb2.HybridSearchRequest.FromString, response_serializer=milvus__pb2.SearchResults.SerializeToString, ), 'Flush': grpc.unary_unary_rpc_method_handler( servicer.Flush, request_deserializer=milvus__pb2.FlushRequest.FromString, response_serializer=milvus__pb2.FlushResponse.SerializeToString, ), 'Query': grpc.unary_unary_rpc_method_handler( servicer.Query, request_deserializer=milvus__pb2.QueryRequest.FromString, response_serializer=milvus__pb2.QueryResults.SerializeToString, ), 'CalcDistance': grpc.unary_unary_rpc_method_handler( servicer.CalcDistance, request_deserializer=milvus__pb2.CalcDistanceRequest.FromString, response_serializer=milvus__pb2.CalcDistanceResults.SerializeToString, ), 'FlushAll': grpc.unary_unary_rpc_method_handler( servicer.FlushAll, request_deserializer=milvus__pb2.FlushAllRequest.FromString, response_serializer=milvus__pb2.FlushAllResponse.SerializeToString, ), 'AddCollectionField': grpc.unary_unary_rpc_method_handler( servicer.AddCollectionField, request_deserializer=milvus__pb2.AddCollectionFieldRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'GetFlushState': grpc.unary_unary_rpc_method_handler( servicer.GetFlushState, request_deserializer=milvus__pb2.GetFlushStateRequest.FromString, response_serializer=milvus__pb2.GetFlushStateResponse.SerializeToString, ), 'GetFlushAllState': grpc.unary_unary_rpc_method_handler( servicer.GetFlushAllState, request_deserializer=milvus__pb2.GetFlushAllStateRequest.FromString, response_serializer=milvus__pb2.GetFlushAllStateResponse.SerializeToString, ), 'GetPersistentSegmentInfo': grpc.unary_unary_rpc_method_handler( servicer.GetPersistentSegmentInfo, request_deserializer=milvus__pb2.GetPersistentSegmentInfoRequest.FromString, response_serializer=milvus__pb2.GetPersistentSegmentInfoResponse.SerializeToString, ), 'GetQuerySegmentInfo': grpc.unary_unary_rpc_method_handler( servicer.GetQuerySegmentInfo, request_deserializer=milvus__pb2.GetQuerySegmentInfoRequest.FromString, response_serializer=milvus__pb2.GetQuerySegmentInfoResponse.SerializeToString, ), 'GetReplicas': grpc.unary_unary_rpc_method_handler( servicer.GetReplicas, request_deserializer=milvus__pb2.GetReplicasRequest.FromString, response_serializer=milvus__pb2.GetReplicasResponse.SerializeToString, ), 'Dummy': grpc.unary_unary_rpc_method_handler( servicer.Dummy, request_deserializer=milvus__pb2.DummyRequest.FromString, response_serializer=milvus__pb2.DummyResponse.SerializeToString, ), 'RegisterLink': grpc.unary_unary_rpc_method_handler( servicer.RegisterLink, request_deserializer=milvus__pb2.RegisterLinkRequest.FromString, response_serializer=milvus__pb2.RegisterLinkResponse.SerializeToString, ), 'GetMetrics': grpc.unary_unary_rpc_method_handler( servicer.GetMetrics, request_deserializer=milvus__pb2.GetMetricsRequest.FromString, response_serializer=milvus__pb2.GetMetricsResponse.SerializeToString, ), 'GetComponentStates': grpc.unary_unary_rpc_method_handler( servicer.GetComponentStates, request_deserializer=milvus__pb2.GetComponentStatesRequest.FromString, response_serializer=milvus__pb2.ComponentStates.SerializeToString, ), 'LoadBalance': grpc.unary_unary_rpc_method_handler( servicer.LoadBalance, request_deserializer=milvus__pb2.LoadBalanceRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'GetCompactionState': grpc.unary_unary_rpc_method_handler( servicer.GetCompactionState, request_deserializer=milvus__pb2.GetCompactionStateRequest.FromString, response_serializer=milvus__pb2.GetCompactionStateResponse.SerializeToString, ), 'ManualCompaction': grpc.unary_unary_rpc_method_handler( servicer.ManualCompaction, request_deserializer=milvus__pb2.ManualCompactionRequest.FromString, response_serializer=milvus__pb2.ManualCompactionResponse.SerializeToString, ), 'GetCompactionStateWithPlans': grpc.unary_unary_rpc_method_handler( servicer.GetCompactionStateWithPlans, request_deserializer=milvus__pb2.GetCompactionPlansRequest.FromString, response_serializer=milvus__pb2.GetCompactionPlansResponse.SerializeToString, ), 'Import': grpc.unary_unary_rpc_method_handler( servicer.Import, request_deserializer=milvus__pb2.ImportRequest.FromString, response_serializer=milvus__pb2.ImportResponse.SerializeToString, ), 'GetImportState': grpc.unary_unary_rpc_method_handler( servicer.GetImportState, request_deserializer=milvus__pb2.GetImportStateRequest.FromString, response_serializer=milvus__pb2.GetImportStateResponse.SerializeToString, ), 'ListImportTasks': grpc.unary_unary_rpc_method_handler( servicer.ListImportTasks, request_deserializer=milvus__pb2.ListImportTasksRequest.FromString, response_serializer=milvus__pb2.ListImportTasksResponse.SerializeToString, ), 'CreateCredential': grpc.unary_unary_rpc_method_handler( servicer.CreateCredential, request_deserializer=milvus__pb2.CreateCredentialRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'UpdateCredential': grpc.unary_unary_rpc_method_handler( servicer.UpdateCredential, request_deserializer=milvus__pb2.UpdateCredentialRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DeleteCredential': grpc.unary_unary_rpc_method_handler( servicer.DeleteCredential, request_deserializer=milvus__pb2.DeleteCredentialRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'ListCredUsers': grpc.unary_unary_rpc_method_handler( servicer.ListCredUsers, request_deserializer=milvus__pb2.ListCredUsersRequest.FromString, response_serializer=milvus__pb2.ListCredUsersResponse.SerializeToString, ), 'CreateRole': grpc.unary_unary_rpc_method_handler( servicer.CreateRole, request_deserializer=milvus__pb2.CreateRoleRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DropRole': grpc.unary_unary_rpc_method_handler( servicer.DropRole, request_deserializer=milvus__pb2.DropRoleRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'OperateUserRole': grpc.unary_unary_rpc_method_handler( servicer.OperateUserRole, request_deserializer=milvus__pb2.OperateUserRoleRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'SelectRole': grpc.unary_unary_rpc_method_handler( servicer.SelectRole, request_deserializer=milvus__pb2.SelectRoleRequest.FromString, response_serializer=milvus__pb2.SelectRoleResponse.SerializeToString, ), 'SelectUser': grpc.unary_unary_rpc_method_handler( servicer.SelectUser, request_deserializer=milvus__pb2.SelectUserRequest.FromString, response_serializer=milvus__pb2.SelectUserResponse.SerializeToString, ), 'OperatePrivilege': grpc.unary_unary_rpc_method_handler( servicer.OperatePrivilege, request_deserializer=milvus__pb2.OperatePrivilegeRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'OperatePrivilegeV2': grpc.unary_unary_rpc_method_handler( servicer.OperatePrivilegeV2, request_deserializer=milvus__pb2.OperatePrivilegeV2Request.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'SelectGrant': grpc.unary_unary_rpc_method_handler( servicer.SelectGrant, request_deserializer=milvus__pb2.SelectGrantRequest.FromString, response_serializer=milvus__pb2.SelectGrantResponse.SerializeToString, ), 'GetVersion': grpc.unary_unary_rpc_method_handler( servicer.GetVersion, request_deserializer=milvus__pb2.GetVersionRequest.FromString, response_serializer=milvus__pb2.GetVersionResponse.SerializeToString, ), 'CheckHealth': grpc.unary_unary_rpc_method_handler( servicer.CheckHealth, request_deserializer=milvus__pb2.CheckHealthRequest.FromString, response_serializer=milvus__pb2.CheckHealthResponse.SerializeToString, ), 'CreateResourceGroup': grpc.unary_unary_rpc_method_handler( servicer.CreateResourceGroup, request_deserializer=milvus__pb2.CreateResourceGroupRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DropResourceGroup': grpc.unary_unary_rpc_method_handler( servicer.DropResourceGroup, request_deserializer=milvus__pb2.DropResourceGroupRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'UpdateResourceGroups': grpc.unary_unary_rpc_method_handler( servicer.UpdateResourceGroups, request_deserializer=milvus__pb2.UpdateResourceGroupsRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'TransferNode': grpc.unary_unary_rpc_method_handler( servicer.TransferNode, request_deserializer=milvus__pb2.TransferNodeRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'TransferReplica': grpc.unary_unary_rpc_method_handler( servicer.TransferReplica, request_deserializer=milvus__pb2.TransferReplicaRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'ListResourceGroups': grpc.unary_unary_rpc_method_handler( servicer.ListResourceGroups, request_deserializer=milvus__pb2.ListResourceGroupsRequest.FromString, response_serializer=milvus__pb2.ListResourceGroupsResponse.SerializeToString, ), 'DescribeResourceGroup': grpc.unary_unary_rpc_method_handler( servicer.DescribeResourceGroup, request_deserializer=milvus__pb2.DescribeResourceGroupRequest.FromString, response_serializer=milvus__pb2.DescribeResourceGroupResponse.SerializeToString, ), 'RenameCollection': grpc.unary_unary_rpc_method_handler( servicer.RenameCollection, request_deserializer=milvus__pb2.RenameCollectionRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'ListIndexedSegment': grpc.unary_unary_rpc_method_handler( servicer.ListIndexedSegment, request_deserializer=feder__pb2.ListIndexedSegmentRequest.FromString, response_serializer=feder__pb2.ListIndexedSegmentResponse.SerializeToString, ), 'DescribeSegmentIndexData': grpc.unary_unary_rpc_method_handler( servicer.DescribeSegmentIndexData, request_deserializer=feder__pb2.DescribeSegmentIndexDataRequest.FromString, response_serializer=feder__pb2.DescribeSegmentIndexDataResponse.SerializeToString, ), 'Connect': grpc.unary_unary_rpc_method_handler( servicer.Connect, request_deserializer=milvus__pb2.ConnectRequest.FromString, response_serializer=milvus__pb2.ConnectResponse.SerializeToString, ), 'AllocTimestamp': grpc.unary_unary_rpc_method_handler( servicer.AllocTimestamp, request_deserializer=milvus__pb2.AllocTimestampRequest.FromString, response_serializer=milvus__pb2.AllocTimestampResponse.SerializeToString, ), 'CreateDatabase': grpc.unary_unary_rpc_method_handler( servicer.CreateDatabase, request_deserializer=milvus__pb2.CreateDatabaseRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DropDatabase': grpc.unary_unary_rpc_method_handler( servicer.DropDatabase, request_deserializer=milvus__pb2.DropDatabaseRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'ListDatabases': grpc.unary_unary_rpc_method_handler( servicer.ListDatabases, request_deserializer=milvus__pb2.ListDatabasesRequest.FromString, response_serializer=milvus__pb2.ListDatabasesResponse.SerializeToString, ), 'AlterDatabase': grpc.unary_unary_rpc_method_handler( servicer.AlterDatabase, request_deserializer=milvus__pb2.AlterDatabaseRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DescribeDatabase': grpc.unary_unary_rpc_method_handler( servicer.DescribeDatabase, request_deserializer=milvus__pb2.DescribeDatabaseRequest.FromString, response_serializer=milvus__pb2.DescribeDatabaseResponse.SerializeToString, ), 'ReplicateMessage': grpc.unary_unary_rpc_method_handler( servicer.ReplicateMessage, request_deserializer=milvus__pb2.ReplicateMessageRequest.FromString, response_serializer=milvus__pb2.ReplicateMessageResponse.SerializeToString, ), 'BackupRBAC': grpc.unary_unary_rpc_method_handler( servicer.BackupRBAC, request_deserializer=milvus__pb2.BackupRBACMetaRequest.FromString, response_serializer=milvus__pb2.BackupRBACMetaResponse.SerializeToString, ), 'RestoreRBAC': grpc.unary_unary_rpc_method_handler( servicer.RestoreRBAC, request_deserializer=milvus__pb2.RestoreRBACMetaRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'CreatePrivilegeGroup': grpc.unary_unary_rpc_method_handler( servicer.CreatePrivilegeGroup, request_deserializer=milvus__pb2.CreatePrivilegeGroupRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DropPrivilegeGroup': grpc.unary_unary_rpc_method_handler( servicer.DropPrivilegeGroup, request_deserializer=milvus__pb2.DropPrivilegeGroupRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'ListPrivilegeGroups': grpc.unary_unary_rpc_method_handler( servicer.ListPrivilegeGroups, request_deserializer=milvus__pb2.ListPrivilegeGroupsRequest.FromString, response_serializer=milvus__pb2.ListPrivilegeGroupsResponse.SerializeToString, ), 'OperatePrivilegeGroup': grpc.unary_unary_rpc_method_handler( servicer.OperatePrivilegeGroup, request_deserializer=milvus__pb2.OperatePrivilegeGroupRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'RunAnalyzer': grpc.unary_unary_rpc_method_handler( servicer.RunAnalyzer, request_deserializer=milvus__pb2.RunAnalyzerRequest.FromString, response_serializer=milvus__pb2.RunAnalyzerResponse.SerializeToString, ), 'AddFileResource': grpc.unary_unary_rpc_method_handler( servicer.AddFileResource, request_deserializer=milvus__pb2.AddFileResourceRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'RemoveFileResource': grpc.unary_unary_rpc_method_handler( servicer.RemoveFileResource, request_deserializer=milvus__pb2.RemoveFileResourceRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'ListFileResources': grpc.unary_unary_rpc_method_handler( servicer.ListFileResources, request_deserializer=milvus__pb2.ListFileResourcesRequest.FromString, response_serializer=milvus__pb2.ListFileResourcesResponse.SerializeToString, ), 'AddUserTags': grpc.unary_unary_rpc_method_handler( servicer.AddUserTags, request_deserializer=milvus__pb2.AddUserTagsRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DeleteUserTags': grpc.unary_unary_rpc_method_handler( servicer.DeleteUserTags, request_deserializer=milvus__pb2.DeleteUserTagsRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'GetUserTags': grpc.unary_unary_rpc_method_handler( servicer.GetUserTags, request_deserializer=milvus__pb2.GetUserTagsRequest.FromString, response_serializer=milvus__pb2.GetUserTagsResponse.SerializeToString, ), 'ListUsersWithTag': grpc.unary_unary_rpc_method_handler( servicer.ListUsersWithTag, request_deserializer=milvus__pb2.ListUsersWithTagRequest.FromString, response_serializer=milvus__pb2.ListUsersWithTagResponse.SerializeToString, ), 'CreateRowPolicy': grpc.unary_unary_rpc_method_handler( servicer.CreateRowPolicy, request_deserializer=milvus__pb2.CreateRowPolicyRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'DropRowPolicy': grpc.unary_unary_rpc_method_handler( servicer.DropRowPolicy, request_deserializer=milvus__pb2.DropRowPolicyRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'ListRowPolicies': grpc.unary_unary_rpc_method_handler( servicer.ListRowPolicies, request_deserializer=milvus__pb2.ListRowPoliciesRequest.FromString, response_serializer=milvus__pb2.ListRowPoliciesResponse.SerializeToString, ), 'UpdateReplicateConfiguration': grpc.unary_unary_rpc_method_handler( servicer.UpdateReplicateConfiguration, request_deserializer=milvus__pb2.UpdateReplicateConfigurationRequest.FromString, response_serializer=common__pb2.Status.SerializeToString, ), 'GetReplicateInfo': grpc.unary_unary_rpc_method_handler( servicer.GetReplicateInfo, request_deserializer=milvus__pb2.GetReplicateInfoRequest.FromString, response_serializer=milvus__pb2.GetReplicateInfoResponse.SerializeToString, ), 'CreateReplicateStream': grpc.stream_stream_rpc_method_handler( servicer.CreateReplicateStream, request_deserializer=milvus__pb2.ReplicateRequest.FromString, response_serializer=milvus__pb2.ReplicateResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'milvus.proto.milvus.MilvusService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) server.add_registered_method_handlers('milvus.proto.milvus.MilvusService', rpc_method_handlers) # This class is part of an EXPERIMENTAL API.
MilvusServiceServicer
python
airbytehq__airbyte
airbyte-integrations/connectors/source-github/source_github/github_schema.py
{ "start": 589459, "end": 589912 }
class ____(sgqlc.types.Type): """A failed invitation to be a member in an enterprise organization.""" __schema__ = github_schema __field_names__ = ("cursor", "node") cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor") """A cursor for use in pagination.""" node = sgqlc.types.Field("OrganizationInvitation", graphql_name="node") """The item at the end of the edge."""
EnterpriseFailedInvitationEdge
python
microsoft__pyright
packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance2.py
{ "start": 880, "end": 1120 }
class ____: @classmethod def test(cls: type[Self], id: int | Self): if isinstance(id, cls): reveal_type(id, expected_text="Self@ClassE") else: reveal_type(id, expected_text="int | ClassE*")
ClassE
python
huggingface__transformers
src/transformers/models/dpr/tokenization_dpr.py
{ "start": 1438, "end": 6742 }
class ____(BertTokenizer): r""" Constructs a DPRQuestionEncoder tokenizer. [`DPRQuestionEncoderTokenizer`] is identical to [`BertTokenizer`] and runs end-to-end tokenization: punctuation splitting and wordpiece. Refer to superclass [`BertTokenizer`] for usage examples and documentation concerning parameters. """ vocab_files_names = VOCAB_FILES_NAMES DPRSpanPrediction = collections.namedtuple( "DPRSpanPrediction", ["span_score", "relevance_score", "doc_id", "start_index", "end_index", "text"] ) DPRReaderOutput = collections.namedtuple("DPRReaderOutput", ["start_logits", "end_logits", "relevance_logits"]) CUSTOM_DPR_READER_DOCSTRING = r""" Return a dictionary with the token ids of the input strings and other information to give to `.decode_best_spans`. It converts the strings of a question and different passages (title and text) in a sequence of IDs (integers), using the tokenizer and vocabulary. The resulting `input_ids` is a matrix of size `(n_passages, sequence_length)` with the format: ``` [CLS] <question token ids> [SEP] <titles ids> [SEP] <texts ids> ``` Args: questions (`str` or `list[str]`): The questions to be encoded. You can specify one question for many passages. In this case, the question will be duplicated like `[questions] * n_passages`. Otherwise you have to specify as many questions as in `titles` or `texts`. titles (`str` or `list[str]`): The passages titles to be encoded. This can be a string or a list of strings if there are several passages. texts (`str` or `list[str]`): The passages texts to be encoded. This can be a string or a list of strings if there are several passages. padding (`bool`, `str` or [`~utils.PaddingStrategy`], *optional*, defaults to `False`): Activates and controls padding. Accepts the following values: - `True` or `'longest'`: Pad to the longest sequence in the batch (or no padding if only a single sequence if provided). - `'max_length'`: Pad to a maximum length specified with the argument `max_length` or to the maximum acceptable input length for the model if that argument is not provided. - `False` or `'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of different lengths). truncation (`bool`, `str` or [`~tokenization_utils_base.TruncationStrategy`], *optional*, defaults to `False`): Activates and controls truncation. Accepts the following values: - `True` or `'longest_first'`: Truncate to a maximum length specified with the argument `max_length` or to the maximum acceptable input length for the model if that argument is not provided. This will truncate token by token, removing a token from the longest sequence in the pair if a pair of sequences (or a batch of pairs) is provided. - `'only_first'`: Truncate to a maximum length specified with the argument `max_length` or to the maximum acceptable input length for the model if that argument is not provided. This will only truncate the first sequence of a pair if a pair of sequences (or a batch of pairs) is provided. - `'only_second'`: Truncate to a maximum length specified with the argument `max_length` or to the maximum acceptable input length for the model if that argument is not provided. This will only truncate the second sequence of a pair if a pair of sequences (or a batch of pairs) is provided. - `False` or `'do_not_truncate'` (default): No truncation (i.e., can output batch with sequence lengths greater than the model maximum admissible input size). max_length (`int`, *optional*): Controls the maximum length to use by one of the truncation/padding parameters. If left unset or set to `None`, this will use the predefined model maximum length if a maximum length is required by one of the truncation/padding parameters. If the model has no specific maximum input length (like XLNet) truncation/padding to a maximum length will be deactivated. return_tensors (`str` or [`~utils.TensorType`], *optional*): If set, will return tensors instead of list of python integers. Acceptable values are: - `'pt'`: Return PyTorch `torch.Tensor` objects. - `'np'`: Return Numpy `np.ndarray` objects. return_attention_mask (`bool`, *optional*): Whether or not to return the attention mask. If not set, will return the attention mask according to the specific tokenizer's default, defined by the `return_outputs` attribute. [What are attention masks?](../glossary#attention-mask) Returns: `dict[str, list[list[int]]]`: A dictionary with the following keys: - `input_ids`: List of token ids to be fed to a model. - `attention_mask`: List of indices specifying which tokens should be attended to by the model. """ @add_start_docstrings(CUSTOM_DPR_READER_DOCSTRING)
DPRQuestionEncoderTokenizer
python
PrefectHQ__prefect
src/prefect/server/events/actions.py
{ "start": 3027, "end": 11688 }
class ____(PrefectBaseModel, abc.ABC): """An Action that may be performed when an Automation is triggered""" type: str # Captures any additional information about the result of the action we'd like to # make available in the payload of the executed or failed events _result_details: Dict[str, Any] = PrivateAttr(default_factory=dict) _resulting_related_resources: List[RelatedResource] = PrivateAttr( default_factory=list ) @abc.abstractmethod async def act(self, triggered_action: "TriggeredAction") -> None: """Perform the requested Action""" async def fail(self, triggered_action: "TriggeredAction", reason: str) -> None: from prefect.server.events.schemas.automations import EventTrigger automation = triggered_action.automation action = triggered_action.action action_index = triggered_action.action_index automation_resource_id = f"prefect.automation.{automation.id}" action_details = { "action_index": action_index, "action_type": action.type, "invocation": str(triggered_action.id), } resource = Resource( { "prefect.resource.id": automation_resource_id, "prefect.resource.name": automation.name, "prefect.trigger-type": automation.trigger.type, } ) if isinstance(automation.trigger, EventTrigger): resource["prefect.posture"] = automation.trigger.posture logger.warning( "Action failed: %r", reason, extra={**self.logging_context(triggered_action)}, ) async with PrefectServerEventsClient() as events: triggered_event_id = uuid7() # Link to the triggering event if available and recent to establish causal chain. # Only set follows if timing is tight (within 5 minutes) to avoid unnecessary # waiting at CausalOrdering when events arrive >15 min after their follows event. follows_id = None if ( triggered_action.triggering_event and triggered_action.triggering_event.occurred ): time_since_trigger = ( triggered_action.triggered - triggered_action.triggering_event.occurred ) TIGHT_TIMING = timedelta(minutes=5) if abs(time_since_trigger) < TIGHT_TIMING: follows_id = triggered_action.triggering_event.id # Build related resources including triggering event reference related_resources = list(self._resulting_related_resources) if triggered_action.triggering_event: related_resources.append( RelatedResource( { "prefect.resource.id": f"prefect.event.{triggered_action.triggering_event.id}", "prefect.resource.role": "triggering-event", } ) ) await events.emit( Event( occurred=triggered_action.triggered, event="prefect.automation.action.triggered", resource=resource, related=related_resources, payload=action_details, id=triggered_event_id, follows=follows_id, ) ) await events.emit( Event( occurred=now("UTC"), event="prefect.automation.action.failed", resource=resource, related=self._resulting_related_resources, payload={ **action_details, "reason": reason, **self._result_details, }, follows=triggered_event_id, id=uuid7(), ) ) async def succeed(self, triggered_action: "TriggeredAction") -> None: from prefect.server.events.schemas.automations import EventTrigger automation = triggered_action.automation action = triggered_action.action action_index = triggered_action.action_index automation_resource_id = f"prefect.automation.{automation.id}" action_details = { "action_index": action_index, "action_type": action.type, "invocation": str(triggered_action.id), } resource = Resource( { "prefect.resource.id": automation_resource_id, "prefect.resource.name": automation.name, "prefect.trigger-type": automation.trigger.type, } ) if isinstance(automation.trigger, EventTrigger): resource["prefect.posture"] = automation.trigger.posture async with PrefectServerEventsClient() as events: triggered_event_id = uuid7() # Link to the triggering event if available and recent to establish causal chain. # Only set follows if timing is tight (within 5 minutes) to avoid unnecessary # waiting at CausalOrdering when events arrive >15 min after their follows event. follows_id = None if ( triggered_action.triggering_event and triggered_action.triggering_event.occurred ): time_since_trigger = ( triggered_action.triggered - triggered_action.triggering_event.occurred ) TIGHT_TIMING = timedelta(minutes=5) if abs(time_since_trigger) < TIGHT_TIMING: follows_id = triggered_action.triggering_event.id # Build related resources including triggering event reference related_resources = list(self._resulting_related_resources) if triggered_action.triggering_event: related_resources.append( RelatedResource( { "prefect.resource.id": f"prefect.event.{triggered_action.triggering_event.id}", "prefect.resource.role": "triggering-event", } ) ) await events.emit( Event( occurred=triggered_action.triggered, event="prefect.automation.action.triggered", resource=Resource( { "prefect.resource.id": automation_resource_id, "prefect.resource.name": automation.name, "prefect.trigger-type": automation.trigger.type, } ), related=related_resources, payload=action_details, id=triggered_event_id, follows=follows_id, ) ) await events.emit( Event( occurred=now("UTC"), event="prefect.automation.action.executed", resource=Resource( { "prefect.resource.id": automation_resource_id, "prefect.resource.name": automation.name, "prefect.trigger-type": automation.trigger.type, } ), related=self._resulting_related_resources, payload={ **action_details, **self._result_details, }, id=uuid7(), follows=triggered_event_id, ) ) def logging_context(self, triggered_action: "TriggeredAction") -> Dict[str, Any]: """Common logging context for all actions""" return { "automation": str(triggered_action.automation.id), "action": self.model_dump(mode="json"), "triggering_event": ( { "id": triggered_action.triggering_event.id, "event": triggered_action.triggering_event.event, } if triggered_action.triggering_event else None ), "triggering_labels": triggered_action.triggering_labels, }
Action
python
python-poetry__poetry
src/poetry/console/commands/self/show/__init__.py
{ "start": 364, "end": 1440 }
class ____(SelfCommand, ShowCommand): name = "self show" options: ClassVar[list[Option]] = [ option("addons", None, "List only add-on packages installed."), *[ o for o in ShowCommand.options if o.name in {"tree", "latest", "outdated", "format"} ], ] description = "Show packages from Poetry's runtime environment." help = f"""\ The <c1>self show</c1> command behaves similar to the <c1>show</c1> command, but working within Poetry's runtime environment. This lists all packages installed within the Poetry install environment. To show only additional packages that have been added via <c1>self add</c1> and their dependencies use <c1>self show --addons</c1>. This is managed in the <comment>{SelfCommand.get_default_system_pyproject_file()}</> \ file. """ @property def activated_groups(self) -> set[NormalizedName]: if self.option("addons", False): return {SelfCommand.ADDITIONAL_PACKAGE_GROUP} return super(ShowCommand, self).activated_groups
SelfShowCommand
python
PrefectHQ__prefect
src/prefect/blocks/core.py
{ "start": 2842, "end": 8761 }
class ____(Exception): """ Raised when a block type is not found in the registry. """ def _collect_nested_reference_strings( obj: dict[str, Any] | list[Any], ) -> list[dict[str, Any]]: """ Collects all nested reference strings (e.g. #/definitions/Model) from a given object. """ found_reference_strings: list[dict[str, Any]] = [] if isinstance(obj, dict): if ref := obj.get("$ref"): found_reference_strings.append(ref) for value in obj.values(): found_reference_strings.extend(_collect_nested_reference_strings(value)) if isinstance(obj, list): for item in obj: found_reference_strings.extend(_collect_nested_reference_strings(item)) return found_reference_strings def _get_non_block_reference_definitions( object_definition: dict[str, Any], definitions: dict[str, Any] ) -> dict[str, Any]: """ Given a definition of an object in a block schema OpenAPI spec and the dictionary of all reference definitions in that same block schema OpenAPI spec, return the definitions for objects that are referenced from the object or any children of the object that do not reference a block. """ non_block_definitions: dict[str, Any] = {} reference_strings = _collect_nested_reference_strings(object_definition) for reference_string in reference_strings: if isinstance(reference_string, str): definition_key = reference_string.replace("#/definitions/", "") definition = definitions.get(definition_key) if definition and definition.get("block_type_slug") is None: non_block_definitions = { **non_block_definitions, definition_key: definition, **_get_non_block_reference_definitions(definition, definitions), } return non_block_definitions def _is_subclass(cls: type, parent_cls: type) -> TypeGuard[type[BaseModel]]: """ Checks if a given class is a subclass of another class. Unlike issubclass, this will not throw an exception if cls is an instance instead of a type. """ # For python<=3.11 inspect.isclass() will return True for parametrized types (e.g. list[str]) # so we need to check for get_origin() to avoid TypeError for issubclass. return inspect.isclass(cls) and not get_origin(cls) and issubclass(cls, parent_cls) def _collect_secret_fields( name: str, type_: type[BaseModel] | type[SecretStr] | type[SecretBytes] | type[SecretDict], secrets: list[str], ) -> None: """ Recursively collects all secret fields from a given type and adds them to the secrets list, supporting nested Union / Dict / Tuple / List / BaseModel fields. Also, note, this function mutates the input secrets list, thus does not return anything. """ if get_origin(type_) in NestedTypes: for nested_type in get_args(type_): _collect_secret_fields(name, nested_type, secrets) return elif _is_subclass(type_, BaseModel): for field_name, field in type_.model_fields.items(): if field.annotation is not None: _collect_secret_fields( f"{name}.{field_name}", field.annotation, secrets ) return # Check if this is a pydantic Secret type (including generic Secret[T]) is_pydantic_secret = False # Direct check for SecretStr, SecretBytes if type_ in (SecretStr, SecretBytes): is_pydantic_secret = True # Check for base Secret class elif ( isinstance(type_, type) # type: ignore[unnecessaryIsInstance] and getattr(type_, "__module__", None) == "pydantic.types" and getattr(type_, "__name__", None) == "Secret" ): is_pydantic_secret = True # Check for generic Secret[T] (e.g., Secret[str], Secret[int]) elif get_origin(type_) is not None: origin = get_origin(type_) if ( getattr(origin, "__module__", None) == "pydantic.types" and getattr(origin, "__name__", None) == "Secret" ): is_pydantic_secret = True if is_pydantic_secret: secrets.append(name) elif type_ == SecretDict: # Append .* to field name to signify that all values under a given key are secret and should be obfuscated. secrets.append(f"{name}.*") elif Block.is_block_class(type_): secrets.extend( f"{name}.{s}" for s in type_.model_json_schema()["secret_fields"] ) def _should_update_block_type( local_block_type: BlockType, server_block_type: BlockType ) -> bool: """ Compares the fields of `local_block_type` and `server_block_type`. Only compare the possible updatable fields as defined by `BlockTypeUpdate.updatable_fields` Returns True if they are different, otherwise False. """ fields = BlockTypeUpdate.updatable_fields() local_block_fields = local_block_type.model_dump(include=fields, exclude_unset=True) server_block_fields = server_block_type.model_dump( include=fields, exclude_unset=True ) if local_block_fields.get("description") is not None: local_block_fields["description"] = html.unescape( local_block_fields["description"] ) if local_block_fields.get("code_example") is not None: local_block_fields["code_example"] = html.unescape( local_block_fields["code_example"] ) if server_block_fields.get("description") is not None: server_block_fields["description"] = html.unescape( server_block_fields["description"] ) if server_block_fields.get("code_example") is not None: server_block_fields["code_example"] = html.unescape( server_block_fields["code_example"] ) return server_block_fields != local_block_fields
UnknownBlockType
python
apache__airflow
providers/tableau/tests/unit/tableau/hooks/test_tableau.py
{ "start": 1086, "end": 19124 }
class ____: """ Test class for TableauHook """ @pytest.fixture(autouse=True) def setup_connections(self, create_connection_without_db): configuration.conf.load_test_config() create_connection_without_db( models.Connection( conn_id="tableau_test_password", conn_type="tableau", host="tableau", login="user", password="password", extra='{"site_id": "my_site"}', ) ) create_connection_without_db( models.Connection( conn_id="tableau_test_ssl_connection_certificates_path", conn_type="tableau", host="tableau", login="user", password="password", extra='{"verify": "my_cert_path", "cert": "my_client_cert_path"}', ) ) create_connection_without_db( models.Connection( conn_id="tableau_test_ssl_false_connection", conn_type="tableau", host="tableau", login="user", password="password", extra='{"verify": "False"}', ) ) create_connection_without_db( models.Connection( conn_id="tableau_test_ssl_bool_param_connection", conn_type="tableau", host="tableau", login="user", password="password", extra='{"verify": false}', ) ) create_connection_without_db( models.Connection( conn_id="tableau_test_jwt_auth", conn_type="tableau", host="tableau", extra='{"auth": "jwt", "jwt_token": "fake_jwt_token", "site_id": ""}', ) ) create_connection_without_db( models.Connection( conn_id="tableau_test_ssl_connection_certificates_path_with_jwt", conn_type="tableau", host="tableau", extra='{"auth": "jwt", "jwt_token": "fake_jwt_token", "site_id": "", "verify": "my_cert_path", "cert": "my_client_cert_path"}', ) ) create_connection_without_db( models.Connection( conn_id="tableau_test_jwt_auth_no_token", conn_type="tableau", host="tableau", extra='{"auth": "jwt", "site_id": ""}', ) ) create_connection_without_db( models.Connection( conn_id="tableau_test_both_auth", conn_type="tableau", host="tableau", login="user", password="password", extra='{"auth": "jwt", "jwt_token": "fake_jwt_token", "site_id": ""}', ) ) @patch("airflow.providers.tableau.hooks.tableau.TableauAuth") @patch("airflow.providers.tableau.hooks.tableau.Server") def test_get_conn_auth_via_password_and_site_in_connection(self, mock_server, mock_tableau_auth): """ Test get conn auth via password """ with TableauHook(tableau_conn_id="tableau_test_password") as tableau_hook: mock_server.assert_called_once_with(tableau_hook.conn.host) mock_tableau_auth.assert_called_once_with( username=tableau_hook.conn.login, password=tableau_hook.conn.password, site_id=tableau_hook.conn.extra_dejson["site_id"], ) mock_server.return_value.auth.sign_in.assert_called_once_with(mock_tableau_auth.return_value) mock_server.return_value.auth.sign_out.assert_called_once_with() @patch("airflow.providers.tableau.hooks.tableau.JWTAuth") @patch("airflow.providers.tableau.hooks.tableau.Server") def test_jwt_auth(self, mock_server, mock_tableau_jwt_auth): """ Test get conn using JWT authentication via a token string """ with TableauHook(tableau_conn_id="tableau_test_jwt_auth") as tableau_hook: mock_server.assert_called_once_with(tableau_hook.conn.host) mock_tableau_jwt_auth.assert_called_once_with( jwt="fake_jwt_token", site_id="", ) mock_server.return_value.auth.sign_in.assert_called_once_with(mock_tableau_jwt_auth.return_value) mock_server.return_value.auth.sign_out.assert_called_once_with() @patch("airflow.providers.tableau.hooks.tableau.JWTAuth") @patch("airflow.providers.tableau.hooks.tableau.Server") def test_jwt_auth_with_ssl(self, mock_server, mock_tableau_jwt_auth): """ Test get conn using JWT authentication via a token string and ssl """ with TableauHook( tableau_conn_id="tableau_test_ssl_connection_certificates_path_with_jwt" ) as tableau_hook: mock_server.assert_called_once_with(tableau_hook.conn.host) mock_server.return_value.add_http_options.assert_called_once_with( options_dict={ "verify": tableau_hook.conn.extra_dejson["verify"], "cert": tableau_hook.conn.extra_dejson["cert"], } ) mock_tableau_jwt_auth.assert_called_once_with( jwt="fake_jwt_token", site_id="", ) mock_server.return_value.auth.sign_in.assert_called_once_with(mock_tableau_jwt_auth.return_value) mock_server.return_value.auth.sign_out.assert_called_once_with() def test_jwt_auth_with_no_token_provided(self): """ Test get conn using JWT authentication without providing a token """ with pytest.raises( ValueError, match=r"When auth set to 'jwt' then expected exactly one parameter 'jwt_file' or 'jwt_token' in connection extra, but none of them provided.", ): TableauHook(tableau_conn_id="tableau_test_jwt_auth_no_token").get_conn() @patch("airflow.providers.tableau.hooks.tableau.JWTAuth") def test_jwt_auth_with_two_tokens_provided(self, mock_tableau_jwt_auth, create_connection_without_db): """ Test get conn using JWT authentication while providing both a string token and a path The connection setup is done within this test to handle the creation of a temporary file for the JWT token, keeping the shared setup_connections function focused solely on connection logic """ fake_jwt_token = "fake_jwt_token_from_file" with tempfile.NamedTemporaryFile(mode="w+", delete=False) as jwt_file: jwt_file.write(fake_jwt_token) jwt_file_path = jwt_file.name create_connection_without_db( models.Connection( conn_id="tableau_test_jwt_file_auth_two_tokens", conn_type="tableau", host="tableau", extra=json.dumps( {"auth": "jwt", "jwt_file": jwt_file_path, "jwt_token": "fake_jwt_token", "site_id": ""} ), ) ) with pytest.raises( ValueError, match=r"When auth set to 'jwt' then expected exactly one parameter 'jwt_file' or 'jwt_token' in connection extra, but provided both.", ): TableauHook(tableau_conn_id="tableau_test_jwt_file_auth_two_tokens").get_conn() mock_tableau_jwt_auth.assert_not_called() @patch("airflow.providers.tableau.hooks.tableau.JWTAuth") @patch("airflow.providers.tableau.hooks.tableau.Server") def test_jwt_auth_from_file(self, mock_server, mock_tableau_jwt_auth, create_connection_without_db): """ Test get conn using JWT token read from file The connection setup is done within this test to handle the creation of a temporary file for the JWT token, keeping the shared setup_connections function focused solely on connection logic """ fake_jwt_token = "fake_jwt_token_from_file" with tempfile.NamedTemporaryFile(mode="w+", delete=False) as jwt_file: jwt_file.write(fake_jwt_token) jwt_file_path = jwt_file.name create_connection_without_db( models.Connection( conn_id="tableau_test_jwt_file_auth", conn_type="tableau", host="tableau", extra=json.dumps({"auth": "jwt", "jwt_file": jwt_file_path, "site_id": ""}), ) ) with TableauHook(tableau_conn_id="tableau_test_jwt_file_auth") as tableau_hook: mock_server.assert_called_once_with(tableau_hook.conn.host) mock_tableau_jwt_auth.assert_called_once_with( jwt=fake_jwt_token, site_id="", ) mock_server.return_value.auth.sign_in.assert_called_once_with(mock_tableau_jwt_auth.return_value) mock_server.return_value.auth.sign_out.assert_called_once_with() @patch("airflow.providers.tableau.hooks.tableau.TableauAuth") def test_both_auth(self, mock_tableau_auth): """ Test whether an error is thrown if both auth types are set """ with pytest.raises( AirflowException, match=r"Username/password authentication and JWT authentication cannot be used simultaneously. Please specify only one authentication method.", ): TableauHook(tableau_conn_id="tableau_test_both_auth").get_conn() mock_tableau_auth.assert_not_called() @patch("airflow.providers.tableau.hooks.tableau.TableauAuth") @patch("airflow.providers.tableau.hooks.tableau.Server") def test_get_conn_ssl_cert_path(self, mock_server, mock_tableau_auth): """ Test get conn with SSL parameters, verify as path """ with TableauHook(tableau_conn_id="tableau_test_ssl_connection_certificates_path") as tableau_hook: mock_server.assert_called_once_with(tableau_hook.conn.host) mock_server.return_value.add_http_options.assert_called_once_with( options_dict={ "verify": tableau_hook.conn.extra_dejson["verify"], "cert": tableau_hook.conn.extra_dejson["cert"], } ) mock_tableau_auth.assert_called_once_with( username=tableau_hook.conn.login, password=tableau_hook.conn.password, site_id="", ) mock_server.return_value.auth.sign_in.assert_called_once_with(mock_tableau_auth.return_value) mock_server.return_value.auth.sign_out.assert_called_once_with() @patch("airflow.providers.tableau.hooks.tableau.TableauAuth") @patch("airflow.providers.tableau.hooks.tableau.Server") def test_get_conn_ssl_default(self, mock_server, mock_tableau_auth): """ Test get conn with default SSL parameters """ with ( TableauHook(tableau_conn_id="tableau_test_password") as tableau_hook, ): mock_server.assert_called_once_with(tableau_hook.conn.host) mock_server.return_value.add_http_options.assert_called_once_with( options_dict={"verify": True, "cert": None} ) mock_tableau_auth.assert_called_once_with( username=tableau_hook.conn.login, password=tableau_hook.conn.password, site_id=tableau_hook.conn.extra_dejson["site_id"], ) mock_server.return_value.auth.sign_out.assert_called_once_with() @patch("airflow.providers.tableau.hooks.tableau.TableauAuth") @patch("airflow.providers.tableau.hooks.tableau.Server") def test_get_conn_ssl_disabled(self, mock_server, mock_tableau_auth): """ Test get conn with default SSL disabled parameters """ with TableauHook(tableau_conn_id="tableau_test_ssl_false_connection") as tableau_hook: mock_server.assert_called_once_with(tableau_hook.conn.host) mock_server.return_value.add_http_options.assert_called_once_with( options_dict={"verify": False, "cert": None} ) mock_tableau_auth.assert_called_once_with( username=tableau_hook.conn.login, password=tableau_hook.conn.password, site_id="", ) mock_server.return_value.auth.sign_in.assert_called_once_with(mock_tableau_auth.return_value) mock_server.return_value.auth.sign_out.assert_called_once_with() @patch("airflow.providers.tableau.hooks.tableau.TableauAuth") @patch("airflow.providers.tableau.hooks.tableau.Server") def test_get_conn_ssl_bool_param(self, mock_server, mock_tableau_auth): """ Test get conn with SSL Verify parameter as bool """ with TableauHook(tableau_conn_id="tableau_test_ssl_bool_param_connection") as tableau_hook: mock_server.assert_called_once_with(tableau_hook.conn.host) mock_server.return_value.add_http_options.assert_called_once_with( options_dict={"verify": False, "cert": None} ) mock_tableau_auth.assert_called_once_with( username=tableau_hook.conn.login, password=tableau_hook.conn.password, site_id="", ) mock_server.return_value.auth.sign_in.assert_called_once_with(mock_tableau_auth.return_value) mock_server.return_value.auth.sign_out.assert_called_once_with() @patch("airflow.providers.tableau.hooks.tableau.TableauAuth") @patch("airflow.providers.tableau.hooks.tableau.Server") @patch("airflow.providers.tableau.hooks.tableau.Pager", return_value=[1, 2, 3]) def test_get_all(self, mock_pager, mock_server, mock_tableau_auth): """ Test get all """ with TableauHook(tableau_conn_id="tableau_test_password") as tableau_hook: jobs = tableau_hook.get_all(resource_name="jobs") assert jobs == mock_pager.return_value mock_pager.assert_called_once_with(mock_server.return_value.jobs.get) @pytest.mark.parametrize( ("finish_code", "expected_status"), [ pytest.param(0, TableauJobFinishCode.SUCCESS, id="SUCCESS"), pytest.param(1, TableauJobFinishCode.ERROR, id="ERROR"), pytest.param(2, TableauJobFinishCode.CANCELED, id="CANCELED"), ], ) @patch("airflow.providers.tableau.hooks.tableau.Server") def test_get_job_status(self, mock_tableau_server, finish_code, expected_status): """ Test get job status """ mock_tableau_server.jobs.get_by_id.return_value.finish_code = finish_code with TableauHook(tableau_conn_id="tableau_test_password") as tableau_hook: tableau_hook.server = mock_tableau_server jobs_status = tableau_hook.get_job_status(job_id="j1") assert jobs_status == expected_status @patch("time.sleep", return_value=None) @patch("airflow.providers.tableau.hooks.tableau.Server") def test_wait_for_state(self, mock_tableau_server, sleep_mock): """ Test wait_for_state """ # Test SUCCESS Positive with TableauHook(tableau_conn_id="tableau_test_password") as tableau_hook: tableau_hook.get_job_status = MagicMock( name="get_job_status", side_effect=[TableauJobFinishCode.PENDING, TableauJobFinishCode.SUCCESS], ) assert tableau_hook.wait_for_state( job_id="j1", target_state=TableauJobFinishCode.SUCCESS, check_interval=1 ) # Test SUCCESS Negative with TableauHook(tableau_conn_id="tableau_test_password") as tableau_hook: tableau_hook.get_job_status = MagicMock( name="get_job_status", side_effect=[ TableauJobFinishCode.PENDING, TableauJobFinishCode.PENDING, TableauJobFinishCode.ERROR, ], ) assert not tableau_hook.wait_for_state( job_id="j1", target_state=TableauJobFinishCode.SUCCESS, check_interval=1 ) # Test ERROR Positive with TableauHook(tableau_conn_id="tableau_test_password") as tableau_hook: tableau_hook.get_job_status = MagicMock( name="get_job_status", side_effect=[ TableauJobFinishCode.PENDING, TableauJobFinishCode.PENDING, TableauJobFinishCode.ERROR, ], ) assert tableau_hook.wait_for_state( job_id="j1", target_state=TableauJobFinishCode.ERROR, check_interval=1 ) # Test CANCELLED Positive with TableauHook(tableau_conn_id="tableau_test_password") as tableau_hook: tableau_hook.get_job_status = MagicMock( name="get_job_status", side_effect=[ TableauJobFinishCode.PENDING, TableauJobFinishCode.PENDING, TableauJobFinishCode.CANCELED, ], ) assert tableau_hook.wait_for_state( job_id="j1", target_state=TableauJobFinishCode.CANCELED, check_interval=1 ) # Test PENDING Positive with TableauHook(tableau_conn_id="tableau_test_password") as tableau_hook: tableau_hook.get_job_status = MagicMock( name="get_job_status", side_effect=[ TableauJobFinishCode.PENDING, TableauJobFinishCode.ERROR, ], ) assert tableau_hook.wait_for_state( job_id="j1", target_state=TableauJobFinishCode.PENDING, check_interval=1 )
TestTableauHook
python
giampaolo__psutil
tests/test_linux.py
{ "start": 34655, "end": 35326 }
class ____(PsutilTestCase): @pytest.mark.skipif(not HAS_GETLOADAVG, reason="not supported") def test_getloadavg(self): psutil_value = psutil.getloadavg() with open("/proc/loadavg") as f: proc_value = f.read().split() assert abs(float(proc_value[0]) - psutil_value[0]) < 1 assert abs(float(proc_value[1]) - psutil_value[1]) < 1 assert abs(float(proc_value[2]) - psutil_value[2]) < 1 # ===================================================================== # --- system network # ===================================================================== @pytest.mark.skipif(not LINUX, reason="LINUX only")
TestLoadAvg
python
airbytehq__airbyte
airbyte-ci/connectors/metadata_service/orchestrator/orchestrator/models/metadata.py
{ "start": 496, "end": 1136 }
class ____: """ This is to allow us to delay validation until we have all the data we need. Note: To use this mixin you will want to construct your model with the pydantic `construct` method. This will allow you to set the values of the model before validation occurs. """ def validate(self): if hasattr(super(), "validate"): super().validate(self.dict()) @property def is_valid(self) -> Tuple[bool, Optional[Any]]: try: self.validate() return (True, None) except ValidationError as e: return (False, e)
PydanticDelayValidationMixin
python
getsentry__sentry
tests/sentry/integrations/api/endpoints/test_data_forwarding.py
{ "start": 889, "end": 5900 }
class ____(DataForwardingIndexEndpointTest): def test_without_revamp_feature_flag_access(self) -> None: with self.feature( { "organizations:data-forwarding-revamp-access": False, "organizations:data-forwarding": True, } ): response = self.client.get(reverse(self.endpoint, args=(self.organization.slug,))) assert response.status_code == 403 def test_without_data_forwarding_feature_flag_access(self) -> None: with self.feature( { "organizations:data-forwarding-revamp-access": True, "organizations:data-forwarding": False, } ): response = self.client.get(reverse(self.endpoint, args=(self.organization.slug,))) assert response.status_code == 200 def test_get_single_data_forwarder(self) -> None: data_forwarder = self.create_data_forwarder( provider=DataForwarderProviderSlug.SEGMENT, config={"write_key": "test_key"}, is_enabled=True, ) response = self.get_success_response(self.organization.slug) assert len(response.data) == 1 assert response.data[0]["id"] == str(data_forwarder.id) assert response.data[0]["provider"] == DataForwarderProviderSlug.SEGMENT assert response.data[0]["config"] == {"write_key": "test_key"} assert response.data[0]["isEnabled"] is True def test_get_multiple_data_forwarders(self) -> None: segment_forwarder = self.create_data_forwarder( provider=DataForwarderProviderSlug.SEGMENT, config={"write_key": "segment_key"}, ) sqs_forwarder = self.create_data_forwarder( provider=DataForwarderProviderSlug.SQS, config={ "queue_url": "https://sqs.us-east-1.amazonaws.com/123456789012/test-queue", "region": "us-east-1", "access_key": "AKIAIOSFODNN7EXAMPLE", "secret_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", }, ) response = self.get_success_response(self.organization.slug) assert len(response.data) == 2 forwarder_ids = [f["id"] for f in response.data] assert str(segment_forwarder.id) in forwarder_ids assert str(sqs_forwarder.id) in forwarder_ids def test_get_data_forwarder_with_project_configs(self) -> None: data_forwarder = self.create_data_forwarder( provider=DataForwarderProviderSlug.SEGMENT, config={"write_key": "test_key"}, ) project1 = self.create_project(organization=self.organization) project2 = self.create_project(organization=self.organization) project_config1 = self.create_data_forwarder_project( data_forwarder=data_forwarder, project=project1, is_enabled=True, overrides={"custom": "value1"}, ) project_config2 = self.create_data_forwarder_project( data_forwarder=data_forwarder, project=project2, is_enabled=False, overrides={"custom": "value2"}, ) response = self.get_success_response(self.organization.slug) assert len(response.data) == 1 project_configs = response.data[0]["projectConfigs"] assert len(project_configs) == 2 project_config_ids = [pc["id"] for pc in project_configs] assert str(project_config1.id) in project_config_ids assert str(project_config2.id) in project_config_ids def test_get_only_returns_organization_data_forwarders(self) -> None: my_forwarder = self.create_data_forwarder( provider=DataForwarderProviderSlug.SEGMENT, config={"write_key": "my_key"}, ) other_org = self.create_organization() self.create_data_forwarder( organization=other_org, provider=DataForwarderProviderSlug.SEGMENT, config={"write_key": "other_key"}, ) response = self.get_success_response(self.organization.slug) assert len(response.data) == 1 assert response.data[0]["id"] == str(my_forwarder.id) def test_get_requires_read_permission(self) -> None: user_without_permission = self.create_user() self.login_as(user=user_without_permission) self.get_error_response(self.organization.slug, status_code=403) def test_get_with_disabled_data_forwarder(self) -> None: data_forwarder = self.create_data_forwarder( provider=DataForwarderProviderSlug.SEGMENT, config={"write_key": "test_key"}, is_enabled=False, ) response = self.get_success_response(self.organization.slug) assert len(response.data) == 1 assert response.data[0]["id"] == str(data_forwarder.id) assert response.data[0]["isEnabled"] is False @region_silo_test
DataForwardingIndexGetTest
python
doocs__leetcode
lcof2/剑指 Offer II 049. 从根节点到叶节点的路径数字之和/Solution.py
{ "start": 192, "end": 550 }
class ____: def sumNumbers(self, root: TreeNode) -> int: def dfs(root, presum): if root is None: return 0 s = 10 * presum + root.val if root.left is None and root.right is None: return s return dfs(root.left, s) + dfs(root.right, s) return dfs(root, 0)
Solution
python
Textualize__textual
docs/examples/app/widgets02.py
{ "start": 66, "end": 277 }
class ____(App): def on_key(self) -> None: self.mount(Welcome()) def on_button_pressed(self) -> None: self.exit() if __name__ == "__main__": app = WelcomeApp() app.run()
WelcomeApp
python
catalyst-team__catalyst
catalyst/callbacks/metrics/cmc_score.py
{ "start": 6206, "end": 7923 }
class ____(LoaderMetricCallback): """ Cumulative Matching Characteristics callback for reID case. More information about cmc-based callbacks in CMCScoreCallback's docs. Args: embeddings_key: embeddings key in output dict pids_key: pids key in output dict cids_key: cids key in output dict is_query_key: bool key True if current object is from query topk: specifies which cmc@K to log. [1] - cmc@1 [1, 3] - cmc@1 and cmc@3 [1, 3, 5] - cmc@1, cmc@3 and cmc@5 prefix: metric prefix suffix: metric suffix """ def __init__( self, embeddings_key: str, pids_key: str, cids_key: str, is_query_key: str, topk: Iterable[int] = None, prefix: str = None, suffix: str = None, ): """Init.""" super().__init__( metric=ReidCMCMetric( embeddings_key=embeddings_key, pids_key=pids_key, cids_key=cids_key, is_query_key=is_query_key, topk=topk, prefix=prefix, suffix=suffix, ), input_key=[embeddings_key, is_query_key], target_key=[pids_key, cids_key], ) def on_experiment_start(self, runner: "IRunner") -> None: """Event handler.""" assert runner.engine.distributed_type not in ( DistributedType.MULTI_GPU, DistributedType.TPU, ), "ReidCMCScoreCallback could not work within ddp training" return super().on_experiment_start(runner) __all__ = ["CMCScoreCallback", "ReidCMCScoreCallback"]
ReidCMCScoreCallback
python
sympy__sympy
sympy/testing/tests/test_runtests_pytest.py
{ "start": 864, "end": 6277 }
class ____: @staticmethod def test_no_paths(): """If no paths are passed, only `sympy` and `doc/src` are appended. `sympy` and `doc/src` are the `testpaths` stated in `pytest.ini`. They need to be manually added as if any path-related arguments are passed to `pytest.main` then the settings in `pytest.ini` may be ignored. """ paths = [] args = update_args_with_paths(paths=paths, keywords=None, args=[]) expected = [ str(pathlib.Path(sympy_dir(), 'sympy')), str(pathlib.Path(sympy_dir(), 'doc/src')), ] assert args == expected @staticmethod @pytest.mark.parametrize( 'path', ['sympy/core/tests/test_basic.py', '_basic'] ) def test_one_file(path: str): """Single files/paths, full or partial, are matched correctly.""" args = update_args_with_paths(paths=[path], keywords=None, args=[]) expected = [ str(pathlib.Path(sympy_dir(), 'sympy/core/tests/test_basic.py')), ] assert args == expected @staticmethod def test_partial_path_from_root(): """Partial paths from the root directly are matched correctly.""" args = update_args_with_paths(paths=['sympy/functions'], keywords=None, args=[]) expected = [str(pathlib.Path(sympy_dir(), 'sympy/functions'))] assert args == expected @staticmethod def test_multiple_paths_from_root(): """Multiple paths, partial or full, are matched correctly.""" paths = ['sympy/core/tests/test_basic.py', 'sympy/functions'] args = update_args_with_paths(paths=paths, keywords=None, args=[]) expected = [ str(pathlib.Path(sympy_dir(), 'sympy/core/tests/test_basic.py')), str(pathlib.Path(sympy_dir(), 'sympy/functions')), ] assert args == expected @staticmethod @pytest.mark.parametrize( 'paths, expected_paths', [ ( ['/core', '/util'], [ 'doc/src/modules/utilities', 'doc/src/reference/public/utilities', 'sympy/core', 'sympy/logic/utilities', 'sympy/utilities', ] ), ] ) def test_multiple_paths_from_non_root(paths: List[str], expected_paths: List[str]): """Multiple partial paths are matched correctly.""" args = update_args_with_paths(paths=paths, keywords=None, args=[]) assert len(args) == len(expected_paths) for arg, expected in zip(sorted(args), expected_paths): assert Path(expected).as_posix() in Path(arg).as_posix() @staticmethod @pytest.mark.parametrize( 'paths', [ [], ['sympy/physics'], ['sympy/physics/mechanics'], ['sympy/physics/mechanics/tests'], ['sympy/physics/mechanics/tests/test_kane3.py'], ] ) def test_string_as_keyword(paths: List[str]): """String keywords are matched correctly.""" keywords = ('bicycle', ) args = update_args_with_paths(paths=paths, keywords=keywords, args=[]) expected_args = ['sympy/physics/mechanics/tests/test_kane3.py::test_bicycle'] assert len(args) == len(expected_args) for arg, expected in zip(sorted(args), expected_args): assert Path(expected).as_posix() in Path(arg).as_posix() @staticmethod @pytest.mark.parametrize( 'paths', [ [], ['sympy/core'], ['sympy/core/tests'], ['sympy/core/tests/test_sympify.py'], ] ) def test_integer_as_keyword(paths: List[str]): """Integer keywords are matched correctly.""" keywords = ('3538', ) args = update_args_with_paths(paths=paths, keywords=keywords, args=[]) expected_args = ['sympy/core/tests/test_sympify.py::test_issue_3538'] assert len(args) == len(expected_args) for arg, expected in zip(sorted(args), expected_args): assert Path(expected).as_posix() in Path(arg).as_posix() @staticmethod def test_multiple_keywords(): """Multiple keywords are matched correctly.""" keywords = ('bicycle', '3538') args = update_args_with_paths(paths=[], keywords=keywords, args=[]) expected_args = [ 'sympy/core/tests/test_sympify.py::test_issue_3538', 'sympy/physics/mechanics/tests/test_kane3.py::test_bicycle', ] assert len(args) == len(expected_args) for arg, expected in zip(sorted(args), expected_args): assert Path(expected).as_posix() in Path(arg).as_posix() @staticmethod def test_keyword_match_in_multiple_files(): """Keywords are matched across multiple files.""" keywords = ('1130', ) args = update_args_with_paths(paths=[], keywords=keywords, args=[]) expected_args = [ 'sympy/integrals/tests/test_heurisch.py::test_heurisch_symbolic_coeffs_1130', 'sympy/utilities/tests/test_lambdify.py::test_python_div_zero_issue_11306', ] assert len(args) == len(expected_args) for arg, expected in zip(sorted(args), expected_args): assert Path(expected).as_posix() in Path(arg).as_posix()
TestUpdateArgsWithPaths
python
huggingface__transformers
tests/models/granite_speech/test_processing_granite_speech.py
{ "start": 1140, "end": 8300 }
class ____(unittest.TestCase): def setUp(self): self.tmpdirname = tempfile.mkdtemp() self.checkpoint = "ibm-granite/granite-speech-3.3-8b" processor = GraniteSpeechProcessor.from_pretrained(self.checkpoint) processor.save_pretrained(self.tmpdirname) def get_tokenizer(self, **kwargs): return AutoTokenizer.from_pretrained(self.tmpdirname, **kwargs) def get_audio_processor(self, **kwargs): return GraniteSpeechFeatureExtractor.from_pretrained(self.tmpdirname, **kwargs) def tearDown(self): shutil.rmtree(self.tmpdirname) def test_save_load_pretrained_default(self): """Ensure we can save / reload a processor correctly.""" tokenizer = self.get_tokenizer() audio_processor = self.get_audio_processor() processor = GraniteSpeechProcessor( tokenizer=tokenizer, audio_processor=audio_processor, ) processor.save_pretrained(self.tmpdirname) processor = GraniteSpeechProcessor.from_pretrained(self.tmpdirname) self.assertEqual(processor.tokenizer.get_vocab(), tokenizer.get_vocab()) self.assertIsInstance(processor.tokenizer, GPT2TokenizerFast) self.assertEqual(processor.audio_processor.to_json_string(), audio_processor.to_json_string()) self.assertIsInstance(processor.audio_processor, GraniteSpeechFeatureExtractor) def test_requires_text(self): """Ensure we require text""" tokenizer = self.get_tokenizer() audio_processor = self.get_audio_processor() processor = GraniteSpeechProcessor( tokenizer=tokenizer, audio_processor=audio_processor, ) with pytest.raises(TypeError): processor(text=None) def test_bad_text_fails(self): """Ensure we gracefully fail if text is the wrong type.""" tokenizer = self.get_tokenizer() audio_processor = self.get_audio_processor() processor = GraniteSpeechProcessor(tokenizer=tokenizer, audio_processor=audio_processor) with pytest.raises(TypeError): processor(text=424, audio=None) def test_bad_nested_text_fails(self): """Ensure we gracefully fail if text is the wrong nested type.""" tokenizer = self.get_tokenizer() audio_processor = self.get_audio_processor() processor = GraniteSpeechProcessor( tokenizer=tokenizer, audio_processor=audio_processor, ) with pytest.raises(TypeError): processor(text=[424], audio=None) def test_bad_audio_fails(self): """Ensure we gracefully fail if audio is the wrong type.""" tokenizer = self.get_tokenizer() audio_processor = self.get_audio_processor() processor = GraniteSpeechProcessor( tokenizer=tokenizer, audio_processor=audio_processor, ) with pytest.raises(TypeError): processor(text=None, audio="foo") def test_nested_bad_audio_fails(self): """Ensure we gracefully fail if audio is the wrong nested type.""" tokenizer = self.get_tokenizer() audio_processor = self.get_audio_processor() processor = GraniteSpeechProcessor( tokenizer=tokenizer, audio_processor=audio_processor, ) with pytest.raises(TypeError): processor(text=None, audio=["foo"]) @parameterized.expand( [ ([1, 269920], [171], torch.rand), ([1, 269920], [171], np.random.rand), ] ) def test_audio_token_filling_same_len_feature_tensors(self, vec_dims, num_expected_features, random_func): """Ensure audio token filling is handled correctly when we have one or more audio inputs whose features are all the same length stacked into a tensor / numpy array. NOTE: Currently we enforce that each sample can only have one audio. """ tokenizer = self.get_tokenizer() audio_processor = self.get_audio_processor() processor = GraniteSpeechProcessor( tokenizer=tokenizer, audio_processor=audio_processor, ) audio = random_func(*vec_dims) - 0.5 audio_tokens = processor.audio_token * vec_dims[0] inputs = processor(text=f"{audio_tokens} Can you compare this audio?", audio=audio, return_tensors="pt") # Check the number of audio tokens audio_token_id = tokenizer.get_vocab()[processor.audio_token] # Make sure the number of audio tokens matches the number of features num_computed_features = processor.audio_processor._get_num_audio_features( [vec_dims[1] for _ in range(vec_dims[0])], ) num_audio_tokens = int(torch.sum(inputs["input_ids"] == audio_token_id)) assert list(inputs["input_features"].shape) == [vec_dims[0], 844, 160] assert sum(num_computed_features) == num_audio_tokens def test_audio_token_filling_varying_len_feature_list(self): """Ensure audio token filling is handled correctly when we have multiple varying len audio sequences passed as a list. """ tokenizer = self.get_tokenizer() audio_processor = self.get_audio_processor() processor = GraniteSpeechProcessor( tokenizer=tokenizer, audio_processor=audio_processor, ) vec_dims = [[1, 142100], [1, 269920]] num_expected_features = [90, 171] audio = [torch.rand(dims) - 0.5 for dims in vec_dims] inputs = processor( text=[ f"{processor.audio_token} Can you describe this audio?", f"{processor.audio_token} How does it compare with this audio?", ], audio=audio, return_tensors="pt", ) # Check the number of audio tokens audio_token_id = tokenizer.get_vocab()[processor.audio_token] # Make sure the number of audio tokens matches the number of features num_calculated_features = processor.audio_processor._get_num_audio_features( [dims[1] for dims in vec_dims], ) num_audio_tokens = int(torch.sum(inputs["input_ids"] == audio_token_id)) assert num_calculated_features == [90, 171] assert sum(num_expected_features) == num_audio_tokens @require_torch_accelerator def test_device_override(self): """Ensure that we regardless of the processing device, the tensors produced are on the CPU. """ tokenizer = self.get_tokenizer() audio_processor = self.get_audio_processor() processor = GraniteSpeechProcessor( tokenizer=tokenizer, audio_processor=audio_processor, ) vec_dims = [1, 269920] wav = torch.rand(vec_dims) - 0.5 inputs = processor( text=f"{processor.audio_token} Can you transcribe this audio?", audio=wav, return_tensors="pt", device=torch_device, ) assert inputs["input_features"].device.type == "cpu"
GraniteSpeechProcessorTest
python
ansible__ansible
test/integration/targets/cache-plugins/inventory_plugins/test_inventoryconfig.py
{ "start": 459, "end": 2360 }
class ____(BaseInventoryPlugin, Cacheable): NAME = 'test' def populate(self, hosts): for host in list(hosts.keys()): self.inventory.add_host(host, group='all') for hostvar, hostval in hosts[host].items(): self.inventory.set_variable(host, hostvar, hostval) def get_hosts(self): return dict( host1=dict( one='two', my_template=trust_as_template("{{ one }}"), verify='two', ), host2=dict( three='four', my_template=trust_as_template("{{ three }}"), verify='four', ), ) def verify_file(self, path): return path.endswith('.inventoryconfig.yml') def parse(self, inventory, loader, path, cache=True): super(InventoryModule, self).parse(inventory, loader, path) self.load_cache_plugin() cache_key = self.get_cache_key(path) # cache may be True or False at this point to indicate if the inventory is being refreshed # get the user's cache option cache_setting = self.get_option('cache') attempt_to_read_cache = cache_setting and cache cache_needs_update = cache_setting and not cache results = {} # attempt to read the cache if inventory isn't being refreshed and the user has caching enabled if attempt_to_read_cache: try: results = self._cache[cache_key] except KeyError: # This occurs if the cache_key is not in the cache or if the cache_key expired, so the cache needs to be updated cache_needs_update = True if cache_needs_update: results = self.get_hosts() # set the cache self._cache[cache_key] = results self.populate(results)
InventoryModule
python
doocs__leetcode
solution/1900-1999/1916.Count Ways to Build Rooms in an Ant Colony/Solution.py
{ "start": 0, "end": 760 }
class ____: def waysToBuildRooms(self, prevRoom: List[int]) -> int: modulo = 10**9 + 7 ingoing = defaultdict(set) outgoing = defaultdict(set) for i in range(1, len(prevRoom)): ingoing[i].add(prevRoom[i]) outgoing[prevRoom[i]].add(i) ans = [1] def recurse(i): if len(outgoing[i]) == 0: return 1 nodes_in_tree = 0 for v in outgoing[i]: cn = recurse(v) if nodes_in_tree != 0: ans[0] *= comb(nodes_in_tree + cn, cn) ans[0] %= modulo nodes_in_tree += cn return nodes_in_tree + 1 recurse(0) return ans[0] % modulo
Solution
python
davidhalter__jedi
jedi/inference/compiled/value.py
{ "start": 9969, "end": 10565 }
class ____(CompiledValue): file_io = None # For modules def _as_context(self): return CompiledModuleContext(self) def py__path__(self): return self.access_handle.py__path__() def is_package(self): return self.py__path__() is not None @property def string_names(self): # For modules name = self.py__name__() if name is None: return () return tuple(name.split('.')) def py__file__(self) -> Optional[Path]: return self.access_handle.py__file__() # type: ignore[no-any-return]
CompiledModule
python
graphql-python__graphene
graphene/relay/id_type.py
{ "start": 1315, "end": 1841 }
class ____(BaseGlobalIDType): """ Simple global ID type: simply the id of the object. To be used carefully as the user is responsible for ensuring that the IDs are indeed global (otherwise it could cause request caching issues). """ graphene_type = ID @classmethod def resolve_global_id(cls, info, global_id): _type = info.return_type.graphene_type._meta.name return _type, global_id @classmethod def to_global_id(cls, _type, _id): return _id
SimpleGlobalIDType
python
conda__conda
conda/activate.py
{ "start": 35170, "end": 36473 }
class ____(_Activator): pathsep_join = ":".join sep = "/" path_conversion = staticmethod(win_path_to_unix if on_win else _path_identity) script_extension = ".csh" tempfile_extension = None # output to stdout command_join = ";\n" needs_line_ending_fix = True unset_var_tmpl = "unsetenv %s" export_var_tmpl = 'setenv %s "%s"' path_var_tmpl = "setenv %s \"`cygpath '%s'`\"" if on_win else export_var_tmpl set_var_tmpl = "set %s='%s'" run_script_tmpl = "source \"`cygpath '%s'`\"" if on_win else 'source "%s"' hook_source_path = Path( CONDA_PACKAGE_ROOT, "shell", "etc", "profile.d", "conda.csh", ) # TCSH/CSH removes newlines when doing command substitution (see `man tcsh`), # source conda.csh directly and use line terminators to separate commands inline_hook_source = False def _update_prompt(self, set_vars, conda_prompt_modifier): prompt = os.getenv("prompt", "") current_prompt_modifier = os.getenv("CONDA_PROMPT_MODIFIER") if current_prompt_modifier: prompt = re.sub(re.escape(current_prompt_modifier), r"", prompt) set_vars.update( { "prompt": conda_prompt_modifier + prompt, } )
CshActivator
python
scrapy__scrapy
tests/test_http2_client_protocol.py
{ "start": 4249, "end": 4634 }
class ____(LeafResource): def render_GET(self, request: TxRequest): request.requestHeaders.removeHeader("Content-Length") self.deferRequest(request, 0, self._delayed_render, request) return NOT_DONE_YET @staticmethod def _delayed_render(request: TxRequest): request.write(Data.NO_CONTENT_LENGTH) request.finish()
NoContentLengthHeader
python
tox-dev__tox
src/tox/config/source/api.py
{ "start": 380, "end": 3745 }
class ____(ABC): """Source is able to return a configuration value (for either the core or per environment source).""" FILENAME = "" def __init__(self, path: Path) -> None: self.path: Path = path #: the path to the configuration source self._section_to_loaders: dict[str, list[Loader[Any]]] = {} def __repr__(self) -> str: return f"{self.__class__.__name__}(path={self.path})" def get_loaders( self, section: Section, base: list[str] | None, override_map: OverrideMap, conf: ConfigSet, ) -> Iterator[Loader[Any]]: """ Return a loader that loads settings from a given section name. :param section: the section to load :param base: base sections to fallback to :param override_map: a list of overrides to apply :param conf: the config set to use :returns: the loaders to use """ section = self.transform_section(section) key = section.key if key in self._section_to_loaders: yield from self._section_to_loaders[key] return loaders: list[Loader[Any]] = [] self._section_to_loaders[key] = loaders loader: Loader[Any] | None = self.get_loader(section, override_map) if loader is not None: loaders.append(loader) yield loader if base is not None: conf.add_config( keys="base", of_type=list[str], desc="inherit missing keys from these sections", default=base, ) for base_section in self.get_base_sections(conf["base"], section): child = loader loader = self.get_loader(base_section, override_map) if loader is None: loader = child continue if child is not None and loader is not None: child.parent = loader yield loader loaders.append(loader) @abstractmethod def transform_section(self, section: Section) -> Section: raise NotImplementedError @abstractmethod def get_loader(self, section: Section, override_map: OverrideMap) -> Loader[Any] | None: raise NotImplementedError @abstractmethod def get_base_sections(self, base: list[str], in_section: Section) -> Iterator[Section]: raise NotImplementedError @abstractmethod def sections(self) -> Iterator[Section]: """ Return a loader that loads the core configuration values. :returns: the core loader from this source """ raise NotImplementedError @abstractmethod def envs(self, core_conf: CoreConfigSet) -> Iterator[str]: """ :param core_conf: the core configuration set :returns: a list of environments defined within this source """ raise NotImplementedError @abstractmethod def get_tox_env_section(self, item: str) -> tuple[Section, list[str], list[str]]: """:returns: the section for a tox environment""" raise NotImplementedError @abstractmethod def get_core_section(self) -> Section: """:returns: the core section""" raise NotImplementedError __all__ = [ "Section", "Source", ]
Source
python
getsentry__sentry
src/sentry/rules/actions/sentry_apps/notify_event.py
{ "start": 1268, "end": 6750 }
class ____(SentryAppEventAction): """ Used for notifying a *specific* sentry app with a custom webhook payload (i.e. specified UI components). """ id = "sentry.rules.actions.notify_event_sentry_app.NotifyEventSentryAppAction" actionType = "sentryapp" # Required field for EventAction, value is ignored label = "" def _get_sentry_app(self, event: GroupEvent) -> RpcSentryApp | None: extra = {"event_id": event.event_id} sentry_app_installation_uuid = self.get_option("sentryAppInstallationUuid") if not sentry_app_installation_uuid: self.logger.info("rules.fail.is_configured", extra=extra) return None result = app_service.get_many(filter=dict(uuids=[sentry_app_installation_uuid])) if result: return result[0].sentry_app self.logger.info("rules.fail.no_app", extra=extra) return None def _get_setting_value(self, field_name: str) -> str | None: incoming_settings = self.data.get("settings", []) return next( (setting["value"] for setting in incoming_settings if setting["name"] == field_name), None, ) def _get_sentry_app_installation_uuid(self) -> Any: sentry_app_installation_uuid = self.data.get("sentryAppInstallationUuid") if not sentry_app_installation_uuid: raise ValidationError("Missing attribute 'sentryAppInstallationUuid'") return sentry_app_installation_uuid def _get_alert_rule_component( self, sentry_app_id: int, sentry_app_name: str ) -> RpcSentryAppComponent: components = app_service.find_app_components(app_id=sentry_app_id) for component in components: if component.type == "alert-rule-action": return component raise ValidationError( f"Alert Rule Actions are not enabled for the {sentry_app_name} integration." ) def get_custom_actions(self, project: Project) -> Sequence[Mapping[str, Any]]: return app_service.get_custom_alert_rule_actions( event_data=RpcSentryAppEventData.from_event(self), organization_id=project.organization_id, project_slug=project.slug, ) def self_validate(self) -> None: sentry_app_installation_uuid = self._get_sentry_app_installation_uuid() installations = app_service.get_many(filter=dict(uuids=[sentry_app_installation_uuid])) if not installations: raise ValidationError("Could not identify integration from the installation uuid.") sentry_app = installations[0].sentry_app # Ensure the uuid does not match a deleted installation if installations[0].date_deleted is not None: raise ValidationError( f"The installation provided is out of date, please reinstall the {sentry_app.name} integration." ) alert_rule_component = self._get_alert_rule_component(sentry_app.id, sentry_app.name) incoming_settings = self.data.get("settings") if not incoming_settings: raise ValidationError(f"{sentry_app.name} requires settings to configure alert rules.") # Ensure required fields are provided and valid valid_fields = set() schema = alert_rule_component.app_schema.get("settings", {}) for required_field in schema.get("required_fields", []): field_name = required_field.get("name") field_value = self._get_setting_value(field_name) if not field_value: raise ValidationError( f"{sentry_app.name} is missing required settings field: '{field_name}'" ) validate_field(field_value, required_field, sentry_app.name) valid_fields.add(field_name) # Ensure optional fields are valid for optional_field in schema.get("optional_fields", []): field_name = optional_field.get("name") field_value = self._get_setting_value(field_name) validate_field(field_value, optional_field, sentry_app.name) valid_fields.add(field_name) # Ensure the payload we send matches the expectations set in the schema extra_keys = {setting["name"] for setting in incoming_settings} - valid_fields if extra_keys: extra_keys_string = ", ".join(extra_keys) raise ValidationError( f"Unexpected setting(s) '{extra_keys_string}' configured for {sentry_app.name}" ) def after( self, event: GroupEvent, notification_uuid: str | None = None ) -> Generator[CallbackFuture]: sentry_app = self._get_sentry_app(event) yield self.future( notify_sentry_app, sentry_app=sentry_app, schema_defined_settings=self.get_option("settings"), ) def render_label(self) -> str: sentry_app_installation_uuid = self._get_sentry_app_installation_uuid() installations = app_service.get_many(filter=dict(uuids=[sentry_app_installation_uuid])) if not installations: raise ValidationError("Could not identify integration from the installation uuid.") sentry_app = installations[0].sentry_app alert_rule_component = self._get_alert_rule_component(sentry_app.id, sentry_app.name) return str(alert_rule_component.app_schema.get("title"))
NotifyEventSentryAppAction
python
joke2k__faker
faker/providers/person/sv_SE/__init__.py
{ "start": 515, "end": 53256 }
class ____(PersonProvider): formats_female = ( "{{first_name_female}} {{last_name}}", "{{first_name_female}} {{last_name}}", "{{first_name_female}} {{last_name}}", "{{first_name_female}} {{last_name}}", "{{first_name_female}} {{last_name}}", "{{first_name_female}} {{last_name}}", "{{first_name_female}} {{last_name}}", "{{first_name_female}} {{last_name}}", "{{first_name_female}} {{last_name}} {{last_name}}", "{{first_name_female}} {{last_name}}-{{last_name}}", ) formats_male = ( "{{first_name_male}} {{last_name}}", "{{first_name_male}} {{last_name}}", "{{first_name_male}} {{last_name}}", "{{first_name_male}} {{last_name}}", "{{first_name_male}} {{last_name}}", "{{first_name_male}} {{last_name}}", "{{first_name_male}} {{last_name}}", "{{first_name_male}} {{last_name}}", "{{first_name_male}} {{last_name}} {{last_name}}", "{{first_name_male}} {{last_name}}-{{last_name}}", ) formats = formats_female + formats_male first_names_female = OrderedDict( ( ("Abdi", 0.000288), ("Adele", 0.000343), ("Agnes", 0.003102), ("Agneta", 0.003776), ("Agnieszka", 0.000268), ("Ahmed", 0.000767), ("Aina", 0.001031), ("Aisha", 0.000292), ("Alba", 0.000281), ("Aleksandra", 0.000327), ("Alexandra", 0.003364), ("Ali", 0.000822), ("Alice", 0.005009), ("Alicia", 0.001859), ("Alina", 0.000500), ("Alma", 0.002266), ("Alva", 0.002389), ("Amalia", 0.000354), ("Amanda", 0.004428), ("Amelia", 0.000554), ("Amelie", 0.000405), ("Amina", 0.000493), ("Amira", 0.000276), ("Ana", 0.000458), ("Andrea", 0.001204), ("Anette", 0.003509), ("Angela", 0.000516), ("Angelica", 0.001502), ("Angelika", 0.000288), ("Angelina", 0.000421), ("Anita", 0.006239), ("Anja", 0.000580), ("Ann", 0.004046), ("Ann-Britt", 0.000647), ("Ann-Charlott", 0.000344), ("Ann-Charlotte", 0.001258), ("Ann-Christin", 0.001080), ("Ann-Christine", 0.000548), ("Ann-Kristin", 0.000744), ("Ann-Louise", 0.000565), ("Ann-Mari", 0.000613), ("Ann-Marie", 0.001147), ("Ann-Sofi", 0.000391), ("Ann-Sofie", 0.000803), ("Anna", 0.035051), ("Anna-Karin", 0.000969), ("Anna-Lena", 0.000910), ("Anna-Lisa", 0.000338), ("Anna-Maria", 0.000337), ("Anne", 0.002188), ("Anne-Marie", 0.000795), ("Anneli", 0.003006), ("Annelie", 0.001400), ("Annette", 0.001264), ("Annica", 0.000943), ("Annie", 0.001500), ("Annika", 0.004633), ("Annikki", 0.000339), ("Antonia", 0.000384), ("Asta", 0.000640), ("Astrid", 0.004994), ("Aurora", 0.000722), ("Barbara", 0.000462), ("Barbro", 0.004156), ("Beata", 0.000410), ("Beatrice", 0.001165), ("Bella", 0.000337), ("Berit", 0.002914), ("Betty", 0.000312), ("Bianca", 0.000529), ("Birgit", 0.002089), ("Birgitta", 0.018070), ("Bodil", 0.000870), ("Boel", 0.000296), ("Brita", 0.000825), ("Britt", 0.003536), ("Britt-Mari", 0.000313), ("Britt-Marie", 0.001744), ("Britta", 0.001336), ("Cajsa", 0.000267), ("Camilla", 0.004123), ("Carin", 0.001345), ("Carina", 0.005446), ("Carmen", 0.000457), ("Carola", 0.000953), ("Carolin", 0.000361), ("Carolina", 0.001906), ("Caroline", 0.003921), ("Cassandra", 0.000381), ("Catarina", 0.001099), ("Catharina", 0.001131), ("Cathrine", 0.000331), ("Catrin", 0.000450), ("Cecilia", 0.007656), ("Celine", 0.000379), ("Charlott", 0.000351), ("Charlotta", 0.003093), ("Charlotte", 0.003647), ("Christel", 0.000572), ("Christin", 0.000787), ("Christina", 0.012326), ("Christine", 0.001486), ("Clara", 0.001563), ("Claudia", 0.000363), ("Cornelia", 0.001234), ("Cristina", 0.000544), ("Dagmar", 0.000515), ("Dagny", 0.000477), ("Daniela", 0.000411), ("Daniella", 0.000390), ("Del", 0.000329), ("Denise", 0.000448), ("Desirée", 0.000548), ("Diana", 0.001097), ("Doris", 0.000941), ("Ebba", 0.003558), ("Edit", 0.000589), ("Edith", 0.001119), ("Eira", 0.000321), ("Eivor", 0.001063), ("Elena", 0.000738), ("Eleonor", 0.000601), ("Eleonora", 0.001382), ("Elin", 0.007281), ("Elina", 0.001308), ("Elinor", 0.000458), ("Elisa", 0.000389), ("Elisabet", 0.013796), ("Elisabeth", 0.024617), ("Elise", 0.001787), ("Elizabeth", 0.000896), ("Ella", 0.002845), ("Ellen", 0.003449), ("Ellie", 0.000778), ("Ellinor", 0.001547), ("Elly", 0.000454), ("Elma", 0.000294), ("Elna", 0.000606), ("Elsa", 0.005643), ("Else", 0.000294), ("Elsie", 0.000950), ("Elsy", 0.000405), ("Elvira", 0.002178), ("Elvy", 0.000392), ("Emelie", 0.003381), ("Emilia", 0.003444), ("Emilie", 0.000371), ("Emily", 0.000422), ("Emma", 0.008964), ("Emmy", 0.000853), ("Engla", 0.000378), ("Erica", 0.000977), ("Erika", 0.003152), ("Erna", 0.000286), ("Estelle", 0.000322), ("Ester", 0.001881), ("Esther", 0.000641), ("Ethel", 0.000330), ("Eva", 0.021862), ("Eva-Lena", 0.000401), ("Evelina", 0.002218), ("Evelyn", 0.000395), ("Evy", 0.000887), ("Ewa", 0.001347), ("Fanny", 0.001265), ("Fatima", 0.000796), ("Felicia", 0.002700), ("Filippa", 0.001352), ("Florence", 0.000303), ("Fredrika", 0.000796), ("Freja", 0.001180), ("Frida", 0.003599), ("Gabriella", 0.001446), ("Gerd", 0.002025), ("Gerda", 0.000402), ("Gertrud", 0.000928), ("Gisela", 0.000294), ("Greta", 0.001384), ("Gudrun", 0.001447), ("Gun", 0.003207), ("Gun-Britt", 0.000674), ("Gunborg", 0.000689), ("Gunhild", 0.000888), ("Gunilla", 0.005892), ("Gunnel", 0.002305), ("Gunvor", 0.001362), ("Hanna", 0.005986), ("Hannah", 0.000703), ("Hannele", 0.000478), ("Harriet", 0.000870), ("Hassan", 0.000370), ("Hedda", 0.000610), ("Hedvig", 0.000788), ("Heidi", 0.000409), ("Helen", 0.002414), ("Helena", 0.009521), ("Helene", 0.001824), ("Helga", 0.000465), ("Helén", 0.001026), ("Heléne", 0.000609), ("Henrietta", 0.000385), ("Hilda", 0.001025), ("Hillevi", 0.000766), ("Hilma", 0.000726), ("Hjördis", 0.000384), ("Hussein", 0.000306), ("Ibrahim", 0.000320), ("Ida", 0.006289), ("Idun", 0.000275), ("Ilona", 0.000318), ("Ilse", 0.000273), ("Ina", 0.000276), ("Ines", 0.000648), ("Inez", 0.000930), ("Ing-Britt", 0.000343), ("Ing-Marie", 0.000537), ("Inga", 0.002718), ("Inga-Britt", 0.000403), ("Inga-Lill", 0.001058), ("Ingalill", 0.000694), ("Ingeborg", 0.002308), ("Ingegerd", 0.003255), ("Ingegärd", 0.001608), ("Ingela", 0.002281), ("Inger", 0.007595), ("Ingrid", 0.012833), ("Irene", 0.004408), ("Iris", 0.001575), ("Irma", 0.000949), ("Iréne", 0.001284), ("Isa", 0.000325), ("Isabel", 0.000731), ("Isabell", 0.000854), ("Isabella", 0.001912), ("Isabelle", 0.001963), ("Jane", 0.000811), ("Janet", 0.000332), ("Jasmin", 0.000322), ("Jasmine", 0.000613), ("Jeanette", 0.001928), ("Jennie", 0.001346), ("Jennifer", 0.001465), ("Jenny", 0.005533), ("Jessica", 0.003136), ("Jill", 0.000286), ("Joanna", 0.000639), ("Johanna", 0.008600), ("Joline", 0.000384), ("Jonna", 0.000681), ("Josefin", 0.002565), ("Josefina", 0.000998), ("Josefine", 0.002224), ("Josephine", 0.000611), ("Judith", 0.000376), ("Julia", 0.004958), ("Julie", 0.000380), ("Juni", 0.000568), ("Kaarina", 0.000478), ("Kajsa", 0.001389), ("Karin", 0.017304), ("Karina", 0.000560), ("Karolina", 0.002517), ("Katarina", 0.005727), ("Katarzyna", 0.000329), ("Katharina", 0.000369), ("Katja", 0.000335), ("Katrin", 0.000593), ("Kerstin", 0.009177), ("Kim", 0.000576), ("Klara", 0.001911), ("Kristin", 0.001827), ("Kristina", 0.019218), ("Kristine", 0.000342), ("Laila", 0.001405), ("Lara", 0.000284), ("Laura", 0.000750), ("Lea", 0.000628), ("Leah", 0.000578), ("Leena", 0.000278), ("Leia", 0.000526), ("Leila", 0.000443), ("Lena", 0.009663), ("Leona", 0.000308), ("Li", 0.000418), ("Liisa", 0.000382), ("Lilian", 0.001693), ("Lillemor", 0.001130), ("Lilly", 0.001914), ("Lily", 0.000632), ("Lina", 0.002528), ("Linda", 0.006146), ("Linn", 0.001849), ("Linnea", 0.008769), ("Linnéa", 0.010510), ("Lisa", 0.003817), ("Lisbet", 0.000472), ("Lisbeth", 0.001988), ("Lise-Lott", 0.000275), ("Lise-Lotte", 0.000360), ("Liselott", 0.000490), ("Liselotte", 0.000523), ("Liv", 0.001159), ("Livia", 0.000579), ("Lo", 0.000534), ("Lotta", 0.000683), ("Louise", 0.006464), ("Lova", 0.000754), ("Lovis", 0.000328), ("Lovisa", 0.004157), ("Lucia", 0.000556), ("Luna", 0.000559), ("Lydia", 0.000486), ("Lykke", 0.000476), ("Maarit", 0.000296), ("Madeleine", 0.002729), ("Madelene", 0.000980), ("Magdalena", 0.002464), ("Maj", 0.001967), ("Maj-Britt", 0.000911), ("Maj-Lis", 0.000407), ("Maja", 0.003334), ("Majken", 0.000784), ("Majvor", 0.000287), ("Malgorzata", 0.000270), ("Malin", 0.005829), ("Malva", 0.000288), ("Margaret", 0.000275), ("Margareta", 0.024360), ("Margaretha", 0.003317), ("Margit", 0.001368), ("Margot", 0.000391), ("Margret", 0.000268), ("Mari", 0.001817), ("Maria", 0.051852), ("Mariam", 0.000506), ("Mariana", 0.000810), ("Mariann", 0.000616), ("Marianne", 0.008709), ("Marie", 0.015282), ("Marie-Louise", 0.001157), ("Marika", 0.000681), ("Marina", 0.001296), ("Marit", 0.000276), ("Marita", 0.001985), ("Marja", 0.000329), ("Marjatta", 0.000441), ("Marlene", 0.000522), ("Marta", 0.000568), ("Martina", 0.001555), ("Mary", 0.001158), ("Maryam", 0.000535), ("Mathilda", 0.001100), ("Matilda", 0.004321), ("Maud", 0.001278), ("May", 0.000501), ("Maya", 0.000691), ("Meja", 0.000527), ("Melina", 0.000462), ("Melissa", 0.000614), ("Mia", 0.001025), ("Michaela", 0.000744), ("Michelle", 0.000795), ("Mikaela", 0.001559), ("Mila", 0.000409), ("Milla", 0.000274), ("Mimmi", 0.000567), ("Mina", 0.000293), ("Minna", 0.000530), ("Mira", 0.000603), ("Miranda", 0.000535), ("Miriam", 0.000471), ("Mirjam", 0.000323), ("Moa", 0.002202), ("Mohamed", 0.000844), ("Mohammed", 0.000566), ("Molly", 0.001214), ("Mona", 0.002555), ("Monica", 0.004784), ("Monika", 0.002292), ("My", 0.001265), ("Märta", 0.002804), ("Märtha", 0.000319), ("Nadia", 0.000387), ("Nadja", 0.000349), ("Nancy", 0.000376), ("Nanna", 0.000296), ("Natalia", 0.000483), ("Natalie", 0.000928), ("Nathalie", 0.001592), ("Nellie", 0.001027), ("Nelly", 0.000578), ("Nicole", 0.000944), ("Nina", 0.001970), ("Nora", 0.001197), ("Nour", 0.000283), ("Nova", 0.001132), ("Olga", 0.000613), ("Olivia", 0.003357), ("Ottilia", 0.000387), ("Patricia", 0.000816), ("Paula", 0.000726), ("Paulina", 0.000964), ("Pauline", 0.000423), ("Pernilla", 0.002158), ("Petra", 0.001815), ("Pia", 0.002707), ("Ragnhild", 0.000651), ("Rebecca", 0.001856), ("Rebecka", 0.001777), ("Regina", 0.000526), ("Renée", 0.000277), ("Rigmor", 0.000366), ("Rita", 0.000533), ("Ronja", 0.000880), ("Rosa", 0.000482), ("Rose", 0.000418), ("Rose-Marie", 0.001020), ("Rosita", 0.000312), ("Rut", 0.001986), ("Ruth", 0.001790), ("Sabina", 0.000822), ("Saga", 0.002219), ("Sally", 0.000595), ("Samira", 0.000329), ("Sandra", 0.003644), ("Sanna", 0.001309), ("Sara", 0.007990), ("Sarah", 0.001064), ("Selma", 0.001525), ("Signe", 0.002671), ("Sigrid", 0.001842), ("Simone", 0.000416), ("Siri", 0.001162), ("Siv", 0.003702), ("Sofi", 0.000574), ("Sofia", 0.011084), ("Sofie", 0.003631), ("Solveig", 0.001808), ("Sonia", 0.000285), ("Sonja", 0.002359), ("Sophia", 0.000768), ("Sophie", 0.000926), ("Stella", 0.001372), ("Stephanie", 0.000404), ("Stina", 0.002429), ("Susan", 0.000331), ("Susann", 0.000470), ("Susanna", 0.001526), ("Susanne", 0.006000), ("Suzanne", 0.000359), ("Svea", 0.001409), ("Sylvia", 0.000960), ("Tanja", 0.000314), ("Teresa", 0.000537), ("Terese", 0.000712), ("Teresia", 0.000892), ("Thea", 0.000955), ("Theres", 0.000287), ("Therese", 0.004216), ("Theresia", 0.000469), ("Therése", 0.001111), ("Thi", 0.000634), ("Tilda", 0.000927), ("Tilde", 0.000869), ("Tina", 0.001056), ("Tindra", 0.000829), ("Tora", 0.000338), ("Tova", 0.000405), ("Tove", 0.000971), ("Tuulikki", 0.000300), ("Tuva", 0.000927), ("Tyra", 0.001099), ("Ulla", 0.005454), ("Ulla-Britt", 0.000882), ("Ulrica", 0.000479), ("Ulrika", 0.004778), ("Valborg", 0.000396), ("Valentina", 0.000481), ("Vanessa", 0.000515), ("Vanja", 0.000513), ("Vega", 0.000277), ("Vendela", 0.000493), ("Vera", 0.001994), ("Veronica", 0.001910), ("Veronika", 0.000473), ("Victoria", 0.003326), ("Viktoria", 0.004526), ("Vilhelmina", 0.000529), ("Vilma", 0.000648), ("Viola", 0.005577), ("Viveka", 0.000333), ("Vivianne", 0.000321), ("Wilhelmina", 0.000307), ("Wilma", 0.001918), ("Yasmin", 0.000286), ("Ylva", 0.001264), ("Yvonne", 0.003868), ("Zahra", 0.000491), ("Åsa", 0.004350), ("Åse", 0.000397), ) ) first_names_male = OrderedDict( ( ("Aaron", 0.000273), ("Abbas", 0.000313), ("Abdi", 0.000412), ("Abdirahman", 0.000318), ("Abdul", 0.000418), ("Abdullah", 0.000462), ("Abdullahi", 0.000326), ("Adam", 0.003762), ("Adnan", 0.000349), ("Adrian", 0.001660), ("Agne", 0.000400), ("Ahmad", 0.001476), ("Ahmed", 0.001952), ("Alan", 0.000266), ("Albert", 0.001255), ("Alberto", 0.000235), ("Albin", 0.002744), ("Alejandro", 0.000267), ("Alex", 0.000990), ("Alexander", 0.009795), ("Alexis", 0.000252), ("Alf", 0.002320), ("Alfons", 0.000416), ("Alfred", 0.002058), ("Algot", 0.000653), ("Ali", 0.003231), ("Allan", 0.002085), ("Alvar", 0.000827), ("Alve", 0.000289), ("Alvin", 0.000773), ("Amadeus", 0.000243), ("Amin", 0.000282), ("Amir", 0.000693), ("Anas", 0.000220), ("Anders", 0.020522), ("Andreas", 0.008256), ("Andres", 0.000241), ("Andrew", 0.000280), ("Andrzej", 0.000287), ("André", 0.001527), ("Andréas", 0.000291), ("Antero", 0.000558), ("Anthony", 0.000378), ("Anton", 0.004343), ("Antonio", 0.000676), ("Arne", 0.005777), ("Arnold", 0.000498), ("Aron", 0.000688), ("Arthur", 0.000662), ("Artur", 0.000661), ("Arvid", 0.002871), ("Assar", 0.000265), ("Aston", 0.000221), ("August", 0.001489), ("Axel", 0.006717), ("Ben", 0.000319), ("Bengt", 0.009296), ("Benjamin", 0.001967), ("Benny", 0.001130), ("Berndt", 0.000338), ("Bernhard", 0.000524), ("Bernt", 0.001677), ("Bert", 0.000665), ("Bertil", 0.005907), ("Bill", 0.000339), ("Billy", 0.000371), ("Birger", 0.001644), ("Bjarne", 0.000277), ("Björn", 0.006852), ("Bo", 0.009425), ("Boris", 0.000264), ("Bror", 0.001796), ("Bruno", 0.000529), ("Börje", 0.002099), ("Carl", 0.015879), ("Carl-Johan", 0.000326), ("Carlos", 0.000403), ("Casper", 0.000797), ("Charles", 0.000719), ("Charlie", 0.001452), ("Christer", 0.006241), ("Christian", 0.004402), ("Christofer", 0.000343), ("Christoffer", 0.002262), ("Christopher", 0.001055), ("Claes", 0.002372), ("Clas", 0.000461), ("Colin", 0.000406), ("Conny", 0.001551), ("Curt", 0.000584), ("Dag", 0.000492), ("Dan", 0.002496), ("Daniel", 0.010147), ("Dante", 0.000422), ("David", 0.006344), ("Dennis", 0.001896), ("Dick", 0.000530), ("Douglas", 0.000572), ("Ebbe", 0.000915), ("Eddie", 0.000628), ("Edgar", 0.000220), ("Edvard", 0.000883), ("Edvin", 0.002110), ("Edward", 0.000769), ("Edwin", 0.000525), ("Egon", 0.000539), ("Einar", 0.001625), ("Elias", 0.003786), ("Elis", 0.000906), ("Elliot", 0.001271), ("Elmer", 0.000251), ("Elof", 0.000372), ("Elton", 0.000552), ("Elvin", 0.000317), ("Elvis", 0.000289), ("Emanuel", 0.003357), ("Emil", 0.006301), ("Emilio", 0.000320), ("Enar", 0.000278), ("Eric", 0.003986), ("Erik", 0.032401), ("Erland", 0.000832), ("Erling", 0.000723), ("Ernst", 0.001118), ("Eskil", 0.000583), ("Eugen", 0.000251), ("Evald", 0.000411), ("Evert", 0.001602), ("Fabian", 0.000729), ("Felix", 0.001707), ("Ferdinand", 0.000235), ("Filip", 0.003200), ("Folke", 0.001953), ("Frank", 0.001242), ("Frans", 0.001201), ("Fred", 0.000459), ("Fredric", 0.000263), ("Fredrik", 0.010720), ("Fritz", 0.000313), ("Gabriel", 0.002474), ("Georg", 0.001924), ("George", 0.000780), ("Gerhard", 0.000554), ("Gert", 0.000965), ("Gillis", 0.000220), ("Glenn", 0.000609), ("Gottfrid", 0.000288), ("Greger", 0.000317), ("Gunnar", 0.010797), ("Gustaf", 0.003906), ("Gustav", 0.009303), ("Göran", 0.008240), ("Gösta", 0.002634), ("Göte", 0.001058), ("Hamid", 0.000266), ("Hampus", 0.001492), ("Hamza", 0.000304), ("Hannes", 0.000804), ("Hans", 0.012544), ("Harald", 0.001547), ("Harry", 0.002438), ("Hasan", 0.000455), ("Hassan", 0.000984), ("Helge", 0.001028), ("Helmer", 0.000491), ("Henning", 0.000927), ("Henric", 0.000347), ("Henrik", 0.006764), ("Henry", 0.002499), ("Herbert", 0.000567), ("Herman", 0.000956), ("Hilding", 0.000578), ("Hjalmar", 0.001146), ("Holger", 0.000983), ("Hugo", 0.003867), ("Hussein", 0.000775), ("Håkan", 0.005705), ("Ian", 0.000321), ("Ibrahim", 0.001173), ("Inge", 0.001754), ("Ingemar", 0.005694), ("Ingmar", 0.000698), ("Ingvar", 0.003646), ("Isaac", 0.000248), ("Isac", 0.000588), ("Isak", 0.001921), ("Ismail", 0.000355), ("Ivan", 0.001344), ("Ivar", 0.002279), ("Jack", 0.001067), ("Jacob", 0.001663), ("Jakob", 0.001717), ("Jamal", 0.000253), ("James", 0.000901), ("Jan", 0.013697), ("Jan-Erik", 0.000724), ("Jan-Olof", 0.000383), ("Jan-Åke", 0.000251), ("Jari", 0.000267), ("Jarl", 0.000368), ("Jean", 0.000346), ("Jens", 0.002036), ("Jerker", 0.000292), ("Jerry", 0.000643), ("Jesper", 0.002373), ("Jim", 0.000697), ("Jimmie", 0.000238), ("Jimmy", 0.001978), ("Joachim", 0.000617), ("Joacim", 0.000407), ("Joakim", 0.004417), ("Joel", 0.002406), ("Johan", 0.019352), ("Johannes", 0.003621), ("John", 0.007684), ("Johnny", 0.001309), ("Jon", 0.000733), ("Jonas", 0.006796), ("Jonatan", 0.000895), ("Jonathan", 0.002685), ("Jonny", 0.001117), ("Jose", 0.000220), ("Josef", 0.001312), ("Joseph", 0.000486), ("José", 0.000261), ("Juan", 0.000311), ("Juhani", 0.000978), ("Julian", 0.000606), ("Julius", 0.000844), ("Junior", 0.000328), ("Jörgen", 0.003116), ("Kai", 0.000232), ("Kaj", 0.000627), ("Kalevi", 0.000331), ("Kalle", 0.000423), ("Kari", 0.000258), ("Karim", 0.000284), ("Karl", 0.022011), ("Karl-Erik", 0.000439), ("Kasper", 0.000380), ("Kennet", 0.000529), ("Kenneth", 0.002848), ("Kenny", 0.000345), ("Kent", 0.003337), ("Kenth", 0.000581), ("Kevin", 0.001685), ("Khaled", 0.000403), ("Khalid", 0.000250), ("Kian", 0.000309), ("Kim", 0.001372), ("Kjell", 0.005442), ("Klas", 0.001469), ("Knut", 0.001557), ("Konrad", 0.000325), ("Krister", 0.001868), ("Kristian", 0.001698), ("Kristofer", 0.000494), ("Kristoffer", 0.001512), ("Krzysztof", 0.000305), ("Kumar", 0.000238), ("Kurt", 0.002236), ("Lage", 0.000216), ("Lars", 0.024802), ("Lars-Erik", 0.000679), ("Lars-Göran", 0.000466), ("Lars-Olof", 0.000289), ("Lars-Åke", 0.000334), ("Lasse", 0.000278), ("Leif", 0.006716), ("Lennart", 0.011350), ("Leo", 0.002393), ("Leon", 0.001287), ("Leonard", 0.000800), ("Leonardo", 0.000234), ("Leopold", 0.000272), ("Levi", 0.000412), ("Liam", 0.002057), ("Linus", 0.002757), ("Loke", 0.000636), ("Loui", 0.000278), ("Louie", 0.000273), ("Louis", 0.000273), ("Love", 0.001194), ("Lowe", 0.000263), ("Lucas", 0.001952), ("Ludvig", 0.002036), ("Ludwig", 0.000874), ("Luis", 0.000371), ("Lukas", 0.001683), ("Magnus", 0.008230), ("Mahdi", 0.000281), ("Mahmoud", 0.000610), ("Malte", 0.001148), ("Manfred", 0.000345), ("Manuel", 0.000334), ("Marcin", 0.000236), ("Marco", 0.000257), ("Marcus", 0.004288), ("Marek", 0.000234), ("Mario", 0.000323), ("Mark", 0.000325), ("Marko", 0.000330), ("Markus", 0.002111), ("Martin", 0.008193), ("Matheo", 0.000250), ("Mathias", 0.001640), ("Matias", 0.000283), ("Mats", 0.007088), ("Matteo", 0.000330), ("Matti", 0.000405), ("Mattias", 0.005588), ("Matts", 0.000227), ("Mauritz", 0.000538), ("Max", 0.002422), ("Maximilian", 0.000624), ("Mehmet", 0.000221), ("Melker", 0.001059), ("Melvin", 0.000968), ("Melwin", 0.000310), ("Micael", 0.000587), ("Michael", 0.004774), ("Michal", 0.000231), ("Michel", 0.000323), ("Miguel", 0.000241), ("Mika", 0.000225), ("Mikael", 0.014597), ("Milan", 0.000280), ("Milo", 0.000576), ("Milton", 0.000566), ("Mio", 0.000365), ("Mohamad", 0.000995), ("Mohamed", 0.002142), ("Mohammad", 0.001974), ("Mohammed", 0.001743), ("Morgan", 0.001222), ("Muhammad", 0.000677), ("Mustafa", 0.000727), ("Måns", 0.000684), ("Mårten", 0.000645), ("Natanael", 0.000272), ("Neo", 0.000514), ("Nicholas", 0.000362), ("Nicklas", 0.001137), ("Niclas", 0.001543), ("Nicolas", 0.000375), ("Niklas", 0.003501), ("Nikola", 0.000227), ("Nils", 0.015519), ("Noa", 0.000217), ("Noah", 0.001539), ("Noel", 0.000890), ("Ola", 0.002294), ("Olav", 0.000218), ("Olavi", 0.000547), ("Ole", 0.000229), ("Oliver", 0.002808), ("Oliwer", 0.000253), ("Olle", 0.002717), ("Olof", 0.012025), ("Olov", 0.003594), ("Omar", 0.001037), ("Oscar", 0.004540), ("Oskar", 0.005082), ("Osman", 0.000339), ("Ossian", 0.000417), ("Otto", 0.001634), ("Ove", 0.003120), ("Owe", 0.000327), ("Patric", 0.000406), ("Patrick", 0.000791), ("Patrik", 0.004666), ("Paul", 0.002075), ("Pawel", 0.000308), ("Peder", 0.000659), ("Pekka", 0.000237), ("Pelle", 0.000364), ("Per", 0.018031), ("Per-Erik", 0.000378), ("Per-Olof", 0.000512), ("Peter", 0.012580), ("Petter", 0.001144), ("Philip", 0.002050), ("Pierre", 0.001002), ("Piotr", 0.000427), ("Pontus", 0.001825), ("Pär", 0.001882), ("Rafael", 0.000302), ("Ragnar", 0.001635), ("Ralf", 0.000402), ("Ralph", 0.000221), ("Rasmus", 0.002126), ("Raymond", 0.000253), ("Reine", 0.000234), ("Reinhold", 0.000458), ("Reza", 0.000473), ("Richard", 0.001983), ("Rickard", 0.002477), ("Rikard", 0.001124), ("Robert", 0.006260), ("Roberto", 0.000225), ("Robin", 0.003330), ("Roger", 0.004124), ("Roland", 0.004299), ("Rolf", 0.005100), ("Ronald", 0.000256), ("Ronnie", 0.000417), ("Ronny", 0.001230), ("Roy", 0.000512), ("Ruben", 0.000562), ("Rudolf", 0.000328), ("Rune", 0.002796), ("Said", 0.000453), ("Saleh", 0.000218), ("Sam", 0.001044), ("Sami", 0.000435), ("Samir", 0.000398), ("Samuel", 0.002443), ("Sebastian", 0.004364), ("Seth", 0.000312), ("Sigfrid", 0.000312), ("Sigge", 0.000602), ("Sigurd", 0.000471), ("Sigvard", 0.001041), ("Simon", 0.004479), ("Sivert", 0.000249), ("Sixten", 0.001457), ("Sonny", 0.000344), ("Staffan", 0.001302), ("Stefan", 0.008105), ("Stellan", 0.000453), ("Sten", 0.002342), ("Stig", 0.005104), ("Sture", 0.001358), ("Sune", 0.001284), ("Svante", 0.000955), ("Sven", 0.010617), ("Sven-Erik", 0.000376), ("Sven-Olof", 0.000221), ("Sören", 0.001636), ("Tage", 0.001640), ("Tapani", 0.000391), ("Tapio", 0.000312), ("Ted", 0.000719), ("Teodor", 0.000636), ("Theo", 0.001239), ("Theodor", 0.001268), ("Thomas", 0.006897), ("Thor", 0.000377), ("Thore", 0.000259), ("Thure", 0.000295), ("Tim", 0.001212), ("Timo", 0.000223), ("Tobias", 0.003625), ("Tom", 0.001059), ("Tomas", 0.003809), ("Tomasz", 0.000289), ("Tommy", 0.004554), ("Toni", 0.000220), ("Tony", 0.001667), ("Tor", 0.000617), ("Torbjörn", 0.002283), ("Tord", 0.000988), ("Tore", 0.001554), ("Torgny", 0.000499), ("Torsten", 0.001458), ("Tristan", 0.000221), ("Ture", 0.000873), ("Ulf", 0.006815), ("Ulrik", 0.000288), ("Uno", 0.001029), ("Urban", 0.001217), ("Valdemar", 0.001371), ("Valentin", 0.001042), ("Valter", 0.001241), ("Verner", 0.000523), ("Victor", 0.002379), ("Vidar", 0.000743), ("Vide", 0.000345), ("Viggo", 0.000988), ("Viking", 0.000532), ("Viktor", 0.003799), ("Vilgot", 0.000446), ("Vilhelm", 0.002303), ("Ville", 0.000433), ("Vilmer", 0.000330), ("Vincent", 0.001816), ("Waldemar", 0.000397), ("Walter", 0.000931), ("Werner", 0.000261), ("Wilhelm", 0.002871), ("Wille", 0.000275), ("William", 0.005564), ("Willy", 0.000434), ("Wilmer", 0.000534), ("Yngve", 0.001416), ("Yousef", 0.000308), ("Yusuf", 0.000321), ("Åke", 0.008156), ("Örjan", 0.000613), ("Östen", 0.000362), ) ) first_names = first_names_male.copy() first_names.update(first_names_female) last_names = OrderedDict( ( ("Abbas", 0.000488), ("Abdi", 0.001225), ("Abdullah", 0.000551), ("Abrahamsson", 0.001998), ("Adolfsson", 0.001604), ("Ahlberg", 0.000872), ("Ahlgren", 0.000861), ("Ahlin", 0.000609), ("Ahlqvist", 0.000616), ("Ahlström", 0.001032), ("Ahmad", 0.001023), ("Ahmadi", 0.000921), ("Ahmed", 0.002943), ("Alexandersson", 0.000913), ("Alfredsson", 0.000935), ("Ali", 0.004060), ("Alm", 0.001399), ("Almgren", 0.000579), ("Almqvist", 0.000863), ("Anderberg", 0.000541), ("Andersen", 0.001066), ("Andersson", 0.050317), ("Andreasson", 0.001849), ("Andrén", 0.000787), ("Antonsson", 0.000764), ("Aronsson", 0.001422), ("Arvidsson", 0.002861), ("Asp", 0.000730), ("Asplund", 0.001103), ("Augustsson", 0.000979), ("Axelsson", 0.004888), ("Backlund", 0.000896), ("Backman", 0.000957), ("Bengtsson", 0.007000), ("Berg", 0.004777), ("Bergdahl", 0.000487), ("Berggren", 0.002593), ("Bergkvist", 0.000852), ("Berglund", 0.004363), ("Bergman", 0.003326), ("Bergqvist", 0.001890), ("Bergsten", 0.000582), ("Bergstrand", 0.000534), ("Bergström", 0.004867), ("Bergvall", 0.000548), ("Berntsson", 0.000984), ("Bertilsson", 0.000898), ("Björk", 0.003130), ("Björklund", 0.002640), ("Björkman", 0.001610), ("Blixt", 0.000709), ("Blom", 0.002153), ("Blomberg", 0.001388), ("Blomgren", 0.000722), ("Blomkvist", 0.000497), ("Blomqvist", 0.002051), ("Bodin", 0.000720), ("Bohlin", 0.000666), ("Bohman", 0.000503), ("Bolin", 0.000612), ("Boman", 0.001171), ("Borg", 0.001885), ("Borgström", 0.000631), ("Boström", 0.001687), ("Brandt", 0.001121), ("Brink", 0.000501), ("Broberg", 0.000794), ("Brodin", 0.000613), ("Brolin", 0.000551), ("Broman", 0.000737), ("Brännström", 0.000676), ("Burman", 0.000661), ("Bylund", 0.000601), ("Byström", 0.000924), ("Bäck", 0.000856), ("Bäckman", 0.000939), ("Bäckström", 0.001705), ("Börjesson", 0.001568), ("Carlberg", 0.000521), ("Carlson", 0.000583), ("Carlsson", 0.007093), ("Chen", 0.000555), ("Christensen", 0.000762), ("Claesson", 0.001471), ("Dahl", 0.002055), ("Dahlberg", 0.002239), ("Dahlgren", 0.001421), ("Dahlin", 0.001116), ("Dahlqvist", 0.000949), ("Dahlström", 0.001410), ("Dahlén", 0.000525), ("Danielsson", 0.003335), ("Davidsson", 0.001723), ("Edberg", 0.000550), ("Edin", 0.000618), ("Edlund", 0.001516), ("Edman", 0.000612), ("Edström", 0.001014), ("Edvardsson", 0.000862), ("Einarsson", 0.000618), ("Ek", 0.002203), ("Ekberg", 0.001172), ("Ekdahl", 0.000670), ("Ekelund", 0.000717), ("Ekholm", 0.000840), ("Eklund", 0.003541), ("Eklöf", 0.000727), ("Ekman", 0.001816), ("Ekstrand", 0.000766), ("Ekström", 0.002406), ("Eliasson", 0.002587), ("Elofsson", 0.000570), ("Emanuelsson", 0.000812), ("Emilsson", 0.000492), ("Engberg", 0.000727), ("Engdahl", 0.000748), ("Englund", 0.001828), ("Engman", 0.000809), ("Engström", 0.003639), ("Engvall", 0.000525), ("Ericson", 0.000845), ("Ericsson", 0.001421), ("Eriksson", 0.028017), ("Erlandsson", 0.001527), ("Fagerström", 0.000494), ("Falk", 0.002234), ("Farah", 0.000524), ("Ferm", 0.000527), ("Flink", 0.000517), ("Folkesson", 0.000563), ("Fors", 0.001060), ("Forsberg", 0.003822), ("Forsell", 0.000599), ("Forsgren", 0.000615), ("Forslund", 0.001029), ("Forsman", 0.000935), ("Frank", 0.000681), ("Fransson", 0.002975), ("Franzén", 0.001505), ("Fredriksson", 0.003919), ("Friberg", 0.001715), ("Frid", 0.000522), ("Frisk", 0.001002), ("Gabrielsson", 0.000704), ("Gashi", 0.000506), ("Grahn", 0.001012), ("Granath", 0.000674), ("Granberg", 0.000881), ("Granlund", 0.000621), ("Granström", 0.000658), ("Green", 0.000736), ("Gren", 0.000529), ("Grönlund", 0.000524), ("Gullberg", 0.000487), ("Gunnarsson", 0.003164), ("Gustafsson", 0.014569), ("Gustavsson", 0.005205), ("Göransson", 0.001786), ("Hagberg", 0.000925), ("Haglund", 0.001481), ("Hagman", 0.000901), ("Hagström", 0.001212), ("Hall", 0.001005), ("Hallberg", 0.001804), ("Hallgren", 0.000846), ("Hallin", 0.000720), ("Halvarsson", 0.000575), ("Hammar", 0.000854), ("Hammarström", 0.000719), ("Hanna", 0.000823), ("Hansen", 0.001914), ("Hansson", 0.008987), ("Haraldsson", 0.000638), ("Hasan", 0.000775), ("Hassan", 0.002012), ("Hedberg", 0.001695), ("Hedin", 0.001081), ("Hedlund", 0.002288), ("Hedman", 0.001284), ("Hedström", 0.001241), ("Helgesson", 0.000643), ("Hellberg", 0.001132), ("Hellgren", 0.000801), ("Hellman", 0.000875), ("Hellström", 0.002141), ("Henningsson", 0.000708), ("Henriksson", 0.003664), ("Hermansson", 0.002318), ("Hjalmarsson", 0.001038), ("Hjelm", 0.000947), ("Hjort", 0.000503), ("Holgersson", 0.000799), ("Holm", 0.003357), ("Holmberg", 0.003028), ("Holmgren", 0.002334), ("Holmqvist", 0.001361), ("Holmström", 0.001657), ("Holst", 0.000620), ("Hosseini", 0.000553), ("Hult", 0.000792), ("Hultgren", 0.000727), ("Hultman", 0.000744), ("Hussein", 0.001442), ("Hägg", 0.000790), ("Hägglund", 0.000984), ("Häggström", 0.000583), ("Håkansson", 0.003234), ("Högberg", 0.001341), ("Höglund", 0.001621), ("Ibrahim", 0.002039), ("Ingvarsson", 0.000503), ("Isaksson", 0.002827), ("Ismail", 0.000721), ("Israelsson", 0.000619), ("Issa", 0.000728), ("Ivarsson", 0.001875), ("Jacobsson", 0.001207), ("Jafari", 0.000493), ("Jakobsson", 0.004351), ("Jansson", 0.009964), ("Jarl", 0.000521), ("Jensen", 0.002004), ("Jeppsson", 0.000681), ("Johannesson", 0.001418), ("Johansen", 0.000525), ("Johansson", 0.049876), ("Johnson", 0.000664), ("Johnsson", 0.002873), ("Jonasson", 0.001889), ("Jonsson", 0.011662), ("Josefsson", 0.001719), ("Juhlin", 0.000531), ("Jönsson", 0.006349), ("Jörgensen", 0.000503), ("Karlberg", 0.000720), ("Karlsson", 0.037073), ("Karlström", 0.000885), ("Khalil", 0.000488), ("Khan", 0.001060), ("Kjellberg", 0.000847), ("Klasson", 0.000745), ("Kling", 0.000570), ("Knutsson", 0.001343), ("Krantz", 0.000853), ("Kristensson", 0.000617), ("Kristiansson", 0.000910), ("Kristoffersson", 0.000599), ("Kvist", 0.000666), ("Källström", 0.000578), ("Landin", 0.000604), ("Landström", 0.000559), ("Lantz", 0.000910), ("Larsen", 0.000930), ("Larsson", 0.025694), ("Lennartsson", 0.000886), ("Levin", 0.001025), ("Li", 0.000645), ("Lidström", 0.000539), ("Lidén", 0.000687), ("Lilja", 0.001697), ("Liljegren", 0.000592), ("Lind", 0.003811), ("Lindahl", 0.001756), ("Lindberg", 0.006391), ("Lindblad", 0.001218), ("Lindblom", 0.001817), ("Linde", 0.000681), ("Lindell", 0.001370), ("Linder", 0.001256), ("Lindfors", 0.000623), ("Lindgren", 0.005226), ("Lindh", 0.001269), ("Lindholm", 0.002064), ("Lindkvist", 0.001056), ("Lindmark", 0.000788), ("Lindquist", 0.000580), ("Lindqvist", 0.003660), ("Lindskog", 0.000662), ("Lindström", 0.005711), ("Lindvall", 0.000965), ("Lindén", 0.001586), ("Liu", 0.000484), ("Ljung", 0.001217), ("Ljungberg", 0.001216), ("Ljunggren", 0.000916), ("Ljungqvist", 0.000537), ("Lund", 0.001893), ("Lundberg", 0.004833), ("Lundblad", 0.000583), ("Lundell", 0.000826), ("Lundgren", 0.004680), ("Lundh", 0.000669), ("Lundholm", 0.000507), ("Lundin", 0.003530), ("Lundkvist", 0.001018), ("Lundmark", 0.001263), ("Lundquist", 0.000559), ("Lundqvist", 0.003025), ("Lundström", 0.002683), ("Löf", 0.000558), ("Löfgren", 0.002016), ("Lönn", 0.000524), ("Lövgren", 0.000931), ("Magnusson", 0.005757), ("Malm", 0.001591), ("Malmberg", 0.001160), ("Malmgren", 0.000739), ("Malmqvist", 0.000776), ("Malmström", 0.000903), ("Marklund", 0.000972), ("Martinsson", 0.002087), ("Mattsson", 0.003885), ("Melander", 0.000681), ("Melin", 0.001481), ("Moberg", 0.001491), ("Modig", 0.000512), ("Mohamed", 0.002454), ("Mohammad", 0.000748), ("Mohammadi", 0.000812), ("Mohammed", 0.001278), ("Mohamud", 0.000491), ("Molin", 0.001245), ("Mustafa", 0.000691), ("Månsson", 0.002053), ("Mårtensson", 0.002615), ("Möller", 0.001852), ("Nguyen", 0.001422), ("Nielsen", 0.001591), ("Niklasson", 0.000900), ("Nilsson", 0.034636), ("Norberg", 0.001997), ("Nord", 0.001297), ("Nordberg", 0.000693), ("Nordgren", 0.000773), ("Nordin", 0.002530), ("Nordlander", 0.000544), ("Nordlund", 0.000932), ("Nordqvist", 0.000946), ("Nordström", 0.002816), ("Norgren", 0.000581), ("Norlin", 0.000520), ("Norling", 0.000660), ("Norman", 0.001089), ("Norrman", 0.000596), ("Norén", 0.001486), ("Nyberg", 0.002917), ("Nygren", 0.001672), ("Nylander", 0.000742), ("Nylén", 0.000520), ("Nyman", 0.001940), ("Nyström", 0.003064), ("Näslund", 0.001094), ("Ohlsson", 0.001144), ("Olander", 0.000521), ("Olausson", 0.001225), ("Olofsson", 0.005180), ("Olsen", 0.000674), ("Olsson", 0.021540), ("Omar", 0.000946), ("Oskarsson", 0.001179), ("Osman", 0.000917), ("Ottosson", 0.001680), ("Palm", 0.001916), ("Palmgren", 0.000558), ("Palmqvist", 0.000823), ("Paulsson", 0.001123), ("Pedersen", 0.001150), ("Persson", 0.021497), ("Petersen", 0.000494), ("Petersson", 0.005904), ("Pettersson", 0.012948), ("Pihl", 0.000577), ("Pålsson", 0.001234), ("Rask", 0.000500), ("Rasmussen", 0.000557), ("Rehn", 0.000499), ("Robertsson", 0.000596), ("Roos", 0.001645), ("Rosenberg", 0.000571), ("Rosengren", 0.000868), ("Rosenqvist", 0.000598), ("Rosén", 0.001878), ("Roth", 0.000527), ("Rydberg", 0.000882), ("Rydén", 0.000806), ("Sahlin", 0.000815), ("Said", 0.000723), ("Saleh", 0.000658), ("Salomonsson", 0.000775), ("Samuelsson", 0.003083), ("Sandberg", 0.004135), ("Sandell", 0.000710), ("Sandgren", 0.000712), ("Sandin", 0.000826), ("Sandström", 0.002540), ("Schmidt", 0.000601), ("Selin", 0.000498), ("Simonsson", 0.000870), ("Singh", 0.000791), ("Sjöberg", 0.003817), ("Sjöblom", 0.000893), ("Sjödin", 0.001224), ("Sjögren", 0.002256), ("Sjöholm", 0.000877), ("Sjölander", 0.000611), ("Sjölin", 0.000600), ("Sjölund", 0.000733), ("Sjöstedt", 0.000663), ("Sjöstrand", 0.000940), ("Sjöström", 0.001764), ("Skog", 0.000847), ("Skoglund", 0.001678), ("Skoog", 0.000707), ("Sköld", 0.001275), ("Smith", 0.000623), ("Stark", 0.000727), ("Steen", 0.000537), ("Stenberg", 0.001733), ("Stenlund", 0.000496), ("Stenström", 0.000584), ("Storm", 0.000601), ("Strand", 0.001764), ("Strandberg", 0.001645), ("Strid", 0.000751), ("Ström", 0.002411), ("Strömberg", 0.002108), ("Ståhl", 0.001265), ("Sundberg", 0.002240), ("Sundell", 0.000562), ("Sundin", 0.001292), ("Sundqvist", 0.001352), ("Sundström", 0.001961), ("Svahn", 0.000771), ("Svanberg", 0.000802), ("Svantesson", 0.000524), ("Svedberg", 0.000831), ("Svensson", 0.020050), ("Svärd", 0.001241), ("Söderberg", 0.002920), ("Södergren", 0.000553), ("Söderholm", 0.000652), ("Söderlund", 0.001695), ("Söderman", 0.000563), ("Söderqvist", 0.000690), ("Söderström", 0.001939), ("Sörensen", 0.000589), ("Thor", 0.000493), ("Thorén", 0.000819), ("Thulin", 0.000594), ("Torstensson", 0.000915), ("Tran", 0.000748), ("Turesson", 0.000539), ("Törnqvist", 0.001096), ("Vallin", 0.000550), ("Vesterlund", 0.000608), ("Vestin", 0.000521), ("Viberg", 0.000564), ("Viklund", 0.001368), ("Vikström", 0.001310), ("Vilhelmsson", 0.000597), ("Wahlberg", 0.000829), ("Wahlgren", 0.000553), ("Wahlström", 0.001108), ("Wall", 0.000891), ("Wallgren", 0.000556), ("Wallin", 0.003219), ("Wallén", 0.000540), ("Wang", 0.000723), ("Wennberg", 0.000651), ("Werner", 0.000629), ("Westberg", 0.000822), ("Wester", 0.000667), ("Westerberg", 0.001035), ("Westerlund", 0.001071), ("Westin", 0.000927), ("Westlund", 0.000574), ("Westman", 0.001005), ("Wiberg", 0.001096), ("Widén", 0.000615), ("Wiklund", 0.001296), ("Wikström", 0.001628), ("Wilhelmsson", 0.000628), ("Winberg", 0.000511), ("Zetterberg", 0.000539), ("Zhang", 0.000593), ("Åberg", 0.002406), ("Ågren", 0.001078), ("Åkerlund", 0.000806), ("Åkerman", 0.000500), ("Åkesson", 0.001948), ("Åsberg", 0.000510), ("Åslund", 0.000555), ("Åström", 0.001978), ("Öberg", 0.002142), ("Öhman", 0.001348), ("Östberg", 0.000679), ("Österberg", 0.000994), ("Östling", 0.000563), ("Östlund", 0.001400), ("Östman", 0.000854), ) )
Provider
python
django__django
django/contrib/sites/requests.py
{ "start": 0, "end": 641 }
class ____: """ A class that shares the primary interface of Site (i.e., it has ``domain`` and ``name`` attributes) but gets its data from an HttpRequest object rather than from a database. The save() and delete() methods raise NotImplementedError. """ def __init__(self, request): self.domain = self.name = request.get_host() def __str__(self): return self.domain def save(self, force_insert=False, force_update=False): raise NotImplementedError("RequestSite cannot be saved.") def delete(self): raise NotImplementedError("RequestSite cannot be deleted.")
RequestSite
python
pennersr__django-allauth
allauth/account/views.py
{ "start": 21852, "end": 25349 }
class ____( AjaxCapableProcessFormViewMixin, NextRedirectMixin, LogoutFunctionalityMixin, FormView, ): template_name = "account/password_reset_from_key." + app_settings.TEMPLATE_EXTENSION form_class = ResetPasswordKeyForm success_url = reverse_lazy("account_reset_password_from_key_done") reset_url_key = "set-password" def get_form_class(self): return get_form_class( app_settings.FORMS, "reset_password_from_key", self.form_class ) def dispatch(self, request, uidb36, key, **kwargs): self.request = request self.key = key user_token_form_class = get_form_class( app_settings.FORMS, "user_token", UserTokenForm ) is_ajax = get_adapter().is_ajax(request) if self.key == self.reset_url_key or is_ajax: if not is_ajax: self.key = self.request.session.get(INTERNAL_RESET_SESSION_KEY, "") # (Ab)using forms here to be able to handle errors in XHR #890 token_form = user_token_form_class(data={"uidb36": uidb36, "key": self.key}) if token_form.is_valid(): self.reset_user = token_form.reset_user # In the event someone clicks on a password reset link # for one account while logged into another account, # logout of the currently logged in account. if ( self.request.user.is_authenticated and self.request.user.pk != self.reset_user.pk ): self.logout() self.request.session[INTERNAL_RESET_SESSION_KEY] = self.key return super().dispatch(request, uidb36, self.key, **kwargs) else: token_form = user_token_form_class(data={"uidb36": uidb36, "key": self.key}) if token_form.is_valid(): # Store the key in the session and redirect to the # password reset form at a URL without the key. That # avoids the possibility of leaking the key in the # HTTP Referer header. self.request.session[INTERNAL_RESET_SESSION_KEY] = self.key redirect_url = self.passthrough_next_url( self.request.path.replace(self.key, self.reset_url_key) ) return redirect(redirect_url) self.reset_user = None response = self.render_to_response(self.get_context_data(token_fail=True)) return _ajax_response(self.request, response, form=token_form) def get_context_data(self, **kwargs): ret = super().get_context_data(**kwargs) ret["action_url"] = reverse( "account_reset_password_from_key", kwargs={ "uidb36": self.kwargs["uidb36"], "key": self.kwargs["key"], }, ) return ret def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs["user"] = self.reset_user kwargs["temp_key"] = self.key return kwargs def form_valid(self, form): form.save() resp = flows.password_reset.finalize_password_reset( self.request, self.reset_user ) if resp: return resp return super().form_valid(form) password_reset_from_key = PasswordResetFromKeyView.as_view() @method_decorator(login_not_required, name="dispatch")
PasswordResetFromKeyView
python
django__django
tests/backends/models.py
{ "start": 918, "end": 1060 }
class ____(models.Manager): def get_queryset(self): return super().get_queryset().prefetch_related("schoolclasses")
SchoolBusManager
python
sqlalchemy__sqlalchemy
lib/sqlalchemy/engine/interfaces.py
{ "start": 12534, "end": 13053 }
class ____(ReflectedConstraint): """Dictionary representing the reflected elements corresponding to :class:`.PrimaryKeyConstraint`. The :class:`.ReflectedPrimaryKeyConstraint` structure is returned by the :meth:`.Inspector.get_pk_constraint` method. """ constrained_columns: List[str] """column names which comprise the primary key""" dialect_options: NotRequired[Dict[str, Any]] """Additional dialect-specific options detected for this primary key"""
ReflectedPrimaryKeyConstraint
python
jazzband__django-polymorphic
src/polymorphic/tests/models.py
{ "start": 5218, "end": 5371 }
class ____(ShowFieldType, PolymorphicModel): objects = MyManager() field1 = models.CharField(max_length=30) # needed as MyManager uses it
MROBase1
python
getsentry__sentry
tests/sentry/integrations/repository/notification_action/test_notification_action_notification_message_repository.py
{ "start": 460, "end": 924 }
class ____(TestCase): def setUp(self) -> None: self.action = self.create_action() self.group = self.create_group() self.parent_notification_message = NotificationMessage.objects.create( action=self.action, group=self.group, message_identifier="123abc", ) self.repository = NotificationActionNotificationMessageRepository.default()
BaseNotificationActionNotificationMessageRepositoryTest
python
run-llama__llama_index
llama-index-core/llama_index/core/base/llms/types.py
{ "start": 15733, "end": 16467 }
class ____(BaseModel): block_type: Literal["tool_call"] = "tool_call" tool_call_id: Optional[str] = Field( default=None, description="ID of the tool call, if provided" ) tool_name: str = Field(description="Name of the called tool") tool_kwargs: dict[str, Any] | str = Field( default_factory=dict, # type: ignore description="Arguments provided to the tool, if available", ) ContentBlock = Annotated[ Union[ TextBlock, ImageBlock, AudioBlock, VideoBlock, DocumentBlock, CachePoint, CitableBlock, CitationBlock, ThinkingBlock, ToolCallBlock, ], Field(discriminator="block_type"), ]
ToolCallBlock
python
TheAlgorithms__Python
computer_vision/harris_corner.py
{ "start": 117, "end": 2209 }
class ____: def __init__(self, k: float, window_size: int): """ k : is an empirically determined constant in [0.04,0.06] window_size : neighbourhoods considered """ if k in (0.04, 0.06): self.k = k self.window_size = window_size else: raise ValueError("invalid k value") def __str__(self) -> str: return str(self.k) def detect(self, img_path: str) -> tuple[cv2.Mat, list[list[int]]]: """ Returns the image with corners identified img_path : path of the image output : list of the corner positions, image """ img = cv2.imread(img_path, 0) h, w = img.shape corner_list: list[list[int]] = [] color_img = img.copy() color_img = cv2.cvtColor(color_img, cv2.COLOR_GRAY2RGB) dy, dx = np.gradient(img) ixx = dx**2 iyy = dy**2 ixy = dx * dy k = 0.04 offset = self.window_size // 2 for y in range(offset, h - offset): for x in range(offset, w - offset): wxx = ixx[ y - offset : y + offset + 1, x - offset : x + offset + 1 ].sum() wyy = iyy[ y - offset : y + offset + 1, x - offset : x + offset + 1 ].sum() wxy = ixy[ y - offset : y + offset + 1, x - offset : x + offset + 1 ].sum() det = (wxx * wyy) - (wxy**2) trace = wxx + wyy r = det - k * (trace**2) # Can change the value if r > 0.5: corner_list.append([x, y, r]) color_img.itemset((y, x, 0), 0) color_img.itemset((y, x, 1), 0) color_img.itemset((y, x, 2), 255) return color_img, corner_list if __name__ == "__main__": edge_detect = HarrisCorner(0.04, 3) color_img, _ = edge_detect.detect("path_to_image") cv2.imwrite("detect.png", color_img)
HarrisCorner
python
paramiko__paramiko
tests/test_sftp_big.py
{ "start": 1166, "end": 15272 }
class ____: def test_lots_of_files(self, sftp): """ create a bunch of files over the same session. """ numfiles = 100 try: for i in range(numfiles): target = f"{sftp.FOLDER}/file{i}.txt" with sftp.open(target, "w", 1) as f: f.write(f"this is file #{i}.\n") sftp.chmod(target, o660) # now make sure every file is there, by creating a list of filenmes # and reading them in random order. numlist = list(range(numfiles)) while len(numlist) > 0: r = numlist[random.randint(0, len(numlist) - 1)] with sftp.open(f"{sftp.FOLDER}/file{r}.txt") as f: assert f.readline() == f"this is file #{r}.\n" numlist.remove(r) finally: for i in range(numfiles): try: sftp.remove(f"{sftp.FOLDER}/file{i}.txt") except: pass def test_big_file(self, sftp): """ write a 1MB file with no buffering. """ kblob = 1024 * b"x" start = time.time() try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w") as f: for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write(".") sys.stderr.write(" ") assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) end = time.time() sys.stderr.write(f"{round(end - start)}s") start = time.time() with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r") as f: for n in range(1024): data = f.read(1024) assert data == kblob end = time.time() sys.stderr.write(f"{round(end - start)}s") finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt") def test_big_file_pipelined(self, sftp): """ write a 1MB file, with no linefeeds, using pipelining. """ kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) start = time.time() try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write(".") sys.stderr.write(" ") assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) end = time.time() sys.stderr.write(f"{round(end - start)}s") start = time.time() with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f: file_size = f.stat().st_size f.prefetch(file_size) # read on odd boundaries to make sure the bytes aren't getting # scrambled n = 0 k2blob = kblob + kblob chunk = 629 size = 1024 * 1024 while n < size: if n + chunk > size: chunk = size - n data = f.read(chunk) offset = n % 1024 assert data == k2blob[offset : offset + chunk] n += chunk end = time.time() sys.stderr.write(f"{round(end - start)}s") finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt") def test_prefetch_seek(self, sftp): kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write(".") sys.stderr.write(" ") assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) start = time.time() k2blob = kblob + kblob chunk = 793 for i in range(10): with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f: file_size = f.stat().st_size f.prefetch(file_size) base_offset = (512 * 1024) + 17 * random.randint( 1000, 2000 ) offsets = [base_offset + j * chunk for j in range(100)] # randomly seek around and read them out for j in range(100): offset = offsets[random.randint(0, len(offsets) - 1)] offsets.remove(offset) f.seek(offset) data = f.read(chunk) n_offset = offset % 1024 assert data == k2blob[n_offset : n_offset + chunk] offset += chunk end = time.time() sys.stderr.write(f"{round(end - start)}s") finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt") def test_readv_seek(self, sftp): kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write(".") sys.stderr.write(" ") assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) start = time.time() k2blob = kblob + kblob chunk = 793 for i in range(10): with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f: base_offset = (512 * 1024) + 17 * random.randint( 1000, 2000 ) # make a bunch of offsets and put them in random order offsets = [base_offset + j * chunk for j in range(100)] readv_list = [] for j in range(100): o = offsets[random.randint(0, len(offsets) - 1)] offsets.remove(o) readv_list.append((o, chunk)) ret = f.readv(readv_list) for i in range(len(readv_list)): offset = readv_list[i][0] n_offset = offset % 1024 assert next(ret) == k2blob[n_offset : n_offset + chunk] end = time.time() sys.stderr.write(f"{round(end - start)}s") finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt") def test_lots_of_prefetching(self, sftp): """ prefetch a 1MB file a bunch of times, discarding the file object without using it, to verify that paramiko doesn't get confused. """ kblob = 1024 * b"x" try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write(".") sys.stderr.write(" ") assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) for i in range(10): with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r") as f: file_size = f.stat().st_size f.prefetch(file_size) with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r") as f: file_size = f.stat().st_size f.prefetch(file_size) for n in range(1024): data = f.read(1024) assert data == kblob if n % 128 == 0: sys.stderr.write(".") sys.stderr.write(" ") finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt") def test_prefetch_readv(self, sftp): """ verify that prefetch and readv don't conflict with each other. """ kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write(".") sys.stderr.write(" ") assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f: file_size = f.stat().st_size f.prefetch(file_size) data = f.read(1024) assert data == kblob chunk_size = 793 base_offset = 512 * 1024 k2blob = kblob + kblob chunks = [ (base_offset + (chunk_size * i), chunk_size) for i in range(20) ] for data in f.readv(chunks): offset = base_offset % 1024 assert chunk_size == len(data) assert k2blob[offset : offset + chunk_size] == data base_offset += chunk_size sys.stderr.write(" ") finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt") def test_large_readv(self, sftp): """ verify that a very large readv is broken up correctly and still returned as a single blob. """ kblob = bytes().join([struct.pack(">H", n) for n in range(512)]) try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "wb") as f: f.set_pipelined(True) for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write(".") sys.stderr.write(" ") assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f: data = list(f.readv([(23 * 1024, 128 * 1024)])) assert len(data) == 1 data = data[0] assert len(data) == 128 * 1024 sys.stderr.write(" ") finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt") def test_big_file_big_buffer(self, sftp): """ write a 1MB file, with no linefeeds, and a big buffer. """ mblob = 1024 * 1024 * "x" try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w", 128 * 1024) as f: f.write(mblob) assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt") def test_big_file_renegotiate(self, sftp): """ write a 1MB file, forcing key renegotiation in the middle. """ t = sftp.sock.get_transport() t.packetizer.REKEY_BYTES = 512 * 1024 k32blob = 32 * 1024 * "x" try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w", 128 * 1024) as f: for i in range(32): f.write(k32blob) assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) assert t.H != t.session_id # try to read it too. with sftp.open(f"{sftp.FOLDER}/hongry.txt", "r", 128 * 1024) as f: file_size = f.stat().st_size f.prefetch(file_size) total = 0 while total < 1024 * 1024: total += len(f.read(32 * 1024)) finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt") t.packetizer.REKEY_BYTES = pow(2, 30) def test_prefetch_limit(self, sftp): """ write a 1MB file and prefetch with a limit """ kblob = 1024 * b"x" start = time.time() def expect_prefetch_extents(file, expected_extents): with file._prefetch_lock: assert len(file._prefetch_extents) == expected_extents try: with sftp.open(f"{sftp.FOLDER}/hongry.txt", "w") as f: for n in range(1024): f.write(kblob) if n % 128 == 0: sys.stderr.write(".") sys.stderr.write(" ") assert ( sftp.stat(f"{sftp.FOLDER}/hongry.txt").st_size == 1024 * 1024 ) end = time.time() sys.stderr.write(f"{round(end - start)}s") # read with prefetch, no limit # expecting 32 requests (32k * 32 == 1M) with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f: file_size = f.stat().st_size f.prefetch(file_size) wait_until(lambda: expect_prefetch_extents(f, 32)) # read with prefetch, limiting to 5 simultaneous requests with sftp.open(f"{sftp.FOLDER}/hongry.txt", "rb") as f: file_size = f.stat().st_size f.prefetch(file_size, 5) wait_until(lambda: expect_prefetch_extents(f, 5)) for n in range(1024): with f._prefetch_lock: assert len(f._prefetch_extents) <= 5 data = f.read(1024) assert data == kblob if n % 128 == 0: sys.stderr.write(".") finally: sftp.remove(f"{sftp.FOLDER}/hongry.txt")
TestBigSFTP
python
kevin1024__vcrpy
tests/integration/test_proxy.py
{ "start": 228, "end": 3396 }
class ____(http.server.SimpleHTTPRequestHandler): """ Simple proxy server. (Inspired by: http://effbot.org/librarybook/simplehttpserver.htm). """ def do_GET(self): upstream_response = urlopen(self.path) try: status = upstream_response.status headers = upstream_response.headers.items() except AttributeError: # In Python 2 the response is an addinfourl instance. status = upstream_response.code headers = upstream_response.info().items() self.log_request(status) self.send_response_only(status, upstream_response.msg) for header in headers: self.send_header(*header) self.end_headers() self.copyfile(upstream_response, self.wfile) def do_CONNECT(self): host, port = self.path.split(":") asyncio.run(self._tunnel(host, port, self.connection)) async def _tunnel(self, host, port, client_sock): target_r, target_w = await asyncio.open_connection(host=host, port=port) self.send_response(http.HTTPStatus.OK) self.end_headers() source_r, source_w = await asyncio.open_connection(sock=client_sock) async def channel(reader, writer): while True: data = await reader.read(1024) if not data: break writer.write(data) await writer.drain() writer.close() await writer.wait_closed() await asyncio.gather( channel(target_r, source_w), channel(source_r, target_w), ) @pytest.fixture(scope="session") def proxy_server(): with socketserver.ThreadingTCPServer(("", 0), Proxy) as httpd: proxy_process = threading.Thread(target=httpd.serve_forever) proxy_process.start() yield "http://{}:{}".format(*httpd.server_address) httpd.shutdown() proxy_process.join() def test_use_proxy(tmpdir, httpbin, proxy_server): """Ensure that it works with a proxy.""" with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))): response = requests.get(httpbin.url, proxies={"http": proxy_server}) with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette: cassette_response = requests.get(httpbin.url, proxies={"http": proxy_server}) assert cassette_response.headers == response.headers assert cassette.play_count == 1 def test_use_https_proxy(tmpdir, httpbin_secure, proxy_server): """Ensure that it works with an HTTPS proxy.""" with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))): response = requests.get(httpbin_secure.url, proxies={"https": proxy_server}) with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette: cassette_response = requests.get( httpbin_secure.url, proxies={"https": proxy_server}, ) assert cassette_response.headers == response.headers assert cassette.play_count == 1 # The cassette URL points to httpbin, not the proxy assert cassette.requests[0].url == httpbin_secure.url + "/"
Proxy
python
django-extensions__django-extensions
tests/test_management_command.py
{ "start": 19323, "end": 22123 }
class ____(TestCase): """ Tests for the `runjobs` management command. """ @mock.patch( "django_extensions.management.commands.runjobs.Command.runjobs_by_signals" ) @mock.patch("django_extensions.management.commands.runjobs.Command.runjobs") @mock.patch("django_extensions.management.commands.runjobs.Command.usage_msg") def test_runjobs_management_command(self, usage_msg, runjobs, runjobs_by_signals): when = "daily" call_command("runjobs", when) usage_msg.assert_not_called() runjobs.assert_called_once() runjobs_by_signals.assert_called_once() self.assertEqual(runjobs.call_args[0][0], when) @mock.patch( "django_extensions.management.commands.runjobs.Command.runjobs_by_signals" ) @mock.patch("django_extensions.management.commands.runjobs.Command.runjobs") @mock.patch("django_extensions.management.commands.runjobs.Command.usage_msg") def test_runjobs_management_command_invalid_when( self, usage_msg, runjobs, runjobs_by_signals ): when = "invalid" call_command("runjobs", when) usage_msg.assert_called_once_with() runjobs.assert_not_called() runjobs_by_signals.assert_not_called() def test_runjobs_integration_test(self): jobs = [ ("hourly", HOURLY_JOB_MOCK), ("daily", DAILY_JOB_MOCK), ("monthly", MONTHLY_JOB_MOCK), ("weekly", WEEKLY_JOB_MOCK), ("yearly", YEARLY_JOB_MOCK), ] # Reset all mocks in case they have been called elsewhere. for job in jobs: job[1].reset_mock() counter = 1 for job in jobs: call_command("runjobs", job[0], verbosity=2) for already_called in jobs[:counter]: already_called[1].assert_called_once_with() for not_yet_called in jobs[counter:]: not_yet_called[1].assert_not_called() counter += 1 def test_runjob_integration_test(self): jobs = [ ("test_hourly_job", HOURLY_JOB_MOCK), ("test_daily_job", DAILY_JOB_MOCK), ("test_monthly_job", MONTHLY_JOB_MOCK), ("test_weekly_job", WEEKLY_JOB_MOCK), ("test_yearly_job", YEARLY_JOB_MOCK), ] # Reset all mocks in case they have been called elsewhere. for job in jobs: job[1].reset_mock() counter = 1 for job in jobs: call_command("runjob", job[0], verbosity=2) for already_called in jobs[:counter]: already_called[1].assert_called_once_with() for not_yet_called in jobs[counter:]: not_yet_called[1].assert_not_called() counter += 1
RunJobsTests
python
openai__openai-python
tests/test_utils/test_proxy.py
{ "start": 175, "end": 1072 }
class ____(LazyProxy[Any]): @override def __load__(self) -> Any: return self def __call__(self, *_args: Any, **_kwds: Any) -> Any: raise RuntimeError("This should never be called!") def test_recursive_proxy() -> None: proxy = RecursiveLazyProxy() assert repr(proxy) == "RecursiveLazyProxy" assert str(proxy) == "RecursiveLazyProxy" assert dir(proxy) == [] assert type(proxy).__name__ == "RecursiveLazyProxy" assert type(operator.attrgetter("name.foo.bar.baz")(proxy)).__name__ == "RecursiveLazyProxy" def test_isinstance_does_not_error() -> None: class MissingDepsProxy(LazyProxy[Any]): @override def __load__(self) -> Any: raise MissingDependencyError("Mocking missing dependency") proxy = MissingDepsProxy() assert not isinstance(proxy, dict) assert isinstance(proxy, LazyProxy)
RecursiveLazyProxy
python
getsentry__sentry
tests/sentry/monitors/logic/test_incident_occurrence.py
{ "start": 887, "end": 16522 }
class ____(TestCase): def build_occurrence_test_data(self): self.monitor = Monitor.objects.create( name="test monitor", organization_id=self.organization.id, project_id=self.project.id, config={ "schedule": [1, "month"], "schedule_type": ScheduleType.INTERVAL, "max_runtime": None, "checkin_margin": None, }, ) self.monitor_environment = MonitorEnvironment.objects.create( monitor=self.monitor, environment_id=self.environment.id, status=MonitorStatus.ERROR, ) self.successful_checkin = MonitorCheckIn.objects.create( monitor=self.monitor, monitor_environment=self.monitor_environment, project_id=self.project.id, status=CheckInStatus.OK, ) self.last_checkin = timezone.now() self.trace_id = uuid.uuid4() self.timeout_checkin = MonitorCheckIn.objects.create( monitor=self.monitor, monitor_environment=self.monitor_environment, project_id=self.project.id, status=CheckInStatus.TIMEOUT, trace_id=uuid.uuid4(), date_added=self.last_checkin - timedelta(minutes=1), ) self.failed_checkin = MonitorCheckIn.objects.create( monitor=self.monitor, monitor_environment=self.monitor_environment, project_id=self.project.id, status=CheckInStatus.ERROR, trace_id=self.trace_id, date_added=self.last_checkin, ) self.incident = MonitorIncident.objects.create( monitor=self.monitor, monitor_environment=self.monitor_environment, starting_checkin=self.failed_checkin, starting_timestamp=self.last_checkin, grouphash="abcd", ) @mock.patch("sentry.monitors.logic.incident_occurrence.produce_occurrence_to_kafka") def test_send_incident_occurrence( self, mock_produce_occurrence_to_kafka: mock.MagicMock ) -> None: self.build_occurrence_test_data() send_incident_occurrence( self.failed_checkin, [self.timeout_checkin, self.failed_checkin], self.incident, self.last_checkin, ) assert mock_produce_occurrence_to_kafka.call_count == 1 kwargs = mock_produce_occurrence_to_kafka.call_args.kwargs occurrence = kwargs["occurrence"] event = kwargs["event_data"] occurrence = occurrence.to_dict() assert dict( occurrence, **{ "project_id": self.project.id, "fingerprint": [self.incident.grouphash], "issue_title": f"Cron failure: {self.monitor.name}", "subtitle": "Your monitor is failing: 1 timeout and 1 error check-ins detected.", "resource_id": None, "evidence_data": {}, "evidence_display": [ { "name": "Failure reason", "value": "1 timeout and 1 error check-ins detected", "important": True, }, { "name": "Environment", "value": self.monitor_environment.get_environment().name, "important": False, }, { "name": "Last successful check-in", "value": self.successful_checkin.date_added.isoformat(), "important": False, }, ], "type": MonitorIncidentType.type_id, "level": "error", "culprit": "", "detection_time": self.failed_checkin.date_added.timestamp(), }, ) == dict(occurrence) assert dict( event, **{ "contexts": { "monitor": { "status": "error", "config": self.monitor.config, "id": str(self.monitor.guid), "name": self.monitor.name, "slug": self.monitor.slug, }, "trace": { "trace_id": self.trace_id.hex, "span_id": None, }, }, "environment": self.monitor_environment.get_environment().name, "event_id": occurrence["event_id"], "fingerprint": [self.incident.grouphash], "platform": "other", "project_id": self.monitor.project_id, "sdk": None, "tags": { "monitor.id": str(self.monitor.guid), "monitor.slug": str(self.monitor.slug), "monitor.incident": str(self.incident.id), }, }, ) == dict(event) @mock.patch("sentry.monitors.logic.incident_occurrence.produce_occurrence_to_kafka") def test_send_incident_occurrence_detector( self, mock_produce_occurrence_to_kafka: mock.MagicMock ) -> None: self.build_occurrence_test_data() ensure_cron_detector(self.monitor) send_incident_occurrence( self.failed_checkin, [self.timeout_checkin, self.failed_checkin], self.incident, self.last_checkin, ) assert mock_produce_occurrence_to_kafka.call_count == 1 kwargs = mock_produce_occurrence_to_kafka.call_args.kwargs occurrence = kwargs["occurrence"] event = kwargs["event_data"] occurrence = occurrence.to_dict() detector = get_detector_for_monitor(self.monitor) assert detector assert dict( occurrence, **{ "project_id": self.project.id, "fingerprint": [self.incident.grouphash], "issue_title": f"Cron failure: {self.monitor.name}", "subtitle": "Your monitor is failing: 1 timeout and 1 error check-ins detected.", "resource_id": None, "evidence_data": {"detector_id": detector.id}, "evidence_display": [ { "name": "Failure reason", "value": "1 timeout and 1 error check-ins detected", "important": True, }, { "name": "Environment", "value": self.monitor_environment.get_environment().name, "important": False, }, { "name": "Last successful check-in", "value": self.successful_checkin.date_added.isoformat(), "important": False, }, ], "type": MonitorIncidentType.type_id, "level": "error", "culprit": "", "detection_time": self.failed_checkin.date_added.timestamp(), }, ) == dict(occurrence) assert dict( event, **{ "contexts": { "monitor": { "status": "error", "config": self.monitor.config, "id": str(self.monitor.guid), "name": self.monitor.name, "slug": self.monitor.slug, }, "trace": { "trace_id": self.trace_id.hex, "span_id": None, }, }, "environment": self.monitor_environment.get_environment().name, "event_id": occurrence["event_id"], "fingerprint": [self.incident.grouphash], "platform": "other", "project_id": self.monitor.project_id, "sdk": None, "tags": { "monitor.id": str(self.monitor.guid), "monitor.slug": str(self.monitor.slug), "monitor.incident": str(self.incident.id), }, }, ) == dict(event) def test_failure_reason(self) -> None: monitor = self.create_monitor() monitor_environment = MonitorEnvironment.objects.create( monitor=monitor, environment_id=self.environment.id, ) timeout_checkin = MonitorCheckIn.objects.create( monitor=monitor, monitor_environment=monitor_environment, project_id=self.project.id, status=CheckInStatus.TIMEOUT, ) error_checkin = MonitorCheckIn.objects.create( monitor=monitor, monitor_environment=monitor_environment, project_id=self.project.id, status=CheckInStatus.ERROR, ) miss_checkin = MonitorCheckIn.objects.create( monitor=monitor, monitor_environment=monitor_environment, project_id=self.project.id, status=CheckInStatus.MISSED, ) assert get_failure_reason([error_checkin]) == "An error check-in was detected" assert get_failure_reason([timeout_checkin]) == "A timeout check-in was detected" assert get_failure_reason([miss_checkin]) == "A missed check-in was detected" assert ( get_failure_reason([error_checkin, miss_checkin, timeout_checkin]) == "1 error, 1 missed and 1 timeout check-ins detected" ) assert ( get_failure_reason([miss_checkin, timeout_checkin]) == "1 missed and 1 timeout check-ins detected" ) @override_settings( KAFKA_TOPIC_OVERRIDES={"monitors-incident-occurrences": "monitors-test-topic"} ) @mock.patch("sentry.monitors.logic.incident_occurrence._incident_occurrence_producer") def test_queue_incident_occurrence(self, mock_producer: mock.MagicMock) -> None: tick = timezone.now().replace(second=0, microsecond=0) monitor = self.create_monitor() monitor_environment = MonitorEnvironment.objects.create( monitor=monitor, environment_id=self.environment.id, status=MonitorStatus.ERROR, ) last_checkin = timezone.now() trace_id = uuid.uuid4() failed_checkin = MonitorCheckIn.objects.create( monitor=monitor, monitor_environment=monitor_environment, project_id=self.project.id, status=CheckInStatus.ERROR, trace_id=trace_id, date_added=last_checkin, ) incident = MonitorIncident.objects.create( monitor=monitor, monitor_environment=monitor_environment, starting_checkin=failed_checkin, starting_timestamp=last_checkin, ) queue_incident_occurrence( failed_checkin, [failed_checkin], incident, last_checkin, tick, ) incident_occurrence: IncidentOccurrence = { "incident_id": incident.id, "failed_checkin_id": failed_checkin.id, "previous_checkin_ids": [failed_checkin.id], "received_ts": int(last_checkin.timestamp()), "clock_tick_ts": int(tick.timestamp()), } test_payload = KafkaPayload( str(monitor_environment.id).encode(), MONITORS_INCIDENT_OCCURRENCES.encode(incident_occurrence), [], ) assert mock_producer.produce.call_count == 1 assert mock_producer.produce.mock_calls[0] == mock.call( Topic("monitors-test-topic"), test_payload ) @mock.patch("sentry.monitors.logic.incident_occurrence.produce_occurrence_to_kafka") def test_send_incident_occurrence_invalid_owner( self, mock_produce_occurrence_to_kafka: mock.MagicMock ) -> None: self.build_occurrence_test_data() team = self.create_team(organization=self.organization) self.monitor.update(owner_team_id=team.id) self.monitor.refresh_from_db() other_org = self.create_organization() team.update(organization_id=other_org.id) send_incident_occurrence( self.failed_checkin, [self.timeout_checkin, self.failed_checkin], self.incident, self.last_checkin, ) self.monitor.refresh_from_db() assert self.monitor.owner_team_id is None assert self.monitor.owner_user_id is None assert mock_produce_occurrence_to_kafka.call_count == 1 kwargs = mock_produce_occurrence_to_kafka.call_args.kwargs occurrence = kwargs["occurrence"] assert occurrence.assignee is None @mock.patch("sentry.monitors.logic.incident_occurrence.send_incident_occurrence") @mock.patch("sentry.monitors.logic.incident_occurrence.queue_incident_occurrence") def test_dispatch_incident_occurrence( self, mock_queue_incident_occurrence, mock_send_incident_occurrence, ): monitor = self.create_monitor() monitor_environment = MonitorEnvironment.objects.create( monitor=monitor, environment_id=self.environment.id, status=MonitorStatus.ERROR, ) failed_checkin = MonitorCheckIn.objects.create( monitor=monitor, monitor_environment=monitor_environment, project_id=self.project.id, status=CheckInStatus.ERROR, date_added=timezone.now(), ) incident = MonitorIncident.objects.create( monitor=monitor, monitor_environment=monitor_environment, starting_checkin=failed_checkin, starting_timestamp=failed_checkin.date_added, ) # Sending without tick triggers send_incident_occurrence dispatch_incident_occurrence( failed_checkin, [failed_checkin], incident, received=failed_checkin.date_added, clock_tick=None, ) assert mock_send_incident_occurrence.call_count == 1 mock_send_incident_occurrence.reset_mock() # Sending with tick triggers send_incident_occurrence unless we enable # the crons.dispatch_incident_occurrences_to_consumer option dispatch_incident_occurrence( failed_checkin, [failed_checkin], incident, received=failed_checkin.date_added, clock_tick=timezone.now(), ) assert mock_send_incident_occurrence.call_count == 1 mock_send_incident_occurrence.reset_mock() # Sending with tick and option set dispatches via # queue_incident_occurrence with self.options({"crons.dispatch_incident_occurrences_to_consumer": True}): dispatch_incident_occurrence( failed_checkin, [failed_checkin], incident, received=failed_checkin.date_added, clock_tick=timezone.now(), ) assert mock_queue_incident_occurrence.call_count == 1 assert mock_send_incident_occurrence.call_count == 0
IncidentOccurrenceTestCase
python
apache__airflow
providers/google/src/airflow/providers/google/cloud/utils/external_token_supplier.py
{ "start": 3343, "end": 4006 }
class ____(LoggingMixin, SubjectTokenSupplier): """ A superclass for all Subject Token Supplier classes that wish to implement a caching mechanism. Child classes must implement the ``get_subject_key`` method to generate a string that serves as the cache key, ensuring that tokens are shared appropriately among instances. Methods: get_subject_key: Abstract method to be implemented by child classes. It should return a string that serves as the cache key. """ def __init__(self): super().__init__() @abc.abstractmethod def get_subject_key(self) -> str: raise NotImplementedError("")
CacheTokenSupplier
python
django__django
tests/migrations/test_migrations_squashed_ref_squashed/app1/4_auto.py
{ "start": 35, "end": 120 }
class ____(migrations.Migration): dependencies = [("app1", "2_squashed_3")]
Migration
python
huggingface__transformers
tests/models/bros/test_modeling_bros.py
{ "start": 1280, "end": 9385 }
class ____: def __init__( self, parent, batch_size=13, seq_length=7, is_training=True, use_input_mask=True, use_token_type_ids=True, use_bbox_first_token_mask=True, use_labels=True, vocab_size=99, hidden_size=64, num_hidden_layers=2, num_attention_heads=4, intermediate_size=37, hidden_act="gelu", hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=512, type_vocab_size=16, type_sequence_label_size=2, initializer_range=0.02, num_labels=3, num_choices=4, scope=None, ): self.parent = parent self.batch_size = batch_size self.seq_length = seq_length self.is_training = is_training self.use_input_mask = use_input_mask self.use_bbox_first_token_mask = use_bbox_first_token_mask self.use_token_type_ids = use_token_type_ids self.use_labels = use_labels self.vocab_size = vocab_size self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_size = intermediate_size self.hidden_act = hidden_act self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.type_vocab_size = type_vocab_size self.type_sequence_label_size = type_sequence_label_size self.initializer_range = initializer_range self.num_labels = num_labels def prepare_config_and_inputs(self): input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size) bbox = ids_tensor([self.batch_size, self.seq_length, 8], 1) # Ensure that bbox is legal for i in range(bbox.shape[0]): for j in range(bbox.shape[1]): if bbox[i, j, 3] < bbox[i, j, 1]: t = bbox[i, j, 3] bbox[i, j, 3] = bbox[i, j, 1] bbox[i, j, 1] = t if bbox[i, j, 2] < bbox[i, j, 0]: t = bbox[i, j, 2] bbox[i, j, 2] = bbox[i, j, 0] bbox[i, j, 0] = t input_mask = None if self.use_input_mask: input_mask = random_attention_mask([self.batch_size, self.seq_length]) bbox_first_token_mask = None if self.use_bbox_first_token_mask: bbox_first_token_mask = torch.ones([self.batch_size, self.seq_length], dtype=torch.bool).to(torch_device) token_type_ids = None if self.use_token_type_ids: token_type_ids = ids_tensor([self.batch_size, self.seq_length], self.type_vocab_size) token_labels = None if self.use_labels: token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels) initial_token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels) subsequent_token_labels = ids_tensor([self.batch_size, self.seq_length], self.num_labels) config = self.get_config() return ( config, input_ids, bbox, token_type_ids, input_mask, bbox_first_token_mask, token_labels, initial_token_labels, subsequent_token_labels, ) def get_config(self): return BrosConfig( vocab_size=self.vocab_size, hidden_size=self.hidden_size, num_hidden_layers=self.num_hidden_layers, num_attention_heads=self.num_attention_heads, intermediate_size=self.intermediate_size, hidden_act=self.hidden_act, hidden_dropout_prob=self.hidden_dropout_prob, attention_probs_dropout_prob=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings, type_vocab_size=self.type_vocab_size, is_decoder=False, initializer_range=self.initializer_range, ) def create_and_check_model( self, config, input_ids, bbox, token_type_ids, input_mask, bbox_first_token_mask, token_labels, initial_token_labels, subsequent_token_labels, ): model = BrosModel(config=config) model.to(torch_device) model.eval() result = model(input_ids, bbox=bbox, attention_mask=input_mask, token_type_ids=token_type_ids) result = model(input_ids, bbox=bbox, token_type_ids=token_type_ids) result = model(input_ids, bbox=bbox) self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size)) def create_and_check_for_token_classification( self, config, input_ids, bbox, token_type_ids, input_mask, bbox_first_token_mask, token_labels, initial_token_labels, subsequent_token_labels, ): config.num_labels = self.num_labels model = BrosForTokenClassification(config=config) model.to(torch_device) model.eval() result = model( input_ids, bbox=bbox, attention_mask=input_mask, token_type_ids=token_type_ids, labels=token_labels ) self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.num_labels)) def create_and_check_for_spade_ee_token_classification( self, config, input_ids, bbox, token_type_ids, input_mask, bbox_first_token_mask, token_labels, initial_token_labels, subsequent_token_labels, ): config.num_labels = self.num_labels model = BrosSpadeEEForTokenClassification(config=config) model.to(torch_device) model.eval() result = model( input_ids, bbox=bbox, attention_mask=input_mask, bbox_first_token_mask=bbox_first_token_mask, token_type_ids=token_type_ids, initial_token_labels=token_labels, subsequent_token_labels=token_labels, ) self.parent.assertEqual(result.initial_token_logits.shape, (self.batch_size, self.seq_length, self.num_labels)) self.parent.assertEqual( result.subsequent_token_logits.shape, (self.batch_size, self.seq_length, self.seq_length + 1) ) def create_and_check_for_spade_el_token_classification( self, config, input_ids, bbox, token_type_ids, input_mask, bbox_first_token_mask, token_labels, initial_token_labels, subsequent_token_labels, ): config.num_labels = self.num_labels model = BrosSpadeELForTokenClassification(config=config) model.to(torch_device) model.eval() result = model( input_ids, bbox=bbox, attention_mask=input_mask, bbox_first_token_mask=bbox_first_token_mask, token_type_ids=token_type_ids, labels=token_labels, ) self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, self.seq_length + 1)) def prepare_config_and_inputs_for_common(self): config_and_inputs = self.prepare_config_and_inputs() ( config, input_ids, bbox, token_type_ids, input_mask, bbox_first_token_mask, token_labels, initial_token_labels, subsequent_token_labels, ) = config_and_inputs inputs_dict = { "input_ids": input_ids, "bbox": bbox, "token_type_ids": token_type_ids, "attention_mask": input_mask, } return config, inputs_dict @require_torch
BrosModelTester
python
spyder-ide__spyder
external-deps/spyder-remote-services/spyder_remote_services/services/spyder_kernels/patches.py
{ "start": 1007, "end": 5255 }
class ____(MainKernelHandler): """Handler to integrate Spyder kernels with Jupyter Server. Adds a `spyder_kernel` parameter to the kernel start request. This parameter is used to indicate that the kernel should be started using the Spyder kernel provisioner. This handler also allows for the use of a custom kernel ID or specifying environment variables to be used in the kernel. """ @web.authenticated @authorized async def post(self): """Start a kernel.""" km = self.kernel_manager model = self.get_json_body() if model is None: model = {"name": km.default_kernel_name} else: model.setdefault("name", km.default_kernel_name) kernel_id = await ensure_async( km.start_kernel( # type:ignore[has-type] kernel_name=model["name"], path=model.get("path"), spyder_kernel=model.get("spyder_kernel", False), env=model.get("env", {}), kernel_id=model.get("kernel_id", str(uuid.uuid4())), ) ) model = await ensure_async(km.kernel_model(kernel_id)) location = url_path_join(self.base_url, "api", "kernels", url_escape(kernel_id)) self.set_header("Location", location) self.set_status(201) self.finish(json.dumps(model, default=json_default)) def __kernel_model(self, kernel_id): """ Return a JSON-safe dict representing a kernel. For use in representing kernels in the JSON APIs. """ self._check_kernel_id(kernel_id) kernel = self._kernels[kernel_id] conn_info = kernel.get_connection_info() # convert key bytes to str conn_info["key"] = conn_info["key"].decode() model = { "id": kernel_id, "name": kernel.kernel_name, "last_activity": isoformat(kernel.last_activity), "execution_state": kernel.execution_state, "connections": self._kernel_connections.get(kernel_id, 0), "connection_info": conn_info, } if getattr(kernel, "reason", None): model["reason"] = kernel.reason return model def __patch_async_start_kernel(func: Callable[P, T]) -> Callable[P, T]: """Patch the async start kernel method to add Spyder kernel support. This method is used to start a kernel and add the spyder_kernel parameter to the kernel start request. This parameter is used to indicate that the kernel should be started using the Spyder kernel provisioner. """ @wraps(func) async def wrapper(self: KernelManager, *args: P.args, **kw: P.kwargs) -> T: self.kernel_id = self.kernel_id or kw.pop("kernel_id", str(uuid.uuid4())) if kw.pop("spyder_kernel", False): # Overwrite provisioner with Spyder kernel provisioner self.provisioner = SpyderKernelProvisioner( kernel_id=self.kernel_id, kernel_spec=self.kernel_spec, parent=self, ) return await func(*args, **kw) return wrapper def _patch_kernel_manager( kernel_manager_factory: Callable[P, KernelManager] ) -> Callable[P, KernelManager]: """Patch the kernel manager factory to add Spyder kernel support.""" @wraps(kernel_manager_factory) def wrapper(*args: P.args, **kwargs: P.kwargs) -> KernelManager: kernel_manager = kernel_manager_factory(*args, **kwargs) kernel_manager._async_pre_start_kernel = MethodType( __patch_async_start_kernel(kernel_manager._async_pre_start_kernel), kernel_manager, ) return kernel_manager return wrapper def patch_maping_kernel_manager(obj: AsyncMappingKernelManager): obj.kernel_model = MethodType(__kernel_model, obj) obj.default_kernel_name = "spyder-kernel" obj.kernel_manager_factory = _patch_kernel_manager(obj.kernel_manager_factory) def patch_main_kernel_handler(router: Router): for idx, rule in enumerate(router.rules): if isinstance(rule.target, Router): patch_main_kernel_handler(rule.target) elif rule.target is MainKernelHandler: router.rules[idx].target = SpyderMainKernelHandler break
SpyderMainKernelHandler
python
tensorflow__tensorflow
tensorflow/python/module/module_test.py
{ "start": 13296, "end": 13565 }
class ____(module.Module): def __init__(self, name=None): super().__init__(name=name) self._leaves = [] @module.Module.with_name_scope def new_leaf(self, name=None): leaf = TreeModule(name=name) self._leaves.append(leaf) return leaf
TreeModule
python
openai__openai-python
src/openai/types/graders/python_grader.py
{ "start": 217, "end": 534 }
class ____(BaseModel): name: str """The name of the grader.""" source: str """The source code of the python script.""" type: Literal["python"] """The object type, which is always `python`.""" image_tag: Optional[str] = None """The image tag to use for the python script."""
PythonGrader
python
tensorflow__tensorflow
tensorflow/python/autograph/converters/return_statements.py
{ "start": 5458, "end": 5711 }
class ____(object): def __init__(self): self.is_function = False self.return_used = False self.create_guard_next = False self.create_guard_now = False def __repr__(self): return 'used: {}'.format( self.return_used)
_Block
python
huggingface__transformers
src/transformers/models/layoutlmv2/processing_layoutlmv2.py
{ "start": 883, "end": 7338 }
class ____(ProcessorMixin): r""" Constructs a LayoutLMv2 processor which combines a LayoutLMv2 image processor and a LayoutLMv2 tokenizer into a single processor. [`LayoutLMv2Processor`] offers all the functionalities you need to prepare data for the model. It first uses [`LayoutLMv2ImageProcessor`] to resize document images to a fixed size, and optionally applies OCR to get words and normalized bounding boxes. These are then provided to [`LayoutLMv2Tokenizer`] or [`LayoutLMv2TokenizerFast`], which turns the words and bounding boxes into token-level `input_ids`, `attention_mask`, `token_type_ids`, `bbox`. Optionally, one can provide integer `word_labels`, which are turned into token-level `labels` for token classification tasks (such as FUNSD, CORD). Args: image_processor (`LayoutLMv2ImageProcessor`, *optional*): An instance of [`LayoutLMv2ImageProcessor`]. The image processor is a required input. tokenizer (`LayoutLMv2Tokenizer` or `LayoutLMv2TokenizerFast`, *optional*): An instance of [`LayoutLMv2Tokenizer`] or [`LayoutLMv2TokenizerFast`]. The tokenizer is a required input. """ def __init__(self, image_processor=None, tokenizer=None, **kwargs): super().__init__(image_processor, tokenizer) def __call__( self, images, text: Union[TextInput, PreTokenizedInput, list[TextInput], list[PreTokenizedInput]] = None, text_pair: Optional[Union[PreTokenizedInput, list[PreTokenizedInput]]] = None, boxes: Optional[Union[list[list[int]], list[list[list[int]]]]] = None, word_labels: Optional[Union[list[int], list[list[int]]]] = None, add_special_tokens: bool = True, padding: Union[bool, str, PaddingStrategy] = False, truncation: Union[bool, str, TruncationStrategy] = False, max_length: Optional[int] = None, stride: int = 0, pad_to_multiple_of: Optional[int] = None, return_token_type_ids: Optional[bool] = None, return_attention_mask: Optional[bool] = None, return_overflowing_tokens: bool = False, return_special_tokens_mask: bool = False, return_offsets_mapping: bool = False, return_length: bool = False, verbose: bool = True, return_tensors: Optional[Union[str, TensorType]] = None, **kwargs, ) -> BatchEncoding: """ This method first forwards the `images` argument to [`~LayoutLMv2ImageProcessor.__call__`]. In case [`LayoutLMv2ImageProcessor`] was initialized with `apply_ocr` set to `True`, it passes the obtained words and bounding boxes along with the additional arguments to [`~LayoutLMv2Tokenizer.__call__`] and returns the output, together with resized `images`. In case [`LayoutLMv2ImageProcessor`] was initialized with `apply_ocr` set to `False`, it passes the words (`text`/``text_pair`) and `boxes` specified by the user along with the additional arguments to [`~LayoutLMv2Tokenizer.__call__`] and returns the output, together with resized `images``. Please refer to the docstring of the above two methods for more information. """ # verify input if self.image_processor.apply_ocr and (boxes is not None): raise ValueError( "You cannot provide bounding boxes if you initialized the image processor with apply_ocr set to True." ) if self.image_processor.apply_ocr and (word_labels is not None): raise ValueError( "You cannot provide word labels if you initialized the image processor with apply_ocr set to True." ) if return_overflowing_tokens is True and return_offsets_mapping is False: raise ValueError("You cannot return overflowing tokens without returning the offsets mapping.") # first, apply the image processor features = self.image_processor(images=images, return_tensors=return_tensors) # second, apply the tokenizer if text is not None and self.image_processor.apply_ocr and text_pair is None: if isinstance(text, str): text = [text] # add batch dimension (as the image processor always adds a batch dimension) text_pair = features["words"] encoded_inputs = self.tokenizer( text=text if text is not None else features["words"], text_pair=text_pair if text_pair is not None else None, boxes=boxes if boxes is not None else features["boxes"], word_labels=word_labels, add_special_tokens=add_special_tokens, padding=padding, truncation=truncation, max_length=max_length, stride=stride, pad_to_multiple_of=pad_to_multiple_of, return_token_type_ids=return_token_type_ids, return_attention_mask=return_attention_mask, return_overflowing_tokens=return_overflowing_tokens, return_special_tokens_mask=return_special_tokens_mask, return_offsets_mapping=return_offsets_mapping, return_length=return_length, verbose=verbose, return_tensors=return_tensors, **kwargs, ) # add pixel values images = features.pop("pixel_values") if return_overflowing_tokens is True: images = self.get_overflowing_images(images, encoded_inputs["overflow_to_sample_mapping"]) encoded_inputs["image"] = images return encoded_inputs def get_overflowing_images(self, images, overflow_to_sample_mapping): # in case there's an overflow, ensure each `input_ids` sample is mapped to its corresponding image images_with_overflow = [] for sample_idx in overflow_to_sample_mapping: images_with_overflow.append(images[sample_idx]) if len(images_with_overflow) != len(overflow_to_sample_mapping): raise ValueError( "Expected length of images to be the same as the length of `overflow_to_sample_mapping`, but got" f" {len(images_with_overflow)} and {len(overflow_to_sample_mapping)}" ) return images_with_overflow @property def model_input_names(self): return ["input_ids", "bbox", "token_type_ids", "attention_mask", "image"] __all__ = ["LayoutLMv2Processor"]
LayoutLMv2Processor
python
sqlalchemy__sqlalchemy
lib/sqlalchemy/sql/sqltypes.py
{ "start": 122421, "end": 122557 }
class ____(Text): """The CLOB type. This type is found in Oracle Database and Informix. """ __visit_name__ = "CLOB"
CLOB
python
ray-project__ray
rllib/env/wrappers/atari_wrappers.py
{ "start": 4019, "end": 5106 }
class ____(gym.Wrapper): def __init__(self, env, k): """Stack k last frames.""" gym.Wrapper.__init__(self, env) self.k = k self.frames = deque([], maxlen=k) shp = env.observation_space.shape self.observation_space = spaces.Box( low=np.repeat(env.observation_space.low, repeats=k, axis=-1), high=np.repeat(env.observation_space.high, repeats=k, axis=-1), shape=(shp[0], shp[1], shp[2] * k), dtype=env.observation_space.dtype, ) def reset(self, *, seed=None, options=None): ob, infos = self.env.reset(seed=seed, options=options) for _ in range(self.k): self.frames.append(ob) return self._get_ob(), infos def step(self, action): ob, reward, terminated, truncated, info = self.env.step(action) self.frames.append(ob) return self._get_ob(), reward, terminated, truncated, info def _get_ob(self): assert len(self.frames) == self.k return np.concatenate(self.frames, axis=2) @PublicAPI
FrameStack
python
pandas-dev__pandas
asv_bench/benchmarks/hash_functions.py
{ "start": 576, "end": 930 }
class ____: params = range(4, 16) param_names = ["exponent"] def setup(self, exponent): a = np.arange(10**4, dtype="float64") self.a2 = (a + 10**exponent).repeat(100) def time_factorize(self, exponent): pd.factorize(self.a2) def time_unique(self, exponent): pd.unique(self.a2)
UniqueAndFactorizeArange
python
pyinstaller__pyinstaller
bootloader/waflib/Build.py
{ "start": 25061, "end": 25285 }
class ____(InstallContext): '''removes the targets installed''' cmd = 'uninstall' def __init__(self, **kw): super(UninstallContext, self).__init__(**kw) self.is_install = UNINSTALL
UninstallContext
python
pytest-dev__pytest
src/_pytest/nodes.py
{ "start": 19231, "end": 21355 }
class ____(Collector, abc.ABC): """Base class for filesystem collectors.""" def __init__( self, fspath: LEGACY_PATH | None = None, path_or_parent: Path | Node | None = None, path: Path | None = None, name: str | None = None, parent: Node | None = None, config: Config | None = None, session: Session | None = None, nodeid: str | None = None, ) -> None: if path_or_parent: if isinstance(path_or_parent, Node): assert parent is None parent = cast(FSCollector, path_or_parent) elif isinstance(path_or_parent, Path): assert path is None path = path_or_parent path = _imply_path(type(self), path, fspath=fspath) if name is None: name = path.name if parent is not None and parent.path != path: try: rel = path.relative_to(parent.path) except ValueError: pass else: name = str(rel) name = name.replace(os.sep, SEP) self.path = path if session is None: assert parent is not None session = parent.session if nodeid is None: try: nodeid = str(self.path.relative_to(session.config.rootpath)) except ValueError: nodeid = _check_initialpaths_for_relpath(session._initialpaths, path) if nodeid and os.sep != SEP: nodeid = nodeid.replace(os.sep, SEP) super().__init__( name=name, parent=parent, config=config, session=session, nodeid=nodeid, path=path, ) @classmethod def from_parent( cls, parent, *, fspath: LEGACY_PATH | None = None, path: Path | None = None, **kw, ) -> Self: """The public constructor.""" return super().from_parent(parent=parent, fspath=fspath, path=path, **kw)
FSCollector
python
dask__dask
dask/dataframe/dask_expr/_quantile.py
{ "start": 2228, "end": 2998 }
class ____(SeriesQuantile): @functools.cached_property def _meta(self): import_required( "crick", "crick is a required dependency for using the tdigest method." ) return super()._meta def _layer(self) -> dict: from dask.array.percentile import _percentiles_from_tdigest, _tdigest_chunk dsk = {} for i in range(self.frame.npartitions): dsk[(f"chunk-{self._name}", i)] = ( _tdigest_chunk, (getattr, (self.frame._name, i), "values"), ) dsk[(self._name, 0)] = self._finalizer( (_percentiles_from_tdigest, self.q * 100, sorted(dsk)) ) return dsk def _lower(self): return None
SeriesQuantileTdigest
python
pypa__pipenv
pipenv/installers.py
{ "start": 275, "end": 1596 }
class ____: major: int minor: int patch: Optional[int] = field(default=None) def __str__(self): parts = [self.major, self.minor] if self.patch is not None: parts.append(self.patch) return ".".join(str(p) for p in parts) @classmethod def parse(cls, name: str): """Parse an X.Y.Z, X.Y, or pre-release version string into a version tuple.""" match = re.match(r"^(\d+)\.(\d+)(?:\.(\d+))?(a|b|rc)?(\d+)?$", name) if not match: raise ValueError(f"invalid version name {name!r}") major = int(match.group(1)) minor = int(match.group(2)) patch = match.group(3) if patch is not None: patch = int(patch) return cls(major=major, minor=minor, patch=patch) @property def cmpkey(self): """Make the version a comparable tuple. Some old Python versions do not have a patch part, e.g., 2.7.0 is named "2.7" in pyenv. Fix that; otherwise, `None` will fail to compare with int. """ return (self.major, self.minor, self.patch or 0) def matches_minor(self, other: "Version"): """Check whether this version matches the other in (major, minor).""" return (self.major, self.minor) == (other.major, other.minor)
Version
python
dagster-io__dagster
examples/docs_snippets/docs_snippets/concepts/partitions_schedules_sensors/backfills/single_run_backfill_io_manager.py
{ "start": 38, "end": 1047 }
class ____(dg.IOManager): def load_input(self, context: dg.InputContext): start_datetime, end_datetime = context.asset_partitions_time_window return read_data_in_datetime_range(start_datetime, end_datetime) def handle_output(self, context: dg.OutputContext, obj): start_datetime, end_datetime = context.asset_partitions_time_window return overwrite_data_in_datetime_range(start_datetime, end_datetime, obj) daily_partition = dg.DailyPartitionsDefinition(start_date="2020-01-01") raw_events = dg.AssetSpec("raw_events", partitions_def=daily_partition) @dg.asset( partitions_def=daily_partition, backfill_policy=dg.BackfillPolicy.single_run(), ) def events(context: dg.AssetExecutionContext, raw_events): output_data = compute_events_from_raw_events(raw_events) return output_data # end_marker def compute_events_from_raw_events(*args): ... def read_data_in_datetime_range(*args): ... def overwrite_data_in_datetime_range(*args): ...
MyIOManager
python
apache__airflow
task-sdk/src/airflow/sdk/definitions/asset/__init__.py
{ "start": 19844, "end": 20571 }
class ____(AssetBooleanCondition): """Use to combine assets schedule references in an "or" relationship.""" agg_func = any # type: ignore[assignment] def __or__(self, other: BaseAsset) -> BaseAsset: if not isinstance(other, BaseAsset): return NotImplemented # Optimization: X | (Y | Z) is equivalent to X | Y | Z. return AssetAny(*self.objects, other) def __repr__(self) -> str: return f"AssetAny({', '.join(map(str, self.objects))})" def as_expression(self) -> dict[str, Any]: """ Serialize the asset into its scheduling expression. :meta private: """ return {"any": [o.as_expression() for o in self.objects]}
AssetAny
python
realpython__materials
itertools-in-python3/sp500.py
{ "start": 124, "end": 2051 }
class ____(namedtuple("DataPoint", ["date", "value"])): __slots__ = () def __le__(self, other): return self.value <= other.value def __lt__(self, other): return self.value < other.value def __gt__(self, other): return self.value > other.value def consecutive_positives(sequence, zero=0): def _consecutives(): for itr in it.repeat(iter(sequence)): yield tuple( it.takewhile( lambda p: p > zero, it.dropwhile(lambda p: p <= zero, itr) ) ) return it.takewhile(lambda t: len(t), _consecutives()) def read_prices(csvfile, _strptime=datetime.strptime): with open(csvfile) as infile: reader = csv.DictReader(infile) for row in reader: yield DataPoint( date=_strptime(row["Date"], "%Y-%m-%d").date(), value=float(row["Adj Close"]), ) # Read prices and calculate daily percent change. prices = tuple(read_prices("SP500.csv")) gains = tuple( DataPoint(day.date, 100 * (day.value / prev_day.value - 1.0)) for day, prev_day in zip(prices[1:], prices, strict=False) ) # Find maximum daily gain/loss. zdp = DataPoint(None, 0) # zero DataPoint max_gain = ft.reduce(max, it.filterfalse(lambda p: p <= zdp, gains)) max_loss = ft.reduce(min, it.filterfalse(lambda p: p > zdp, gains), zdp) # Find longest growth streak. growth_streaks = consecutive_positives(gains, zero=DataPoint(None, 0)) longest_streak = ft.reduce( lambda x, y: x if len(x) > len(y) else y, growth_streaks ) # Display results. print("Max gain: {1:.2f}% on {0}".format(*max_gain)) print("Max loss: {1:.2f}% on {0}".format(*max_loss)) print( "Longest growth streak: {num_days} days ({first} to {last})".format( num_days=len(longest_streak), first=longest_streak[0].date, last=longest_streak[-1].date, ) )
DataPoint
python
python__mypy
mypy/semanal.py
{ "start": 352554, "end": 357349 }
class ____(TrivialSyntheticTypeTranslator): def visit_any(self, t: AnyType) -> Type: if t.type_of_any == TypeOfAny.from_unimported_type: return t.copy_modified(TypeOfAny.special_form, missing_import_name=None) return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: return t.copy_modified(args=[a.accept(self) for a in t.args]) def apply_semantic_analyzer_patches(patches: list[tuple[int, Callable[[], None]]]) -> None: """Call patch callbacks in the right order. This should happen after semantic analyzer pass 3. """ patches_by_priority = sorted(patches, key=lambda x: x[0]) for priority, patch_func in patches_by_priority: patch_func() def names_modified_by_assignment(s: AssignmentStmt) -> list[NameExpr]: """Return all unqualified (short) names assigned to in an assignment statement.""" result: list[NameExpr] = [] for lvalue in s.lvalues: result += names_modified_in_lvalue(lvalue) return result def names_modified_in_lvalue(lvalue: Lvalue) -> list[NameExpr]: """Return all NameExpr assignment targets in an Lvalue.""" if isinstance(lvalue, NameExpr): return [lvalue] elif isinstance(lvalue, StarExpr): return names_modified_in_lvalue(lvalue.expr) elif isinstance(lvalue, (ListExpr, TupleExpr)): result: list[NameExpr] = [] for item in lvalue.items: result += names_modified_in_lvalue(item) return result return [] def is_same_var_from_getattr(n1: SymbolNode | None, n2: SymbolNode | None) -> bool: """Do n1 and n2 refer to the same Var derived from module-level __getattr__?""" return ( isinstance(n1, Var) and n1.from_module_getattr and isinstance(n2, Var) and n2.from_module_getattr and n1.fullname == n2.fullname ) def dummy_context() -> Context: return TempNode(AnyType(TypeOfAny.special_form)) def is_valid_replacement(old: SymbolTableNode, new: SymbolTableNode) -> bool: """Can symbol table node replace an existing one? These are the only valid cases: 1. Placeholder gets replaced with a non-placeholder 2. Placeholder that isn't known to become type replaced with a placeholder that can become a type """ if isinstance(old.node, PlaceholderNode): if isinstance(new.node, PlaceholderNode): return not old.node.becomes_typeinfo and new.node.becomes_typeinfo else: return True return False def is_same_symbol(a: SymbolNode | None, b: SymbolNode | None) -> bool: return ( a == b or (isinstance(a, PlaceholderNode) and isinstance(b, PlaceholderNode)) or is_same_var_from_getattr(a, b) ) def is_trivial_body(block: Block) -> bool: """Returns 'true' if the given body is "trivial" -- if it contains just a "pass", "..." (ellipsis), or "raise NotImplementedError()". A trivial body may also start with a statement containing just a string (e.g. a docstring). Note: Functions that raise other kinds of exceptions do not count as "trivial". We use this function to help us determine when it's ok to relax certain checks on body, but functions that raise arbitrary exceptions are more likely to do non-trivial work. For example: def halt(self, reason: str = ...) -> NoReturn: raise MyCustomError("Fatal error: " + reason, self.line, self.context) A function that raises just NotImplementedError is much less likely to be this complex. Note: If you update this, you may also need to update mypy.fastparse.is_possible_trivial_body! """ body = block.body if not body: # Functions have empty bodies only if the body is stripped or the function is # generated or deserialized. In these cases the body is unknown. return False # Skip a docstring if isinstance(body[0], ExpressionStmt) and isinstance(body[0].expr, StrExpr): body = block.body[1:] if len(body) == 0: # There's only a docstring (or no body at all). return True elif len(body) > 1: return False stmt = body[0] if isinstance(stmt, RaiseStmt): expr = stmt.expr if expr is None: return False if isinstance(expr, CallExpr): expr = expr.callee return isinstance(expr, NameExpr) and expr.fullname == "builtins.NotImplementedError" return isinstance(stmt, PassStmt) or ( isinstance(stmt, ExpressionStmt) and isinstance(stmt.expr, EllipsisExpr) ) def is_init_only(node: Var) -> bool: return ( isinstance(type := get_proper_type(node.type), Instance) and type.type.fullname == "dataclasses.InitVar" )
MakeAnyNonUnimported
python
walkccc__LeetCode
solutions/3005. Count Elements With Maximum Frequency/3005.py
{ "start": 0, "end": 215 }
class ____: def maxFrequencyElements(self, nums: list[int]) -> int: count = collections.Counter(nums) maxFreq = max(count.values()) return sum(freq == maxFreq for freq in count.values()) * maxFreq
Solution
python
gevent__gevent
src/gevent/tests/test__greenletset.py
{ "start": 291, "end": 603 }
class ____(object): def __init__(self): self.shot_count = 0 def __call__(self): while True: try: gevent.sleep(1) except SpecialError: break except: # pylint:disable=bare-except self.shot_count += 1
Undead
python
apache__airflow
providers/google/tests/unit/google/cloud/hooks/test_bigquery.py
{ "start": 59490, "end": 61771 }
class ____(_BigQueryBaseTestClass): """Ensure `use_legacy_sql` param in `BigQueryHook` propagates properly.""" @mock.patch("airflow.providers.google.cloud.hooks.bigquery.build") @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_job") @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryCursor._get_query_result") def test_hook_uses_legacy_sql_by_default(self, mock_get_query_result, mock_insert, _): mock_get_query_result.return_value = {} self.hook.get_first("query") _, kwargs = mock_insert.call_args assert kwargs["configuration"]["query"]["useLegacySql"] is True @mock.patch( "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.get_credentials_and_project_id", return_value=(CREDENTIALS, PROJECT_ID), ) @mock.patch("airflow.providers.google.cloud.hooks.bigquery.build") @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_job") @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryCursor._get_query_result") def test_legacy_sql_override_propagates_properly( self, mock_get_query_result, mock_insert, mock_build, mock_get_creds_and_proj_id ): mock_get_query_result.return_value = {} bq_hook = BigQueryHook(use_legacy_sql=False) bq_hook.get_first("query") _, kwargs = mock_insert.call_args assert kwargs["configuration"]["query"]["useLegacySql"] is False @mock.patch("airflow.providers.common.compat.sdk.BaseHook.get_connection") @mock.patch( "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.get_credentials_and_project_id", return_value=(CREDENTIALS, PROJECT_ID), ) def test_use_legacy_sql_from_connection_extra_false( self, mock_get_creds_and_proj_id, mock_get_connection ): """Test that use_legacy_sql=False in connection extras is respected.""" mock_connection = mock.MagicMock() mock_connection.extra_dejson = {"use_legacy_sql": False} mock_get_connection.return_value = mock_connection bq_hook = BigQueryHook(gcp_conn_id="test_conn") assert bq_hook.use_legacy_sql is False @pytest.mark.db_test
TestBigQueryHookLegacySql
python
charliermarsh__ruff
crates/ruff_linter/resources/test/fixtures/isort/insert_empty_lines.py
{ "start": 123, "end": 598 }
class ____: pass y = 1 import os import sys """Docstring""" if True: import os def f(): pass if True: import os def f(): pass if True: x = 1 import collections import typing class X: pass if True: x = 1 import collections import typing def f(): pass import os # Comment goes here. def f(): pass import os # Comment goes here. def f(): pass import os # Comment goes here. # And another. def f(): pass
X
python
tensorflow__tensorflow
tensorflow/python/ops/control_flow_state.py
{ "start": 4569, "end": 19505 }
class ____: """The state used for constructing the gradient graph for a while loop. We create a _GradLoopState for each while loop in forward and its corresponding while loop in backprop. This gives us access to both the forward and the backprop WhileContexts. During the construction of gradient graph, any time when we detect a forward value that is needed for backprop, we create a history accumulator and add it to `history_map`. Any time when we backprop a loop switch op (in _SwitchGrad), we add the grad merge op in `switch_map`. """ def __init__(self, forward_ctxt, outer_grad_state): # The grad loop state for the outer while loop. self._outer_grad_state = None # The while loop context for forward. self._forward_context = None # The loop counter added by AddForwardLoopCounter. It is the value # of the loop counter for the next iteration. self._forward_index = None # A sync op for forward. self._forward_sync = None # The while loop context for backprop. self._grad_context = None # The loop counter added by AddBackpropLoopCounter. It is the value # of the loop counter for the current iteration. self._grad_index = None # A sync op for backprop. self._grad_sync = None # Information needed by backprop. self._history_map = {} self._switch_map = {} self._unused_exits = [] self._deferred_exits = [] self._forward_loop_exits = list(forward_ctxt.loop_exits) self._pending_exits_count = len(forward_ctxt.loop_exits) self._outer_grad_state = outer_grad_state if outer_grad_state: outer_forward_ctxt = outer_grad_state.forward_context else: if not hasattr(forward_ctxt, "outer_context"): raise ValueError("Failed to call gradients on a while loop without" "properly serializing graph via MetaGraphDef") outer_forward_ctxt = forward_ctxt.outer_context # Add the forward loop counter. with forward_ctxt._graph.as_default(): # pylint: disable=protected-access if outer_forward_ctxt: outer_forward_ctxt.Enter() cnt, forward_index = forward_ctxt.AddForwardLoopCounter(outer_grad_state) if outer_forward_ctxt: outer_forward_ctxt.Exit() self._forward_context = forward_ctxt self._forward_index = forward_index # Add the backprop WhileContext, and the backprop loop counter. if outer_grad_state: # This is a nested loop. Remember the iteration counts for each # execution of this inner loop. outer_forward_ctxt.AddName(cnt.name) history_cnt = outer_grad_state.AddForwardAccumulator(cnt) outer_grad_ctxt = outer_grad_state.grad_context outer_grad_ctxt.Enter() self._grad_context = control_flow_ops.WhileContext( maximum_iterations=forward_ctxt.maximum_iterations, parallel_iterations=forward_ctxt.parallel_iterations, back_prop=forward_ctxt.back_prop, swap_memory=forward_ctxt.swap_memory, name=forward_ctxt.name, grad_state=self) real_cnt = outer_grad_state.AddBackpropAccumulatedValue(history_cnt, cnt) self._grad_index = self._grad_context.AddBackpropLoopCounter( real_cnt, outer_grad_state) outer_grad_ctxt.Exit() else: if outer_forward_ctxt: outer_forward_ctxt.Enter() self._grad_context = control_flow_ops.WhileContext( maximum_iterations=forward_ctxt.maximum_iterations, parallel_iterations=forward_ctxt.parallel_iterations, back_prop=forward_ctxt.back_prop, swap_memory=forward_ctxt.swap_memory, name=forward_ctxt.name, grad_state=self) self._grad_index = self._grad_context.AddBackpropLoopCounter( cnt, outer_grad_state) if outer_forward_ctxt: outer_forward_ctxt.Exit() @property def outer_grad_state(self): """The grad loop state for outer loop.""" return self._outer_grad_state @property def forward_context(self): """The while loop context for forward.""" return self._forward_context @property def forward_index(self): """The loop index of forward loop.""" return self._forward_index @property def forward_sync(self): """A control trigger node for synchronization in the forward loop. One main use is to keep the push ops of a stack executed in the iteration order. """ if self._forward_sync is None: with ops.control_dependencies(None): self._forward_sync = control_flow_ops.control_trigger(name="f_sync") self._forward_sync._set_control_flow_context(self._forward_context) self._forward_index.op._add_control_input(self._forward_sync) return self._forward_sync @property def grad_context(self): """The corresponding WhileContext for gradient.""" return self._grad_context @property def grad_index(self): """The loop index of backprop loop.""" return self._grad_index @property def grad_sync(self): """A control trigger node for synchronization in the grad loop. One main use is to keep the pop ops of a stack executed in the iteration order. """ if self._grad_sync is None: with ops.control_dependencies(None): self._grad_sync = control_flow_ops.control_trigger(name="b_sync") self._grad_sync._set_control_flow_context(self._grad_context) self._grad_index.op._add_control_input(self._grad_sync) if self._grad_context.outer_context: self._grad_context.outer_context.AddInnerOp(self._grad_sync) return self._grad_sync @property def history_map(self): """The map that records all the tensors needed for backprop.""" return self._history_map @property def switch_map(self): """The map that records all the Switch ops for the while loop.""" return self._switch_map @property def unused_exits(self): """The list of "unused" exits.""" return self._unused_exits @property def deferred_exits(self): """The list of "deferred" exits.""" return self._deferred_exits @property def forward_loop_exits(self): """The list of exits of the forward loop.""" return self._forward_loop_exits @property def pending_exits_count(self): """The number of exits we expect to see but haven't.""" return self._pending_exits_count @pending_exits_count.setter def pending_exits_count(self, cnt): """Set the pending count to cnt.""" self._pending_exits_count = cnt def AddForwardAccumulator(self, value, dead_branch=False): """Add an accumulator for each forward tensor that is needed in backprop. This is added to the forward loop at the first time when a tensor in the forward loop is used by backprop gradient computation loop. We create an accumulator that accumulates the value of tensor at each iteration. Called in the control flow context where gradients() is called. The pseudocode is: ``` acc = stack(); while (_pivot) { acc = stack_push(acc, value); } ``` We make sure that the stack push op in one iteration is executed before next iteration. This is achieved by adding a control edge from `forward_index.op.inputs[0].op` to the push op, and another control edge from the push op to either `forward_index.op` or `forward_sync`. Args: value: The source tensor in forward that is to be accumulated. dead_branch: True iff the tensor is on a dead branch of a cond. Returns: The stack that contains the accumulated history of the tensor. Raises: TypeError: For internal errors involving the value condition context. ValueError: If `value` is inside a XLA scope and a valid max size for the stack can't be found. """ # curr_ctxt is the context that tf.gradients was called in. with self._forward_index.graph.as_default(): curr_ctxt = ops.get_default_graph()._get_control_flow_context() # pylint: disable=protected-access with ops.control_dependencies(None): if curr_ctxt: curr_ctxt.Enter() with ops.colocate_with(value): # We only need to pass maximum_iterations to the stack if # we're inside an XLA context. if not util.IsInXLAContext(value.op): max_size = constant_op.constant(-1, dtypes.int32) else: max_size = _GetMaxSizeFromNestedMaximumIterations( value, self.forward_context) acc = gen_data_flow_ops.stack_v2( max_size=max_size, elem_type=value.dtype.base_dtype, name="f_acc") if curr_ctxt: curr_ctxt.Exit() # Make acc available in the forward context. enter_acc = self.forward_context.AddValue(acc) # Add the stack_push op in the context of value.op. swap_enabled = self.forward_context.swap_memory value_ctxt = util.GetOutputContext(value.op) if value_ctxt == self.forward_context: # value is not nested in the forward context. self.forward_context.Enter() push = gen_data_flow_ops.stack_push_v2( enter_acc, value, swap_memory=swap_enabled) self.forward_context.Exit() # Protect stack push and order it before forward_index. self.forward_index.op._add_control_input(push.op) else: # value is in a cond context within the forward context. if not isinstance(value_ctxt, control_flow_ops.CondContext): raise TypeError("value_ctxt is not a CondContext: %s" % value_ctxt) if dead_branch: # The special case for creating a zero tensor for a dead # branch of a switch. See _ControlFlowState.ZerosLikeV1WhileLoop(). value_ctxt.outer_context.Enter() push = gen_data_flow_ops.stack_push_v2( enter_acc, value, swap_memory=swap_enabled) value_ctxt.outer_context.Exit() push.op._set_control_flow_context(value_ctxt) else: value_ctxt.Enter() push = gen_data_flow_ops.stack_push_v2( enter_acc, value, swap_memory=swap_enabled) value_ctxt.Exit() # Protect stack push and order it before forward_sync. self.forward_sync._add_control_input(push.op) # Order stack push after the successor of forward_index add_op = self.forward_index.op.inputs[0].op push.op._add_control_input(add_op) return acc def AddBackpropAccumulatedValue(self, history_value, value, dead_branch=False): """Add the getter for an accumulated value in the grad context. This is added to the backprop loop. Called in the grad context to get the value of an accumulated value. The stack pop op must be guarded by the pred of the controlling cond. Args: history_value: The history (a stack) of a value. value: The value that is pushed onto the stack. dead_branch: True iff the tensor is on a dead branch of a cond. Returns: The current value (the top of the stack). """ history_ctxt = history_value.op._get_control_flow_context() # Find the cond context that controls history_value if any. cond_ctxt = None value_ctxt = value.op._get_control_flow_context() while value_ctxt and value_ctxt != history_ctxt: if isinstance(value_ctxt, control_flow_ops.CondContext): cond_ctxt = value_ctxt break value_ctxt = value_ctxt.outer_context with ops.control_dependencies(None): self.grad_context.Enter() if cond_ctxt: # Guard stack pop with a switch if it is controlled by a cond. grad_state = self pred = None while pred is None and grad_state: pred = grad_state.history_map.get(cond_ctxt.pred.name) grad_state = grad_state.outer_grad_state if pred is None: pred = cond_ctxt.pred branch = (1 - cond_ctxt.branch) if dead_branch else cond_ctxt.branch history_value = control_flow_ops._SwitchRefOrTensor( history_value, pred)[branch] pop = gen_data_flow_ops.stack_pop_v2(history_value, value.dtype.base_dtype) pop.set_shape(value.get_shape()) self.grad_context.Exit() parallel_iterations = self.grad_context.parallel_iterations if parallel_iterations > 1: # All pops are ordered after pivot_for_body and before grad_sync. self.grad_sync._add_control_input(pop.op) return pop def GetRealValue(self, value): """Get the real value of `value`. If backprop "uses" a value produced by forward inference, an accumulator is added in the forward loop to accumulate its values. We use the accumulated value. This method must be called in the grad loop context. `value` must be in forward and needed for backprop. Args: value: A tensor to be captured. Returns: The same tensor obtained from the saved history. """ assert value.op.type not in ["Variable", "VariableV2"] real_value = self._history_map.get(value.name) if real_value is None: cur_value = value cur_grad_state = self while True: enter_op = util.GetLoopConstantEnter(cur_value) if enter_op: # Special case: cur_value comes from a constant Enter node. cur_value = enter_op.inputs[0] cur_grad_state = cur_grad_state.outer_grad_state if cur_grad_state is None: # We are now outside all nested loops for this gradient(), # so `value` is a loop invariant and there is no need to # save the history of value. Just make cur_value to enter # the right control flow context. real_value = self._grad_context.AddValue(cur_value) break elif constant_op.is_constant(cur_value): # If the value to be forwarded is a constant, clone the constant in # the gradient loop rather than using a stack. # TODO(phawkins): consider hoisting the constant out of the loop # instead. real_value = constant_op.constant( tensor_util.constant_value(cur_value), dtype=cur_value.dtype) break else: # Record the history of this value in forward_ctxt. self._grad_context.Exit() history_value = cur_grad_state.AddForwardAccumulator(cur_value) self._grad_context.Enter() break if real_value is None: # Add the stack pop op in the grad context. real_value = cur_grad_state.AddBackpropAccumulatedValue( history_value, cur_value) if cur_grad_state != self: real_value = self._grad_context.AddValue(real_value) self._history_map[value.name] = real_value return real_value
_GradLoopState
python
pandas-dev__pandas
pandas/tests/scalar/timestamp/test_constructors.py
{ "start": 11645, "end": 15243 }
class ____: # Timestamp constructors other than __new__ def test_utcnow_deprecated(self): # GH#56680 msg = "Timestamp.utcnow is deprecated" with tm.assert_produces_warning(Pandas4Warning, match=msg): Timestamp.utcnow() def test_utcfromtimestamp_deprecated(self): # GH#56680 msg = "Timestamp.utcfromtimestamp is deprecated" with tm.assert_produces_warning(Pandas4Warning, match=msg): Timestamp.utcfromtimestamp(43) def test_constructor_strptime(self): # GH#25016 # Test support for Timestamp.strptime fmt = "%Y%m%d-%H%M%S-%f%z" ts = "20190129-235348-000001+0000" msg = r"Timestamp.strptime\(\) is not implemented" with pytest.raises(NotImplementedError, match=msg): Timestamp.strptime(ts, fmt) def test_constructor_fromisocalendar(self): # GH#30395 expected_timestamp = Timestamp("2000-01-03 00:00:00") expected_stdlib = datetime.fromisocalendar(2000, 1, 1) result = Timestamp.fromisocalendar(2000, 1, 1) assert result == expected_timestamp assert result == expected_stdlib assert isinstance(result, Timestamp) def test_constructor_fromordinal(self): base = datetime(2000, 1, 1) ts = Timestamp.fromordinal(base.toordinal()) assert base == ts assert base.toordinal() == ts.toordinal() ts = Timestamp.fromordinal(base.toordinal(), tz="US/Eastern") assert Timestamp("2000-01-01", tz="US/Eastern") == ts assert base.toordinal() == ts.toordinal() # GH#3042 dt = datetime(2011, 4, 16, 0, 0) ts = Timestamp.fromordinal(dt.toordinal()) assert ts.to_pydatetime() == dt # with a tzinfo stamp = Timestamp("2011-4-16", tz="US/Eastern") dt_tz = stamp.to_pydatetime() ts = Timestamp.fromordinal(dt_tz.toordinal(), tz="US/Eastern") assert ts.to_pydatetime() == dt_tz def test_now(self): # GH#9000 ts_from_string = Timestamp("now") ts_from_method = Timestamp.now() ts_datetime = datetime.now() ts_from_string_tz = Timestamp("now", tz="US/Eastern") ts_from_method_tz = Timestamp.now(tz="US/Eastern") # Check that the delta between the times is less than 1s (arbitrarily # small) delta = Timedelta(seconds=1) assert abs(ts_from_method - ts_from_string) < delta assert abs(ts_datetime - ts_from_method) < delta assert abs(ts_from_method_tz - ts_from_string_tz) < delta assert ( abs( ts_from_string_tz.tz_localize(None) - ts_from_method_tz.tz_localize(None) ) < delta ) def test_today(self): ts_from_string = Timestamp("today") ts_from_method = Timestamp.today() ts_datetime = datetime.today() ts_from_string_tz = Timestamp("today", tz="US/Eastern") ts_from_method_tz = Timestamp.today(tz="US/Eastern") # Check that the delta between the times is less than 1s (arbitrarily # small) delta = Timedelta(seconds=1) assert abs(ts_from_method - ts_from_string) < delta assert abs(ts_datetime - ts_from_method) < delta assert abs(ts_from_method_tz - ts_from_string_tz) < delta assert ( abs( ts_from_string_tz.tz_localize(None) - ts_from_method_tz.tz_localize(None) ) < delta )
TestTimestampClassMethodConstructors
python
ray-project__ray
python/ray/tune/search/_mock.py
{ "start": 1168, "end": 1744 }
class ____(SearchGenerator): def __init__(self, max_concurrent: Optional[int] = None, **kwargs): self.searcher = _MockSearcher(**kwargs) if max_concurrent: self.searcher = ConcurrencyLimiter( self.searcher, max_concurrent=max_concurrent ) super(_MockSuggestionAlgorithm, self).__init__(self.searcher) @property def live_trials(self) -> List[Trial]: return self.searcher.live_trials @property def results(self) -> List[Dict]: return self.searcher.results
_MockSuggestionAlgorithm
python
altair-viz__altair
altair/vegalite/v6/schema/core.py
{ "start": 442506, "end": 442705 }
class ____(VegaLiteSchema): """Gradient schema wrapper.""" _schema = {"$ref": "#/definitions/Gradient"} def __init__(self, *args, **kwds): super().__init__(*args, **kwds)
Gradient
python
huggingface__transformers
tests/models/llama/test_modeling_llama.py
{ "start": 1430, "end": 1896 }
class ____(CausalLMModelTest, unittest.TestCase): model_tester_class = LlamaModelTester # Need to use `0.8` instead of `0.9` for `test_cpu_offload` # This is because we are hitting edge cases with the causal_mask buffer model_split_percents = [0.5, 0.7, 0.8] # used in `test_torch_compile_for_training` _torch_compile_train_cls = LlamaForCausalLM if is_torch_available() else None @require_torch_accelerator @require_read_token
LlamaModelTest
python
eventlet__eventlet
eventlet/hubs/hub.py
{ "start": 2364, "end": 3094 }
class ____(FdListener): def __init__(self, evtype, fileno, cb, tb, mark_as_closed): self.where_called = traceback.format_stack() self.greenlet = greenlet.getcurrent() super().__init__(evtype, fileno, cb, tb, mark_as_closed) def __repr__(self): return "DebugListener(%r, %r, %r, %r, %r, %r)\n%sEndDebugFdListener" % ( self.evtype, self.fileno, self.cb, self.tb, self.mark_as_closed, self.greenlet, ''.join(self.where_called)) __str__ = __repr__ def alarm_handler(signum, frame): import inspect raise RuntimeError("Blocking detector ALARMED at" + str(inspect.getframeinfo(frame)))
DebugListener
python
getsentry__sentry
src/sentry/release_health/metrics_sessions_v2.py
{ "start": 2345, "end": 3321 }
class ____: """Hashable version of group key dict""" project: int | None = None release: str | None = None environment: str | None = None session_status: SessionStatus | None = None @staticmethod def from_input_dict(dct: MetricsGroupKeyDict) -> "GroupKey": """Construct from a metrics group["by"] result""" return GroupKey( project=dct.get("project_id", None), release=dct.get("release", None), environment=dct.get("environment", None), ) def to_output_dict(self) -> GroupKeyDict: dct: GroupKeyDict = {} if self.project: dct["project"] = self.project if self.release is not None: dct["release"] = self.release if self.environment is not None: dct["environment"] = self.environment if self.session_status is not None: dct["session.status"] = self.session_status.value return dct
GroupKey
python
tensorflow__tensorflow
tensorflow/python/ops/weak_tensor_image_ops_test.py
{ "start": 2454, "end": 4898 }
class ____(test.TestCase): def test_adjust_gamma_less_zero_float32(self): """White image should be returned for gamma equal to zero.""" with self.cached_session(): x_data = np.random.uniform(0, 1.0, (8, 8)) x_np = np.array(x_data, dtype=np.float32) x = _get_weak_tensor(x_np, shape=x_np.shape) err_msg = "Gamma should be a non-negative real number" with self.assertRaisesRegex( (ValueError, errors.InvalidArgumentError), err_msg): image_ops.adjust_gamma(x, gamma=-1) def test_adjust_gamma_less_zero_tensor(self): """White image should be returned for gamma equal to zero.""" with self.cached_session(): x_data = np.random.uniform(0, 1.0, (8, 8)) x_np = np.array(x_data, dtype=np.float32) x = _get_weak_tensor(x_np, shape=x_np.shape) y = constant_op.constant(-1.0, dtype=dtypes.float32) err_msg = "Gamma should be a non-negative real number" with self.assertRaisesRegex( (ValueError, errors.InvalidArgumentError), err_msg): image = image_ops.adjust_gamma(x, gamma=y) self.evaluate(image) def _test_adjust_gamma_float32(self, gamma): """Verifying the output with expected results for gamma correction for float32 images.""" with self.cached_session(): x_np = np.random.uniform(0, 1.0, (8, 8)) x = _get_weak_tensor(x_np, shape=x_np.shape) y = image_ops.adjust_gamma(x, gamma=gamma) y_tf = self.evaluate(y) self.assertIsInstance(y, WeakTensor) y_np = np.clip(np.power(x_np, gamma), 0, 1.0) self.assertAllClose(y_tf, y_np, 1e-6) def test_adjust_gamma_one_float32(self): """Same image should be returned for gamma equal to one.""" self._test_adjust_gamma_float32(1.0) def test_adjust_gamma_less_one_float32(self): """Verifying the output with expected results for gamma correction with gamma equal to half for float32 images.""" self._test_adjust_gamma_float32(0.5) def test_adjust_gamma_greater_one_float32(self): """Verifying the output with expected results for gamma correction with gamma equal to two for float32 images.""" self._test_adjust_gamma_float32(1.0) def test_adjust_gamma_zero_float32(self): """White image should be returned for gamma equal to zero for float32 images.""" self._test_adjust_gamma_float32(0.0) if __name__ == "__main__": ops.set_dtype_conversion_mode("all") test.main()
AdjustGamma