body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
676e1ce32822d494954e4f37440e2fb86bb44c520108d7e4fecf9034b7acbb65
def send_multicast(self, data: List[str]) -> None: 'Send multicast packets for the given data.\n\n The data is pre-encoded to multicast IPv4 addresses.\n ' for ipaddr in data: self._socket.sendto(b'\x00', (ipaddr, 30012)) time.sleep(self._gap)
Send multicast packets for the given data. The data is pre-encoded to multicast IPv4 addresses.
smarthack/smartconfig/smartconfig.py
send_multicast
paravoid/tuya-convert
0
python
def send_multicast(self, data: List[str]) -> None: 'Send multicast packets for the given data.\n\n The data is pre-encoded to multicast IPv4 addresses.\n ' for ipaddr in data: self._socket.sendto(b'\x00', (ipaddr, 30012)) time.sleep(self._gap)
def send_multicast(self, data: List[str]) -> None: 'Send multicast packets for the given data.\n\n The data is pre-encoded to multicast IPv4 addresses.\n ' for ipaddr in data: self._socket.sendto(b'\x00', (ipaddr, 30012)) time.sleep(self._gap)<|docstring|>Send multicast packets for the given data. The data is pre-encoded to multicast IPv4 addresses.<|endoftext|>
ad60a5ffc5e4184ec409b5ad16d6c50899b211fb5217f14bdd76a3260648848e
@classmethod def setUpClass(cls): 'Creates a Portia configuration to test the adapter.\n\t\t' base_url = os.getenv('PORTIA_URL', 'https://api-portia.agriness.io/v3') token = os.getenv('PORTIA_TOKEN') cls.portia_config = {'baseurl': base_url, 'authorization': token, 'debug': False, 'Accept': 'text/csv'}
Creates a Portia configuration to test the adapter.
tests/integration/test_events.py
setUpClass
AgrinessEdgeIoT/portiapy
1
python
@classmethod def setUpClass(cls): '\n\t\t' base_url = os.getenv('PORTIA_URL', 'https://api-portia.agriness.io/v3') token = os.getenv('PORTIA_TOKEN') cls.portia_config = {'baseurl': base_url, 'authorization': token, 'debug': False, 'Accept': 'text/csv'}
@classmethod def setUpClass(cls): '\n\t\t' base_url = os.getenv('PORTIA_URL', 'https://api-portia.agriness.io/v3') token = os.getenv('PORTIA_TOKEN') cls.portia_config = {'baseurl': base_url, 'authorization': token, 'debug': False, 'Accept': 'text/csv'}<|docstring|>Creates a Portia configuration to test the adapter.<|endoftext|>
5348827cd879c3d10c52eeed125446c944722f024c2d453f3fa84ca1848a63c7
def arrayaxis(dtypes=bn.dtypes): 'Iterator that yield arrays and axis to use for unit testing.' ss = {} ss[1] = {'size': 4, 'shapes': [(4,)]} ss[2] = {'size': 6, 'shapes': [(2, 3)]} ss[3] = {'size': 24, 'shapes': [(2, 3, 4)]} for ndim in ss: size = ss[ndim]['size'] shapes = ss[ndim]['shapes'] for dtype in dtypes: a = np.arange(size, dtype=dtype) if (not issubclass(a.dtype.type, np.inexact)): for shape in shapes: a = a.reshape(shape) for axis in (list(range((- a.ndim), a.ndim)) + [None]): (yield (a.copy(), axis))
Iterator that yield arrays and axis to use for unit testing.
bottleneck/tests/fast_test.py
arrayaxis
stroxler/bottleneck
2
python
def arrayaxis(dtypes=bn.dtypes): ss = {} ss[1] = {'size': 4, 'shapes': [(4,)]} ss[2] = {'size': 6, 'shapes': [(2, 3)]} ss[3] = {'size': 24, 'shapes': [(2, 3, 4)]} for ndim in ss: size = ss[ndim]['size'] shapes = ss[ndim]['shapes'] for dtype in dtypes: a = np.arange(size, dtype=dtype) if (not issubclass(a.dtype.type, np.inexact)): for shape in shapes: a = a.reshape(shape) for axis in (list(range((- a.ndim), a.ndim)) + [None]): (yield (a.copy(), axis))
def arrayaxis(dtypes=bn.dtypes): ss = {} ss[1] = {'size': 4, 'shapes': [(4,)]} ss[2] = {'size': 6, 'shapes': [(2, 3)]} ss[3] = {'size': 24, 'shapes': [(2, 3, 4)]} for ndim in ss: size = ss[ndim]['size'] shapes = ss[ndim]['shapes'] for dtype in dtypes: a = np.arange(size, dtype=dtype) if (not issubclass(a.dtype.type, np.inexact)): for shape in shapes: a = a.reshape(shape) for axis in (list(range((- a.ndim), a.ndim)) + [None]): (yield (a.copy(), axis))<|docstring|>Iterator that yield arrays and axis to use for unit testing.<|endoftext|>
df153a282023d05e037daead6c03c50a758ef26328f83907f7116e584665a98e
def test_median_selector(): 'Test median_selector.' fast_checker(bn.func.median_selector)
Test median_selector.
bottleneck/tests/fast_test.py
test_median_selector
stroxler/bottleneck
2
python
def test_median_selector(): fast_checker(bn.func.median_selector)
def test_median_selector(): fast_checker(bn.func.median_selector)<|docstring|>Test median_selector.<|endoftext|>
e9883cca1c3fe4133e77accc15b2ddcda4be2c080ac4a4d60b524639bd207e34
def test_nanmedian_selector(): 'Test nanmedian_selector.' fast_checker(bn.func.nanmedian_selector)
Test nanmedian_selector.
bottleneck/tests/fast_test.py
test_nanmedian_selector
stroxler/bottleneck
2
python
def test_nanmedian_selector(): fast_checker(bn.func.nanmedian_selector)
def test_nanmedian_selector(): fast_checker(bn.func.nanmedian_selector)<|docstring|>Test nanmedian_selector.<|endoftext|>
044074794eadc6017b260e56c98e4e6e8b73db5f213362bb1ddd70a20c40dd45
def test_nansum_selector(): 'Test nansum_selector.' fast_checker(bn.func.nansum_selector)
Test nansum_selector.
bottleneck/tests/fast_test.py
test_nansum_selector
stroxler/bottleneck
2
python
def test_nansum_selector(): fast_checker(bn.func.nansum_selector)
def test_nansum_selector(): fast_checker(bn.func.nansum_selector)<|docstring|>Test nansum_selector.<|endoftext|>
dd8a4faeeb3801e925e383505047e5699adc752aa67485538305a21855bdc3ca
def test_nanmin_selector(): 'Test nanmin_selector.' fast_checker(bn.func.nanmin_selector)
Test nanmin_selector.
bottleneck/tests/fast_test.py
test_nanmin_selector
stroxler/bottleneck
2
python
def test_nanmin_selector(): fast_checker(bn.func.nanmin_selector)
def test_nanmin_selector(): fast_checker(bn.func.nanmin_selector)<|docstring|>Test nanmin_selector.<|endoftext|>
3463bf8db16882b1fd294ec6e0a982f85dba1a203a072a11795cb2a409ee3b10
def test_nanmax_selector(): 'Test nanmax_selector.' fast_checker(bn.func.nanmax_selector)
Test nanmax_selector.
bottleneck/tests/fast_test.py
test_nanmax_selector
stroxler/bottleneck
2
python
def test_nanmax_selector(): fast_checker(bn.func.nanmax_selector)
def test_nanmax_selector(): fast_checker(bn.func.nanmax_selector)<|docstring|>Test nanmax_selector.<|endoftext|>
5098aa448d5fd3bedc479c52128dae2bc3e922538db9e6fee04bf4f809691dbe
def test_nanmean_selector(): 'Test nanmean_selector.' fast_checker(bn.func.nanmean_selector)
Test nanmean_selector.
bottleneck/tests/fast_test.py
test_nanmean_selector
stroxler/bottleneck
2
python
def test_nanmean_selector(): fast_checker(bn.func.nanmean_selector)
def test_nanmean_selector(): fast_checker(bn.func.nanmean_selector)<|docstring|>Test nanmean_selector.<|endoftext|>
ffafdcfab84c25fc4b58afc57b37306d1a3c385c857b44b6310a902054f069da
def test_nanstd_selector(): 'Test nanstd_selector.' fast_checker(bn.func.nanstd_selector)
Test nanstd_selector.
bottleneck/tests/fast_test.py
test_nanstd_selector
stroxler/bottleneck
2
python
def test_nanstd_selector(): fast_checker(bn.func.nanstd_selector)
def test_nanstd_selector(): fast_checker(bn.func.nanstd_selector)<|docstring|>Test nanstd_selector.<|endoftext|>
f835607af853a0419c60dcc929f3130bd679013fe9dbe7607ba0432b283bb506
def test_nanargmin_selector(): 'Test nanargmin_selector.' fast_checker(bn.func.nanargmin_selector)
Test nanargmin_selector.
bottleneck/tests/fast_test.py
test_nanargmin_selector
stroxler/bottleneck
2
python
def test_nanargmin_selector(): fast_checker(bn.func.nanargmin_selector)
def test_nanargmin_selector(): fast_checker(bn.func.nanargmin_selector)<|docstring|>Test nanargmin_selector.<|endoftext|>
1a5233270ae0cdf66d622cc7a7dc69abe2538cfbcf450ec6f666a32935212ddb
def test_nanargmax_selector(): 'Test nanargmax_selector.' fast_checker(bn.func.nanargmax_selector)
Test nanargmax_selector.
bottleneck/tests/fast_test.py
test_nanargmax_selector
stroxler/bottleneck
2
python
def test_nanargmax_selector(): fast_checker(bn.func.nanargmax_selector)
def test_nanargmax_selector(): fast_checker(bn.func.nanargmax_selector)<|docstring|>Test nanargmax_selector.<|endoftext|>
a77756e893da8e4e2f8e02d790e607aa538db267bd442dee987425728e01de6a
def test_nanvar_selector(): 'Test nanvar_selector.' fast_checker(bn.func.nanvar_selector)
Test nanvar_selector.
bottleneck/tests/fast_test.py
test_nanvar_selector
stroxler/bottleneck
2
python
def test_nanvar_selector(): fast_checker(bn.func.nanvar_selector)
def test_nanvar_selector(): fast_checker(bn.func.nanvar_selector)<|docstring|>Test nanvar_selector.<|endoftext|>
d9b2b7b415a63cc08d4311acea84c5d846a3aff60f2e086a5f39fe88b28070f8
def test_rankdata_selector(): 'Test rankdata_selector.' fast_checker(bn.func.rankdata_selector)
Test rankdata_selector.
bottleneck/tests/fast_test.py
test_rankdata_selector
stroxler/bottleneck
2
python
def test_rankdata_selector(): fast_checker(bn.func.rankdata_selector)
def test_rankdata_selector(): fast_checker(bn.func.rankdata_selector)<|docstring|>Test rankdata_selector.<|endoftext|>
019f317515396dc0eaa5b812d120a924eea02670352f1b293def91e57f194705
def test_nanrankdata_selector(): 'Test nanrankdata_selector.' fast_checker(bn.func.nanrankdata_selector)
Test nanrankdata_selector.
bottleneck/tests/fast_test.py
test_nanrankdata_selector
stroxler/bottleneck
2
python
def test_nanrankdata_selector(): fast_checker(bn.func.nanrankdata_selector)
def test_nanrankdata_selector(): fast_checker(bn.func.nanrankdata_selector)<|docstring|>Test nanrankdata_selector.<|endoftext|>
d98039f780840f1e9dceb45f7aa44819b448a0100e1505c189cbb84ad0655648
def test_ss_selector(): 'Test ss_selector.' fast_checker(bn.func.ss_selector)
Test ss_selector.
bottleneck/tests/fast_test.py
test_ss_selector
stroxler/bottleneck
2
python
def test_ss_selector(): fast_checker(bn.func.ss_selector)
def test_ss_selector(): fast_checker(bn.func.ss_selector)<|docstring|>Test ss_selector.<|endoftext|>
a7db15d3ed6dd34e3fc11133252faa65d37c08ad56db71707ed12e6254ee07bb
def test_partsort_selector(): 'Test partsort_selector.' fast_checker(bn.func.partsort_selector)
Test partsort_selector.
bottleneck/tests/fast_test.py
test_partsort_selector
stroxler/bottleneck
2
python
def test_partsort_selector(): fast_checker(bn.func.partsort_selector)
def test_partsort_selector(): fast_checker(bn.func.partsort_selector)<|docstring|>Test partsort_selector.<|endoftext|>
e38088718fd838f65a8d53d3b251963ce64d87d728f8db197976a5adeac36719
def test_argpartsort_selector(): 'Test argpartsort_selector.' fast_checker(bn.func.argpartsort_selector)
Test argpartsort_selector.
bottleneck/tests/fast_test.py
test_argpartsort_selector
stroxler/bottleneck
2
python
def test_argpartsort_selector(): fast_checker(bn.func.argpartsort_selector)
def test_argpartsort_selector(): fast_checker(bn.func.argpartsort_selector)<|docstring|>Test argpartsort_selector.<|endoftext|>
d00e1e86cec06244fb9265a5d5767b86c6df338e138056bac6c4b0ed05ce7e32
def test_replace_selector(): 'Test replace_selector.' fast_checker(bn.func.replace_selector, mode='replace')
Test replace_selector.
bottleneck/tests/fast_test.py
test_replace_selector
stroxler/bottleneck
2
python
def test_replace_selector(): fast_checker(bn.func.replace_selector, mode='replace')
def test_replace_selector(): fast_checker(bn.func.replace_selector, mode='replace')<|docstring|>Test replace_selector.<|endoftext|>
ef4b6e18195b36760988c090ade78f3ed7e8b5b59b3152393b7d0822c88a1afa
def test_anynan_selector(): 'Test anynan_selector.' fast_checker(bn.func.anynan_selector)
Test anynan_selector.
bottleneck/tests/fast_test.py
test_anynan_selector
stroxler/bottleneck
2
python
def test_anynan_selector(): fast_checker(bn.func.anynan_selector)
def test_anynan_selector(): fast_checker(bn.func.anynan_selector)<|docstring|>Test anynan_selector.<|endoftext|>
d38ccb02bf502c09c73c719837fbfa41212ce0070d99f5a1ec2b6457ac3db458
def test_move_sum_selector(): 'Test move_sum_selector.' fast_checker(bn.move.move_sum_selector, mode='move')
Test move_sum_selector.
bottleneck/tests/fast_test.py
test_move_sum_selector
stroxler/bottleneck
2
python
def test_move_sum_selector(): fast_checker(bn.move.move_sum_selector, mode='move')
def test_move_sum_selector(): fast_checker(bn.move.move_sum_selector, mode='move')<|docstring|>Test move_sum_selector.<|endoftext|>
aa2a26b4aba157dffc46b4f443ce786debdf0835aa66961dff5f2e62ffe0ba23
def test_move_nansum_selector(): 'Test move_nansum_selector.' fast_checker(bn.move.move_nansum_selector, mode='move')
Test move_nansum_selector.
bottleneck/tests/fast_test.py
test_move_nansum_selector
stroxler/bottleneck
2
python
def test_move_nansum_selector(): fast_checker(bn.move.move_nansum_selector, mode='move')
def test_move_nansum_selector(): fast_checker(bn.move.move_nansum_selector, mode='move')<|docstring|>Test move_nansum_selector.<|endoftext|>
7f593f114f9b19eef572717c2c9d9db3eec311c28a357c011d7ca09ed42e50a8
def test_move_mean_selector(): 'Test move_mean_selector.' fast_checker(bn.move.move_mean_selector, mode='move')
Test move_mean_selector.
bottleneck/tests/fast_test.py
test_move_mean_selector
stroxler/bottleneck
2
python
def test_move_mean_selector(): fast_checker(bn.move.move_mean_selector, mode='move')
def test_move_mean_selector(): fast_checker(bn.move.move_mean_selector, mode='move')<|docstring|>Test move_mean_selector.<|endoftext|>
4d8483321a228e8f77e32e06a1219f055c36a54f2a398d968bd7a8903d125018
def test_move_median_selector(): 'Test move_median_selector.' fast_checker(bn.move.move_median_selector, mode='move')
Test move_median_selector.
bottleneck/tests/fast_test.py
test_move_median_selector
stroxler/bottleneck
2
python
def test_move_median_selector(): fast_checker(bn.move.move_median_selector, mode='move')
def test_move_median_selector(): fast_checker(bn.move.move_median_selector, mode='move')<|docstring|>Test move_median_selector.<|endoftext|>
173d22be94e3fbca643570004c80270cea217539ea3aca862a3b5358edac6788
def test_move_nanmean_selector(): 'Test move_nanmean_selector.' fast_checker(bn.move.move_nanmean_selector, mode='move')
Test move_nanmean_selector.
bottleneck/tests/fast_test.py
test_move_nanmean_selector
stroxler/bottleneck
2
python
def test_move_nanmean_selector(): fast_checker(bn.move.move_nanmean_selector, mode='move')
def test_move_nanmean_selector(): fast_checker(bn.move.move_nanmean_selector, mode='move')<|docstring|>Test move_nanmean_selector.<|endoftext|>
7af089787af2e8090337bb887130d525439f1f7ecd46191ea6b2010cb5391316
def test_move_std_selector(): 'Test move_std_selector.' fast_checker(bn.move.move_std_selector, mode='move')
Test move_std_selector.
bottleneck/tests/fast_test.py
test_move_std_selector
stroxler/bottleneck
2
python
def test_move_std_selector(): fast_checker(bn.move.move_std_selector, mode='move')
def test_move_std_selector(): fast_checker(bn.move.move_std_selector, mode='move')<|docstring|>Test move_std_selector.<|endoftext|>
7225b727cf1bc5e039ffa90988303c91b6a7bdaf023f852e70f401cfefd19b71
def test_move_nanstd_selector(): 'Test move_nanstd_selector.' fast_checker(bn.move.move_nanstd_selector, mode='move')
Test move_nanstd_selector.
bottleneck/tests/fast_test.py
test_move_nanstd_selector
stroxler/bottleneck
2
python
def test_move_nanstd_selector(): fast_checker(bn.move.move_nanstd_selector, mode='move')
def test_move_nanstd_selector(): fast_checker(bn.move.move_nanstd_selector, mode='move')<|docstring|>Test move_nanstd_selector.<|endoftext|>
7e15fbf7af5eb6acc9e80ee134baf3507402d7ca150b4d2cd59962c096d9ab5b
def test_move_min_selector(): 'Test move_min_selector.' fast_checker(bn.move.move_min_selector, mode='move')
Test move_min_selector.
bottleneck/tests/fast_test.py
test_move_min_selector
stroxler/bottleneck
2
python
def test_move_min_selector(): fast_checker(bn.move.move_min_selector, mode='move')
def test_move_min_selector(): fast_checker(bn.move.move_min_selector, mode='move')<|docstring|>Test move_min_selector.<|endoftext|>
76794061194abc54296e5fe56d1b3b73b5a68ebcf5a55c632df5037519faec14
def test_move_max_selector(): 'Test move_max_selector.' fast_checker(bn.move.move_max_selector, mode='move')
Test move_max_selector.
bottleneck/tests/fast_test.py
test_move_max_selector
stroxler/bottleneck
2
python
def test_move_max_selector(): fast_checker(bn.move.move_max_selector, mode='move')
def test_move_max_selector(): fast_checker(bn.move.move_max_selector, mode='move')<|docstring|>Test move_max_selector.<|endoftext|>
938e8e953ed820d75b3ee06572407a8c698164632a9e1d0723e1f62688dcf3e0
def test_move_nanmin_selector(): 'Test move_nanmin_selector.' fast_checker(bn.move.move_nanmin_selector, mode='move')
Test move_nanmin_selector.
bottleneck/tests/fast_test.py
test_move_nanmin_selector
stroxler/bottleneck
2
python
def test_move_nanmin_selector(): fast_checker(bn.move.move_nanmin_selector, mode='move')
def test_move_nanmin_selector(): fast_checker(bn.move.move_nanmin_selector, mode='move')<|docstring|>Test move_nanmin_selector.<|endoftext|>
867292f2a9c9421d7dd1e28c734716e6c0af01af78a31d3702da94b1ddf34d84
def test_move_nanmixn_selector(): 'Test move_nanmax_selector.' fast_checker(bn.move.move_nanmax_selector, mode='move')
Test move_nanmax_selector.
bottleneck/tests/fast_test.py
test_move_nanmixn_selector
stroxler/bottleneck
2
python
def test_move_nanmixn_selector(): fast_checker(bn.move.move_nanmax_selector, mode='move')
def test_move_nanmixn_selector(): fast_checker(bn.move.move_nanmax_selector, mode='move')<|docstring|>Test move_nanmax_selector.<|endoftext|>
7821a04dabdbd18690b13aa43a72bb3ad7ad87d46afb0be78447a28c4bfb67e8
def __init__(self, classifier: 'CLASSIFIER_TYPE', batch_size: int=128, nb_epochs: int=10) -> None: '\n Create an instance of the defensive distillation defence.\n\n :param classifier: A trained classifier.\n :param batch_size: Size of batches.\n :param nb_epochs: Number of epochs to use for training.\n ' super().__init__(classifier=classifier) self._is_fitted = True self.batch_size = batch_size self.nb_epochs = nb_epochs self._check_params()
Create an instance of the defensive distillation defence. :param classifier: A trained classifier. :param batch_size: Size of batches. :param nb_epochs: Number of epochs to use for training.
art/defences/transformer/evasion/defensive_distillation.py
__init__
changx03/adversarial-robustness-toolbox
1,350
python
def __init__(self, classifier: 'CLASSIFIER_TYPE', batch_size: int=128, nb_epochs: int=10) -> None: '\n Create an instance of the defensive distillation defence.\n\n :param classifier: A trained classifier.\n :param batch_size: Size of batches.\n :param nb_epochs: Number of epochs to use for training.\n ' super().__init__(classifier=classifier) self._is_fitted = True self.batch_size = batch_size self.nb_epochs = nb_epochs self._check_params()
def __init__(self, classifier: 'CLASSIFIER_TYPE', batch_size: int=128, nb_epochs: int=10) -> None: '\n Create an instance of the defensive distillation defence.\n\n :param classifier: A trained classifier.\n :param batch_size: Size of batches.\n :param nb_epochs: Number of epochs to use for training.\n ' super().__init__(classifier=classifier) self._is_fitted = True self.batch_size = batch_size self.nb_epochs = nb_epochs self._check_params()<|docstring|>Create an instance of the defensive distillation defence. :param classifier: A trained classifier. :param batch_size: Size of batches. :param nb_epochs: Number of epochs to use for training.<|endoftext|>
b1b0377e5a911711a8b10e5d09bf9f0d338694322f2bb39e58a6c67081d11845
def __call__(self, x: np.ndarray, transformed_classifier: 'CLASSIFIER_TYPE') -> 'CLASSIFIER_TYPE': '\n Perform the defensive distillation defence mechanism and return a robuster classifier.\n\n :param x: Dataset for training the transformed classifier.\n :param transformed_classifier: A classifier to be transformed for increased robustness. Note that, the\n objective loss function used for fitting inside the input transformed_classifier must support soft labels,\n i.e. probability labels.\n :return: The transformed classifier.\n ' preds = self.classifier.predict(x=x, batch_size=self.batch_size) are_probability = [is_probability(y) for y in preds] all_probability = (np.sum(are_probability) == preds.shape[0]) if (not all_probability): raise ValueError('The input trained classifier do not produce probability outputs.') transformed_preds = transformed_classifier.predict(x=x, batch_size=self.batch_size) are_probability = [is_probability(y) for y in transformed_preds] all_probability = (np.sum(are_probability) == transformed_preds.shape[0]) if (not all_probability): raise ValueError('The input transformed classifier do not produce probability outputs.') transformed_classifier.fit(x=x, y=preds, batch_size=self.batch_size, nb_epochs=self.nb_epochs) return transformed_classifier
Perform the defensive distillation defence mechanism and return a robuster classifier. :param x: Dataset for training the transformed classifier. :param transformed_classifier: A classifier to be transformed for increased robustness. Note that, the objective loss function used for fitting inside the input transformed_classifier must support soft labels, i.e. probability labels. :return: The transformed classifier.
art/defences/transformer/evasion/defensive_distillation.py
__call__
changx03/adversarial-robustness-toolbox
1,350
python
def __call__(self, x: np.ndarray, transformed_classifier: 'CLASSIFIER_TYPE') -> 'CLASSIFIER_TYPE': '\n Perform the defensive distillation defence mechanism and return a robuster classifier.\n\n :param x: Dataset for training the transformed classifier.\n :param transformed_classifier: A classifier to be transformed for increased robustness. Note that, the\n objective loss function used for fitting inside the input transformed_classifier must support soft labels,\n i.e. probability labels.\n :return: The transformed classifier.\n ' preds = self.classifier.predict(x=x, batch_size=self.batch_size) are_probability = [is_probability(y) for y in preds] all_probability = (np.sum(are_probability) == preds.shape[0]) if (not all_probability): raise ValueError('The input trained classifier do not produce probability outputs.') transformed_preds = transformed_classifier.predict(x=x, batch_size=self.batch_size) are_probability = [is_probability(y) for y in transformed_preds] all_probability = (np.sum(are_probability) == transformed_preds.shape[0]) if (not all_probability): raise ValueError('The input transformed classifier do not produce probability outputs.') transformed_classifier.fit(x=x, y=preds, batch_size=self.batch_size, nb_epochs=self.nb_epochs) return transformed_classifier
def __call__(self, x: np.ndarray, transformed_classifier: 'CLASSIFIER_TYPE') -> 'CLASSIFIER_TYPE': '\n Perform the defensive distillation defence mechanism and return a robuster classifier.\n\n :param x: Dataset for training the transformed classifier.\n :param transformed_classifier: A classifier to be transformed for increased robustness. Note that, the\n objective loss function used for fitting inside the input transformed_classifier must support soft labels,\n i.e. probability labels.\n :return: The transformed classifier.\n ' preds = self.classifier.predict(x=x, batch_size=self.batch_size) are_probability = [is_probability(y) for y in preds] all_probability = (np.sum(are_probability) == preds.shape[0]) if (not all_probability): raise ValueError('The input trained classifier do not produce probability outputs.') transformed_preds = transformed_classifier.predict(x=x, batch_size=self.batch_size) are_probability = [is_probability(y) for y in transformed_preds] all_probability = (np.sum(are_probability) == transformed_preds.shape[0]) if (not all_probability): raise ValueError('The input transformed classifier do not produce probability outputs.') transformed_classifier.fit(x=x, y=preds, batch_size=self.batch_size, nb_epochs=self.nb_epochs) return transformed_classifier<|docstring|>Perform the defensive distillation defence mechanism and return a robuster classifier. :param x: Dataset for training the transformed classifier. :param transformed_classifier: A classifier to be transformed for increased robustness. Note that, the objective loss function used for fitting inside the input transformed_classifier must support soft labels, i.e. probability labels. :return: The transformed classifier.<|endoftext|>
73073a7ee7b5f9b75dbeb38178cb453421ac75edddf449f7602fd1a487062055
def fit(self, x: np.ndarray, y: Optional[np.ndarray]=None, **kwargs) -> None: '\n No parameters to learn for this method; do nothing.\n ' pass
No parameters to learn for this method; do nothing.
art/defences/transformer/evasion/defensive_distillation.py
fit
changx03/adversarial-robustness-toolbox
1,350
python
def fit(self, x: np.ndarray, y: Optional[np.ndarray]=None, **kwargs) -> None: '\n \n ' pass
def fit(self, x: np.ndarray, y: Optional[np.ndarray]=None, **kwargs) -> None: '\n \n ' pass<|docstring|>No parameters to learn for this method; do nothing.<|endoftext|>
ed576e517f36aa432482389e20c25a5b269ecf097c24e0c7462b02ae1e1562d0
def word_entropy(word, alphabet): '\n word = tuple(), cause it should be produced via itertools.\n alphabet = string(), ideally from the alphabet_menu dict.\n\n H = L(log N / log 2), where L is the number of symbols in the \n word and N is the number of symbols in the alphabet.\n\n Returns the entropy value in bits, or zero case the word has \n lenght 0.\n ' if ((len(word) == 0) or (len(alphabet) == 0)): return 0 else: alphabet_length = len(alphabet) word_symbols = set() for i in word: for j in i: word_symbols.add(j) result = (len(word_symbols) * (math.log(alphabet_length) / math.log(2))) return result
word = tuple(), cause it should be produced via itertools. alphabet = string(), ideally from the alphabet_menu dict. H = L(log N / log 2), where L is the number of symbols in the word and N is the number of symbols in the alphabet. Returns the entropy value in bits, or zero case the word has lenght 0.
run.py
word_entropy
daavelino/pwd-dist
0
python
def word_entropy(word, alphabet): '\n word = tuple(), cause it should be produced via itertools.\n alphabet = string(), ideally from the alphabet_menu dict.\n\n H = L(log N / log 2), where L is the number of symbols in the \n word and N is the number of symbols in the alphabet.\n\n Returns the entropy value in bits, or zero case the word has \n lenght 0.\n ' if ((len(word) == 0) or (len(alphabet) == 0)): return 0 else: alphabet_length = len(alphabet) word_symbols = set() for i in word: for j in i: word_symbols.add(j) result = (len(word_symbols) * (math.log(alphabet_length) / math.log(2))) return result
def word_entropy(word, alphabet): '\n word = tuple(), cause it should be produced via itertools.\n alphabet = string(), ideally from the alphabet_menu dict.\n\n H = L(log N / log 2), where L is the number of symbols in the \n word and N is the number of symbols in the alphabet.\n\n Returns the entropy value in bits, or zero case the word has \n lenght 0.\n ' if ((len(word) == 0) or (len(alphabet) == 0)): return 0 else: alphabet_length = len(alphabet) word_symbols = set() for i in word: for j in i: word_symbols.add(j) result = (len(word_symbols) * (math.log(alphabet_length) / math.log(2))) return result<|docstring|>word = tuple(), cause it should be produced via itertools. alphabet = string(), ideally from the alphabet_menu dict. H = L(log N / log 2), where L is the number of symbols in the word and N is the number of symbols in the alphabet. Returns the entropy value in bits, or zero case the word has lenght 0.<|endoftext|>
a0819ddb077e8b040d3def4bd93adefae0e8507c2bf2670260054cd974cb4885
def entropy_per_symbol(alphabet): '\n alphabet = string()\n\n The entropy per symbol of a given alphabet.\n\n Basically it comes from H = L(log N / log 2) by taking L = N and,\n in the end, taking H / N.\n\n Check here:\n https://en.wikipedia.org/wiki/Password_strength#Random_passwords\n ' symbols_count = len(alphabet) result = (math.log(symbols_count) / math.log(2)) return result
alphabet = string() The entropy per symbol of a given alphabet. Basically it comes from H = L(log N / log 2) by taking L = N and, in the end, taking H / N. Check here: https://en.wikipedia.org/wiki/Password_strength#Random_passwords
run.py
entropy_per_symbol
daavelino/pwd-dist
0
python
def entropy_per_symbol(alphabet): '\n alphabet = string()\n\n The entropy per symbol of a given alphabet.\n\n Basically it comes from H = L(log N / log 2) by taking L = N and,\n in the end, taking H / N.\n\n Check here:\n https://en.wikipedia.org/wiki/Password_strength#Random_passwords\n ' symbols_count = len(alphabet) result = (math.log(symbols_count) / math.log(2)) return result
def entropy_per_symbol(alphabet): '\n alphabet = string()\n\n The entropy per symbol of a given alphabet.\n\n Basically it comes from H = L(log N / log 2) by taking L = N and,\n in the end, taking H / N.\n\n Check here:\n https://en.wikipedia.org/wiki/Password_strength#Random_passwords\n ' symbols_count = len(alphabet) result = (math.log(symbols_count) / math.log(2)) return result<|docstring|>alphabet = string() The entropy per symbol of a given alphabet. Basically it comes from H = L(log N / log 2) by taking L = N and, in the end, taking H / N. Check here: https://en.wikipedia.org/wiki/Password_strength#Random_passwords<|endoftext|>
f0eeb63f366f7bc5aaa9c6616788ccc720becdc116a41d551715c1d4c7a262e9
def entropy_distribution(alphabet, word_size): '\n Calculates the entropy distribution of the words of size word_size \n generated by the symbols on the given alphabet.\n alphabet = string()\n word_size = int()\n ' result = dict() for word in product(list(alphabet), repeat=word_size): entropy = word_entropy(word, alphabet) if (entropy not in result.keys()): result[entropy] = 1 else: result[entropy] += 1 return result
Calculates the entropy distribution of the words of size word_size generated by the symbols on the given alphabet. alphabet = string() word_size = int()
run.py
entropy_distribution
daavelino/pwd-dist
0
python
def entropy_distribution(alphabet, word_size): '\n Calculates the entropy distribution of the words of size word_size \n generated by the symbols on the given alphabet.\n alphabet = string()\n word_size = int()\n ' result = dict() for word in product(list(alphabet), repeat=word_size): entropy = word_entropy(word, alphabet) if (entropy not in result.keys()): result[entropy] = 1 else: result[entropy] += 1 return result
def entropy_distribution(alphabet, word_size): '\n Calculates the entropy distribution of the words of size word_size \n generated by the symbols on the given alphabet.\n alphabet = string()\n word_size = int()\n ' result = dict() for word in product(list(alphabet), repeat=word_size): entropy = word_entropy(word, alphabet) if (entropy not in result.keys()): result[entropy] = 1 else: result[entropy] += 1 return result<|docstring|>Calculates the entropy distribution of the words of size word_size generated by the symbols on the given alphabet. alphabet = string() word_size = int()<|endoftext|>
c0dfca518abff92fd5d700d72ef2a49abf7b4466c4f64984f0a3aafb88128637
def plot_entropy_distribution(distribution): '\n distribution from entropy_distribution()\n ' pass
distribution from entropy_distribution()
run.py
plot_entropy_distribution
daavelino/pwd-dist
0
python
def plot_entropy_distribution(distribution): '\n \n ' pass
def plot_entropy_distribution(distribution): '\n \n ' pass<|docstring|>distribution from entropy_distribution()<|endoftext|>
e9ccd713bfa865260267f4319b2b55c20aa2f027507399f44f689302a5859562
def fetchOfficers(self, page=1): '\n recursively fetch all officers currently active\n ' params = {'active': datetime.date.today(), 'page': page} resp = requests.get((self._api_root + '/officers'), params=params) data = Box(resp.json()) if (data.total > (page * data.perPage)): return (data.data.to_list() + self.fetchOfficers((page + 1))) else: return data.data.to_list()
recursively fetch all officers currently active
src/officers.py
fetchOfficers
rit-sse/qmail-manager
1
python
def fetchOfficers(self, page=1): '\n \n ' params = {'active': datetime.date.today(), 'page': page} resp = requests.get((self._api_root + '/officers'), params=params) data = Box(resp.json()) if (data.total > (page * data.perPage)): return (data.data.to_list() + self.fetchOfficers((page + 1))) else: return data.data.to_list()
def fetchOfficers(self, page=1): '\n \n ' params = {'active': datetime.date.today(), 'page': page} resp = requests.get((self._api_root + '/officers'), params=params) data = Box(resp.json()) if (data.total > (page * data.perPage)): return (data.data.to_list() + self.fetchOfficers((page + 1))) else: return data.data.to_list()<|docstring|>recursively fetch all officers currently active<|endoftext|>
50b5414227cc649368b35f73fd3de3bc69a8285fe9a895c0b52fa38b5861c5d6
def get_page(self, page_num, active=None, blocked=None, org_uid=None, user_uid=None, destination_guid=None, include_backup_usage=None, include_counts=True, page_size=None, q=None): 'Gets a page of devices.\n\n Args:\n page_num (int): The page number to request.\n active (bool, optional): Filters results by device state. When set to True, gets all\n active devices. When set to False, gets all deactivated devices. When set to None\n or excluded, gets all devices regardless of state. Defaults to None.\n blocked (bool, optional): Filters results by blocked status: True or False. Defaults\n to None.\n org_uid (int, optional): The identification number of an Organization. Defaults to None.\n user_uid (int, optional): The identification number of a User. Defaults to None.\n destination_guid (str or int, optional): The globally unique identifier of the storage\n server that the device back up to. Defaults to None.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n include_counts (bool, optional): A flag to denote whether to include total, warning,\n and critical counts. Defaults to True.\n page_size (int, optional): The number of devices to return per page. Defaults to\n `py42.settings.items_per_page`.\n q (str, optional): Searches results flexibly by incomplete GUID, hostname,\n computer name, etc. Defaults to None.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = '/api/Computer' page_size = (page_size or settings.items_per_page) params = {'active': active, 'blocked': blocked, 'orgUid': org_uid, 'userUid': user_uid, 'targetComputerGuid': destination_guid, 'incBackupUsage': include_backup_usage, 'incCounts': include_counts, 'pgNum': page_num, 'pgSize': page_size, 'q': q} try: return self._connection.get(uri, params=params) except Py42BadRequestError as err: if ('Unable to find org' in str(err.response.text)): raise Py42OrgNotFoundError(err, org_uid) raise
Gets a page of devices. Args: page_num (int): The page number to request. active (bool, optional): Filters results by device state. When set to True, gets all active devices. When set to False, gets all deactivated devices. When set to None or excluded, gets all devices regardless of state. Defaults to None. blocked (bool, optional): Filters results by blocked status: True or False. Defaults to None. org_uid (int, optional): The identification number of an Organization. Defaults to None. user_uid (int, optional): The identification number of a User. Defaults to None. destination_guid (str or int, optional): The globally unique identifier of the storage server that the device back up to. Defaults to None. include_backup_usage (bool, optional): A flag to denote whether to include the destination and its backup stats. Defaults to None. include_counts (bool, optional): A flag to denote whether to include total, warning, and critical counts. Defaults to True. page_size (int, optional): The number of devices to return per page. Defaults to `py42.settings.items_per_page`. q (str, optional): Searches results flexibly by incomplete GUID, hostname, computer name, etc. Defaults to None. Returns: :class:`py42.response.Py42Response`
src/py42/services/devices.py
get_page
code42/py42
21
python
def get_page(self, page_num, active=None, blocked=None, org_uid=None, user_uid=None, destination_guid=None, include_backup_usage=None, include_counts=True, page_size=None, q=None): 'Gets a page of devices.\n\n Args:\n page_num (int): The page number to request.\n active (bool, optional): Filters results by device state. When set to True, gets all\n active devices. When set to False, gets all deactivated devices. When set to None\n or excluded, gets all devices regardless of state. Defaults to None.\n blocked (bool, optional): Filters results by blocked status: True or False. Defaults\n to None.\n org_uid (int, optional): The identification number of an Organization. Defaults to None.\n user_uid (int, optional): The identification number of a User. Defaults to None.\n destination_guid (str or int, optional): The globally unique identifier of the storage\n server that the device back up to. Defaults to None.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n include_counts (bool, optional): A flag to denote whether to include total, warning,\n and critical counts. Defaults to True.\n page_size (int, optional): The number of devices to return per page. Defaults to\n `py42.settings.items_per_page`.\n q (str, optional): Searches results flexibly by incomplete GUID, hostname,\n computer name, etc. Defaults to None.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = '/api/Computer' page_size = (page_size or settings.items_per_page) params = {'active': active, 'blocked': blocked, 'orgUid': org_uid, 'userUid': user_uid, 'targetComputerGuid': destination_guid, 'incBackupUsage': include_backup_usage, 'incCounts': include_counts, 'pgNum': page_num, 'pgSize': page_size, 'q': q} try: return self._connection.get(uri, params=params) except Py42BadRequestError as err: if ('Unable to find org' in str(err.response.text)): raise Py42OrgNotFoundError(err, org_uid) raise
def get_page(self, page_num, active=None, blocked=None, org_uid=None, user_uid=None, destination_guid=None, include_backup_usage=None, include_counts=True, page_size=None, q=None): 'Gets a page of devices.\n\n Args:\n page_num (int): The page number to request.\n active (bool, optional): Filters results by device state. When set to True, gets all\n active devices. When set to False, gets all deactivated devices. When set to None\n or excluded, gets all devices regardless of state. Defaults to None.\n blocked (bool, optional): Filters results by blocked status: True or False. Defaults\n to None.\n org_uid (int, optional): The identification number of an Organization. Defaults to None.\n user_uid (int, optional): The identification number of a User. Defaults to None.\n destination_guid (str or int, optional): The globally unique identifier of the storage\n server that the device back up to. Defaults to None.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n include_counts (bool, optional): A flag to denote whether to include total, warning,\n and critical counts. Defaults to True.\n page_size (int, optional): The number of devices to return per page. Defaults to\n `py42.settings.items_per_page`.\n q (str, optional): Searches results flexibly by incomplete GUID, hostname,\n computer name, etc. Defaults to None.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = '/api/Computer' page_size = (page_size or settings.items_per_page) params = {'active': active, 'blocked': blocked, 'orgUid': org_uid, 'userUid': user_uid, 'targetComputerGuid': destination_guid, 'incBackupUsage': include_backup_usage, 'incCounts': include_counts, 'pgNum': page_num, 'pgSize': page_size, 'q': q} try: return self._connection.get(uri, params=params) except Py42BadRequestError as err: if ('Unable to find org' in str(err.response.text)): raise Py42OrgNotFoundError(err, org_uid) raise<|docstring|>Gets a page of devices. Args: page_num (int): The page number to request. active (bool, optional): Filters results by device state. When set to True, gets all active devices. When set to False, gets all deactivated devices. When set to None or excluded, gets all devices regardless of state. Defaults to None. blocked (bool, optional): Filters results by blocked status: True or False. Defaults to None. org_uid (int, optional): The identification number of an Organization. Defaults to None. user_uid (int, optional): The identification number of a User. Defaults to None. destination_guid (str or int, optional): The globally unique identifier of the storage server that the device back up to. Defaults to None. include_backup_usage (bool, optional): A flag to denote whether to include the destination and its backup stats. Defaults to None. include_counts (bool, optional): A flag to denote whether to include total, warning, and critical counts. Defaults to True. page_size (int, optional): The number of devices to return per page. Defaults to `py42.settings.items_per_page`. q (str, optional): Searches results flexibly by incomplete GUID, hostname, computer name, etc. Defaults to None. Returns: :class:`py42.response.Py42Response`<|endoftext|>
1efbf1adce9a948fef65490bc206b4a332ab6e116c594d1d7935a3eaa00580fc
def get_all(self, active=None, blocked=None, org_uid=None, user_uid=None, destination_guid=None, include_backup_usage=None, include_counts=True, q=None, **kwargs): 'Gets all device information.\n\n When no arguments are passed, all records are returned. To filter results, specify\n respective arguments. For example, to retrieve all active and blocked devices, pass\n active=true and blocked=true.\n\n Args:\n active (bool, optional): Filters results by device state. When set to True, gets all\n active devices. When set to False, gets all deactivated devices. When set to None\n or excluded, gets all devices regardless of state. Defaults to None.\n blocked (bool, optional): Filters results by blocked status: True or False. Defaults\n to None.\n org_uid (int, optional): The identification number of an Organization. Defaults to None.\n user_uid (int, optional): The identification number of a User. Defaults to None.\n destination_guid (str or int, optional): The globally unique identifier of the storage\n server that the device back up to. Defaults to None.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n include_counts (bool, optional): A flag to denote whether to include total, warning,\n and critical counts. Defaults to True.\n q (str, optional): Searches results flexibly by incomplete GUID, hostname,\n computer name, etc. Defaults to None.\n\n Returns:\n generator: An object that iterates over :class:`py42.response.Py42Response` objects\n that each contain a page of devices.\n\n The devices returned by `get_all()` are based on the role and permissions of the user\n authenticating the py42 SDK.\n ' return get_all_pages(self.get_page, 'computers', active=active, blocked=blocked, org_uid=org_uid, user_uid=user_uid, destination_guid=destination_guid, include_backup_usage=include_backup_usage, include_counts=include_counts, q=q, **kwargs)
Gets all device information. When no arguments are passed, all records are returned. To filter results, specify respective arguments. For example, to retrieve all active and blocked devices, pass active=true and blocked=true. Args: active (bool, optional): Filters results by device state. When set to True, gets all active devices. When set to False, gets all deactivated devices. When set to None or excluded, gets all devices regardless of state. Defaults to None. blocked (bool, optional): Filters results by blocked status: True or False. Defaults to None. org_uid (int, optional): The identification number of an Organization. Defaults to None. user_uid (int, optional): The identification number of a User. Defaults to None. destination_guid (str or int, optional): The globally unique identifier of the storage server that the device back up to. Defaults to None. include_backup_usage (bool, optional): A flag to denote whether to include the destination and its backup stats. Defaults to None. include_counts (bool, optional): A flag to denote whether to include total, warning, and critical counts. Defaults to True. q (str, optional): Searches results flexibly by incomplete GUID, hostname, computer name, etc. Defaults to None. Returns: generator: An object that iterates over :class:`py42.response.Py42Response` objects that each contain a page of devices. The devices returned by `get_all()` are based on the role and permissions of the user authenticating the py42 SDK.
src/py42/services/devices.py
get_all
code42/py42
21
python
def get_all(self, active=None, blocked=None, org_uid=None, user_uid=None, destination_guid=None, include_backup_usage=None, include_counts=True, q=None, **kwargs): 'Gets all device information.\n\n When no arguments are passed, all records are returned. To filter results, specify\n respective arguments. For example, to retrieve all active and blocked devices, pass\n active=true and blocked=true.\n\n Args:\n active (bool, optional): Filters results by device state. When set to True, gets all\n active devices. When set to False, gets all deactivated devices. When set to None\n or excluded, gets all devices regardless of state. Defaults to None.\n blocked (bool, optional): Filters results by blocked status: True or False. Defaults\n to None.\n org_uid (int, optional): The identification number of an Organization. Defaults to None.\n user_uid (int, optional): The identification number of a User. Defaults to None.\n destination_guid (str or int, optional): The globally unique identifier of the storage\n server that the device back up to. Defaults to None.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n include_counts (bool, optional): A flag to denote whether to include total, warning,\n and critical counts. Defaults to True.\n q (str, optional): Searches results flexibly by incomplete GUID, hostname,\n computer name, etc. Defaults to None.\n\n Returns:\n generator: An object that iterates over :class:`py42.response.Py42Response` objects\n that each contain a page of devices.\n\n The devices returned by `get_all()` are based on the role and permissions of the user\n authenticating the py42 SDK.\n ' return get_all_pages(self.get_page, 'computers', active=active, blocked=blocked, org_uid=org_uid, user_uid=user_uid, destination_guid=destination_guid, include_backup_usage=include_backup_usage, include_counts=include_counts, q=q, **kwargs)
def get_all(self, active=None, blocked=None, org_uid=None, user_uid=None, destination_guid=None, include_backup_usage=None, include_counts=True, q=None, **kwargs): 'Gets all device information.\n\n When no arguments are passed, all records are returned. To filter results, specify\n respective arguments. For example, to retrieve all active and blocked devices, pass\n active=true and blocked=true.\n\n Args:\n active (bool, optional): Filters results by device state. When set to True, gets all\n active devices. When set to False, gets all deactivated devices. When set to None\n or excluded, gets all devices regardless of state. Defaults to None.\n blocked (bool, optional): Filters results by blocked status: True or False. Defaults\n to None.\n org_uid (int, optional): The identification number of an Organization. Defaults to None.\n user_uid (int, optional): The identification number of a User. Defaults to None.\n destination_guid (str or int, optional): The globally unique identifier of the storage\n server that the device back up to. Defaults to None.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n include_counts (bool, optional): A flag to denote whether to include total, warning,\n and critical counts. Defaults to True.\n q (str, optional): Searches results flexibly by incomplete GUID, hostname,\n computer name, etc. Defaults to None.\n\n Returns:\n generator: An object that iterates over :class:`py42.response.Py42Response` objects\n that each contain a page of devices.\n\n The devices returned by `get_all()` are based on the role and permissions of the user\n authenticating the py42 SDK.\n ' return get_all_pages(self.get_page, 'computers', active=active, blocked=blocked, org_uid=org_uid, user_uid=user_uid, destination_guid=destination_guid, include_backup_usage=include_backup_usage, include_counts=include_counts, q=q, **kwargs)<|docstring|>Gets all device information. When no arguments are passed, all records are returned. To filter results, specify respective arguments. For example, to retrieve all active and blocked devices, pass active=true and blocked=true. Args: active (bool, optional): Filters results by device state. When set to True, gets all active devices. When set to False, gets all deactivated devices. When set to None or excluded, gets all devices regardless of state. Defaults to None. blocked (bool, optional): Filters results by blocked status: True or False. Defaults to None. org_uid (int, optional): The identification number of an Organization. Defaults to None. user_uid (int, optional): The identification number of a User. Defaults to None. destination_guid (str or int, optional): The globally unique identifier of the storage server that the device back up to. Defaults to None. include_backup_usage (bool, optional): A flag to denote whether to include the destination and its backup stats. Defaults to None. include_counts (bool, optional): A flag to denote whether to include total, warning, and critical counts. Defaults to True. q (str, optional): Searches results flexibly by incomplete GUID, hostname, computer name, etc. Defaults to None. Returns: generator: An object that iterates over :class:`py42.response.Py42Response` objects that each contain a page of devices. The devices returned by `get_all()` are based on the role and permissions of the user authenticating the py42 SDK.<|endoftext|>
70cb36c90125cf26f47f401976834d0cde6106828bc9790cedeb313d98eb5518
def get_by_id(self, device_id, include_backup_usage=None, **kwargs): 'Gets device information by ID.\n\n Args:\n device_id (int): The identification number of the device.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing device information.\n ' uri = f'/api/Computer/{device_id}' params = dict(incBackupUsage=include_backup_usage, **kwargs) return self._connection.get(uri, params=params)
Gets device information by ID. Args: device_id (int): The identification number of the device. include_backup_usage (bool, optional): A flag to denote whether to include the destination and its backup stats. Defaults to None. Returns: :class:`py42.response.Py42Response`: A response containing device information.
src/py42/services/devices.py
get_by_id
code42/py42
21
python
def get_by_id(self, device_id, include_backup_usage=None, **kwargs): 'Gets device information by ID.\n\n Args:\n device_id (int): The identification number of the device.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing device information.\n ' uri = f'/api/Computer/{device_id}' params = dict(incBackupUsage=include_backup_usage, **kwargs) return self._connection.get(uri, params=params)
def get_by_id(self, device_id, include_backup_usage=None, **kwargs): 'Gets device information by ID.\n\n Args:\n device_id (int): The identification number of the device.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing device information.\n ' uri = f'/api/Computer/{device_id}' params = dict(incBackupUsage=include_backup_usage, **kwargs) return self._connection.get(uri, params=params)<|docstring|>Gets device information by ID. Args: device_id (int): The identification number of the device. include_backup_usage (bool, optional): A flag to denote whether to include the destination and its backup stats. Defaults to None. Returns: :class:`py42.response.Py42Response`: A response containing device information.<|endoftext|>
ca30372f6eeb4021f8d056baae83af006f40b93541bad58a4e57f00ef7e16218
def get_by_guid(self, guid, include_backup_usage=None, **kwargs): 'Gets device information by GUID.\n\n Args:\n guid (str): The globally unique identifier of the device.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing device information.\n ' uri = f'/api/Computer/{guid}' params = dict(idType='guid', incBackupUsage=include_backup_usage, **kwargs) return self._connection.get(uri, params=params)
Gets device information by GUID. Args: guid (str): The globally unique identifier of the device. include_backup_usage (bool, optional): A flag to denote whether to include the destination and its backup stats. Defaults to None. Returns: :class:`py42.response.Py42Response`: A response containing device information.
src/py42/services/devices.py
get_by_guid
code42/py42
21
python
def get_by_guid(self, guid, include_backup_usage=None, **kwargs): 'Gets device information by GUID.\n\n Args:\n guid (str): The globally unique identifier of the device.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing device information.\n ' uri = f'/api/Computer/{guid}' params = dict(idType='guid', incBackupUsage=include_backup_usage, **kwargs) return self._connection.get(uri, params=params)
def get_by_guid(self, guid, include_backup_usage=None, **kwargs): 'Gets device information by GUID.\n\n Args:\n guid (str): The globally unique identifier of the device.\n include_backup_usage (bool, optional): A flag to denote whether to include the\n destination and its backup stats. Defaults to None.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing device information.\n ' uri = f'/api/Computer/{guid}' params = dict(idType='guid', incBackupUsage=include_backup_usage, **kwargs) return self._connection.get(uri, params=params)<|docstring|>Gets device information by GUID. Args: guid (str): The globally unique identifier of the device. include_backup_usage (bool, optional): A flag to denote whether to include the destination and its backup stats. Defaults to None. Returns: :class:`py42.response.Py42Response`: A response containing device information.<|endoftext|>
73be5acd9e00b3289d9622a097ec7c0fb0024dbdcf4aa33c4f226e1ceb4e3067
def block(self, device_id): 'Blocks a device causing the user not to be able to log in to or restore from Code42 on\n that device.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = f'/api/ComputerBlock/{device_id}' return self._connection.put(uri)
Blocks a device causing the user not to be able to log in to or restore from Code42 on that device. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`
src/py42/services/devices.py
block
code42/py42
21
python
def block(self, device_id): 'Blocks a device causing the user not to be able to log in to or restore from Code42 on\n that device.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = f'/api/ComputerBlock/{device_id}' return self._connection.put(uri)
def block(self, device_id): 'Blocks a device causing the user not to be able to log in to or restore from Code42 on\n that device.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = f'/api/ComputerBlock/{device_id}' return self._connection.put(uri)<|docstring|>Blocks a device causing the user not to be able to log in to or restore from Code42 on that device. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`<|endoftext|>
29584201e07dfa72ba7e6310388000cf81145c9c1682e90e389f7650c19c5ac4
def unblock(self, device_id): 'Unblocks a device, permitting a user to be able to login and restore again.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = f'/api/ComputerBlock/{device_id}' return self._connection.delete(uri)
Unblocks a device, permitting a user to be able to login and restore again. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`
src/py42/services/devices.py
unblock
code42/py42
21
python
def unblock(self, device_id): 'Unblocks a device, permitting a user to be able to login and restore again.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = f'/api/ComputerBlock/{device_id}' return self._connection.delete(uri)
def unblock(self, device_id): 'Unblocks a device, permitting a user to be able to login and restore again.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = f'/api/ComputerBlock/{device_id}' return self._connection.delete(uri)<|docstring|>Unblocks a device, permitting a user to be able to login and restore again. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`<|endoftext|>
d20bdc2c6addc203ae3f0874f8a82f3250ba5aad0e4b82b4fc9a45ef706d4c1b
def deactivate(self, device_id): 'Deactivates a device, causing backups to stop and archives to go to cold storage.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = '/api/v4/computer-deactivation/update' data = {'id': device_id} try: return self._connection.post(uri, json=data) except Py42BadRequestError as ex: handle_active_legal_hold_error(ex, 'device', device_id) raise
Deactivates a device, causing backups to stop and archives to go to cold storage. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`
src/py42/services/devices.py
deactivate
code42/py42
21
python
def deactivate(self, device_id): 'Deactivates a device, causing backups to stop and archives to go to cold storage.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = '/api/v4/computer-deactivation/update' data = {'id': device_id} try: return self._connection.post(uri, json=data) except Py42BadRequestError as ex: handle_active_legal_hold_error(ex, 'device', device_id) raise
def deactivate(self, device_id): 'Deactivates a device, causing backups to stop and archives to go to cold storage.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = '/api/v4/computer-deactivation/update' data = {'id': device_id} try: return self._connection.post(uri, json=data) except Py42BadRequestError as ex: handle_active_legal_hold_error(ex, 'device', device_id) raise<|docstring|>Deactivates a device, causing backups to stop and archives to go to cold storage. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`<|endoftext|>
bf936a3354f1f4df7dcfc308385413ff00c7884f8f87e0b6257b321b5d7cc522
def reactivate(self, device_id): 'Activates a previously deactivated device.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = '/api/v4/computer-deactivation/remove' data = {'id': device_id} return self._connection.post(uri, json=data)
Activates a previously deactivated device. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`
src/py42/services/devices.py
reactivate
code42/py42
21
python
def reactivate(self, device_id): 'Activates a previously deactivated device.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = '/api/v4/computer-deactivation/remove' data = {'id': device_id} return self._connection.post(uri, json=data)
def reactivate(self, device_id): 'Activates a previously deactivated device.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = '/api/v4/computer-deactivation/remove' data = {'id': device_id} return self._connection.post(uri, json=data)<|docstring|>Activates a previously deactivated device. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`<|endoftext|>
3daa3f9a65e4d0b171ec14f6b725338ef9be0eb3f9f69e3ac5e9881f99a6036c
def deauthorize(self, device_id): 'Deauthorizes the device with the given ID. If used on a cloud connector device, it will\n remove the authorization token for that account.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = f'/api/ComputerDeauthorization/{device_id}' return self._connection.put(uri)
Deauthorizes the device with the given ID. If used on a cloud connector device, it will remove the authorization token for that account. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`
src/py42/services/devices.py
deauthorize
code42/py42
21
python
def deauthorize(self, device_id): 'Deauthorizes the device with the given ID. If used on a cloud connector device, it will\n remove the authorization token for that account.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = f'/api/ComputerDeauthorization/{device_id}' return self._connection.put(uri)
def deauthorize(self, device_id): 'Deauthorizes the device with the given ID. If used on a cloud connector device, it will\n remove the authorization token for that account.\n\n Args:\n device_id (int): The identification number of the device.\n\n Returns:\n :class:`py42.response.Py42Response`\n ' uri = f'/api/ComputerDeauthorization/{device_id}' return self._connection.put(uri)<|docstring|>Deauthorizes the device with the given ID. If used on a cloud connector device, it will remove the authorization token for that account. Args: device_id (int): The identification number of the device. Returns: :class:`py42.response.Py42Response`<|endoftext|>
c86eced47e102f7e700edcc3f971b0dd9d86627260ce00b5df8307d519179914
def get_agent_state(self, guid, property_name): 'Gets the agent state of the device.\n\n Args:\n guid (str): The globally unique identifier of the device.\n property_name (str): The name of the property to retrieve (e.g. `fullDiskAccess`).\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing settings information.\n ' uri = '/api/v14/agent-state/view-by-device-guid' params = {'deviceGuid': guid, 'propertyName': property_name} return self._connection.get(uri, params=params)
Gets the agent state of the device. Args: guid (str): The globally unique identifier of the device. property_name (str): The name of the property to retrieve (e.g. `fullDiskAccess`). Returns: :class:`py42.response.Py42Response`: A response containing settings information.
src/py42/services/devices.py
get_agent_state
code42/py42
21
python
def get_agent_state(self, guid, property_name): 'Gets the agent state of the device.\n\n Args:\n guid (str): The globally unique identifier of the device.\n property_name (str): The name of the property to retrieve (e.g. `fullDiskAccess`).\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing settings information.\n ' uri = '/api/v14/agent-state/view-by-device-guid' params = {'deviceGuid': guid, 'propertyName': property_name} return self._connection.get(uri, params=params)
def get_agent_state(self, guid, property_name): 'Gets the agent state of the device.\n\n Args:\n guid (str): The globally unique identifier of the device.\n property_name (str): The name of the property to retrieve (e.g. `fullDiskAccess`).\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing settings information.\n ' uri = '/api/v14/agent-state/view-by-device-guid' params = {'deviceGuid': guid, 'propertyName': property_name} return self._connection.get(uri, params=params)<|docstring|>Gets the agent state of the device. Args: guid (str): The globally unique identifier of the device. property_name (str): The name of the property to retrieve (e.g. `fullDiskAccess`). Returns: :class:`py42.response.Py42Response`: A response containing settings information.<|endoftext|>
97fa41cfa406a36cedd99db72be73b49f097ac9f5916928f298f539444a6a13e
def get_agent_full_disk_access_state(self, guid): 'Gets the full disk access status of a device.\n\n Args:\n guid (str): The globally unique identifier of the device.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing settings information.\n ' return self.get_agent_state(guid, 'fullDiskAccess')
Gets the full disk access status of a device. Args: guid (str): The globally unique identifier of the device. Returns: :class:`py42.response.Py42Response`: A response containing settings information.
src/py42/services/devices.py
get_agent_full_disk_access_state
code42/py42
21
python
def get_agent_full_disk_access_state(self, guid): 'Gets the full disk access status of a device.\n\n Args:\n guid (str): The globally unique identifier of the device.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing settings information.\n ' return self.get_agent_state(guid, 'fullDiskAccess')
def get_agent_full_disk_access_state(self, guid): 'Gets the full disk access status of a device.\n\n Args:\n guid (str): The globally unique identifier of the device.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing settings information.\n ' return self.get_agent_state(guid, 'fullDiskAccess')<|docstring|>Gets the full disk access status of a device. Args: guid (str): The globally unique identifier of the device. Returns: :class:`py42.response.Py42Response`: A response containing settings information.<|endoftext|>
55d5ac30ee4b47058e7c21c4ba3d62bef6281eac7fd6d38e20f5fd5c2400ddd1
def get_settings(self, guid): 'Gets setting data for a device and returns a `DeviceSettings` object for the target device.\n\n Args:\n guid (int,str): The globally unique identifier of the device.\n\n Returns:\n :class:`py42.clients.settings.device_settings.DeviceSettings`: A class to help manage device settings.\n ' settings = self.get_by_guid(guid, incSettings=True) return DeviceSettings(settings.data)
Gets setting data for a device and returns a `DeviceSettings` object for the target device. Args: guid (int,str): The globally unique identifier of the device. Returns: :class:`py42.clients.settings.device_settings.DeviceSettings`: A class to help manage device settings.
src/py42/services/devices.py
get_settings
code42/py42
21
python
def get_settings(self, guid): 'Gets setting data for a device and returns a `DeviceSettings` object for the target device.\n\n Args:\n guid (int,str): The globally unique identifier of the device.\n\n Returns:\n :class:`py42.clients.settings.device_settings.DeviceSettings`: A class to help manage device settings.\n ' settings = self.get_by_guid(guid, incSettings=True) return DeviceSettings(settings.data)
def get_settings(self, guid): 'Gets setting data for a device and returns a `DeviceSettings` object for the target device.\n\n Args:\n guid (int,str): The globally unique identifier of the device.\n\n Returns:\n :class:`py42.clients.settings.device_settings.DeviceSettings`: A class to help manage device settings.\n ' settings = self.get_by_guid(guid, incSettings=True) return DeviceSettings(settings.data)<|docstring|>Gets setting data for a device and returns a `DeviceSettings` object for the target device. Args: guid (int,str): The globally unique identifier of the device. Returns: :class:`py42.clients.settings.device_settings.DeviceSettings`: A class to help manage device settings.<|endoftext|>
a7f75397c70bbf00aba630b06053c7552ed62aba1f3e630d04ccbd15e72d51e1
def update_settings(self, device_settings): "Updates a device's settings based on changes to the passed in `DeviceSettings` instance.\n\n Args:\n device_settings (`DeviceSettings`): An instance of `DeviceSettings` with desired modifications to settings.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing the result of the setting change.\n " device_settings = dict(device_settings) device_id = device_settings['computerId'] uri = f'/api/Computer/{device_id}' new_config_date_ms = str(int((time() * 1000))) device_settings['settings']['configDateMs'] = new_config_date_ms return self._connection.put(uri, json=device_settings)
Updates a device's settings based on changes to the passed in `DeviceSettings` instance. Args: device_settings (`DeviceSettings`): An instance of `DeviceSettings` with desired modifications to settings. Returns: :class:`py42.response.Py42Response`: A response containing the result of the setting change.
src/py42/services/devices.py
update_settings
code42/py42
21
python
def update_settings(self, device_settings): "Updates a device's settings based on changes to the passed in `DeviceSettings` instance.\n\n Args:\n device_settings (`DeviceSettings`): An instance of `DeviceSettings` with desired modifications to settings.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing the result of the setting change.\n " device_settings = dict(device_settings) device_id = device_settings['computerId'] uri = f'/api/Computer/{device_id}' new_config_date_ms = str(int((time() * 1000))) device_settings['settings']['configDateMs'] = new_config_date_ms return self._connection.put(uri, json=device_settings)
def update_settings(self, device_settings): "Updates a device's settings based on changes to the passed in `DeviceSettings` instance.\n\n Args:\n device_settings (`DeviceSettings`): An instance of `DeviceSettings` with desired modifications to settings.\n\n Returns:\n :class:`py42.response.Py42Response`: A response containing the result of the setting change.\n " device_settings = dict(device_settings) device_id = device_settings['computerId'] uri = f'/api/Computer/{device_id}' new_config_date_ms = str(int((time() * 1000))) device_settings['settings']['configDateMs'] = new_config_date_ms return self._connection.put(uri, json=device_settings)<|docstring|>Updates a device's settings based on changes to the passed in `DeviceSettings` instance. Args: device_settings (`DeviceSettings`): An instance of `DeviceSettings` with desired modifications to settings. Returns: :class:`py42.response.Py42Response`: A response containing the result of the setting change.<|endoftext|>
d53026aa8cd6319ea092a319ba01de3e707501c1412d2346e490e6a24ff1db90
def report(self, params={}): '보고서를 호출 할 때 params를 날려 맞춤화된 보고서를 구성할 수 있다.\n\n :param params: GET 요청 시 전달할 parameters 구성하여 전달\n :type params: dict\n\n https://developers.kakao.com/docs/latest/ko/keyword-ad/report#ad-account\n 각 Paramer에 대해서는 kakao developers를 참고.\n Paramer를 Kakao API에서는 camel case로 지원하고 있으나, 파이썬에 친숙한 snake case로 변환하여 작업한다.\n\n 파라미터 예시)\n\n **params**\n - *metrics_groups* (`str[]`) : metricsGroups 파라미터 (default BASIC)\n - *date_preset* (`str`) : datePreset 파라미터 (default TODAY)\n\n ' r = self.call('GET', self.path, params=self._get_params(params)) if (r.status_code == 401): self._refresh_token() r = self.call('GET', self.path, params=self._get_params(params)) return r
보고서를 호출 할 때 params를 날려 맞춤화된 보고서를 구성할 수 있다. :param params: GET 요청 시 전달할 parameters 구성하여 전달 :type params: dict https://developers.kakao.com/docs/latest/ko/keyword-ad/report#ad-account 각 Paramer에 대해서는 kakao developers를 참고. Paramer를 Kakao API에서는 camel case로 지원하고 있으나, 파이썬에 친숙한 snake case로 변환하여 작업한다. 파라미터 예시) **params** - *metrics_groups* (`str[]`) : metricsGroups 파라미터 (default BASIC) - *date_preset* (`str`) : datePreset 파라미터 (default TODAY)
src/soomgogather/kakao/keywordreport.py
report
Soomgo-Platform/soomgo-gather
3
python
def report(self, params={}): '보고서를 호출 할 때 params를 날려 맞춤화된 보고서를 구성할 수 있다.\n\n :param params: GET 요청 시 전달할 parameters 구성하여 전달\n :type params: dict\n\n https://developers.kakao.com/docs/latest/ko/keyword-ad/report#ad-account\n 각 Paramer에 대해서는 kakao developers를 참고.\n Paramer를 Kakao API에서는 camel case로 지원하고 있으나, 파이썬에 친숙한 snake case로 변환하여 작업한다.\n\n 파라미터 예시)\n\n **params**\n - *metrics_groups* (`str[]`) : metricsGroups 파라미터 (default BASIC)\n - *date_preset* (`str`) : datePreset 파라미터 (default TODAY)\n\n ' r = self.call('GET', self.path, params=self._get_params(params)) if (r.status_code == 401): self._refresh_token() r = self.call('GET', self.path, params=self._get_params(params)) return r
def report(self, params={}): '보고서를 호출 할 때 params를 날려 맞춤화된 보고서를 구성할 수 있다.\n\n :param params: GET 요청 시 전달할 parameters 구성하여 전달\n :type params: dict\n\n https://developers.kakao.com/docs/latest/ko/keyword-ad/report#ad-account\n 각 Paramer에 대해서는 kakao developers를 참고.\n Paramer를 Kakao API에서는 camel case로 지원하고 있으나, 파이썬에 친숙한 snake case로 변환하여 작업한다.\n\n 파라미터 예시)\n\n **params**\n - *metrics_groups* (`str[]`) : metricsGroups 파라미터 (default BASIC)\n - *date_preset* (`str`) : datePreset 파라미터 (default TODAY)\n\n ' r = self.call('GET', self.path, params=self._get_params(params)) if (r.status_code == 401): self._refresh_token() r = self.call('GET', self.path, params=self._get_params(params)) return r<|docstring|>보고서를 호출 할 때 params를 날려 맞춤화된 보고서를 구성할 수 있다. :param params: GET 요청 시 전달할 parameters 구성하여 전달 :type params: dict https://developers.kakao.com/docs/latest/ko/keyword-ad/report#ad-account 각 Paramer에 대해서는 kakao developers를 참고. Paramer를 Kakao API에서는 camel case로 지원하고 있으나, 파이썬에 친숙한 snake case로 변환하여 작업한다. 파라미터 예시) **params** - *metrics_groups* (`str[]`) : metricsGroups 파라미터 (default BASIC) - *date_preset* (`str`) : datePreset 파라미터 (default TODAY)<|endoftext|>
25e64f308fb7832b84085f0ae72a32e09aa8fe852ff2406af16804c24af60d83
def __call__(self, cube): '\n Return an numpy array of appropriate shape based on the `cube`.\n ' try: return self.access(cube) except Exception as e: raise ValueError("Error with cube '{}' ({})".format(cube.name, self.__class__.__name__)) from e
Return an numpy array of appropriate shape based on the `cube`.
clustertools_analytics/accessor.py
__call__
jm-begon/clustertools-analytics
1
python
def __call__(self, cube): '\n \n ' try: return self.access(cube) except Exception as e: raise ValueError("Error with cube '{}' ({})".format(cube.name, self.__class__.__name__)) from e
def __call__(self, cube): '\n \n ' try: return self.access(cube) except Exception as e: raise ValueError("Error with cube '{}' ({})".format(cube.name, self.__class__.__name__)) from e<|docstring|>Return an numpy array of appropriate shape based on the `cube`.<|endoftext|>
f7ceee336df56a6b417a1399954a30fe0c6ac78cd94d852da858d6baa246674a
@property def node(self): '\n Node\n Attributes: key, non-creatable, non-modifiable\n ' return self._node
Node Attributes: key, non-creatable, non-modifiable
generated-libraries/python/netapp/autosupport/autosupport_compliant_info.py
node
radekg/netapp-ontap-lib-get
2
python
@property def node(self): '\n Node\n Attributes: key, non-creatable, non-modifiable\n ' return self._node
@property def node(self): '\n Node\n Attributes: key, non-creatable, non-modifiable\n ' return self._node<|docstring|>Node Attributes: key, non-creatable, non-modifiable<|endoftext|>
29887b31035e36fcf24c0227bbc0f16096a0135869a741302049cb2693cd7c3a
@property def plaintext(self): '\n Plaintext\n Attributes: key, non-creatable, non-modifiable\n ' return self._plaintext
Plaintext Attributes: key, non-creatable, non-modifiable
generated-libraries/python/netapp/autosupport/autosupport_compliant_info.py
plaintext
radekg/netapp-ontap-lib-get
2
python
@property def plaintext(self): '\n Plaintext\n Attributes: key, non-creatable, non-modifiable\n ' return self._plaintext
@property def plaintext(self): '\n Plaintext\n Attributes: key, non-creatable, non-modifiable\n ' return self._plaintext<|docstring|>Plaintext Attributes: key, non-creatable, non-modifiable<|endoftext|>
deb67c0ab56cf6c10811dc2a037e8179c6615571322f8acc70331a3dadc21a70
@property def hash(self): '\n hash\n Attributes: non-creatable, non-modifiable\n ' return self._hash
hash Attributes: non-creatable, non-modifiable
generated-libraries/python/netapp/autosupport/autosupport_compliant_info.py
hash
radekg/netapp-ontap-lib-get
2
python
@property def hash(self): '\n hash\n Attributes: non-creatable, non-modifiable\n ' return self._hash
@property def hash(self): '\n hash\n Attributes: non-creatable, non-modifiable\n ' return self._hash<|docstring|>hash Attributes: non-creatable, non-modifiable<|endoftext|>
336954033c0d1616a22fd2f03a3a0986376c04ced864e8963fea3f95a20c3534
@property def last_used_sequence(self): '\n Last AutoSupport that used this hash\n Attributes: non-creatable, non-modifiable\n ' return self._last_used_sequence
Last AutoSupport that used this hash Attributes: non-creatable, non-modifiable
generated-libraries/python/netapp/autosupport/autosupport_compliant_info.py
last_used_sequence
radekg/netapp-ontap-lib-get
2
python
@property def last_used_sequence(self): '\n Last AutoSupport that used this hash\n Attributes: non-creatable, non-modifiable\n ' return self._last_used_sequence
@property def last_used_sequence(self): '\n Last AutoSupport that used this hash\n Attributes: non-creatable, non-modifiable\n ' return self._last_used_sequence<|docstring|>Last AutoSupport that used this hash Attributes: non-creatable, non-modifiable<|endoftext|>
26c23c2829ba7c6a0c83a376304d9b5d0bf76098be73ccf6e3d5fc6b7c0b79b3
def test_basic_plugin_init(self): 'check if a basic plugin intis' self.assertEqual(self.plugin.PLUGIN_NAME, '') self.assertEqual(self.plugin.plugin_name(), '')
check if a basic plugin intis
InvenTree/plugin/test_plugin.py
test_basic_plugin_init
killerfish/InvenTree
2
python
def test_basic_plugin_init(self): self.assertEqual(self.plugin.PLUGIN_NAME, ) self.assertEqual(self.plugin.plugin_name(), )
def test_basic_plugin_init(self): self.assertEqual(self.plugin.PLUGIN_NAME, ) self.assertEqual(self.plugin.plugin_name(), )<|docstring|>check if a basic plugin intis<|endoftext|>
2960f056a0582bf4068c97b45f3f78df46ca99dff55fe04f0d1dbf21cb970292
def test_basic_plugin_name(self): 'check if the name of a basic plugin can be set' self.assertEqual(self.named_plugin.PLUGIN_NAME, 'abc123') self.assertEqual(self.named_plugin.plugin_name(), 'abc123')
check if the name of a basic plugin can be set
InvenTree/plugin/test_plugin.py
test_basic_plugin_name
killerfish/InvenTree
2
python
def test_basic_plugin_name(self): self.assertEqual(self.named_plugin.PLUGIN_NAME, 'abc123') self.assertEqual(self.named_plugin.plugin_name(), 'abc123')
def test_basic_plugin_name(self): self.assertEqual(self.named_plugin.PLUGIN_NAME, 'abc123') self.assertEqual(self.named_plugin.plugin_name(), 'abc123')<|docstring|>check if the name of a basic plugin can be set<|endoftext|>
c2598454f3f0110eb7f2306bde3e28b3031c052c63b57f2a9c1f8ad365d7a4d6
def test_tag_plugin_list(self): 'test that all plugins are listed' self.assertEqual(plugin_tags.plugin_list(), registry.plugins)
test that all plugins are listed
InvenTree/plugin/test_plugin.py
test_tag_plugin_list
killerfish/InvenTree
2
python
def test_tag_plugin_list(self): self.assertEqual(plugin_tags.plugin_list(), registry.plugins)
def test_tag_plugin_list(self): self.assertEqual(plugin_tags.plugin_list(), registry.plugins)<|docstring|>test that all plugins are listed<|endoftext|>
3aeab863912267a2905b5a9793da5df3b81f033fccdaa2b42ef2e96d17373261
def test_tag_incative_plugin_list(self): 'test that all inactive plugins are listed' self.assertEqual(plugin_tags.inactive_plugin_list(), registry.plugins_inactive)
test that all inactive plugins are listed
InvenTree/plugin/test_plugin.py
test_tag_incative_plugin_list
killerfish/InvenTree
2
python
def test_tag_incative_plugin_list(self): self.assertEqual(plugin_tags.inactive_plugin_list(), registry.plugins_inactive)
def test_tag_incative_plugin_list(self): self.assertEqual(plugin_tags.inactive_plugin_list(), registry.plugins_inactive)<|docstring|>test that all inactive plugins are listed<|endoftext|>
619037651b9e522e6c542c26631183a8b7aa65fdcad1dc8860c320570782041c
def test_tag_plugin_settings(self): 'check all plugins are listed' self.assertEqual(plugin_tags.plugin_settings(self.sample), registry.mixins_settings.get(self.sample))
check all plugins are listed
InvenTree/plugin/test_plugin.py
test_tag_plugin_settings
killerfish/InvenTree
2
python
def test_tag_plugin_settings(self): self.assertEqual(plugin_tags.plugin_settings(self.sample), registry.mixins_settings.get(self.sample))
def test_tag_plugin_settings(self): self.assertEqual(plugin_tags.plugin_settings(self.sample), registry.mixins_settings.get(self.sample))<|docstring|>check all plugins are listed<|endoftext|>
7f74d424e801c289cb8186a1a368e32ec274002df67ff54ee0695203ad72c7f2
def test_tag_mixin_enabled(self): 'check that mixin enabled functions work' key = 'urls' self.assertEqual(plugin_tags.mixin_enabled(self.sample, key), True) self.assertEqual(plugin_tags.mixin_enabled(self.plugin_wrong, key), False) self.assertEqual(plugin_tags.mixin_enabled(self.plugin_no, key), False)
check that mixin enabled functions work
InvenTree/plugin/test_plugin.py
test_tag_mixin_enabled
killerfish/InvenTree
2
python
def test_tag_mixin_enabled(self): key = 'urls' self.assertEqual(plugin_tags.mixin_enabled(self.sample, key), True) self.assertEqual(plugin_tags.mixin_enabled(self.plugin_wrong, key), False) self.assertEqual(plugin_tags.mixin_enabled(self.plugin_no, key), False)
def test_tag_mixin_enabled(self): key = 'urls' self.assertEqual(plugin_tags.mixin_enabled(self.sample, key), True) self.assertEqual(plugin_tags.mixin_enabled(self.plugin_wrong, key), False) self.assertEqual(plugin_tags.mixin_enabled(self.plugin_no, key), False)<|docstring|>check that mixin enabled functions work<|endoftext|>
21a001db6ae379f71d0e79dbd0a8fefb75e799cbf8a7a8c3977fb25217ff44b0
def test_tag_safe_url(self): 'test that the safe url tag works expected' self.assertEqual(plugin_tags.safe_url('api-plugin-install'), '/api/plugin/install/') self.assertEqual(plugin_tags.safe_url('indexas'), None)
test that the safe url tag works expected
InvenTree/plugin/test_plugin.py
test_tag_safe_url
killerfish/InvenTree
2
python
def test_tag_safe_url(self): self.assertEqual(plugin_tags.safe_url('api-plugin-install'), '/api/plugin/install/') self.assertEqual(plugin_tags.safe_url('indexas'), None)
def test_tag_safe_url(self): self.assertEqual(plugin_tags.safe_url('api-plugin-install'), '/api/plugin/install/') self.assertEqual(plugin_tags.safe_url('indexas'), None)<|docstring|>test that the safe url tag works expected<|endoftext|>
1937a3e33d87a67d4ff2835c52a70d687d65b8736d61ca239daef56971f1c96f
def test_tag_plugin_errors(self): 'test that all errors are listed' self.assertEqual(plugin_tags.plugin_errors(), registry.errors)
test that all errors are listed
InvenTree/plugin/test_plugin.py
test_tag_plugin_errors
killerfish/InvenTree
2
python
def test_tag_plugin_errors(self): self.assertEqual(plugin_tags.plugin_errors(), registry.errors)
def test_tag_plugin_errors(self): self.assertEqual(plugin_tags.plugin_errors(), registry.errors)<|docstring|>test that all errors are listed<|endoftext|>
fbca102b6c4fb62eda5c51b0874a67b71658f785e3918048b7c076c10e4bb513
def specifyAllFiles(options): 'Specify that all files managed by this interface are to be retrieved.' filesToRetrieve = dict() filesToRetrieve.update(specifyGeomFiles(options)) filesToRetrieve.update(specifyRestartFiles(options)) filesToRetrieve.update(specifyPowerFiles(options)) if (options.energyDepoCalcMethodStep and (GAMSOR_POWERCALC in options.energyDepoCalcMethodStep)): filesToRetrieve.merge(specifyGamsorPowerFiles()) return filesToRetrieve
Specify that all files managed by this interface are to be retrieved.
armicontrib/dif3d/fileSetsHandler.py
specifyAllFiles
terrapower/armicontrib-dif3d
4
python
def specifyAllFiles(options): filesToRetrieve = dict() filesToRetrieve.update(specifyGeomFiles(options)) filesToRetrieve.update(specifyRestartFiles(options)) filesToRetrieve.update(specifyPowerFiles(options)) if (options.energyDepoCalcMethodStep and (GAMSOR_POWERCALC in options.energyDepoCalcMethodStep)): filesToRetrieve.merge(specifyGamsorPowerFiles()) return filesToRetrieve
def specifyAllFiles(options): filesToRetrieve = dict() filesToRetrieve.update(specifyGeomFiles(options)) filesToRetrieve.update(specifyRestartFiles(options)) filesToRetrieve.update(specifyPowerFiles(options)) if (options.energyDepoCalcMethodStep and (GAMSOR_POWERCALC in options.energyDepoCalcMethodStep)): filesToRetrieve.merge(specifyGamsorPowerFiles()) return filesToRetrieve<|docstring|>Specify that all files managed by this interface are to be retrieved.<|endoftext|>
ae0a94695f7fe17b6c83ee0428d0a93ca02005683b70ae56ba8ea65503741b12
def specifyRestartFiles(options): '\n Specify files necessary for a DIF3D restart calculation are to be retrieved.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' restartFiles = {dif3dFile.DIF3D: dif3dFile.DIF3D} restartFiles.update(specifyFluxFiles(options)) restartFiles.update(specifyGamsorOtherFiles(options)) return restartFiles
Specify files necessary for a DIF3D restart calculation are to be retrieved. File names are contained in a dictionary with the format {local : destination} names.
armicontrib/dif3d/fileSetsHandler.py
specifyRestartFiles
terrapower/armicontrib-dif3d
4
python
def specifyRestartFiles(options): '\n Specify files necessary for a DIF3D restart calculation are to be retrieved.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' restartFiles = {dif3dFile.DIF3D: dif3dFile.DIF3D} restartFiles.update(specifyFluxFiles(options)) restartFiles.update(specifyGamsorOtherFiles(options)) return restartFiles
def specifyRestartFiles(options): '\n Specify files necessary for a DIF3D restart calculation are to be retrieved.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' restartFiles = {dif3dFile.DIF3D: dif3dFile.DIF3D} restartFiles.update(specifyFluxFiles(options)) restartFiles.update(specifyGamsorOtherFiles(options)) return restartFiles<|docstring|>Specify files necessary for a DIF3D restart calculation are to be retrieved. File names are contained in a dictionary with the format {local : destination} names.<|endoftext|>
2e66455a27b40010e1f605bc8f882a5a509134ed662a1dd5a127ad5a3c94c47b
def specifyPowerFiles(options): 'Specify PWDINT when real flux is active' if options.real: return {pkedit.PKEDIT: pkedit.PKEDIT} return dict()
Specify PWDINT when real flux is active
armicontrib/dif3d/fileSetsHandler.py
specifyPowerFiles
terrapower/armicontrib-dif3d
4
python
def specifyPowerFiles(options): if options.real: return {pkedit.PKEDIT: pkedit.PKEDIT} return dict()
def specifyPowerFiles(options): if options.real: return {pkedit.PKEDIT: pkedit.PKEDIT} return dict()<|docstring|>Specify PWDINT when real flux is active<|endoftext|>
ff2b5d257d351a9250ae9aa426f424b0649604352b66c83202188d9035f2fa9a
def specifyFluxFiles(options): 'Specify flux files depending on calculation type and neutronics kernel.' neutronFluxFiles = specifyRegionTotalFluxFiles(options) if options.nodal: neutronFluxFiles.update(specifyNodalFluxFiles(options)) return neutronFluxFiles
Specify flux files depending on calculation type and neutronics kernel.
armicontrib/dif3d/fileSetsHandler.py
specifyFluxFiles
terrapower/armicontrib-dif3d
4
python
def specifyFluxFiles(options): neutronFluxFiles = specifyRegionTotalFluxFiles(options) if options.nodal: neutronFluxFiles.update(specifyNodalFluxFiles(options)) return neutronFluxFiles
def specifyFluxFiles(options): neutronFluxFiles = specifyRegionTotalFluxFiles(options) if options.nodal: neutronFluxFiles.update(specifyNodalFluxFiles(options)) return neutronFluxFiles<|docstring|>Specify flux files depending on calculation type and neutronics kernel.<|endoftext|>
1f120b90df3e96b3e72a52ec7290f26dcaf522b3689176bbde3901872ba2176a
def specifyNodalFluxFiles(options): '\n Specify DIF3D-Nodal flux files depending on calculation type.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' nodalFluxFiles = dict() depo = options.energyDepoCalcMethodStep if (depo and ((GAMMA_ARMI in depo) or (GAMSOR_GAMMA_FLUX in depo))): destinationName = GHFLUX else: destinationName = NHFLUX if options.real: nodalFluxFiles.update({NHFLUX: destinationName}) if options.adjoint: nodalFluxFiles.update({NAFLUX: NAFLUX}) return nodalFluxFiles
Specify DIF3D-Nodal flux files depending on calculation type. File names are contained in a dictionary with the format {local : destination} names.
armicontrib/dif3d/fileSetsHandler.py
specifyNodalFluxFiles
terrapower/armicontrib-dif3d
4
python
def specifyNodalFluxFiles(options): '\n Specify DIF3D-Nodal flux files depending on calculation type.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' nodalFluxFiles = dict() depo = options.energyDepoCalcMethodStep if (depo and ((GAMMA_ARMI in depo) or (GAMSOR_GAMMA_FLUX in depo))): destinationName = GHFLUX else: destinationName = NHFLUX if options.real: nodalFluxFiles.update({NHFLUX: destinationName}) if options.adjoint: nodalFluxFiles.update({NAFLUX: NAFLUX}) return nodalFluxFiles
def specifyNodalFluxFiles(options): '\n Specify DIF3D-Nodal flux files depending on calculation type.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' nodalFluxFiles = dict() depo = options.energyDepoCalcMethodStep if (depo and ((GAMMA_ARMI in depo) or (GAMSOR_GAMMA_FLUX in depo))): destinationName = GHFLUX else: destinationName = NHFLUX if options.real: nodalFluxFiles.update({NHFLUX: destinationName}) if options.adjoint: nodalFluxFiles.update({NAFLUX: NAFLUX}) return nodalFluxFiles<|docstring|>Specify DIF3D-Nodal flux files depending on calculation type. File names are contained in a dictionary with the format {local : destination} names.<|endoftext|>
9f054ee4d29c1ab69f8f070cd022fdb515c00e37284387f950cee5a17da18d12
def specifyRegionTotalFluxFiles(options): '\n Specify region total flux files depending on calculation type.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' regionTotalFluxFiles = dict() depo = options.energyDepoCalcMethodStep if (depo and ((GAMMA_ARMI in depo) or (GAMSOR_GAMMA_FLUX in depo))): destinationName = GTFLUX else: destinationName = RTFLUX if options.real: regionTotalFluxFiles.update({RZFLUX: RZFLUX}) regionTotalFluxFiles.update({RTFLUX: destinationName}) regionTotalFluxFiles.update({PWDINT: PWDINT}) if options.adjoint: regionTotalFluxFiles.update({ATFLUX: ATFLUX}) return regionTotalFluxFiles
Specify region total flux files depending on calculation type. File names are contained in a dictionary with the format {local : destination} names.
armicontrib/dif3d/fileSetsHandler.py
specifyRegionTotalFluxFiles
terrapower/armicontrib-dif3d
4
python
def specifyRegionTotalFluxFiles(options): '\n Specify region total flux files depending on calculation type.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' regionTotalFluxFiles = dict() depo = options.energyDepoCalcMethodStep if (depo and ((GAMMA_ARMI in depo) or (GAMSOR_GAMMA_FLUX in depo))): destinationName = GTFLUX else: destinationName = RTFLUX if options.real: regionTotalFluxFiles.update({RZFLUX: RZFLUX}) regionTotalFluxFiles.update({RTFLUX: destinationName}) regionTotalFluxFiles.update({PWDINT: PWDINT}) if options.adjoint: regionTotalFluxFiles.update({ATFLUX: ATFLUX}) return regionTotalFluxFiles
def specifyRegionTotalFluxFiles(options): '\n Specify region total flux files depending on calculation type.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' regionTotalFluxFiles = dict() depo = options.energyDepoCalcMethodStep if (depo and ((GAMMA_ARMI in depo) or (GAMSOR_GAMMA_FLUX in depo))): destinationName = GTFLUX else: destinationName = RTFLUX if options.real: regionTotalFluxFiles.update({RZFLUX: RZFLUX}) regionTotalFluxFiles.update({RTFLUX: destinationName}) regionTotalFluxFiles.update({PWDINT: PWDINT}) if options.adjoint: regionTotalFluxFiles.update({ATFLUX: ATFLUX}) return regionTotalFluxFiles<|docstring|>Specify region total flux files depending on calculation type. File names are contained in a dictionary with the format {local : destination} names.<|endoftext|>
58ba091eda835a8cc1ec25506109d4a9c65ba1164a2ccf4f593f73961b963010
def specifyGamsorOtherFiles(options): '\n Specify GAMSOR other files to retrieve based on the methodology step.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' gamsorFiles = dict() depo = options.energyDepoCalcMethodStep if (depo and (GAMSOR_GAMMA_SRCGEN in depo)): gamsorFiles.update(specifyFixedSourceFiles(options)) elif (depo and (GAMSOR_GAMMA_FLUX in depo)): gamsorFiles.update({ANIP3: ANIP3}) elif (depo and (GAMSOR_POWERCALC in depo)): gamsorFiles.update({RTFLUX: RTFLUX, GTFLUX: GTFLUX, ANIP3: ANIP3}) return gamsorFiles
Specify GAMSOR other files to retrieve based on the methodology step. File names are contained in a dictionary with the format {local : destination} names.
armicontrib/dif3d/fileSetsHandler.py
specifyGamsorOtherFiles
terrapower/armicontrib-dif3d
4
python
def specifyGamsorOtherFiles(options): '\n Specify GAMSOR other files to retrieve based on the methodology step.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' gamsorFiles = dict() depo = options.energyDepoCalcMethodStep if (depo and (GAMSOR_GAMMA_SRCGEN in depo)): gamsorFiles.update(specifyFixedSourceFiles(options)) elif (depo and (GAMSOR_GAMMA_FLUX in depo)): gamsorFiles.update({ANIP3: ANIP3}) elif (depo and (GAMSOR_POWERCALC in depo)): gamsorFiles.update({RTFLUX: RTFLUX, GTFLUX: GTFLUX, ANIP3: ANIP3}) return gamsorFiles
def specifyGamsorOtherFiles(options): '\n Specify GAMSOR other files to retrieve based on the methodology step.\n\n File names are contained in a dictionary with the format {local : destination} names.\n ' gamsorFiles = dict() depo = options.energyDepoCalcMethodStep if (depo and (GAMSOR_GAMMA_SRCGEN in depo)): gamsorFiles.update(specifyFixedSourceFiles(options)) elif (depo and (GAMSOR_GAMMA_FLUX in depo)): gamsorFiles.update({ANIP3: ANIP3}) elif (depo and (GAMSOR_POWERCALC in depo)): gamsorFiles.update({RTFLUX: RTFLUX, GTFLUX: GTFLUX, ANIP3: ANIP3}) return gamsorFiles<|docstring|>Specify GAMSOR other files to retrieve based on the methodology step. File names are contained in a dictionary with the format {local : destination} names.<|endoftext|>
49c1cf016bd055b883d236af33740bc16bd27ae48048af90639a9933a95b9291
def test_render(self): '\n L{ResourceScriptDirectory.render} sets the HTTP response code to I{NOT\n FOUND}.\n ' resource = ResourceScriptDirectory(self.mktemp()) request = DummyRequest(['']) d = _render(resource, request) def cbRendered(ignored): self.assertEqual(request.responseCode, NOT_FOUND) d.addCallback(cbRendered) return d
L{ResourceScriptDirectory.render} sets the HTTP response code to I{NOT FOUND}.
python-modules/twisted/twisted/web/test/test_script.py
test_render
axelsengyung/python-for-android
267
python
def test_render(self): '\n L{ResourceScriptDirectory.render} sets the HTTP response code to I{NOT\n FOUND}.\n ' resource = ResourceScriptDirectory(self.mktemp()) request = DummyRequest([]) d = _render(resource, request) def cbRendered(ignored): self.assertEqual(request.responseCode, NOT_FOUND) d.addCallback(cbRendered) return d
def test_render(self): '\n L{ResourceScriptDirectory.render} sets the HTTP response code to I{NOT\n FOUND}.\n ' resource = ResourceScriptDirectory(self.mktemp()) request = DummyRequest([]) d = _render(resource, request) def cbRendered(ignored): self.assertEqual(request.responseCode, NOT_FOUND) d.addCallback(cbRendered) return d<|docstring|>L{ResourceScriptDirectory.render} sets the HTTP response code to I{NOT FOUND}.<|endoftext|>
d5d2c8788aa9e9c207d319814b1c9c3e4b7dceecd9e109deb0ff61de1ff1b2fb
def test_notFoundChild(self): '\n L{ResourceScriptDirectory.getChild} returns a resource which renders an\n response with the HTTP I{NOT FOUND} status code if the indicated child\n does not exist as an entry in the directory used to initialized the\n L{ResourceScriptDirectory}.\n ' path = self.mktemp() os.makedirs(path) resource = ResourceScriptDirectory(path) request = DummyRequest(['foo']) child = resource.getChild('foo', request) d = _render(child, request) def cbRendered(ignored): self.assertEqual(request.responseCode, NOT_FOUND) d.addCallback(cbRendered) return d
L{ResourceScriptDirectory.getChild} returns a resource which renders an response with the HTTP I{NOT FOUND} status code if the indicated child does not exist as an entry in the directory used to initialized the L{ResourceScriptDirectory}.
python-modules/twisted/twisted/web/test/test_script.py
test_notFoundChild
axelsengyung/python-for-android
267
python
def test_notFoundChild(self): '\n L{ResourceScriptDirectory.getChild} returns a resource which renders an\n response with the HTTP I{NOT FOUND} status code if the indicated child\n does not exist as an entry in the directory used to initialized the\n L{ResourceScriptDirectory}.\n ' path = self.mktemp() os.makedirs(path) resource = ResourceScriptDirectory(path) request = DummyRequest(['foo']) child = resource.getChild('foo', request) d = _render(child, request) def cbRendered(ignored): self.assertEqual(request.responseCode, NOT_FOUND) d.addCallback(cbRendered) return d
def test_notFoundChild(self): '\n L{ResourceScriptDirectory.getChild} returns a resource which renders an\n response with the HTTP I{NOT FOUND} status code if the indicated child\n does not exist as an entry in the directory used to initialized the\n L{ResourceScriptDirectory}.\n ' path = self.mktemp() os.makedirs(path) resource = ResourceScriptDirectory(path) request = DummyRequest(['foo']) child = resource.getChild('foo', request) d = _render(child, request) def cbRendered(ignored): self.assertEqual(request.responseCode, NOT_FOUND) d.addCallback(cbRendered) return d<|docstring|>L{ResourceScriptDirectory.getChild} returns a resource which renders an response with the HTTP I{NOT FOUND} status code if the indicated child does not exist as an entry in the directory used to initialized the L{ResourceScriptDirectory}.<|endoftext|>
e11541de30e3bdb47cfe37f0b654326afd8361fef81dde71ac89192566faef1a
def test_notFoundRender(self): "\n If the source file a L{PythonScript} is initialized with doesn't exist,\n L{PythonScript.render} sets the HTTP response code to I{NOT FOUND}.\n " resource = PythonScript(self.mktemp(), None) request = DummyRequest(['']) d = _render(resource, request) def cbRendered(ignored): self.assertEqual(request.responseCode, NOT_FOUND) d.addCallback(cbRendered) return d
If the source file a L{PythonScript} is initialized with doesn't exist, L{PythonScript.render} sets the HTTP response code to I{NOT FOUND}.
python-modules/twisted/twisted/web/test/test_script.py
test_notFoundRender
axelsengyung/python-for-android
267
python
def test_notFoundRender(self): "\n If the source file a L{PythonScript} is initialized with doesn't exist,\n L{PythonScript.render} sets the HTTP response code to I{NOT FOUND}.\n " resource = PythonScript(self.mktemp(), None) request = DummyRequest([]) d = _render(resource, request) def cbRendered(ignored): self.assertEqual(request.responseCode, NOT_FOUND) d.addCallback(cbRendered) return d
def test_notFoundRender(self): "\n If the source file a L{PythonScript} is initialized with doesn't exist,\n L{PythonScript.render} sets the HTTP response code to I{NOT FOUND}.\n " resource = PythonScript(self.mktemp(), None) request = DummyRequest([]) d = _render(resource, request) def cbRendered(ignored): self.assertEqual(request.responseCode, NOT_FOUND) d.addCallback(cbRendered) return d<|docstring|>If the source file a L{PythonScript} is initialized with doesn't exist, L{PythonScript.render} sets the HTTP response code to I{NOT FOUND}.<|endoftext|>
6c7aee756187b0a1025bed4303158959d640d2dbcc5391978e053d70e7cd71ff
def ResNetV1(arch='ResNet18', input_shape=None, num_class=1000, pooling=None, normalization='bn', activation='relu', width=1.0, **kwargs): 'Instantiates the ResNet architecture.' del kwargs def stack_fn(x, arch, width=1.0): (block, stack, expansion) = (BLOCK[arch], STACK[arch], EXPANSION[arch]) x = stack(x, int((64 * width)), block[0], expansion=expansion, stride1=1, normalization=normalization, activation=activation, name='conv2') x = stack(x, int((128 * width)), block[1], expansion=expansion, normalization=normalization, activation=activation, name='conv3') x = stack(x, int((256 * width)), block[2], expansion=expansion, normalization=normalization, activation=activation, name='conv4') return stack(x, int((512 * width)), block[3], expansion=expansion, normalization=normalization, activation=activation, name='conv5') return ResNet(stack_fn=functools.partial(stack_fn, arch=arch, width=width), preact=False, model_name='{}_w{:g}_{}_{}'.format(arch, width, normalization, activation), input_shape=input_shape, pooling=pooling, normalization=normalization, activation=activation, num_class=num_class)
Instantiates the ResNet architecture.
model/resnet.py
ResNetV1
harshita1000/crest
50
python
def ResNetV1(arch='ResNet18', input_shape=None, num_class=1000, pooling=None, normalization='bn', activation='relu', width=1.0, **kwargs): del kwargs def stack_fn(x, arch, width=1.0): (block, stack, expansion) = (BLOCK[arch], STACK[arch], EXPANSION[arch]) x = stack(x, int((64 * width)), block[0], expansion=expansion, stride1=1, normalization=normalization, activation=activation, name='conv2') x = stack(x, int((128 * width)), block[1], expansion=expansion, normalization=normalization, activation=activation, name='conv3') x = stack(x, int((256 * width)), block[2], expansion=expansion, normalization=normalization, activation=activation, name='conv4') return stack(x, int((512 * width)), block[3], expansion=expansion, normalization=normalization, activation=activation, name='conv5') return ResNet(stack_fn=functools.partial(stack_fn, arch=arch, width=width), preact=False, model_name='{}_w{:g}_{}_{}'.format(arch, width, normalization, activation), input_shape=input_shape, pooling=pooling, normalization=normalization, activation=activation, num_class=num_class)
def ResNetV1(arch='ResNet18', input_shape=None, num_class=1000, pooling=None, normalization='bn', activation='relu', width=1.0, **kwargs): del kwargs def stack_fn(x, arch, width=1.0): (block, stack, expansion) = (BLOCK[arch], STACK[arch], EXPANSION[arch]) x = stack(x, int((64 * width)), block[0], expansion=expansion, stride1=1, normalization=normalization, activation=activation, name='conv2') x = stack(x, int((128 * width)), block[1], expansion=expansion, normalization=normalization, activation=activation, name='conv3') x = stack(x, int((256 * width)), block[2], expansion=expansion, normalization=normalization, activation=activation, name='conv4') return stack(x, int((512 * width)), block[3], expansion=expansion, normalization=normalization, activation=activation, name='conv5') return ResNet(stack_fn=functools.partial(stack_fn, arch=arch, width=width), preact=False, model_name='{}_w{:g}_{}_{}'.format(arch, width, normalization, activation), input_shape=input_shape, pooling=pooling, normalization=normalization, activation=activation, num_class=num_class)<|docstring|>Instantiates the ResNet architecture.<|endoftext|>
28381b6a9cf013c07bf1225672974c1443da7e0759e749b5d129daf78f6e1346
def ResNetV2(arch='ResNet18', input_shape=None, num_class=1000, pooling=None, normalization='bn', activation='relu', width=1.0, **kwargs): 'Instantiates the ResNet V2 architecture.' del kwargs def stack_fn(x, arch, width=1.0): (block, stack, expansion) = (BLOCK[arch], STACK[arch], EXPANSION[arch]) x = stack(x, int((64 * width)), block[0], expansion=expansion, normalization=normalization, activation=activation, name='conv2') x = stack(x, int((128 * width)), block[1], expansion=expansion, normalization=normalization, activation=activation, name='conv3') x = stack(x, int((256 * width)), block[2], expansion=expansion, normalization=normalization, activation=activation, name='conv4') return stack(x, int((512 * width)), block[3], expansion=expansion, stride1=1, normalization=normalization, activation=activation, name='conv5') return ResNet(stack_fn=functools.partial(stack_fn, arch=arch, width=width), preact=True, model_name='{}_w{:g}_{}_{}'.format(arch, width, normalization, activation), input_shape=input_shape, pooling=pooling, normalization=normalization, activation=activation, num_class=num_class)
Instantiates the ResNet V2 architecture.
model/resnet.py
ResNetV2
harshita1000/crest
50
python
def ResNetV2(arch='ResNet18', input_shape=None, num_class=1000, pooling=None, normalization='bn', activation='relu', width=1.0, **kwargs): del kwargs def stack_fn(x, arch, width=1.0): (block, stack, expansion) = (BLOCK[arch], STACK[arch], EXPANSION[arch]) x = stack(x, int((64 * width)), block[0], expansion=expansion, normalization=normalization, activation=activation, name='conv2') x = stack(x, int((128 * width)), block[1], expansion=expansion, normalization=normalization, activation=activation, name='conv3') x = stack(x, int((256 * width)), block[2], expansion=expansion, normalization=normalization, activation=activation, name='conv4') return stack(x, int((512 * width)), block[3], expansion=expansion, stride1=1, normalization=normalization, activation=activation, name='conv5') return ResNet(stack_fn=functools.partial(stack_fn, arch=arch, width=width), preact=True, model_name='{}_w{:g}_{}_{}'.format(arch, width, normalization, activation), input_shape=input_shape, pooling=pooling, normalization=normalization, activation=activation, num_class=num_class)
def ResNetV2(arch='ResNet18', input_shape=None, num_class=1000, pooling=None, normalization='bn', activation='relu', width=1.0, **kwargs): del kwargs def stack_fn(x, arch, width=1.0): (block, stack, expansion) = (BLOCK[arch], STACK[arch], EXPANSION[arch]) x = stack(x, int((64 * width)), block[0], expansion=expansion, normalization=normalization, activation=activation, name='conv2') x = stack(x, int((128 * width)), block[1], expansion=expansion, normalization=normalization, activation=activation, name='conv3') x = stack(x, int((256 * width)), block[2], expansion=expansion, normalization=normalization, activation=activation, name='conv4') return stack(x, int((512 * width)), block[3], expansion=expansion, stride1=1, normalization=normalization, activation=activation, name='conv5') return ResNet(stack_fn=functools.partial(stack_fn, arch=arch, width=width), preact=True, model_name='{}_w{:g}_{}_{}'.format(arch, width, normalization, activation), input_shape=input_shape, pooling=pooling, normalization=normalization, activation=activation, num_class=num_class)<|docstring|>Instantiates the ResNet V2 architecture.<|endoftext|>
ce6aae9c357e32434c80a580d55f1808d0b92b25d3d7d76daaef1ac3bf7543c1
def WRN(arch='WRN28', input_shape=None, num_class=1000, pooling=None, normalization='bn', activation='leaky_relu', width=1.0, **kwargs): 'Instantiates the WideResNet architecture.' del kwargs def stack_fn(x, arch, width=1.0): (block, stack, expansion) = (BLOCK[arch], STACK[arch], EXPANSION[arch]) for (i, b) in enumerate(block): x = stack(x, int(((16 << i) * width)), b, expansion=expansion, stride1=(2 if (i > 0) else 1), normalization=normalization, activation=activation, name=('conv%d' % (i + 2))) return x return WideResNet(stack_fn=functools.partial(stack_fn, arch=arch, width=width), preact=True, model_name='{}_w{:g}_{}_{}'.format(arch, width, normalization, activation), input_shape=input_shape, pooling=pooling, normalization=normalization, activation=activation, num_class=num_class)
Instantiates the WideResNet architecture.
model/resnet.py
WRN
harshita1000/crest
50
python
def WRN(arch='WRN28', input_shape=None, num_class=1000, pooling=None, normalization='bn', activation='leaky_relu', width=1.0, **kwargs): del kwargs def stack_fn(x, arch, width=1.0): (block, stack, expansion) = (BLOCK[arch], STACK[arch], EXPANSION[arch]) for (i, b) in enumerate(block): x = stack(x, int(((16 << i) * width)), b, expansion=expansion, stride1=(2 if (i > 0) else 1), normalization=normalization, activation=activation, name=('conv%d' % (i + 2))) return x return WideResNet(stack_fn=functools.partial(stack_fn, arch=arch, width=width), preact=True, model_name='{}_w{:g}_{}_{}'.format(arch, width, normalization, activation), input_shape=input_shape, pooling=pooling, normalization=normalization, activation=activation, num_class=num_class)
def WRN(arch='WRN28', input_shape=None, num_class=1000, pooling=None, normalization='bn', activation='leaky_relu', width=1.0, **kwargs): del kwargs def stack_fn(x, arch, width=1.0): (block, stack, expansion) = (BLOCK[arch], STACK[arch], EXPANSION[arch]) for (i, b) in enumerate(block): x = stack(x, int(((16 << i) * width)), b, expansion=expansion, stride1=(2 if (i > 0) else 1), normalization=normalization, activation=activation, name=('conv%d' % (i + 2))) return x return WideResNet(stack_fn=functools.partial(stack_fn, arch=arch, width=width), preact=True, model_name='{}_w{:g}_{}_{}'.format(arch, width, normalization, activation), input_shape=input_shape, pooling=pooling, normalization=normalization, activation=activation, num_class=num_class)<|docstring|>Instantiates the WideResNet architecture.<|endoftext|>
ccff2607bf7e63b84b6b290f631e3a82efe7235607fca05f3e9f0bb6917a9d10
def create(self, validated_data): '\n Function is used to create user object with serializer.\n :param validated_data: keep all user related data.\n :return: user object.\n ' user = User.objects.create_user(**validated_data) return user
Function is used to create user object with serializer. :param validated_data: keep all user related data. :return: user object.
users/serializers.py
create
prasenjitaich/CardGameApp
0
python
def create(self, validated_data): '\n Function is used to create user object with serializer.\n :param validated_data: keep all user related data.\n :return: user object.\n ' user = User.objects.create_user(**validated_data) return user
def create(self, validated_data): '\n Function is used to create user object with serializer.\n :param validated_data: keep all user related data.\n :return: user object.\n ' user = User.objects.create_user(**validated_data) return user<|docstring|>Function is used to create user object with serializer. :param validated_data: keep all user related data. :return: user object.<|endoftext|>
ddb116632bc998281a8060ff3238159169ee16786b274cb60fa554e2cbb2e721
def update(self, instance, validated_data): '\n Function is used for the update user information.\n :param instance: user object instance\n :param validated_data: new info passed in request.\n :return: user object.\n ' if ('password' in validated_data): password = validated_data.pop('password') instance.set_password(password) return super(UserSerializer, self).update(instance, validated_data)
Function is used for the update user information. :param instance: user object instance :param validated_data: new info passed in request. :return: user object.
users/serializers.py
update
prasenjitaich/CardGameApp
0
python
def update(self, instance, validated_data): '\n Function is used for the update user information.\n :param instance: user object instance\n :param validated_data: new info passed in request.\n :return: user object.\n ' if ('password' in validated_data): password = validated_data.pop('password') instance.set_password(password) return super(UserSerializer, self).update(instance, validated_data)
def update(self, instance, validated_data): '\n Function is used for the update user information.\n :param instance: user object instance\n :param validated_data: new info passed in request.\n :return: user object.\n ' if ('password' in validated_data): password = validated_data.pop('password') instance.set_password(password) return super(UserSerializer, self).update(instance, validated_data)<|docstring|>Function is used for the update user information. :param instance: user object instance :param validated_data: new info passed in request. :return: user object.<|endoftext|>
5c60ea56c355b63b88813ead2313b76f78edbfa7e5aeed10974ff3216f138e83
def validate(self, attrs): '\n function is used for validate token and add extra fields.\n :param attrs: get username and password\n :return: dictionary with tokens, user_id and username\n ' data = super().validate(attrs) refresh = self.get_token(self.user) data['refresh'] = str(refresh) data['access'] = str(refresh.access_token) data['user_id'] = self.user.id data['username'] = self.user.username return data
function is used for validate token and add extra fields. :param attrs: get username and password :return: dictionary with tokens, user_id and username
users/serializers.py
validate
prasenjitaich/CardGameApp
0
python
def validate(self, attrs): '\n function is used for validate token and add extra fields.\n :param attrs: get username and password\n :return: dictionary with tokens, user_id and username\n ' data = super().validate(attrs) refresh = self.get_token(self.user) data['refresh'] = str(refresh) data['access'] = str(refresh.access_token) data['user_id'] = self.user.id data['username'] = self.user.username return data
def validate(self, attrs): '\n function is used for validate token and add extra fields.\n :param attrs: get username and password\n :return: dictionary with tokens, user_id and username\n ' data = super().validate(attrs) refresh = self.get_token(self.user) data['refresh'] = str(refresh) data['access'] = str(refresh.access_token) data['user_id'] = self.user.id data['username'] = self.user.username return data<|docstring|>function is used for validate token and add extra fields. :param attrs: get username and password :return: dictionary with tokens, user_id and username<|endoftext|>
df67689d93efb09bb0f38d858fbb99dca0c0a2cc26ac583b8a3966b09607baed
def submit_job(rjc_api_client, publisher, job_request_body): 'Submit job to REANA Job Controller.' response = rjc_api_client.submit(**job_request_body) job_id = str(response['job_id']) log.info(f'submitted job: {job_id}') publish_job_submission(workflow_uuid=job_request_body['workflow_uuid'], publisher=publisher, reana_job_id=job_id) return job_id
Submit job to REANA Job Controller.
reana_workflow_engine_snakemake/executor.py
submit_job
tiborsimko/reana-workflow-engine-snakemake
0
python
def submit_job(rjc_api_client, publisher, job_request_body): response = rjc_api_client.submit(**job_request_body) job_id = str(response['job_id']) log.info(f'submitted job: {job_id}') publish_job_submission(workflow_uuid=job_request_body['workflow_uuid'], publisher=publisher, reana_job_id=job_id) return job_id
def submit_job(rjc_api_client, publisher, job_request_body): response = rjc_api_client.submit(**job_request_body) job_id = str(response['job_id']) log.info(f'submitted job: {job_id}') publish_job_submission(workflow_uuid=job_request_body['workflow_uuid'], publisher=publisher, reana_job_id=job_id) return job_id<|docstring|>Submit job to REANA Job Controller.<|endoftext|>
c393d51b4b055d642a20c8ddd074afc674c564cc48961621360772a3f3166128
def run_jobs(rjc_api_client, publisher, workflow_workspace, workflow_file, workflow_parameters, operational_options={}): 'Run Snakemake jobs using custom REANA executor.' def _generate_report(workflow_file_path): 'Generate HTML report.' success = snakemake(workflow_file_path, config=workflow_parameters, workdir=workflow_workspace, report=operational_options.get('report', DEFAULT_SNAKEMAKE_REPORT_FILENAME), keep_logger=True) if (not success): log.error('Error generating workflow HTML report.') REANAClusterExecutor.rjc_api_client = rjc_api_client REANAClusterExecutor.publisher = publisher scheduler.GenericClusterExecutor = REANAClusterExecutor workflow_file_path = os.path.join(workflow_workspace, workflow_file) success = snakemake(workflow_file_path, printshellcmds=True, cluster='reana', config=workflow_parameters, workdir=workflow_workspace, notemp=True, nodes=SNAKEMAKE_MAX_PARALLEL_JOBS, keep_logger=True) _generate_report(workflow_file_path) return success
Run Snakemake jobs using custom REANA executor.
reana_workflow_engine_snakemake/executor.py
run_jobs
tiborsimko/reana-workflow-engine-snakemake
0
python
def run_jobs(rjc_api_client, publisher, workflow_workspace, workflow_file, workflow_parameters, operational_options={}): def _generate_report(workflow_file_path): 'Generate HTML report.' success = snakemake(workflow_file_path, config=workflow_parameters, workdir=workflow_workspace, report=operational_options.get('report', DEFAULT_SNAKEMAKE_REPORT_FILENAME), keep_logger=True) if (not success): log.error('Error generating workflow HTML report.') REANAClusterExecutor.rjc_api_client = rjc_api_client REANAClusterExecutor.publisher = publisher scheduler.GenericClusterExecutor = REANAClusterExecutor workflow_file_path = os.path.join(workflow_workspace, workflow_file) success = snakemake(workflow_file_path, printshellcmds=True, cluster='reana', config=workflow_parameters, workdir=workflow_workspace, notemp=True, nodes=SNAKEMAKE_MAX_PARALLEL_JOBS, keep_logger=True) _generate_report(workflow_file_path) return success
def run_jobs(rjc_api_client, publisher, workflow_workspace, workflow_file, workflow_parameters, operational_options={}): def _generate_report(workflow_file_path): 'Generate HTML report.' success = snakemake(workflow_file_path, config=workflow_parameters, workdir=workflow_workspace, report=operational_options.get('report', DEFAULT_SNAKEMAKE_REPORT_FILENAME), keep_logger=True) if (not success): log.error('Error generating workflow HTML report.') REANAClusterExecutor.rjc_api_client = rjc_api_client REANAClusterExecutor.publisher = publisher scheduler.GenericClusterExecutor = REANAClusterExecutor workflow_file_path = os.path.join(workflow_workspace, workflow_file) success = snakemake(workflow_file_path, printshellcmds=True, cluster='reana', config=workflow_parameters, workdir=workflow_workspace, notemp=True, nodes=SNAKEMAKE_MAX_PARALLEL_JOBS, keep_logger=True) _generate_report(workflow_file_path) return success<|docstring|>Run Snakemake jobs using custom REANA executor.<|endoftext|>
f4731fd424d467d8690a642be7ebfef5413208b9aafd3fec474e7bfb9939ea4a
def run(self, job: Job, callback: Callable=None, submit_callback: Callable=None, error_callback: Callable=None): 'Override GenericClusterExecutor run method.' super()._run(job) workflow_workspace = os.getenv('workflow_workspace', 'default') workflow_uuid = os.getenv('workflow_uuid', 'default') publish_workflow_start(workflow_uuid=workflow_uuid, publisher=self.publisher, job=job) try: job.reana_job_id = None log.info(f"Job '{job.name}' received, command: {job.shellcmd}") container_image = self._get_container_image(job) if job.is_shell: job_request_body = {'workflow_uuid': workflow_uuid, 'image': container_image, 'cmd': f'cd {workflow_workspace} && {job.shellcmd}', 'prettified_cmd': job.shellcmd, 'workflow_workspace': workflow_workspace, 'job_name': job.name, 'cvmfs_mounts': MOUNT_CVMFS, 'compute_backend': job.resources.get('compute_backend', ''), 'kerberos': job.resources.get('kerberos', False), 'unpacked_img': job.resources.get('unpacked_img', False), 'kubernetes_uid': job.resources.get('kubernetes_uid'), 'kubernetes_memory_limit': job.resources.get('kubernetes_memory_limit'), 'voms_proxy': job.resources.get('voms_proxy', False), 'htcondor_max_runtime': job.resources.get('htcondor_max_runtime', ''), 'htcondor_accounting_group': job.resources.get('htcondor_accounting_group', '')} job_id = submit_job(self.rjc_api_client, self.publisher, job_request_body) job.reana_job_id = job_id self.workflow.persistence.started(job, external_jobid=job.reana_job_id) elif job.is_run: log.error('Python code execution is not supported yet.') except Exception as excep: log.error(f'Error submitting job {job.name}: {excep}') error_callback(job) return with self.lock: self.active_jobs.append(REANAClusterJob(job, callback, error_callback))
Override GenericClusterExecutor run method.
reana_workflow_engine_snakemake/executor.py
run
tiborsimko/reana-workflow-engine-snakemake
0
python
def run(self, job: Job, callback: Callable=None, submit_callback: Callable=None, error_callback: Callable=None): super()._run(job) workflow_workspace = os.getenv('workflow_workspace', 'default') workflow_uuid = os.getenv('workflow_uuid', 'default') publish_workflow_start(workflow_uuid=workflow_uuid, publisher=self.publisher, job=job) try: job.reana_job_id = None log.info(f"Job '{job.name}' received, command: {job.shellcmd}") container_image = self._get_container_image(job) if job.is_shell: job_request_body = {'workflow_uuid': workflow_uuid, 'image': container_image, 'cmd': f'cd {workflow_workspace} && {job.shellcmd}', 'prettified_cmd': job.shellcmd, 'workflow_workspace': workflow_workspace, 'job_name': job.name, 'cvmfs_mounts': MOUNT_CVMFS, 'compute_backend': job.resources.get('compute_backend', ), 'kerberos': job.resources.get('kerberos', False), 'unpacked_img': job.resources.get('unpacked_img', False), 'kubernetes_uid': job.resources.get('kubernetes_uid'), 'kubernetes_memory_limit': job.resources.get('kubernetes_memory_limit'), 'voms_proxy': job.resources.get('voms_proxy', False), 'htcondor_max_runtime': job.resources.get('htcondor_max_runtime', ), 'htcondor_accounting_group': job.resources.get('htcondor_accounting_group', )} job_id = submit_job(self.rjc_api_client, self.publisher, job_request_body) job.reana_job_id = job_id self.workflow.persistence.started(job, external_jobid=job.reana_job_id) elif job.is_run: log.error('Python code execution is not supported yet.') except Exception as excep: log.error(f'Error submitting job {job.name}: {excep}') error_callback(job) return with self.lock: self.active_jobs.append(REANAClusterJob(job, callback, error_callback))
def run(self, job: Job, callback: Callable=None, submit_callback: Callable=None, error_callback: Callable=None): super()._run(job) workflow_workspace = os.getenv('workflow_workspace', 'default') workflow_uuid = os.getenv('workflow_uuid', 'default') publish_workflow_start(workflow_uuid=workflow_uuid, publisher=self.publisher, job=job) try: job.reana_job_id = None log.info(f"Job '{job.name}' received, command: {job.shellcmd}") container_image = self._get_container_image(job) if job.is_shell: job_request_body = {'workflow_uuid': workflow_uuid, 'image': container_image, 'cmd': f'cd {workflow_workspace} && {job.shellcmd}', 'prettified_cmd': job.shellcmd, 'workflow_workspace': workflow_workspace, 'job_name': job.name, 'cvmfs_mounts': MOUNT_CVMFS, 'compute_backend': job.resources.get('compute_backend', ), 'kerberos': job.resources.get('kerberos', False), 'unpacked_img': job.resources.get('unpacked_img', False), 'kubernetes_uid': job.resources.get('kubernetes_uid'), 'kubernetes_memory_limit': job.resources.get('kubernetes_memory_limit'), 'voms_proxy': job.resources.get('voms_proxy', False), 'htcondor_max_runtime': job.resources.get('htcondor_max_runtime', ), 'htcondor_accounting_group': job.resources.get('htcondor_accounting_group', )} job_id = submit_job(self.rjc_api_client, self.publisher, job_request_body) job.reana_job_id = job_id self.workflow.persistence.started(job, external_jobid=job.reana_job_id) elif job.is_run: log.error('Python code execution is not supported yet.') except Exception as excep: log.error(f'Error submitting job {job.name}: {excep}') error_callback(job) return with self.lock: self.active_jobs.append(REANAClusterJob(job, callback, error_callback))<|docstring|>Override GenericClusterExecutor run method.<|endoftext|>
03d21b0f79fecaa4c6a7d251c271d29c8d09fafa66081d0ec69e21c93af06c3b
def handle_job_success(self, job: Job) -> None: 'Override job success method to publish job status.' super(ClusterExecutor, self).handle_job_success(job, upload_remote=False, handle_log=False, handle_touch=True) self._handle_job_status(job, job_status=JobStatus.finished, workflow_status=RunStatus.running)
Override job success method to publish job status.
reana_workflow_engine_snakemake/executor.py
handle_job_success
tiborsimko/reana-workflow-engine-snakemake
0
python
def handle_job_success(self, job: Job) -> None: super(ClusterExecutor, self).handle_job_success(job, upload_remote=False, handle_log=False, handle_touch=True) self._handle_job_status(job, job_status=JobStatus.finished, workflow_status=RunStatus.running)
def handle_job_success(self, job: Job) -> None: super(ClusterExecutor, self).handle_job_success(job, upload_remote=False, handle_log=False, handle_touch=True) self._handle_job_status(job, job_status=JobStatus.finished, workflow_status=RunStatus.running)<|docstring|>Override job success method to publish job status.<|endoftext|>
3872e5aff651be6b4fdb1a0804c6dce4751d1740a6fb781c5569c4f361ad4490
def handle_job_error(self, job: Job) -> None: 'Override job error method to publish job status.' super().handle_job_error(job) self._handle_job_status(job, job_status=JobStatus.failed, workflow_status=RunStatus.failed)
Override job error method to publish job status.
reana_workflow_engine_snakemake/executor.py
handle_job_error
tiborsimko/reana-workflow-engine-snakemake
0
python
def handle_job_error(self, job: Job) -> None: super().handle_job_error(job) self._handle_job_status(job, job_status=JobStatus.failed, workflow_status=RunStatus.failed)
def handle_job_error(self, job: Job) -> None: super().handle_job_error(job) self._handle_job_status(job, job_status=JobStatus.failed, workflow_status=RunStatus.failed)<|docstring|>Override job error method to publish job status.<|endoftext|>
6c30a919358a9736f4983fb5cf4be99c5843385c76b8a3c4efe18a2c86866b0c
def _get_job_status_from_controller(self, job_id: str) -> str: 'Get job status from controller.\n\n If error occurs, return `failed` status.\n ' try: response = self.rjc_api_client.check_status(job_id) except HTTPNotFound: log.error(f'Job {job_id} was not found in job-controller. Return job failed status.') return JobStatus.failed.name except Exception as exception: log.error(f'Error getting status of job with id {job_id}. Return job failed status. Details: {exception}') return JobStatus.failed.name try: return response.status except AttributeError: log.error(f"job-controller response for job {job_id} does not contain 'status' field. Return job failed status.Response: {response}") return JobStatus.failed.name
Get job status from controller. If error occurs, return `failed` status.
reana_workflow_engine_snakemake/executor.py
_get_job_status_from_controller
tiborsimko/reana-workflow-engine-snakemake
0
python
def _get_job_status_from_controller(self, job_id: str) -> str: 'Get job status from controller.\n\n If error occurs, return `failed` status.\n ' try: response = self.rjc_api_client.check_status(job_id) except HTTPNotFound: log.error(f'Job {job_id} was not found in job-controller. Return job failed status.') return JobStatus.failed.name except Exception as exception: log.error(f'Error getting status of job with id {job_id}. Return job failed status. Details: {exception}') return JobStatus.failed.name try: return response.status except AttributeError: log.error(f"job-controller response for job {job_id} does not contain 'status' field. Return job failed status.Response: {response}") return JobStatus.failed.name
def _get_job_status_from_controller(self, job_id: str) -> str: 'Get job status from controller.\n\n If error occurs, return `failed` status.\n ' try: response = self.rjc_api_client.check_status(job_id) except HTTPNotFound: log.error(f'Job {job_id} was not found in job-controller. Return job failed status.') return JobStatus.failed.name except Exception as exception: log.error(f'Error getting status of job with id {job_id}. Return job failed status. Details: {exception}') return JobStatus.failed.name try: return response.status except AttributeError: log.error(f"job-controller response for job {job_id} does not contain 'status' field. Return job failed status.Response: {response}") return JobStatus.failed.name<|docstring|>Get job status from controller. If error occurs, return `failed` status.<|endoftext|>
94bca5e7b2203a485719e4b2d2101fa710c34aa4b901dcded195b70c32c8ed3b
def _wait_for_jobs(self): 'Override _wait_for_jobs method to poll job-controller for job statuses.\n\n Original GenericClusterExecutor._wait_for_jobs method checks success/failure via .jobfinished or .jobfailed files.\n ' while True: with self.lock: if (not self.wait): return active_jobs = self.active_jobs self.active_jobs = [] still_running = [] for active_job in active_jobs: job_id = active_job.job.reana_job_id status = self._get_job_status_from_controller(job_id) if ((status == JobStatus.finished.name) or active_job.job.is_norun): active_job.callback(active_job.job) elif (status == JobStatus.failed.name): active_job.error_callback(active_job.job) else: still_running.append(active_job) with self.lock: self.active_jobs = still_running time.sleep(POLL_JOBS_STATUS_SLEEP_IN_SECONDS)
Override _wait_for_jobs method to poll job-controller for job statuses. Original GenericClusterExecutor._wait_for_jobs method checks success/failure via .jobfinished or .jobfailed files.
reana_workflow_engine_snakemake/executor.py
_wait_for_jobs
tiborsimko/reana-workflow-engine-snakemake
0
python
def _wait_for_jobs(self): 'Override _wait_for_jobs method to poll job-controller for job statuses.\n\n Original GenericClusterExecutor._wait_for_jobs method checks success/failure via .jobfinished or .jobfailed files.\n ' while True: with self.lock: if (not self.wait): return active_jobs = self.active_jobs self.active_jobs = [] still_running = [] for active_job in active_jobs: job_id = active_job.job.reana_job_id status = self._get_job_status_from_controller(job_id) if ((status == JobStatus.finished.name) or active_job.job.is_norun): active_job.callback(active_job.job) elif (status == JobStatus.failed.name): active_job.error_callback(active_job.job) else: still_running.append(active_job) with self.lock: self.active_jobs = still_running time.sleep(POLL_JOBS_STATUS_SLEEP_IN_SECONDS)
def _wait_for_jobs(self): 'Override _wait_for_jobs method to poll job-controller for job statuses.\n\n Original GenericClusterExecutor._wait_for_jobs method checks success/failure via .jobfinished or .jobfailed files.\n ' while True: with self.lock: if (not self.wait): return active_jobs = self.active_jobs self.active_jobs = [] still_running = [] for active_job in active_jobs: job_id = active_job.job.reana_job_id status = self._get_job_status_from_controller(job_id) if ((status == JobStatus.finished.name) or active_job.job.is_norun): active_job.callback(active_job.job) elif (status == JobStatus.failed.name): active_job.error_callback(active_job.job) else: still_running.append(active_job) with self.lock: self.active_jobs = still_running time.sleep(POLL_JOBS_STATUS_SLEEP_IN_SECONDS)<|docstring|>Override _wait_for_jobs method to poll job-controller for job statuses. Original GenericClusterExecutor._wait_for_jobs method checks success/failure via .jobfinished or .jobfailed files.<|endoftext|>
0150e32e8a0093ab5f3ff9cde12e2afb4fb4cb386096bcf870835e93af691bc4
def _generate_report(workflow_file_path): 'Generate HTML report.' success = snakemake(workflow_file_path, config=workflow_parameters, workdir=workflow_workspace, report=operational_options.get('report', DEFAULT_SNAKEMAKE_REPORT_FILENAME), keep_logger=True) if (not success): log.error('Error generating workflow HTML report.')
Generate HTML report.
reana_workflow_engine_snakemake/executor.py
_generate_report
tiborsimko/reana-workflow-engine-snakemake
0
python
def _generate_report(workflow_file_path): success = snakemake(workflow_file_path, config=workflow_parameters, workdir=workflow_workspace, report=operational_options.get('report', DEFAULT_SNAKEMAKE_REPORT_FILENAME), keep_logger=True) if (not success): log.error('Error generating workflow HTML report.')
def _generate_report(workflow_file_path): success = snakemake(workflow_file_path, config=workflow_parameters, workdir=workflow_workspace, report=operational_options.get('report', DEFAULT_SNAKEMAKE_REPORT_FILENAME), keep_logger=True) if (not success): log.error('Error generating workflow HTML report.')<|docstring|>Generate HTML report.<|endoftext|>
27587ef069e22be99fdd8d92a1e461e50c301f2b69255939896f655c58837073
def infinite_vectorizer(vocabulary, fname, batch_size, context_size): ' Returns one batch at a time, data should be one big list. ' X = np.zeros((batch_size, context_size)) Y = np.zeros((batch_size, len(vocabulary))) examples = 0 for (chunk, iteration) in infinite_datareader(fname, 0): step = np.random.randint(4, 11) for i in range(0, (len(chunk) - context_size), step): example = chunk[i:(i + context_size)] label = chunk[(i + context_size)] Xi = [vocabulary.get(c, vocabulary['<UNKNOWN>']) for c in example] X[examples] = Xi Y[(examples, vocabulary.get(label, vocabulary['<UNKNOWN>']))] = 1 examples += 1 if (examples == batch_size): (yield (X, Y, iteration)) X = np.zeros((batch_size, context_size)) Y = np.zeros((batch_size, len(vocabulary))) examples = 0
Returns one batch at a time, data should be one big list.
text-generation.py
infinite_vectorizer
jmnybl/keras-models
0
python
def infinite_vectorizer(vocabulary, fname, batch_size, context_size): ' ' X = np.zeros((batch_size, context_size)) Y = np.zeros((batch_size, len(vocabulary))) examples = 0 for (chunk, iteration) in infinite_datareader(fname, 0): step = np.random.randint(4, 11) for i in range(0, (len(chunk) - context_size), step): example = chunk[i:(i + context_size)] label = chunk[(i + context_size)] Xi = [vocabulary.get(c, vocabulary['<UNKNOWN>']) for c in example] X[examples] = Xi Y[(examples, vocabulary.get(label, vocabulary['<UNKNOWN>']))] = 1 examples += 1 if (examples == batch_size): (yield (X, Y, iteration)) X = np.zeros((batch_size, context_size)) Y = np.zeros((batch_size, len(vocabulary))) examples = 0
def infinite_vectorizer(vocabulary, fname, batch_size, context_size): ' ' X = np.zeros((batch_size, context_size)) Y = np.zeros((batch_size, len(vocabulary))) examples = 0 for (chunk, iteration) in infinite_datareader(fname, 0): step = np.random.randint(4, 11) for i in range(0, (len(chunk) - context_size), step): example = chunk[i:(i + context_size)] label = chunk[(i + context_size)] Xi = [vocabulary.get(c, vocabulary['<UNKNOWN>']) for c in example] X[examples] = Xi Y[(examples, vocabulary.get(label, vocabulary['<UNKNOWN>']))] = 1 examples += 1 if (examples == batch_size): (yield (X, Y, iteration)) X = np.zeros((batch_size, context_size)) Y = np.zeros((batch_size, len(vocabulary))) examples = 0<|docstring|>Returns one batch at a time, data should be one big list.<|endoftext|>
8583a35c61188ff28d085e41351eabb0b0b3dd6df4f5502931c90fc6b65ea993
def httpie(*args, **kwargs) -> StrCLIResponse: '\n Run HTTPie manager command with the given\n args/kwargs, and capture stderr/out and exit\n status.\n ' env = kwargs.setdefault('env', MockEnvironment()) cli_args = ['httpie'] if (not kwargs.pop('no_debug', False)): cli_args.append('--debug') cli_args += normalize_args(args) exit_status = manager.main(args=cli_args, **kwargs) env.stdout.seek(0) env.stderr.seek(0) try: response = StrCLIResponse(env.stdout.read()) response.stderr = env.stderr.read() response.exit_status = exit_status response.args = cli_args finally: env.stdout.truncate(0) env.stderr.truncate(0) env.stdout.seek(0) env.stderr.seek(0) return response
Run HTTPie manager command with the given args/kwargs, and capture stderr/out and exit status.
tests/utils/__init__.py
httpie
nilushancosta/httpie
24,849
python
def httpie(*args, **kwargs) -> StrCLIResponse: '\n Run HTTPie manager command with the given\n args/kwargs, and capture stderr/out and exit\n status.\n ' env = kwargs.setdefault('env', MockEnvironment()) cli_args = ['httpie'] if (not kwargs.pop('no_debug', False)): cli_args.append('--debug') cli_args += normalize_args(args) exit_status = manager.main(args=cli_args, **kwargs) env.stdout.seek(0) env.stderr.seek(0) try: response = StrCLIResponse(env.stdout.read()) response.stderr = env.stderr.read() response.exit_status = exit_status response.args = cli_args finally: env.stdout.truncate(0) env.stderr.truncate(0) env.stdout.seek(0) env.stderr.seek(0) return response
def httpie(*args, **kwargs) -> StrCLIResponse: '\n Run HTTPie manager command with the given\n args/kwargs, and capture stderr/out and exit\n status.\n ' env = kwargs.setdefault('env', MockEnvironment()) cli_args = ['httpie'] if (not kwargs.pop('no_debug', False)): cli_args.append('--debug') cli_args += normalize_args(args) exit_status = manager.main(args=cli_args, **kwargs) env.stdout.seek(0) env.stderr.seek(0) try: response = StrCLIResponse(env.stdout.read()) response.stderr = env.stderr.read() response.exit_status = exit_status response.args = cli_args finally: env.stdout.truncate(0) env.stderr.truncate(0) env.stdout.seek(0) env.stderr.seek(0) return response<|docstring|>Run HTTPie manager command with the given args/kwargs, and capture stderr/out and exit status.<|endoftext|>
f2bf229c98d9c1714703b3fc692e92743c867426988cedf55b71ced7562c3351
def http(*args, program_name='http', tolerate_error_exit_status=False, **kwargs) -> Union[(StrCLIResponse, BytesCLIResponse)]: "\n Run HTTPie and capture stderr/out and exit status.\n Content written to devnull will be captured only if\n env.devnull is set manually.\n\n Invoke `httpie.core.main()` with `args` and `kwargs`,\n and return a `CLIResponse` subclass instance.\n\n The return value is either a `StrCLIResponse`, or `BytesCLIResponse`\n if unable to decode the output. Devnull is string when possible,\n bytes otherwise.\n\n The response has the following attributes:\n\n `stdout` is represented by the instance itself (print r)\n `stderr`: text written to stderr\n `devnull` text written to devnull.\n `exit_status`: the exit status\n `json`: decoded JSON (if possible) or `None`\n\n Exceptions are propagated.\n\n If you pass ``tolerate_error_exit_status=True``, then error exit statuses\n won't result into an exception.\n\n Example:\n\n $ http --auth=user:password GET pie.dev/basic-auth/user/password\n\n >>> httpbin = getfixture('httpbin')\n >>> r = http('-a', 'user:pw', httpbin.url + '/basic-auth/user/pw')\n >>> type(r) == StrCLIResponse\n True\n >>> r.exit_status is ExitStatus.SUCCESS\n True\n >>> r.stderr\n ''\n >>> 'HTTP/1.1 200 OK' in r\n True\n >>> r.json == {'authenticated': True, 'user': 'user'}\n True\n\n " env = kwargs.get('env') if (not env): env = kwargs['env'] = MockEnvironment() stdout = env.stdout stderr = env.stderr devnull = env.devnull args = list(args) args_with_config_defaults = (args + env.config.default_options) add_to_args = [] if ('--debug' not in args_with_config_defaults): if ((not tolerate_error_exit_status) and ('--traceback' not in args_with_config_defaults)): add_to_args.append('--traceback') if (not any((('--timeout' in arg) for arg in args_with_config_defaults))): add_to_args.append('--timeout=3') complete_args = [program_name, *add_to_args, *args] def dump_stderr(): stderr.seek(0) sys.stderr.write(stderr.read()) try: try: exit_status = core.main(args=complete_args, **kwargs) if ('--download' in args): time.sleep(0.5) except SystemExit: if tolerate_error_exit_status: exit_status = ExitStatus.ERROR else: dump_stderr() raise except Exception: stderr.seek(0) sys.stderr.write(stderr.read()) raise else: if ((not tolerate_error_exit_status) and (exit_status != ExitStatus.SUCCESS)): dump_stderr() raise ExitStatusError(f'httpie.core.main() unexpectedly returned a non-zero exit status: {exit_status}') stdout.seek(0) stderr.seek(0) devnull.seek(0) output = stdout.read() devnull_output = devnull.read() try: output = output.decode() except UnicodeDecodeError: r = BytesCLIResponse(output) else: r = StrCLIResponse(output) try: devnull_output = devnull_output.decode() except Exception: pass r.devnull = devnull_output r.stderr = stderr.read() r.exit_status = exit_status r.args = args r.complete_args = ' '.join(complete_args) if (r.exit_status != ExitStatus.SUCCESS): sys.stderr.write(r.stderr) return r finally: devnull.close() stdout.close() stderr.close() env.cleanup()
Run HTTPie and capture stderr/out and exit status. Content written to devnull will be captured only if env.devnull is set manually. Invoke `httpie.core.main()` with `args` and `kwargs`, and return a `CLIResponse` subclass instance. The return value is either a `StrCLIResponse`, or `BytesCLIResponse` if unable to decode the output. Devnull is string when possible, bytes otherwise. The response has the following attributes: `stdout` is represented by the instance itself (print r) `stderr`: text written to stderr `devnull` text written to devnull. `exit_status`: the exit status `json`: decoded JSON (if possible) or `None` Exceptions are propagated. If you pass ``tolerate_error_exit_status=True``, then error exit statuses won't result into an exception. Example: $ http --auth=user:password GET pie.dev/basic-auth/user/password >>> httpbin = getfixture('httpbin') >>> r = http('-a', 'user:pw', httpbin.url + '/basic-auth/user/pw') >>> type(r) == StrCLIResponse True >>> r.exit_status is ExitStatus.SUCCESS True >>> r.stderr '' >>> 'HTTP/1.1 200 OK' in r True >>> r.json == {'authenticated': True, 'user': 'user'} True
tests/utils/__init__.py
http
nilushancosta/httpie
24,849
python
def http(*args, program_name='http', tolerate_error_exit_status=False, **kwargs) -> Union[(StrCLIResponse, BytesCLIResponse)]: "\n Run HTTPie and capture stderr/out and exit status.\n Content written to devnull will be captured only if\n env.devnull is set manually.\n\n Invoke `httpie.core.main()` with `args` and `kwargs`,\n and return a `CLIResponse` subclass instance.\n\n The return value is either a `StrCLIResponse`, or `BytesCLIResponse`\n if unable to decode the output. Devnull is string when possible,\n bytes otherwise.\n\n The response has the following attributes:\n\n `stdout` is represented by the instance itself (print r)\n `stderr`: text written to stderr\n `devnull` text written to devnull.\n `exit_status`: the exit status\n `json`: decoded JSON (if possible) or `None`\n\n Exceptions are propagated.\n\n If you pass ``tolerate_error_exit_status=True``, then error exit statuses\n won't result into an exception.\n\n Example:\n\n $ http --auth=user:password GET pie.dev/basic-auth/user/password\n\n >>> httpbin = getfixture('httpbin')\n >>> r = http('-a', 'user:pw', httpbin.url + '/basic-auth/user/pw')\n >>> type(r) == StrCLIResponse\n True\n >>> r.exit_status is ExitStatus.SUCCESS\n True\n >>> r.stderr\n \n >>> 'HTTP/1.1 200 OK' in r\n True\n >>> r.json == {'authenticated': True, 'user': 'user'}\n True\n\n " env = kwargs.get('env') if (not env): env = kwargs['env'] = MockEnvironment() stdout = env.stdout stderr = env.stderr devnull = env.devnull args = list(args) args_with_config_defaults = (args + env.config.default_options) add_to_args = [] if ('--debug' not in args_with_config_defaults): if ((not tolerate_error_exit_status) and ('--traceback' not in args_with_config_defaults)): add_to_args.append('--traceback') if (not any((('--timeout' in arg) for arg in args_with_config_defaults))): add_to_args.append('--timeout=3') complete_args = [program_name, *add_to_args, *args] def dump_stderr(): stderr.seek(0) sys.stderr.write(stderr.read()) try: try: exit_status = core.main(args=complete_args, **kwargs) if ('--download' in args): time.sleep(0.5) except SystemExit: if tolerate_error_exit_status: exit_status = ExitStatus.ERROR else: dump_stderr() raise except Exception: stderr.seek(0) sys.stderr.write(stderr.read()) raise else: if ((not tolerate_error_exit_status) and (exit_status != ExitStatus.SUCCESS)): dump_stderr() raise ExitStatusError(f'httpie.core.main() unexpectedly returned a non-zero exit status: {exit_status}') stdout.seek(0) stderr.seek(0) devnull.seek(0) output = stdout.read() devnull_output = devnull.read() try: output = output.decode() except UnicodeDecodeError: r = BytesCLIResponse(output) else: r = StrCLIResponse(output) try: devnull_output = devnull_output.decode() except Exception: pass r.devnull = devnull_output r.stderr = stderr.read() r.exit_status = exit_status r.args = args r.complete_args = ' '.join(complete_args) if (r.exit_status != ExitStatus.SUCCESS): sys.stderr.write(r.stderr) return r finally: devnull.close() stdout.close() stderr.close() env.cleanup()
def http(*args, program_name='http', tolerate_error_exit_status=False, **kwargs) -> Union[(StrCLIResponse, BytesCLIResponse)]: "\n Run HTTPie and capture stderr/out and exit status.\n Content written to devnull will be captured only if\n env.devnull is set manually.\n\n Invoke `httpie.core.main()` with `args` and `kwargs`,\n and return a `CLIResponse` subclass instance.\n\n The return value is either a `StrCLIResponse`, or `BytesCLIResponse`\n if unable to decode the output. Devnull is string when possible,\n bytes otherwise.\n\n The response has the following attributes:\n\n `stdout` is represented by the instance itself (print r)\n `stderr`: text written to stderr\n `devnull` text written to devnull.\n `exit_status`: the exit status\n `json`: decoded JSON (if possible) or `None`\n\n Exceptions are propagated.\n\n If you pass ``tolerate_error_exit_status=True``, then error exit statuses\n won't result into an exception.\n\n Example:\n\n $ http --auth=user:password GET pie.dev/basic-auth/user/password\n\n >>> httpbin = getfixture('httpbin')\n >>> r = http('-a', 'user:pw', httpbin.url + '/basic-auth/user/pw')\n >>> type(r) == StrCLIResponse\n True\n >>> r.exit_status is ExitStatus.SUCCESS\n True\n >>> r.stderr\n \n >>> 'HTTP/1.1 200 OK' in r\n True\n >>> r.json == {'authenticated': True, 'user': 'user'}\n True\n\n " env = kwargs.get('env') if (not env): env = kwargs['env'] = MockEnvironment() stdout = env.stdout stderr = env.stderr devnull = env.devnull args = list(args) args_with_config_defaults = (args + env.config.default_options) add_to_args = [] if ('--debug' not in args_with_config_defaults): if ((not tolerate_error_exit_status) and ('--traceback' not in args_with_config_defaults)): add_to_args.append('--traceback') if (not any((('--timeout' in arg) for arg in args_with_config_defaults))): add_to_args.append('--timeout=3') complete_args = [program_name, *add_to_args, *args] def dump_stderr(): stderr.seek(0) sys.stderr.write(stderr.read()) try: try: exit_status = core.main(args=complete_args, **kwargs) if ('--download' in args): time.sleep(0.5) except SystemExit: if tolerate_error_exit_status: exit_status = ExitStatus.ERROR else: dump_stderr() raise except Exception: stderr.seek(0) sys.stderr.write(stderr.read()) raise else: if ((not tolerate_error_exit_status) and (exit_status != ExitStatus.SUCCESS)): dump_stderr() raise ExitStatusError(f'httpie.core.main() unexpectedly returned a non-zero exit status: {exit_status}') stdout.seek(0) stderr.seek(0) devnull.seek(0) output = stdout.read() devnull_output = devnull.read() try: output = output.decode() except UnicodeDecodeError: r = BytesCLIResponse(output) else: r = StrCLIResponse(output) try: devnull_output = devnull_output.decode() except Exception: pass r.devnull = devnull_output r.stderr = stderr.read() r.exit_status = exit_status r.args = args r.complete_args = ' '.join(complete_args) if (r.exit_status != ExitStatus.SUCCESS): sys.stderr.write(r.stderr) return r finally: devnull.close() stdout.close() stderr.close() env.cleanup()<|docstring|>Run HTTPie and capture stderr/out and exit status. Content written to devnull will be captured only if env.devnull is set manually. Invoke `httpie.core.main()` with `args` and `kwargs`, and return a `CLIResponse` subclass instance. The return value is either a `StrCLIResponse`, or `BytesCLIResponse` if unable to decode the output. Devnull is string when possible, bytes otherwise. The response has the following attributes: `stdout` is represented by the instance itself (print r) `stderr`: text written to stderr `devnull` text written to devnull. `exit_status`: the exit status `json`: decoded JSON (if possible) or `None` Exceptions are propagated. If you pass ``tolerate_error_exit_status=True``, then error exit statuses won't result into an exception. Example: $ http --auth=user:password GET pie.dev/basic-auth/user/password >>> httpbin = getfixture('httpbin') >>> r = http('-a', 'user:pw', httpbin.url + '/basic-auth/user/pw') >>> type(r) == StrCLIResponse True >>> r.exit_status is ExitStatus.SUCCESS True >>> r.stderr '' >>> 'HTTP/1.1 200 OK' in r True >>> r.json == {'authenticated': True, 'user': 'user'} True<|endoftext|>
3fdf32ee887ab450386b784db9f064acf202aa1547481055169afdd62931e5a3
@property def json(self) -> Optional[dict]: '\n Return deserialized the request or response JSON body,\n if one (and only one) included in the output and is parsable.\n\n ' if (not hasattr(self, '_json')): self._json = None if (COLOR in self): pass elif self.strip().startswith('{'): self._json = json.loads(self) elif (self.count('Content-Type:') == 1): try: j = self.strip()[self.strip().rindex('\r\n\r\n'):] except ValueError: pass else: try: self._json = json.loads(j) except ValueError: pass return self._json
Return deserialized the request or response JSON body, if one (and only one) included in the output and is parsable.
tests/utils/__init__.py
json
nilushancosta/httpie
24,849
python
@property def json(self) -> Optional[dict]: '\n Return deserialized the request or response JSON body,\n if one (and only one) included in the output and is parsable.\n\n ' if (not hasattr(self, '_json')): self._json = None if (COLOR in self): pass elif self.strip().startswith('{'): self._json = json.loads(self) elif (self.count('Content-Type:') == 1): try: j = self.strip()[self.strip().rindex('\r\n\r\n'):] except ValueError: pass else: try: self._json = json.loads(j) except ValueError: pass return self._json
@property def json(self) -> Optional[dict]: '\n Return deserialized the request or response JSON body,\n if one (and only one) included in the output and is parsable.\n\n ' if (not hasattr(self, '_json')): self._json = None if (COLOR in self): pass elif self.strip().startswith('{'): self._json = json.loads(self) elif (self.count('Content-Type:') == 1): try: j = self.strip()[self.strip().rindex('\r\n\r\n'):] except ValueError: pass else: try: self._json = json.loads(j) except ValueError: pass return self._json<|docstring|>Return deserialized the request or response JSON body, if one (and only one) included in the output and is parsable.<|endoftext|>
d6300a37550a64302a30da575a8adaf3bf277e843839c17c78d9fed22666e041
def __init__(self, file, schema=None, headers=None, extrasaction='ignore', write_headers=True): "\n :param file: File path\n :param schema: A file, url or python dictionary with the tab schema\n :param extrasaction: If it's equal to 'ignore', the values with a key not defined as a schema field will be\n ignored. If it's equal to 'append' the values will be added at the end of the line, but without a header.\n " TabWriter.__init__(self, file, schema=schema, headers=headers, write_headers=write_headers) self.extrasaction = extrasaction
:param file: File path :param schema: A file, url or python dictionary with the tab schema :param extrasaction: If it's equal to 'ignore', the values with a key not defined as a schema field will be ignored. If it's equal to 'append' the values will be added at the end of the line, but without a header.
itab/writer.py
__init__
jordeu/itab
0
python
def __init__(self, file, schema=None, headers=None, extrasaction='ignore', write_headers=True): "\n :param file: File path\n :param schema: A file, url or python dictionary with the tab schema\n :param extrasaction: If it's equal to 'ignore', the values with a key not defined as a schema field will be\n ignored. If it's equal to 'append' the values will be added at the end of the line, but without a header.\n " TabWriter.__init__(self, file, schema=schema, headers=headers, write_headers=write_headers) self.extrasaction = extrasaction
def __init__(self, file, schema=None, headers=None, extrasaction='ignore', write_headers=True): "\n :param file: File path\n :param schema: A file, url or python dictionary with the tab schema\n :param extrasaction: If it's equal to 'ignore', the values with a key not defined as a schema field will be\n ignored. If it's equal to 'append' the values will be added at the end of the line, but without a header.\n " TabWriter.__init__(self, file, schema=schema, headers=headers, write_headers=write_headers) self.extrasaction = extrasaction<|docstring|>:param file: File path :param schema: A file, url or python dictionary with the tab schema :param extrasaction: If it's equal to 'ignore', the values with a key not defined as a schema field will be ignored. If it's equal to 'append' the values will be added at the end of the line, but without a header.<|endoftext|>
1a6ddaae8875a9586c623f84232b016fcf891a7c7dc7986ce1323d416524ad2f
def writerow(self, row_dict): '\n\n :param row_dict: A dictionary with the values and the field names as keys.\n :return: A list with the writing or validation errors. An empty list if there is no error.\n ' errors = [] for k in row_dict.keys(): if (k not in self.schema.headers): if (self.extrasaction == 'append'): self.schema.headers.append(k) err_msg = 'You will have some extra values without header.' else: err_msg = 'This values are ignored.' errors += "The key '{}' is not a valid schema field. {}".format(k, err_msg) row_list = [row_dict.get(h, None) for h in self.schema.headers] errors += TabWriter.writerow(self, row_list) return errors
:param row_dict: A dictionary with the values and the field names as keys. :return: A list with the writing or validation errors. An empty list if there is no error.
itab/writer.py
writerow
jordeu/itab
0
python
def writerow(self, row_dict): '\n\n :param row_dict: A dictionary with the values and the field names as keys.\n :return: A list with the writing or validation errors. An empty list if there is no error.\n ' errors = [] for k in row_dict.keys(): if (k not in self.schema.headers): if (self.extrasaction == 'append'): self.schema.headers.append(k) err_msg = 'You will have some extra values without header.' else: err_msg = 'This values are ignored.' errors += "The key '{}' is not a valid schema field. {}".format(k, err_msg) row_list = [row_dict.get(h, None) for h in self.schema.headers] errors += TabWriter.writerow(self, row_list) return errors
def writerow(self, row_dict): '\n\n :param row_dict: A dictionary with the values and the field names as keys.\n :return: A list with the writing or validation errors. An empty list if there is no error.\n ' errors = [] for k in row_dict.keys(): if (k not in self.schema.headers): if (self.extrasaction == 'append'): self.schema.headers.append(k) err_msg = 'You will have some extra values without header.' else: err_msg = 'This values are ignored.' errors += "The key '{}' is not a valid schema field. {}".format(k, err_msg) row_list = [row_dict.get(h, None) for h in self.schema.headers] errors += TabWriter.writerow(self, row_list) return errors<|docstring|>:param row_dict: A dictionary with the values and the field names as keys. :return: A list with the writing or validation errors. An empty list if there is no error.<|endoftext|>
cd7b367e5e776ed6b91bfd9025b3bbb05f0f49c4f48f5a50d5ce84c06827def9
@commands.command(name='results', pass_context=True, no_pm=True) async def _results(self, ctx, pollid): 'Results of a strawpoll are returned' async with aiohttp.request('GET', 'http://strawpoll.me/api/v2/polls/{}'.format(pollid), headers={'content-type': 'application/json'}) as resp: data = (await resp.json()) s = '{}\n\n'.format(html.unescape(data['title'])) for o in range(len(data['options'])): s += '{}: {}\n'.format(html.unescape(data['options'][o]), data['votes'][o]) (await self.bot.say(box(s)))
Results of a strawpoll are returned
strawpoll/strawpoll.py
_results
crossedfall/ax-cogs
0
python
@commands.command(name='results', pass_context=True, no_pm=True) async def _results(self, ctx, pollid): async with aiohttp.request('GET', 'http://strawpoll.me/api/v2/polls/{}'.format(pollid), headers={'content-type': 'application/json'}) as resp: data = (await resp.json()) s = '{}\n\n'.format(html.unescape(data['title'])) for o in range(len(data['options'])): s += '{}: {}\n'.format(html.unescape(data['options'][o]), data['votes'][o]) (await self.bot.say(box(s)))
@commands.command(name='results', pass_context=True, no_pm=True) async def _results(self, ctx, pollid): async with aiohttp.request('GET', 'http://strawpoll.me/api/v2/polls/{}'.format(pollid), headers={'content-type': 'application/json'}) as resp: data = (await resp.json()) s = '{}\n\n'.format(html.unescape(data['title'])) for o in range(len(data['options'])): s += '{}: {}\n'.format(html.unescape(data['options'][o]), data['votes'][o]) (await self.bot.say(box(s)))<|docstring|>Results of a strawpoll are returned<|endoftext|>
a72c055ccd9169d98fa0deb4bb7d04d1977353ae5526399494e6fada96e4565b
@commands.command(name='strawpoll', pass_context=True, no_pm=True) async def _strawpoll(self, ctx, *, question, options=None): 'Makes a poll based on questions and choices or options. must be divided by "; "\n Examples:\n [p]strawpoll What is this person?; Who is this person?; Where is this person?; When is this person coming?\n [p]strawpoll What; Who?; Where?; When?; Why?' options_list = question.split('; ') title = options_list[0] options_list.remove(title) if (len(options_list) < 2): (await self.bot.say('You need to specify 2 or more options')) else: normal = {'title': title, 'options': options_list} request = dict(normal, **self.settings) async with aiohttp.request('POST', 'https://www.strawpoll.me/api/v2/polls', headers={'content-type': 'application/json'}, data=json.dumps(request)) as resp: test = (await resp.content.read()) test = json.loads(test.decode()) sid = test['id'] (await self.bot.say("Here's your strawpoll link: http://strawpoll.me/{}".format(sid)))
Makes a poll based on questions and choices or options. must be divided by "; " Examples: [p]strawpoll What is this person?; Who is this person?; Where is this person?; When is this person coming? [p]strawpoll What; Who?; Where?; When?; Why?
strawpoll/strawpoll.py
_strawpoll
crossedfall/ax-cogs
0
python
@commands.command(name='strawpoll', pass_context=True, no_pm=True) async def _strawpoll(self, ctx, *, question, options=None): 'Makes a poll based on questions and choices or options. must be divided by "; "\n Examples:\n [p]strawpoll What is this person?; Who is this person?; Where is this person?; When is this person coming?\n [p]strawpoll What; Who?; Where?; When?; Why?' options_list = question.split('; ') title = options_list[0] options_list.remove(title) if (len(options_list) < 2): (await self.bot.say('You need to specify 2 or more options')) else: normal = {'title': title, 'options': options_list} request = dict(normal, **self.settings) async with aiohttp.request('POST', 'https://www.strawpoll.me/api/v2/polls', headers={'content-type': 'application/json'}, data=json.dumps(request)) as resp: test = (await resp.content.read()) test = json.loads(test.decode()) sid = test['id'] (await self.bot.say("Here's your strawpoll link: http://strawpoll.me/{}".format(sid)))
@commands.command(name='strawpoll', pass_context=True, no_pm=True) async def _strawpoll(self, ctx, *, question, options=None): 'Makes a poll based on questions and choices or options. must be divided by "; "\n Examples:\n [p]strawpoll What is this person?; Who is this person?; Where is this person?; When is this person coming?\n [p]strawpoll What; Who?; Where?; When?; Why?' options_list = question.split('; ') title = options_list[0] options_list.remove(title) if (len(options_list) < 2): (await self.bot.say('You need to specify 2 or more options')) else: normal = {'title': title, 'options': options_list} request = dict(normal, **self.settings) async with aiohttp.request('POST', 'https://www.strawpoll.me/api/v2/polls', headers={'content-type': 'application/json'}, data=json.dumps(request)) as resp: test = (await resp.content.read()) test = json.loads(test.decode()) sid = test['id'] (await self.bot.say("Here's your strawpoll link: http://strawpoll.me/{}".format(sid)))<|docstring|>Makes a poll based on questions and choices or options. must be divided by "; " Examples: [p]strawpoll What is this person?; Who is this person?; Where is this person?; When is this person coming? [p]strawpoll What; Who?; Where?; When?; Why?<|endoftext|>
ba730e0ab722d8e8a5fd5bf08387869a93c3ddaba0d11c34ceefdc560dca7697
@commands.group(name='strawpollset', pass_context=True, no_pm=True) async def strawpollset(self, ctx): 'Toggle the different options available for polls\n multi - Whether multiple choice is available\n dupcheck - Whether check for duplicate votes is enforced\n captcha - Whether voters will have to verify captcha' if (ctx.invoked_subcommand is None): (await send_cmd_help(ctx)) (await self.bot.say('```current settings for the polls are as follows:\nmulti: {}\ndupcheck: {}\ncaptcha: {}```'.format(self.settings['multi'], self.settings['dupcheck'], self.settings['captcha'])))
Toggle the different options available for polls multi - Whether multiple choice is available dupcheck - Whether check for duplicate votes is enforced captcha - Whether voters will have to verify captcha
strawpoll/strawpoll.py
strawpollset
crossedfall/ax-cogs
0
python
@commands.group(name='strawpollset', pass_context=True, no_pm=True) async def strawpollset(self, ctx): 'Toggle the different options available for polls\n multi - Whether multiple choice is available\n dupcheck - Whether check for duplicate votes is enforced\n captcha - Whether voters will have to verify captcha' if (ctx.invoked_subcommand is None): (await send_cmd_help(ctx)) (await self.bot.say('```current settings for the polls are as follows:\nmulti: {}\ndupcheck: {}\ncaptcha: {}```'.format(self.settings['multi'], self.settings['dupcheck'], self.settings['captcha'])))
@commands.group(name='strawpollset', pass_context=True, no_pm=True) async def strawpollset(self, ctx): 'Toggle the different options available for polls\n multi - Whether multiple choice is available\n dupcheck - Whether check for duplicate votes is enforced\n captcha - Whether voters will have to verify captcha' if (ctx.invoked_subcommand is None): (await send_cmd_help(ctx)) (await self.bot.say('```current settings for the polls are as follows:\nmulti: {}\ndupcheck: {}\ncaptcha: {}```'.format(self.settings['multi'], self.settings['dupcheck'], self.settings['captcha'])))<|docstring|>Toggle the different options available for polls multi - Whether multiple choice is available dupcheck - Whether check for duplicate votes is enforced captcha - Whether voters will have to verify captcha<|endoftext|>
663015ec469d3608b4332a4b72240697673187fc1fbc20ef5cc6c7efa2a2081f
@strawpollset.command(name='multi', pass_context=True, no_pm=True) async def multi(self, ctx): 'Toggles between True and False values\n True - Multiple choice is available\n False - Multiple choice is not available' if (self.settings['multi'] == 'true'): self.settings['multi'] = 'false' (await self.bot.say('Multiple choice no longer available in the poll')) else: self.settings['multi'] = 'true' (await self.bot.say('Multiple choice is now available on the polls.')) dataIO.save_json(self.fp, self.settings)
Toggles between True and False values True - Multiple choice is available False - Multiple choice is not available
strawpoll/strawpoll.py
multi
crossedfall/ax-cogs
0
python
@strawpollset.command(name='multi', pass_context=True, no_pm=True) async def multi(self, ctx): 'Toggles between True and False values\n True - Multiple choice is available\n False - Multiple choice is not available' if (self.settings['multi'] == 'true'): self.settings['multi'] = 'false' (await self.bot.say('Multiple choice no longer available in the poll')) else: self.settings['multi'] = 'true' (await self.bot.say('Multiple choice is now available on the polls.')) dataIO.save_json(self.fp, self.settings)
@strawpollset.command(name='multi', pass_context=True, no_pm=True) async def multi(self, ctx): 'Toggles between True and False values\n True - Multiple choice is available\n False - Multiple choice is not available' if (self.settings['multi'] == 'true'): self.settings['multi'] = 'false' (await self.bot.say('Multiple choice no longer available in the poll')) else: self.settings['multi'] = 'true' (await self.bot.say('Multiple choice is now available on the polls.')) dataIO.save_json(self.fp, self.settings)<|docstring|>Toggles between True and False values True - Multiple choice is available False - Multiple choice is not available<|endoftext|>