body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
3e10350d0beb1b02fe413faff599ae8968edd34fd0881da228b00db0f088db8e
def set_cookie(key, value, days=7): 'Set cookie' _init_cookie_client() run_js('setCookie(key, value, days)', key=key, value=value, days=days)
Set cookie
pywebio_battery/web.py
set_cookie
pywebio/pywebio-battery
2
python
def set_cookie(key, value, days=7): _init_cookie_client() run_js('setCookie(key, value, days)', key=key, value=value, days=days)
def set_cookie(key, value, days=7): _init_cookie_client() run_js('setCookie(key, value, days)', key=key, value=value, days=days)<|docstring|>Set cookie<|endoftext|>
99056e696ef815400390f8c60fb6b7956c2990daf582e3c64224737da7e7c7dd
def get_cookie(key): 'Get cookie' _init_cookie_client() return eval_js('getCookie(key)', key=key)
Get cookie
pywebio_battery/web.py
get_cookie
pywebio/pywebio-battery
2
python
def get_cookie(key): _init_cookie_client() return eval_js('getCookie(key)', key=key)
def get_cookie(key): _init_cookie_client() return eval_js('getCookie(key)', key=key)<|docstring|>Get cookie<|endoftext|>
2552a1eb855e91b6d89a081d5dad2272565e8293cb4f1e4a1247495a311a594b
@router.post('/uploadfile', status_code=status.HTTP_201_CREATED) async def create_upload_file(request: Request, file: UploadFile=File(...)): '\n File upload route\n ' (await authenticate(request)) if (not upload_file(file, os.path.join(settings.MEDIA_UPLOAD_LOCATION, 'file'), settings.ALLOWED_IMAGE_TYPES, settings.FILE_SERVICE)): raise HTTPException(detail='file could not be uploaded', status_code=status.HTTP_500_INTERNAL_SERVER_ERROR) return JSONResponse(content=(('The file ' + file.filename) + ' was uploaded successfully'), status_code=status.HTTP_201_CREATED)
File upload route
routers/media_routes.py
create_upload_file
olubiyiontheweb/malliva
0
python
@router.post('/uploadfile', status_code=status.HTTP_201_CREATED) async def create_upload_file(request: Request, file: UploadFile=File(...)): '\n \n ' (await authenticate(request)) if (not upload_file(file, os.path.join(settings.MEDIA_UPLOAD_LOCATION, 'file'), settings.ALLOWED_IMAGE_TYPES, settings.FILE_SERVICE)): raise HTTPException(detail='file could not be uploaded', status_code=status.HTTP_500_INTERNAL_SERVER_ERROR) return JSONResponse(content=(('The file ' + file.filename) + ' was uploaded successfully'), status_code=status.HTTP_201_CREATED)
@router.post('/uploadfile', status_code=status.HTTP_201_CREATED) async def create_upload_file(request: Request, file: UploadFile=File(...)): '\n \n ' (await authenticate(request)) if (not upload_file(file, os.path.join(settings.MEDIA_UPLOAD_LOCATION, 'file'), settings.ALLOWED_IMAGE_TYPES, settings.FILE_SERVICE)): raise HTTPException(detail='file could not be uploaded', status_code=status.HTTP_500_INTERNAL_SERVER_ERROR) return JSONResponse(content=(('The file ' + file.filename) + ' was uploaded successfully'), status_code=status.HTTP_201_CREATED)<|docstring|>File upload route<|endoftext|>
b45b7257cb347a749c99d49f54805573b2157375d3995c5931451007403639ce
@router.get('/{file_path:path}') async def read_files(request: Request, file_path: str): 'read files ' logging.info('TODO: we may need to allow only authenticated user to access files for security and cost reasons') (await get_db_name(request)) try: with open(os.path.join(settings.MEDIA_UPLOAD_LOCATION, file_path), 'r') as file: return FileResponse(path=os.path.join(settings.MEDIA_UPLOAD_LOCATION, file_path)) except: return JSONResponse(content='File does not exist', status_code=status.HTTP_404_NOT_FOUND)
read files
routers/media_routes.py
read_files
olubiyiontheweb/malliva
0
python
@router.get('/{file_path:path}') async def read_files(request: Request, file_path: str): ' ' logging.info('TODO: we may need to allow only authenticated user to access files for security and cost reasons') (await get_db_name(request)) try: with open(os.path.join(settings.MEDIA_UPLOAD_LOCATION, file_path), 'r') as file: return FileResponse(path=os.path.join(settings.MEDIA_UPLOAD_LOCATION, file_path)) except: return JSONResponse(content='File does not exist', status_code=status.HTTP_404_NOT_FOUND)
@router.get('/{file_path:path}') async def read_files(request: Request, file_path: str): ' ' logging.info('TODO: we may need to allow only authenticated user to access files for security and cost reasons') (await get_db_name(request)) try: with open(os.path.join(settings.MEDIA_UPLOAD_LOCATION, file_path), 'r') as file: return FileResponse(path=os.path.join(settings.MEDIA_UPLOAD_LOCATION, file_path)) except: return JSONResponse(content='File does not exist', status_code=status.HTTP_404_NOT_FOUND)<|docstring|>read files<|endoftext|>
e64d152b6330a9d8a6696130ab4ef4b848f3cd0edd17e1d9c7db40e3a670c67b
def minimum_part_size(size_in_bytes, default_part_size=DEFAULT_PART_SIZE): 'Calculate the minimum part size needed for a multipart upload.\n\n Glacier allows a maximum of 10,000 parts per upload. It also\n states that the maximum archive size is 10,000 * 4 GB, which means\n the part size can range from 1MB to 4GB (provided it is one 1MB\n multiplied by a power of 2).\n\n This function will compute what the minimum part size must be in\n order to upload a file of size ``size_in_bytes``.\n\n It will first check if ``default_part_size`` is sufficient for\n a part size given the ``size_in_bytes``. If this is not the case,\n then the smallest part size than can accomodate a file of size\n ``size_in_bytes`` will be returned.\n\n If the file size is greater than the maximum allowed archive\n size of 10,000 * 4GB, a ``ValueError`` will be raised.\n\n ' part_size = _MEGABYTE if ((default_part_size * MAXIMUM_NUMBER_OF_PARTS) < size_in_bytes): if (size_in_bytes > ((4096 * _MEGABYTE) * 10000)): raise ValueError(('File size too large: %s' % size_in_bytes)) min_part_size = (size_in_bytes / 10000) power = 3 while (part_size < min_part_size): part_size = math.ldexp(_MEGABYTE, power) power += 1 part_size = int(part_size) else: part_size = default_part_size return part_size
Calculate the minimum part size needed for a multipart upload. Glacier allows a maximum of 10,000 parts per upload. It also states that the maximum archive size is 10,000 * 4 GB, which means the part size can range from 1MB to 4GB (provided it is one 1MB multiplied by a power of 2). This function will compute what the minimum part size must be in order to upload a file of size ``size_in_bytes``. It will first check if ``default_part_size`` is sufficient for a part size given the ``size_in_bytes``. If this is not the case, then the smallest part size than can accomodate a file of size ``size_in_bytes`` will be returned. If the file size is greater than the maximum allowed archive size of 10,000 * 4GB, a ``ValueError`` will be raised.
boto/glacier/utils.py
minimum_part_size
khagler/boto
5,079
python
def minimum_part_size(size_in_bytes, default_part_size=DEFAULT_PART_SIZE): 'Calculate the minimum part size needed for a multipart upload.\n\n Glacier allows a maximum of 10,000 parts per upload. It also\n states that the maximum archive size is 10,000 * 4 GB, which means\n the part size can range from 1MB to 4GB (provided it is one 1MB\n multiplied by a power of 2).\n\n This function will compute what the minimum part size must be in\n order to upload a file of size ``size_in_bytes``.\n\n It will first check if ``default_part_size`` is sufficient for\n a part size given the ``size_in_bytes``. If this is not the case,\n then the smallest part size than can accomodate a file of size\n ``size_in_bytes`` will be returned.\n\n If the file size is greater than the maximum allowed archive\n size of 10,000 * 4GB, a ``ValueError`` will be raised.\n\n ' part_size = _MEGABYTE if ((default_part_size * MAXIMUM_NUMBER_OF_PARTS) < size_in_bytes): if (size_in_bytes > ((4096 * _MEGABYTE) * 10000)): raise ValueError(('File size too large: %s' % size_in_bytes)) min_part_size = (size_in_bytes / 10000) power = 3 while (part_size < min_part_size): part_size = math.ldexp(_MEGABYTE, power) power += 1 part_size = int(part_size) else: part_size = default_part_size return part_size
def minimum_part_size(size_in_bytes, default_part_size=DEFAULT_PART_SIZE): 'Calculate the minimum part size needed for a multipart upload.\n\n Glacier allows a maximum of 10,000 parts per upload. It also\n states that the maximum archive size is 10,000 * 4 GB, which means\n the part size can range from 1MB to 4GB (provided it is one 1MB\n multiplied by a power of 2).\n\n This function will compute what the minimum part size must be in\n order to upload a file of size ``size_in_bytes``.\n\n It will first check if ``default_part_size`` is sufficient for\n a part size given the ``size_in_bytes``. If this is not the case,\n then the smallest part size than can accomodate a file of size\n ``size_in_bytes`` will be returned.\n\n If the file size is greater than the maximum allowed archive\n size of 10,000 * 4GB, a ``ValueError`` will be raised.\n\n ' part_size = _MEGABYTE if ((default_part_size * MAXIMUM_NUMBER_OF_PARTS) < size_in_bytes): if (size_in_bytes > ((4096 * _MEGABYTE) * 10000)): raise ValueError(('File size too large: %s' % size_in_bytes)) min_part_size = (size_in_bytes / 10000) power = 3 while (part_size < min_part_size): part_size = math.ldexp(_MEGABYTE, power) power += 1 part_size = int(part_size) else: part_size = default_part_size return part_size<|docstring|>Calculate the minimum part size needed for a multipart upload. Glacier allows a maximum of 10,000 parts per upload. It also states that the maximum archive size is 10,000 * 4 GB, which means the part size can range from 1MB to 4GB (provided it is one 1MB multiplied by a power of 2). This function will compute what the minimum part size must be in order to upload a file of size ``size_in_bytes``. It will first check if ``default_part_size`` is sufficient for a part size given the ``size_in_bytes``. If this is not the case, then the smallest part size than can accomodate a file of size ``size_in_bytes`` will be returned. If the file size is greater than the maximum allowed archive size of 10,000 * 4GB, a ``ValueError`` will be raised.<|endoftext|>
b97bb20544e2ad9774129a247398394794d753702558d4ca4de9148ea83d5a10
def tree_hash(fo): '\n Given a hash of each 1MB chunk (from chunk_hashes) this will hash\n together adjacent hashes until it ends up with one big one. So a\n tree of hashes.\n ' hashes = [] hashes.extend(fo) while (len(hashes) > 1): new_hashes = [] while True: if (len(hashes) > 1): first = hashes.pop(0) second = hashes.pop(0) new_hashes.append(hashlib.sha256((first + second)).digest()) elif (len(hashes) == 1): only = hashes.pop(0) new_hashes.append(only) else: break hashes.extend(new_hashes) return hashes[0]
Given a hash of each 1MB chunk (from chunk_hashes) this will hash together adjacent hashes until it ends up with one big one. So a tree of hashes.
boto/glacier/utils.py
tree_hash
khagler/boto
5,079
python
def tree_hash(fo): '\n Given a hash of each 1MB chunk (from chunk_hashes) this will hash\n together adjacent hashes until it ends up with one big one. So a\n tree of hashes.\n ' hashes = [] hashes.extend(fo) while (len(hashes) > 1): new_hashes = [] while True: if (len(hashes) > 1): first = hashes.pop(0) second = hashes.pop(0) new_hashes.append(hashlib.sha256((first + second)).digest()) elif (len(hashes) == 1): only = hashes.pop(0) new_hashes.append(only) else: break hashes.extend(new_hashes) return hashes[0]
def tree_hash(fo): '\n Given a hash of each 1MB chunk (from chunk_hashes) this will hash\n together adjacent hashes until it ends up with one big one. So a\n tree of hashes.\n ' hashes = [] hashes.extend(fo) while (len(hashes) > 1): new_hashes = [] while True: if (len(hashes) > 1): first = hashes.pop(0) second = hashes.pop(0) new_hashes.append(hashlib.sha256((first + second)).digest()) elif (len(hashes) == 1): only = hashes.pop(0) new_hashes.append(only) else: break hashes.extend(new_hashes) return hashes[0]<|docstring|>Given a hash of each 1MB chunk (from chunk_hashes) this will hash together adjacent hashes until it ends up with one big one. So a tree of hashes.<|endoftext|>
2f4e999547b21e4f9477b4a174c7d428b14b42c744c72a95e26bb7c0c48b828d
def compute_hashes_from_fileobj(fileobj, chunk_size=(1024 * 1024)): 'Compute the linear and tree hash from a fileobj.\n\n This function will compute the linear/tree hash of a fileobj\n in a single pass through the fileobj.\n\n :param fileobj: A file like object.\n\n :param chunk_size: The size of the chunks to use for the tree\n hash. This is also the buffer size used to read from\n `fileobj`.\n\n :rtype: tuple\n :return: A tuple of (linear_hash, tree_hash). Both hashes\n are returned in hex.\n\n ' if (six.PY3 and hasattr(fileobj, 'mode') and ('b' not in fileobj.mode)): raise ValueError('File-like object must be opened in binary mode!') linear_hash = hashlib.sha256() chunks = [] chunk = fileobj.read(chunk_size) while chunk: if (not isinstance(chunk, bytes)): chunk = chunk.encode((getattr(fileobj, 'encoding', '') or 'utf-8')) linear_hash.update(chunk) chunks.append(hashlib.sha256(chunk).digest()) chunk = fileobj.read(chunk_size) if (not chunks): chunks = [hashlib.sha256(b'').digest()] return (linear_hash.hexdigest(), bytes_to_hex(tree_hash(chunks)))
Compute the linear and tree hash from a fileobj. This function will compute the linear/tree hash of a fileobj in a single pass through the fileobj. :param fileobj: A file like object. :param chunk_size: The size of the chunks to use for the tree hash. This is also the buffer size used to read from `fileobj`. :rtype: tuple :return: A tuple of (linear_hash, tree_hash). Both hashes are returned in hex.
boto/glacier/utils.py
compute_hashes_from_fileobj
khagler/boto
5,079
python
def compute_hashes_from_fileobj(fileobj, chunk_size=(1024 * 1024)): 'Compute the linear and tree hash from a fileobj.\n\n This function will compute the linear/tree hash of a fileobj\n in a single pass through the fileobj.\n\n :param fileobj: A file like object.\n\n :param chunk_size: The size of the chunks to use for the tree\n hash. This is also the buffer size used to read from\n `fileobj`.\n\n :rtype: tuple\n :return: A tuple of (linear_hash, tree_hash). Both hashes\n are returned in hex.\n\n ' if (six.PY3 and hasattr(fileobj, 'mode') and ('b' not in fileobj.mode)): raise ValueError('File-like object must be opened in binary mode!') linear_hash = hashlib.sha256() chunks = [] chunk = fileobj.read(chunk_size) while chunk: if (not isinstance(chunk, bytes)): chunk = chunk.encode((getattr(fileobj, 'encoding', ) or 'utf-8')) linear_hash.update(chunk) chunks.append(hashlib.sha256(chunk).digest()) chunk = fileobj.read(chunk_size) if (not chunks): chunks = [hashlib.sha256(b).digest()] return (linear_hash.hexdigest(), bytes_to_hex(tree_hash(chunks)))
def compute_hashes_from_fileobj(fileobj, chunk_size=(1024 * 1024)): 'Compute the linear and tree hash from a fileobj.\n\n This function will compute the linear/tree hash of a fileobj\n in a single pass through the fileobj.\n\n :param fileobj: A file like object.\n\n :param chunk_size: The size of the chunks to use for the tree\n hash. This is also the buffer size used to read from\n `fileobj`.\n\n :rtype: tuple\n :return: A tuple of (linear_hash, tree_hash). Both hashes\n are returned in hex.\n\n ' if (six.PY3 and hasattr(fileobj, 'mode') and ('b' not in fileobj.mode)): raise ValueError('File-like object must be opened in binary mode!') linear_hash = hashlib.sha256() chunks = [] chunk = fileobj.read(chunk_size) while chunk: if (not isinstance(chunk, bytes)): chunk = chunk.encode((getattr(fileobj, 'encoding', ) or 'utf-8')) linear_hash.update(chunk) chunks.append(hashlib.sha256(chunk).digest()) chunk = fileobj.read(chunk_size) if (not chunks): chunks = [hashlib.sha256(b).digest()] return (linear_hash.hexdigest(), bytes_to_hex(tree_hash(chunks)))<|docstring|>Compute the linear and tree hash from a fileobj. This function will compute the linear/tree hash of a fileobj in a single pass through the fileobj. :param fileobj: A file like object. :param chunk_size: The size of the chunks to use for the tree hash. This is also the buffer size used to read from `fileobj`. :rtype: tuple :return: A tuple of (linear_hash, tree_hash). Both hashes are returned in hex.<|endoftext|>
5337782b7e965160e9e0ab57e9efc212684954d2b63d329da25c7e495a66295a
def tree_hash_from_str(str_as_bytes): '\n\n :type str_as_bytes: str\n :param str_as_bytes: The string for which to compute the tree hash.\n\n :rtype: str\n :return: The computed tree hash, returned as hex.\n\n ' return bytes_to_hex(tree_hash(chunk_hashes(str_as_bytes)))
:type str_as_bytes: str :param str_as_bytes: The string for which to compute the tree hash. :rtype: str :return: The computed tree hash, returned as hex.
boto/glacier/utils.py
tree_hash_from_str
khagler/boto
5,079
python
def tree_hash_from_str(str_as_bytes): '\n\n :type str_as_bytes: str\n :param str_as_bytes: The string for which to compute the tree hash.\n\n :rtype: str\n :return: The computed tree hash, returned as hex.\n\n ' return bytes_to_hex(tree_hash(chunk_hashes(str_as_bytes)))
def tree_hash_from_str(str_as_bytes): '\n\n :type str_as_bytes: str\n :param str_as_bytes: The string for which to compute the tree hash.\n\n :rtype: str\n :return: The computed tree hash, returned as hex.\n\n ' return bytes_to_hex(tree_hash(chunk_hashes(str_as_bytes)))<|docstring|>:type str_as_bytes: str :param str_as_bytes: The string for which to compute the tree hash. :rtype: str :return: The computed tree hash, returned as hex.<|endoftext|>
9aa49ee0797190c286045c37c3837761d99c90e7b3dfcd1e5b1cfd8cc0f544e7
@pytest.mark.tryfirst def pytest_pyfunc_call(pyfuncitem): '\n Run asyncio marked test functions in an event loop instead of a normal\n function call.\n ' if ('run_loop' in pyfuncitem.keywords): funcargs = pyfuncitem.funcargs loop = funcargs['loop'] testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} loop.run_until_complete(pyfuncitem.obj(**testargs)) return True
Run asyncio marked test functions in an event loop instead of a normal function call.
tests/conftest.py
pytest_pyfunc_call
norbeq/aiomysql
0
python
@pytest.mark.tryfirst def pytest_pyfunc_call(pyfuncitem): '\n Run asyncio marked test functions in an event loop instead of a normal\n function call.\n ' if ('run_loop' in pyfuncitem.keywords): funcargs = pyfuncitem.funcargs loop = funcargs['loop'] testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} loop.run_until_complete(pyfuncitem.obj(**testargs)) return True
@pytest.mark.tryfirst def pytest_pyfunc_call(pyfuncitem): '\n Run asyncio marked test functions in an event loop instead of a normal\n function call.\n ' if ('run_loop' in pyfuncitem.keywords): funcargs = pyfuncitem.funcargs loop = funcargs['loop'] testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} loop.run_until_complete(pyfuncitem.obj(**testargs)) return True<|docstring|>Run asyncio marked test functions in an event loop instead of a normal function call.<|endoftext|>
c728dc5006ec18a173c80e7c93cbf5f540a524672e0b7c2fe713852223d6854d
@pytest.fixture(scope='session') def session_id(): 'Unique session identifier, random string.' return str(uuid.uuid4())
Unique session identifier, random string.
tests/conftest.py
session_id
norbeq/aiomysql
0
python
@pytest.fixture(scope='session') def session_id(): return str(uuid.uuid4())
@pytest.fixture(scope='session') def session_id(): return str(uuid.uuid4())<|docstring|>Unique session identifier, random string.<|endoftext|>
93ac368de973532cc9e04359c1aed7583219f4e9da093402b3c42188641b2888
def is_admin(user): ' Checks the is_superuser fields in user model.\n :param user: User object\n :return: boolean\n ' return user.is_superuser
Checks the is_superuser fields in user model. :param user: User object :return: boolean
accounts/helpers.py
is_admin
tony-joseph/livre
1
python
def is_admin(user): ' Checks the is_superuser fields in user model.\n :param user: User object\n :return: boolean\n ' return user.is_superuser
def is_admin(user): ' Checks the is_superuser fields in user model.\n :param user: User object\n :return: boolean\n ' return user.is_superuser<|docstring|>Checks the is_superuser fields in user model. :param user: User object :return: boolean<|endoftext|>
ac6edda7e7301112431659635ad5650a37af8bd046a4461e278c71455f2e839e
def is_staff(user): ' Checks the is_staff fields in user model.\n :param user: User object\n :return: boolean\n ' return user.is_staff
Checks the is_staff fields in user model. :param user: User object :return: boolean
accounts/helpers.py
is_staff
tony-joseph/livre
1
python
def is_staff(user): ' Checks the is_staff fields in user model.\n :param user: User object\n :return: boolean\n ' return user.is_staff
def is_staff(user): ' Checks the is_staff fields in user model.\n :param user: User object\n :return: boolean\n ' return user.is_staff<|docstring|>Checks the is_staff fields in user model. :param user: User object :return: boolean<|endoftext|>
a6775384825002cabe80961e3a99fd5e59f303f0d3abc7df4308775d3fe672cf
def setUp(self): ' Prepares the test fixture before each test method is called. ' listener.obj = None listener.trait_name = None listener.old = None listener.new = None
Prepares the test fixture before each test method is called.
Latest/venv/Lib/site-packages/envisage/tests/test_extension_point_changed.py
setUp
adamcvj/SatelliteTracker
1
python
def setUp(self): ' ' listener.obj = None listener.trait_name = None listener.old = None listener.new = None
def setUp(self): ' ' listener.obj = None listener.trait_name = None listener.old = None listener.new = None<|docstring|>Prepares the test fixture before each test method is called.<|endoftext|>
0eb682cdfd252e50164085adad1ad26c08114302bad817966aaeee27227020cb
def test_set_extension_point(self): ' set extension point ' a = PluginA() application = TestApplication(plugins=[a]) application.start() with self.assertRaises(SystemError): setattr(a, 'x', [1, 2, 3])
set extension point
Latest/venv/Lib/site-packages/envisage/tests/test_extension_point_changed.py
test_set_extension_point
adamcvj/SatelliteTracker
1
python
def test_set_extension_point(self): ' ' a = PluginA() application = TestApplication(plugins=[a]) application.start() with self.assertRaises(SystemError): setattr(a, 'x', [1, 2, 3])
def test_set_extension_point(self): ' ' a = PluginA() application = TestApplication(plugins=[a]) application.start() with self.assertRaises(SystemError): setattr(a, 'x', [1, 2, 3])<|docstring|>set extension point<|endoftext|>
433007e5ac36e7e978ff604e676ad894cf7502132f46d2cdb24fb0c7f0639ec3
def test_append(self): ' append ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x.append(4) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([1, 2, 3, 4, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([1, 2, 3, 4, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([4], listener.new.added) self.assertEqual([], listener.new.removed) self.assertEqual(3, listener.new.index)
append
Latest/venv/Lib/site-packages/envisage/tests/test_extension_point_changed.py
test_append
adamcvj/SatelliteTracker
1
python
def test_(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x.(4) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([1, 2, 3, 4, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([1, 2, 3, 4, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([4], listener.new.added) self.assertEqual([], listener.new.removed) self.assertEqual(3, listener.new.index)
def test_(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x.(4) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([1, 2, 3, 4, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([1, 2, 3, 4, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([4], listener.new.added) self.assertEqual([], listener.new.removed) self.assertEqual(3, listener.new.index)<|docstring|>append<|endoftext|>
06d2411eec50304d4cd26502f4890e1bdedeeb2195a22abb874e3ab7dddaab23
def test_remove(self): ' remove ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x.remove(3) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(5, len(extensions)) self.assertEqual([1, 2, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(5, len(extensions)) self.assertEqual([1, 2, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([], listener.new.added) self.assertEqual([3], listener.new.removed) self.assertEqual(2, listener.new.index)
remove
Latest/venv/Lib/site-packages/envisage/tests/test_extension_point_changed.py
test_remove
adamcvj/SatelliteTracker
1
python
def test_(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x.(3) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(5, len(extensions)) self.assertEqual([1, 2, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(5, len(extensions)) self.assertEqual([1, 2, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([], listener.new.added) self.assertEqual([3], listener.new.d) self.assertEqual(2, listener.new.index)
def test_(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x.(3) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(5, len(extensions)) self.assertEqual([1, 2, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(5, len(extensions)) self.assertEqual([1, 2, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([], listener.new.added) self.assertEqual([3], listener.new.d) self.assertEqual(2, listener.new.index)<|docstring|>remove<|endoftext|>
78131b4d0e9edde76c71643a3a72f56aa7b4c9d4524ad4c39336b45d6a29b791
def test_assign_empty_list(self): ' assign empty list ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x = [] extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([], listener.new.added) self.assertEqual([1, 2, 3], listener.new.removed) self.assertEqual(0, listener.new.index.start) self.assertEqual(3, listener.new.index.stop)
assign empty list
Latest/venv/Lib/site-packages/envisage/tests/test_extension_point_changed.py
test_assign_empty_list
adamcvj/SatelliteTracker
1
python
def test_assign_empty_list(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x = [] extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([], listener.new.added) self.assertEqual([1, 2, 3], listener.new.removed) self.assertEqual(0, listener.new.index.start) self.assertEqual(3, listener.new.index.stop)
def test_assign_empty_list(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x = [] extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([], listener.new.added) self.assertEqual([1, 2, 3], listener.new.removed) self.assertEqual(0, listener.new.index.start) self.assertEqual(3, listener.new.index.stop)<|docstring|>assign empty list<|endoftext|>
0e78faac694a86823298b86d74b863942c36e616a595c285961c1eba7922dbad
def test_assign_empty_list_no_event(self): ' assign empty list no event ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() b.x = [] extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) self.assertEqual(None, listener.obj)
assign empty list no event
Latest/venv/Lib/site-packages/envisage/tests/test_extension_point_changed.py
test_assign_empty_list_no_event
adamcvj/SatelliteTracker
1
python
def test_assign_empty_list_no_event(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() b.x = [] extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) self.assertEqual(None, listener.obj)
def test_assign_empty_list_no_event(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() b.x = [] extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) self.assertEqual(None, listener.obj)<|docstring|>assign empty list no event<|endoftext|>
f775b394ee3438220c87c16bf163b66944eda0dd2869b71e11ee76630b33425e
def test_assign_non_empty_list(self): ' assign non-empty list ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x = [2, 4, 6, 8] extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([2, 4, 6, 8, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([2, 4, 6, 8, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([2, 4, 6, 8], listener.new.added) self.assertEqual([1, 2, 3], listener.new.removed) self.assertEqual(0, listener.new.index.start) self.assertEqual(4, listener.new.index.stop)
assign non-empty list
Latest/venv/Lib/site-packages/envisage/tests/test_extension_point_changed.py
test_assign_non_empty_list
adamcvj/SatelliteTracker
1
python
def test_assign_non_empty_list(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x = [2, 4, 6, 8] extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([2, 4, 6, 8, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([2, 4, 6, 8, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([2, 4, 6, 8], listener.new.added) self.assertEqual([1, 2, 3], listener.new.removed) self.assertEqual(0, listener.new.index.start) self.assertEqual(4, listener.new.index.stop)
def test_assign_non_empty_list(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() self.assertEqual([1, 2, 3, 98, 99, 100], a.x) b.x = [2, 4, 6, 8] extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([2, 4, 6, 8, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(7, len(extensions)) self.assertEqual([2, 4, 6, 8, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([2, 4, 6, 8], listener.new.added) self.assertEqual([1, 2, 3], listener.new.removed) self.assertEqual(0, listener.new.index.start) self.assertEqual(4, listener.new.index.stop)<|docstring|>assign non-empty list<|endoftext|>
8ad4a1cccf132ce78dd05be944e6d1fabffe758e43bd27cae8bf92fe95291130
def test_add_plugin(self): ' add plugin ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b]) application.start() extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([1, 2, 3], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([1, 2, 3], extensions) application.add_plugin(c) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([98, 99, 100], listener.new.added) self.assertEqual([], listener.new.removed) self.assertEqual(3, listener.new.index)
add plugin
Latest/venv/Lib/site-packages/envisage/tests/test_extension_point_changed.py
test_add_plugin
adamcvj/SatelliteTracker
1
python
def test_add_plugin(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b]) application.start() extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([1, 2, 3], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([1, 2, 3], extensions) application.add_plugin(c) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([98, 99, 100], listener.new.added) self.assertEqual([], listener.new.removed) self.assertEqual(3, listener.new.index)
def test_add_plugin(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b]) application.start() extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([1, 2, 3], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([1, 2, 3], extensions) application.add_plugin(c) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([98, 99, 100], listener.new.added) self.assertEqual([], listener.new.removed) self.assertEqual(3, listener.new.index)<|docstring|>add plugin<|endoftext|>
a42703ede643c40a8a5bda94fbe0f032cc114cb82c9c8739ec69796664123564
def test_remove_plugin(self): ' remove plugin ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) application.remove_plugin(b) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([], listener.new.added) self.assertEqual([1, 2, 3], listener.new.removed) self.assertEqual(0, listener.new.index)
remove plugin
Latest/venv/Lib/site-packages/envisage/tests/test_extension_point_changed.py
test_remove_plugin
adamcvj/SatelliteTracker
1
python
def test_remove_plugin(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) application.remove_plugin(b) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([], listener.new.added) self.assertEqual([1, 2, 3], listener.new.removed) self.assertEqual(0, listener.new.index)
def test_remove_plugin(self): ' ' a = PluginA() a.on_trait_change(listener, 'x_items') b = PluginB() c = PluginC() application = TestApplication(plugins=[a, b, c]) application.start() extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(6, len(extensions)) self.assertEqual([1, 2, 3, 98, 99, 100], extensions) application.remove_plugin(b) extensions = application.get_extensions('a.x') extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) extensions = a.x[:] extensions.sort() self.assertEqual(3, len(extensions)) self.assertEqual([98, 99, 100], extensions) self.assertEqual(a, listener.obj) self.assertEqual('x_items', listener.trait_name) self.assertEqual([], listener.new.added) self.assertEqual([1, 2, 3], listener.new.removed) self.assertEqual(0, listener.new.index)<|docstring|>remove plugin<|endoftext|>
5d8616a2b05fb15fdf9be8ba358839be3284332021544653cc62fd913d0c02b5
def step(self, indx_block, closure=None): 'Performs a single optimization step.\n\n Arguments:\n closure (callable, optional): A closure that reevaluates the model\n and returns the loss.\n ' loss = None if (closure is not None): loss = closure() if (indx_block >= len(self.param_groups)): raise ValueError('Block index exceeds the total number of blocks') group = self.param_groups[indx_block] weight_decay = group['weight_decay'] momentum = group['momentum'] dampening = group['dampening'] nesterov = group['nesterov'] for p in group['params']: if (p.grad is None): continue d_p = p.grad.data if (weight_decay != 0): d_p.add_(weight_decay, p.data) if (momentum != 0): param_state = self.state[p] if ('momentum_buffer' not in param_state): buf = param_state['momentum_buffer'] = torch.clone(d_p).detach() else: buf = param_state['momentum_buffer'] buf.mul_(momentum).add_((1 - dampening), d_p) if nesterov: d_p = d_p.add(momentum, buf) else: d_p = buf p.data.add_((- group['lr']), d_p) return loss
Performs a single optimization step. Arguments: closure (callable, optional): A closure that reevaluates the model and returns the loss.
trim/convolutional_sparse_coding/matfac.py
step
csinva/transformation-importance
6
python
def step(self, indx_block, closure=None): 'Performs a single optimization step.\n\n Arguments:\n closure (callable, optional): A closure that reevaluates the model\n and returns the loss.\n ' loss = None if (closure is not None): loss = closure() if (indx_block >= len(self.param_groups)): raise ValueError('Block index exceeds the total number of blocks') group = self.param_groups[indx_block] weight_decay = group['weight_decay'] momentum = group['momentum'] dampening = group['dampening'] nesterov = group['nesterov'] for p in group['params']: if (p.grad is None): continue d_p = p.grad.data if (weight_decay != 0): d_p.add_(weight_decay, p.data) if (momentum != 0): param_state = self.state[p] if ('momentum_buffer' not in param_state): buf = param_state['momentum_buffer'] = torch.clone(d_p).detach() else: buf = param_state['momentum_buffer'] buf.mul_(momentum).add_((1 - dampening), d_p) if nesterov: d_p = d_p.add(momentum, buf) else: d_p = buf p.data.add_((- group['lr']), d_p) return loss
def step(self, indx_block, closure=None): 'Performs a single optimization step.\n\n Arguments:\n closure (callable, optional): A closure that reevaluates the model\n and returns the loss.\n ' loss = None if (closure is not None): loss = closure() if (indx_block >= len(self.param_groups)): raise ValueError('Block index exceeds the total number of blocks') group = self.param_groups[indx_block] weight_decay = group['weight_decay'] momentum = group['momentum'] dampening = group['dampening'] nesterov = group['nesterov'] for p in group['params']: if (p.grad is None): continue d_p = p.grad.data if (weight_decay != 0): d_p.add_(weight_decay, p.data) if (momentum != 0): param_state = self.state[p] if ('momentum_buffer' not in param_state): buf = param_state['momentum_buffer'] = torch.clone(d_p).detach() else: buf = param_state['momentum_buffer'] buf.mul_(momentum).add_((1 - dampening), d_p) if nesterov: d_p = d_p.add(momentum, buf) else: d_p = buf p.data.add_((- group['lr']), d_p) return loss<|docstring|>Performs a single optimization step. Arguments: closure (callable, optional): A closure that reevaluates the model and returns the loss.<|endoftext|>
d457cbded2c36440d185072286ea615ecb9046ae6bc754f8375caf1690bc70d4
def test_basic(): ' Tests that basic example works ' big_field = (1, 1, 4, 4) inner_field = (2, 2, 3, 3) assert (overlap_area(big_field, inner_field) == 1)
Tests that basic example works
test_overlap.py
test_basic
samcornish/git_lesson
0
python
def test_basic(): ' ' big_field = (1, 1, 4, 4) inner_field = (2, 2, 3, 3) assert (overlap_area(big_field, inner_field) == 1)
def test_basic(): ' ' big_field = (1, 1, 4, 4) inner_field = (2, 2, 3, 3) assert (overlap_area(big_field, inner_field) == 1)<|docstring|>Tests that basic example works<|endoftext|>
36c7a5ead925fae2725422cca441e8aff2e93f4007d6b9815e8632df7e7987a0
def test_partial_overlap(): " Tests when there's a partial overlap" base_field = (1, 1, 4, 3) over_field = (2, 2, 3, 4) assert (overlap_area(base_field, over_field) == 1)
Tests when there's a partial overlap
test_overlap.py
test_partial_overlap
samcornish/git_lesson
0
python
def test_partial_overlap(): " " base_field = (1, 1, 4, 3) over_field = (2, 2, 3, 4) assert (overlap_area(base_field, over_field) == 1)
def test_partial_overlap(): " " base_field = (1, 1, 4, 3) over_field = (2, 2, 3, 4) assert (overlap_area(base_field, over_field) == 1)<|docstring|>Tests when there's a partial overlap<|endoftext|>
c5574108e0cb27ff4eb4a98fa95e656610678b2e029352eb3e6abfa442b81564
def test_corner_overlap(): " Tests when there's a partial overlap" base_field = (1, 0, 3, 5) over_field = (2, 4, 4, 6) assert (overlap_area(base_field, over_field) == 1)
Tests when there's a partial overlap
test_overlap.py
test_corner_overlap
samcornish/git_lesson
0
python
def test_corner_overlap(): " " base_field = (1, 0, 3, 5) over_field = (2, 4, 4, 6) assert (overlap_area(base_field, over_field) == 1)
def test_corner_overlap(): " " base_field = (1, 0, 3, 5) over_field = (2, 4, 4, 6) assert (overlap_area(base_field, over_field) == 1)<|docstring|>Tests when there's a partial overlap<|endoftext|>
2c40120a07591768472f9048ff35ff25e48f86170972d1bc5e0fef37bb4d4693
def test_edge_touching(): ' Test when there is an edge ' base_field = (1, 1, 4, 4) over_field = (2, 2, 3, 4) assert (overlap_area(base_field, over_field) == 2)
Test when there is an edge
test_overlap.py
test_edge_touching
samcornish/git_lesson
0
python
def test_edge_touching(): ' ' base_field = (1, 1, 4, 4) over_field = (2, 2, 3, 4) assert (overlap_area(base_field, over_field) == 2)
def test_edge_touching(): ' ' base_field = (1, 1, 4, 4) over_field = (2, 2, 3, 4) assert (overlap_area(base_field, over_field) == 2)<|docstring|>Test when there is an edge<|endoftext|>
14abf13e789db15bed99296649a1b612d39db139fd6b0baadefbacd0950b29ca
def test_edge_touching(): ' Test when there is an edge ' base_field = (1, 1, 4, 4) over_field = (2, 1, 3, 4) assert (overlap_area(base_field, over_field) == 3)
Test when there is an edge
test_overlap.py
test_edge_touching
samcornish/git_lesson
0
python
def test_edge_touching(): ' ' base_field = (1, 1, 4, 4) over_field = (2, 1, 3, 4) assert (overlap_area(base_field, over_field) == 3)
def test_edge_touching(): ' ' base_field = (1, 1, 4, 4) over_field = (2, 1, 3, 4) assert (overlap_area(base_field, over_field) == 3)<|docstring|>Test when there is an edge<|endoftext|>
000e65452a13fc14c9ee0adcac05239b8c8469c37d38c522dda61a60cff561a5
def test_outside_edge_touching(): ' Test when they are touching on the outside ' base_field = (1, 1, 4, 4) over_field = (2, 4, 3, 5) assert (overlap_area(base_field, over_field) == 0)
Test when they are touching on the outside
test_overlap.py
test_outside_edge_touching
samcornish/git_lesson
0
python
def test_outside_edge_touching(): ' ' base_field = (1, 1, 4, 4) over_field = (2, 4, 3, 5) assert (overlap_area(base_field, over_field) == 0)
def test_outside_edge_touching(): ' ' base_field = (1, 1, 4, 4) over_field = (2, 4, 3, 5) assert (overlap_area(base_field, over_field) == 0)<|docstring|>Test when they are touching on the outside<|endoftext|>
850fd77c0ab281dfdd94ff4d49b677f9b0efa5d8a3f120190be70a3bec00a077
def test_no_overlap(): ' Test when they are not touching each other ' base_field = (0, 0, 3, 3) over_field = (4, 4, 5, 5) assert (overlap_area(base_field, over_field) == 0)
Test when they are not touching each other
test_overlap.py
test_no_overlap
samcornish/git_lesson
0
python
def test_no_overlap(): ' ' base_field = (0, 0, 3, 3) over_field = (4, 4, 5, 5) assert (overlap_area(base_field, over_field) == 0)
def test_no_overlap(): ' ' base_field = (0, 0, 3, 3) over_field = (4, 4, 5, 5) assert (overlap_area(base_field, over_field) == 0)<|docstring|>Test when they are not touching each other<|endoftext|>
d94e4191e679ed44c29d31f0f649566fe494193340b64eb556f3194cafab2bf8
def test_floats(): ' Test that still works when using floats ' base_field = (1, 1.0, 3.5, 3.5) over_field = (3, 3, 5, 5) assert (overlap_area(base_field, over_field) == (0.5 * 0.5))
Test that still works when using floats
test_overlap.py
test_floats
samcornish/git_lesson
0
python
def test_floats(): ' ' base_field = (1, 1.0, 3.5, 3.5) over_field = (3, 3, 5, 5) assert (overlap_area(base_field, over_field) == (0.5 * 0.5))
def test_floats(): ' ' base_field = (1, 1.0, 3.5, 3.5) over_field = (3, 3, 5, 5) assert (overlap_area(base_field, over_field) == (0.5 * 0.5))<|docstring|>Test that still works when using floats<|endoftext|>
806018e156a0347adb5fcfb11780fb5f9c1d5d9aeb9603e955154fca96e65acc
def test_floats(): ' Test that still works when using floats ' base_field = (1, 1.0, 3.3, 3.1) over_field = (3, 3, 5, 5) assert (overlap_area(base_field, over_field) == approx((0.3 * 0.1), rel=0.001))
Test that still works when using floats
test_overlap.py
test_floats
samcornish/git_lesson
0
python
def test_floats(): ' ' base_field = (1, 1.0, 3.3, 3.1) over_field = (3, 3, 5, 5) assert (overlap_area(base_field, over_field) == approx((0.3 * 0.1), rel=0.001))
def test_floats(): ' ' base_field = (1, 1.0, 3.3, 3.1) over_field = (3, 3, 5, 5) assert (overlap_area(base_field, over_field) == approx((0.3 * 0.1), rel=0.001))<|docstring|>Test that still works when using floats<|endoftext|>
86c9c46ed5d5befe58bd61d98621bea50b25d6e8a42a3ff01e89bc627afb36c7
@property def dataset_paths(self) -> List[str]: 'path that the data should be loaded from in the child class' return self._data_paths
path that the data should be loaded from in the child class
disent/data/groundtruth/base.py
dataset_paths
neonkitchen/disent
0
python
@property def dataset_paths(self) -> List[str]: return self._data_paths
@property def dataset_paths(self) -> List[str]: return self._data_paths<|docstring|>path that the data should be loaded from in the child class<|endoftext|>
39ce1f3a24213c4b4b1e6d1c5f776a2431e590118bb10bd5300ee354f5fa78dd
@property def dataset_path(self): 'path that the dataset should be loaded from in the child class' return self._proc_path
path that the dataset should be loaded from in the child class
disent/data/groundtruth/base.py
dataset_path
neonkitchen/disent
0
python
@property def dataset_path(self): return self._proc_path
@property def dataset_path(self): return self._proc_path<|docstring|>path that the dataset should be loaded from in the child class<|endoftext|>
26fb7f03d3b03bcd03d3d3f8a6f2c6e822f8a71b7f7b6413a1bfabfc888f39d1
def get_version(): 'Reads the version from the package' with open(VERSION_FILE) as handle: lines = handle.read() result = VERSION_REGEX.search(lines) if result: return result.groupdict()['version'] else: raise ValueError('Unable to determine __version__')
Reads the version from the package
setup.py
get_version
jordan-wright/rapportive
42
python
def get_version(): with open(VERSION_FILE) as handle: lines = handle.read() result = VERSION_REGEX.search(lines) if result: return result.groupdict()['version'] else: raise ValueError('Unable to determine __version__')
def get_version(): with open(VERSION_FILE) as handle: lines = handle.read() result = VERSION_REGEX.search(lines) if result: return result.groupdict()['version'] else: raise ValueError('Unable to determine __version__')<|docstring|>Reads the version from the package<|endoftext|>
15cd208ebe9374712a526c2485e0ea97ce5d137d6686df24038cc9255b720b14
def get_requirements(): 'Reads the installation requirements from requirements.pip' with open('requirements.pip') as f: return [line.rstrip() for line in f if (not line.startswith('#'))]
Reads the installation requirements from requirements.pip
setup.py
get_requirements
jordan-wright/rapportive
42
python
def get_requirements(): with open('requirements.pip') as f: return [line.rstrip() for line in f if (not line.startswith('#'))]
def get_requirements(): with open('requirements.pip') as f: return [line.rstrip() for line in f if (not line.startswith('#'))]<|docstring|>Reads the installation requirements from requirements.pip<|endoftext|>
351162ecfff45f6bbdee443dca8a014aeffaebca31cb76c0dfc9ab577b73f2f8
def getdata(cube, ecube, w=None): '\n Input\n -----\n cube: astropy.io.fits object\n data cube\n ecube: astropy.io.fits object\n variance cube\n w: astropy.wcs.WCS object\n wcs object\n Output\n ------\n images: ndarray\n data array\n weights: ndarray\n weights array\n ifu_wl: array\n wl array\n w: stropy.wcs.WCS object\n wcs object\n ' chdu = fits.open(cube) ehdu = fits.open(ecube) if (w == None): w = wcs.WCS(chdu[0].header, chdu) else: chdu[0].header.update(w.to_header()) w = wcs.WCS(chdu[0].header, chdu) assert (w.axis_type_names == ['RA', 'DEC', 'pixel']) wlstart = chdu[0].header['CRVAL3'] wldelta = chdu[0].header['CDELT3'] wlend = (wlstart + (wldelta * chdu[0].header['NAXIS3'])) ifu_wl = np.arange(wlstart, wlend, wldelta) images = chdu[0].data weights = (1 / ehdu[0].data) weights[np.isnan(weights)] = 0.0 weights[np.isinf(weights)] = 0.0 weights[np.isinf(images)] = 0.0 wmask = ((weights != 0).sum((1, 2)) != 0) images = images[wmask] weights = weights[wmask] ifu_wl = ifu_wl[wmask] return (images, weights, ifu_wl, w)
Input ----- cube: astropy.io.fits object data cube ecube: astropy.io.fits object variance cube w: astropy.wcs.WCS object wcs object Output ------ images: ndarray data array weights: ndarray weights array ifu_wl: array wl array w: stropy.wcs.WCS object wcs object
junk/spaxlet.py
getdata
Majoburo/spaxlet
0
python
def getdata(cube, ecube, w=None): '\n Input\n -----\n cube: astropy.io.fits object\n data cube\n ecube: astropy.io.fits object\n variance cube\n w: astropy.wcs.WCS object\n wcs object\n Output\n ------\n images: ndarray\n data array\n weights: ndarray\n weights array\n ifu_wl: array\n wl array\n w: stropy.wcs.WCS object\n wcs object\n ' chdu = fits.open(cube) ehdu = fits.open(ecube) if (w == None): w = wcs.WCS(chdu[0].header, chdu) else: chdu[0].header.update(w.to_header()) w = wcs.WCS(chdu[0].header, chdu) assert (w.axis_type_names == ['RA', 'DEC', 'pixel']) wlstart = chdu[0].header['CRVAL3'] wldelta = chdu[0].header['CDELT3'] wlend = (wlstart + (wldelta * chdu[0].header['NAXIS3'])) ifu_wl = np.arange(wlstart, wlend, wldelta) images = chdu[0].data weights = (1 / ehdu[0].data) weights[np.isnan(weights)] = 0.0 weights[np.isinf(weights)] = 0.0 weights[np.isinf(images)] = 0.0 wmask = ((weights != 0).sum((1, 2)) != 0) images = images[wmask] weights = weights[wmask] ifu_wl = ifu_wl[wmask] return (images, weights, ifu_wl, w)
def getdata(cube, ecube, w=None): '\n Input\n -----\n cube: astropy.io.fits object\n data cube\n ecube: astropy.io.fits object\n variance cube\n w: astropy.wcs.WCS object\n wcs object\n Output\n ------\n images: ndarray\n data array\n weights: ndarray\n weights array\n ifu_wl: array\n wl array\n w: stropy.wcs.WCS object\n wcs object\n ' chdu = fits.open(cube) ehdu = fits.open(ecube) if (w == None): w = wcs.WCS(chdu[0].header, chdu) else: chdu[0].header.update(w.to_header()) w = wcs.WCS(chdu[0].header, chdu) assert (w.axis_type_names == ['RA', 'DEC', 'pixel']) wlstart = chdu[0].header['CRVAL3'] wldelta = chdu[0].header['CDELT3'] wlend = (wlstart + (wldelta * chdu[0].header['NAXIS3'])) ifu_wl = np.arange(wlstart, wlend, wldelta) images = chdu[0].data weights = (1 / ehdu[0].data) weights[np.isnan(weights)] = 0.0 weights[np.isinf(weights)] = 0.0 weights[np.isinf(images)] = 0.0 wmask = ((weights != 0).sum((1, 2)) != 0) images = images[wmask] weights = weights[wmask] ifu_wl = ifu_wl[wmask] return (images, weights, ifu_wl, w)<|docstring|>Input ----- cube: astropy.io.fits object data cube ecube: astropy.io.fits object variance cube w: astropy.wcs.WCS object wcs object Output ------ images: ndarray data array weights: ndarray weights array ifu_wl: array wl array w: stropy.wcs.WCS object wcs object<|endoftext|>
4f1b6a16b915d5001511ee790bcae48c598ead71355df6b44b04fdbbf2131996
def query_ps_from_wcs(w): 'Query PanStarrs for a wcs.\n ' (nra, ndec) = w.array_shape[1:] (dra, ddec) = w.wcs.cdelt[:2] c = wcs.utils.pixel_to_skycoord((nra / 2.0), (ndec / 2.0), w) ddeg = np.linalg.norm([((dra * nra) / 2), ((ddec * ndec) / 2)]) pd_table = query(c.ra.value, c.dec.value, ddeg) scat = wcs.utils.skycoord_to_pixel(SkyCoord(pd_table['raMean'], pd_table['decMean'], unit='deg'), w, origin=0, mode='all') mask = ((((scat[0] < nra) * (scat[1] < ndec)) * (scat[0] > 0)) * (scat[1] > 0)) pd_table = pd_table[mask] pd_table['x'] = scat[0][mask] pd_table['y'] = scat[1][mask] return pd_table
Query PanStarrs for a wcs.
junk/spaxlet.py
query_ps_from_wcs
Majoburo/spaxlet
0
python
def query_ps_from_wcs(w): '\n ' (nra, ndec) = w.array_shape[1:] (dra, ddec) = w.wcs.cdelt[:2] c = wcs.utils.pixel_to_skycoord((nra / 2.0), (ndec / 2.0), w) ddeg = np.linalg.norm([((dra * nra) / 2), ((ddec * ndec) / 2)]) pd_table = query(c.ra.value, c.dec.value, ddeg) scat = wcs.utils.skycoord_to_pixel(SkyCoord(pd_table['raMean'], pd_table['decMean'], unit='deg'), w, origin=0, mode='all') mask = ((((scat[0] < nra) * (scat[1] < ndec)) * (scat[0] > 0)) * (scat[1] > 0)) pd_table = pd_table[mask] pd_table['x'] = scat[0][mask] pd_table['y'] = scat[1][mask] return pd_table
def query_ps_from_wcs(w): '\n ' (nra, ndec) = w.array_shape[1:] (dra, ddec) = w.wcs.cdelt[:2] c = wcs.utils.pixel_to_skycoord((nra / 2.0), (ndec / 2.0), w) ddeg = np.linalg.norm([((dra * nra) / 2), ((ddec * ndec) / 2)]) pd_table = query(c.ra.value, c.dec.value, ddeg) scat = wcs.utils.skycoord_to_pixel(SkyCoord(pd_table['raMean'], pd_table['decMean'], unit='deg'), w, origin=0, mode='all') mask = ((((scat[0] < nra) * (scat[1] < ndec)) * (scat[0] > 0)) * (scat[1] > 0)) pd_table = pd_table[mask] pd_table['x'] = scat[0][mask] pd_table['y'] = scat[1][mask] return pd_table<|docstring|>Query PanStarrs for a wcs.<|endoftext|>
012ed5469d3e422fcdb5c655e53570d73ef16b1e838cb8e7536605dbe31b4c28
def define_model(images, weights, psf='startpsf.npy'): ' Create model psf and obsevation\n ' start_psf = np.load(psf) out = np.outer(np.ones(len(images)), start_psf) out.shape = (len(images), start_psf.shape[0], start_psf.shape[1]) psfs = scarlet.PSF(out) model_psf = scarlet.PSF(partial(scarlet.psf.gaussian, sigma=0.8), shape=(None, 8, 8)) model_frame = scarlet.Frame(images.shape, psfs=model_psf) observation = scarlet.Observation(images, weights=weights, psfs=psfs).match(model_frame) return (model_frame, observation)
Create model psf and obsevation
junk/spaxlet.py
define_model
Majoburo/spaxlet
0
python
def define_model(images, weights, psf='startpsf.npy'): ' \n ' start_psf = np.load(psf) out = np.outer(np.ones(len(images)), start_psf) out.shape = (len(images), start_psf.shape[0], start_psf.shape[1]) psfs = scarlet.PSF(out) model_psf = scarlet.PSF(partial(scarlet.psf.gaussian, sigma=0.8), shape=(None, 8, 8)) model_frame = scarlet.Frame(images.shape, psfs=model_psf) observation = scarlet.Observation(images, weights=weights, psfs=psfs).match(model_frame) return (model_frame, observation)
def define_model(images, weights, psf='startpsf.npy'): ' \n ' start_psf = np.load(psf) out = np.outer(np.ones(len(images)), start_psf) out.shape = (len(images), start_psf.shape[0], start_psf.shape[1]) psfs = scarlet.PSF(out) model_psf = scarlet.PSF(partial(scarlet.psf.gaussian, sigma=0.8), shape=(None, 8, 8)) model_frame = scarlet.Frame(images.shape, psfs=model_psf) observation = scarlet.Observation(images, weights=weights, psfs=psfs).match(model_frame) return (model_frame, observation)<|docstring|>Create model psf and obsevation<|endoftext|>
8b7fcbfb4c089506ed25f05d0eb05a8bcb5c206ac97c11bab4d566c96a65171a
def new_handle(self, context=None): '\n The new_handle function returns a Handle object with a url and a\n node id. The new_handle function invokes the localize_handle\n method first to set the url and then invokes the initialize_handle\n function to get an ID.\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.new_handle', [], self._service_ver, context)
The new_handle function returns a Handle object with a url and a node id. The new_handle function invokes the localize_handle method first to set the url and then invokes the initialize_handle function to get an ID. :returns: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String
lib/installed_clients/AbstractHandleClient.py
new_handle
ialarmedalien/kb_blast
1
python
def new_handle(self, context=None): '\n The new_handle function returns a Handle object with a url and a\n node id. The new_handle function invokes the localize_handle\n method first to set the url and then invokes the initialize_handle\n function to get an ID.\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.new_handle', [], self._service_ver, context)
def new_handle(self, context=None): '\n The new_handle function returns a Handle object with a url and a\n node id. The new_handle function invokes the localize_handle\n method first to set the url and then invokes the initialize_handle\n function to get an ID.\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.new_handle', [], self._service_ver, context)<|docstring|>The new_handle function returns a Handle object with a url and a node id. The new_handle function invokes the localize_handle method first to set the url and then invokes the initialize_handle function to get an ID. :returns: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String<|endoftext|>
96cfd83257d7248566fd12fd4c709075fde5047723f01e31ece1b10961ab9bc4
def localize_handle(self, h1, service_name, context=None): '\n The localize_handle function attempts to locate a shock server near\n the service. The localize_handle function must be called before the\n Handle is initialized becuase when the handle is initialized, it is\n given a node id that maps to the shock server where the node was\n created. This function should not be called directly.\n :param h1: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param service_name: instance of String\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.localize_handle', [h1, service_name], self._service_ver, context)
The localize_handle function attempts to locate a shock server near the service. The localize_handle function must be called before the Handle is initialized becuase when the handle is initialized, it is given a node id that maps to the shock server where the node was created. This function should not be called directly. :param h1: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :param service_name: instance of String :returns: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String
lib/installed_clients/AbstractHandleClient.py
localize_handle
ialarmedalien/kb_blast
1
python
def localize_handle(self, h1, service_name, context=None): '\n The localize_handle function attempts to locate a shock server near\n the service. The localize_handle function must be called before the\n Handle is initialized becuase when the handle is initialized, it is\n given a node id that maps to the shock server where the node was\n created. This function should not be called directly.\n :param h1: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param service_name: instance of String\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.localize_handle', [h1, service_name], self._service_ver, context)
def localize_handle(self, h1, service_name, context=None): '\n The localize_handle function attempts to locate a shock server near\n the service. The localize_handle function must be called before the\n Handle is initialized becuase when the handle is initialized, it is\n given a node id that maps to the shock server where the node was\n created. This function should not be called directly.\n :param h1: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param service_name: instance of String\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.localize_handle', [h1, service_name], self._service_ver, context)<|docstring|>The localize_handle function attempts to locate a shock server near the service. The localize_handle function must be called before the Handle is initialized becuase when the handle is initialized, it is given a node id that maps to the shock server where the node was created. This function should not be called directly. :param h1: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :param service_name: instance of String :returns: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String<|endoftext|>
2403f66b876c293b4eba38c5ff956d8362ddbdecc08e9e41579b1a69d9223ee3
def initialize_handle(self, h1, context=None): '\n The initialize_handle returns a Handle object with an ID. This\n function should not be called directly\n :param h1: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.initialize_handle', [h1], self._service_ver, context)
The initialize_handle returns a Handle object with an ID. This function should not be called directly :param h1: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :returns: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String
lib/installed_clients/AbstractHandleClient.py
initialize_handle
ialarmedalien/kb_blast
1
python
def initialize_handle(self, h1, context=None): '\n The initialize_handle returns a Handle object with an ID. This\n function should not be called directly\n :param h1: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.initialize_handle', [h1], self._service_ver, context)
def initialize_handle(self, h1, context=None): '\n The initialize_handle returns a Handle object with an ID. This\n function should not be called directly\n :param h1: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.initialize_handle', [h1], self._service_ver, context)<|docstring|>The initialize_handle returns a Handle object with an ID. This function should not be called directly :param h1: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :returns: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String<|endoftext|>
6958a66414a86e2d6c9a056c65540eaabb1a80b3001ea890e5ec07ac57c5e5e8
def persist_handle(self, h, context=None): '\n The persist_handle writes the handle to a persistent store\n that can be later retrieved using the list_handles\n function.\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :returns: instance of String\n ' return self._client.call_method('AbstractHandle.persist_handle', [h], self._service_ver, context)
The persist_handle writes the handle to a persistent store that can be later retrieved using the list_handles function. :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :returns: instance of String
lib/installed_clients/AbstractHandleClient.py
persist_handle
ialarmedalien/kb_blast
1
python
def persist_handle(self, h, context=None): '\n The persist_handle writes the handle to a persistent store\n that can be later retrieved using the list_handles\n function.\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :returns: instance of String\n ' return self._client.call_method('AbstractHandle.persist_handle', [h], self._service_ver, context)
def persist_handle(self, h, context=None): '\n The persist_handle writes the handle to a persistent store\n that can be later retrieved using the list_handles\n function.\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :returns: instance of String\n ' return self._client.call_method('AbstractHandle.persist_handle', [h], self._service_ver, context)<|docstring|>The persist_handle writes the handle to a persistent store that can be later retrieved using the list_handles function. :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :returns: instance of String<|endoftext|>
ff8ef4bcef93245b5860c996b8900f23c8388d0c394bcaebc1f0792b77bc4087
def upload(self, infile, context=None): '\n The upload and download functions provide an empty\n implementation that must be provided in a client. If a concrete\n implementation is not provided an error is thrown. These are\n the equivelant of abstract methods, with runtime rather than\n compile time inforcement.\n \n [client_implemented]\n :param infile: instance of String\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.upload', [infile], self._service_ver, context)
The upload and download functions provide an empty implementation that must be provided in a client. If a concrete implementation is not provided an error is thrown. These are the equivelant of abstract methods, with runtime rather than compile time inforcement. [client_implemented] :param infile: instance of String :returns: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String
lib/installed_clients/AbstractHandleClient.py
upload
ialarmedalien/kb_blast
1
python
def upload(self, infile, context=None): '\n The upload and download functions provide an empty\n implementation that must be provided in a client. If a concrete\n implementation is not provided an error is thrown. These are\n the equivelant of abstract methods, with runtime rather than\n compile time inforcement.\n \n [client_implemented]\n :param infile: instance of String\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.upload', [infile], self._service_ver, context)
def upload(self, infile, context=None): '\n The upload and download functions provide an empty\n implementation that must be provided in a client. If a concrete\n implementation is not provided an error is thrown. These are\n the equivelant of abstract methods, with runtime rather than\n compile time inforcement.\n \n [client_implemented]\n :param infile: instance of String\n :returns: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.upload', [infile], self._service_ver, context)<|docstring|>The upload and download functions provide an empty implementation that must be provided in a client. If a concrete implementation is not provided an error is thrown. These are the equivelant of abstract methods, with runtime rather than compile time inforcement. [client_implemented] :param infile: instance of String :returns: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String<|endoftext|>
fd29ffd2c8d1caad2fe143800655081aac9878d8c3897aeb50349089088c7bd7
def download(self, h, outfile, context=None): '\n The upload and download functions provide an empty\n implementation that must be provided in a client. If a concrete\n implementation is not provided an error is thrown. These are\n the equivelant of abstract methods, with runtime rather than\n compile time inforcement.\n [client_implemented]\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param outfile: instance of String\n ' return self._client.call_method('AbstractHandle.download', [h, outfile], self._service_ver, context)
The upload and download functions provide an empty implementation that must be provided in a client. If a concrete implementation is not provided an error is thrown. These are the equivelant of abstract methods, with runtime rather than compile time inforcement. [client_implemented] :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :param outfile: instance of String
lib/installed_clients/AbstractHandleClient.py
download
ialarmedalien/kb_blast
1
python
def download(self, h, outfile, context=None): '\n The upload and download functions provide an empty\n implementation that must be provided in a client. If a concrete\n implementation is not provided an error is thrown. These are\n the equivelant of abstract methods, with runtime rather than\n compile time inforcement.\n [client_implemented]\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param outfile: instance of String\n ' return self._client.call_method('AbstractHandle.download', [h, outfile], self._service_ver, context)
def download(self, h, outfile, context=None): '\n The upload and download functions provide an empty\n implementation that must be provided in a client. If a concrete\n implementation is not provided an error is thrown. These are\n the equivelant of abstract methods, with runtime rather than\n compile time inforcement.\n [client_implemented]\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param outfile: instance of String\n ' return self._client.call_method('AbstractHandle.download', [h, outfile], self._service_ver, context)<|docstring|>The upload and download functions provide an empty implementation that must be provided in a client. If a concrete implementation is not provided an error is thrown. These are the equivelant of abstract methods, with runtime rather than compile time inforcement. [client_implemented] :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :param outfile: instance of String<|endoftext|>
8dbb7c9832f05c3a18ce31c1b2446ebcdd10a15af8fbe2d669d1a7b49884c31a
def upload_metadata(self, h, infile, context=None): '\n The upload_metadata function uploads metadata to an existing\n handle. This means that the data that the handle represents\n has already been uploaded. Uploading meta data before the data\n has been uploaded is not currently supported.\n [client_implemented]\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param infile: instance of String\n ' return self._client.call_method('AbstractHandle.upload_metadata', [h, infile], self._service_ver, context)
The upload_metadata function uploads metadata to an existing handle. This means that the data that the handle represents has already been uploaded. Uploading meta data before the data has been uploaded is not currently supported. [client_implemented] :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :param infile: instance of String
lib/installed_clients/AbstractHandleClient.py
upload_metadata
ialarmedalien/kb_blast
1
python
def upload_metadata(self, h, infile, context=None): '\n The upload_metadata function uploads metadata to an existing\n handle. This means that the data that the handle represents\n has already been uploaded. Uploading meta data before the data\n has been uploaded is not currently supported.\n [client_implemented]\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param infile: instance of String\n ' return self._client.call_method('AbstractHandle.upload_metadata', [h, infile], self._service_ver, context)
def upload_metadata(self, h, infile, context=None): '\n The upload_metadata function uploads metadata to an existing\n handle. This means that the data that the handle represents\n has already been uploaded. Uploading meta data before the data\n has been uploaded is not currently supported.\n [client_implemented]\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param infile: instance of String\n ' return self._client.call_method('AbstractHandle.upload_metadata', [h, infile], self._service_ver, context)<|docstring|>The upload_metadata function uploads metadata to an existing handle. This means that the data that the handle represents has already been uploaded. Uploading meta data before the data has been uploaded is not currently supported. [client_implemented] :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :param infile: instance of String<|endoftext|>
144d1096533e5d4df07082f9d77ffb0e34c2b9103a350ed0743181bf0a264862
def download_metadata(self, h, outfile, context=None): '\n The download_metadata function downloads metadata associated\n with the data handle and writes it to a file.\n [client_implemented]\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param outfile: instance of String\n ' return self._client.call_method('AbstractHandle.download_metadata', [h, outfile], self._service_ver, context)
The download_metadata function downloads metadata associated with the data handle and writes it to a file. [client_implemented] :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :param outfile: instance of String
lib/installed_clients/AbstractHandleClient.py
download_metadata
ialarmedalien/kb_blast
1
python
def download_metadata(self, h, outfile, context=None): '\n The download_metadata function downloads metadata associated\n with the data handle and writes it to a file.\n [client_implemented]\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param outfile: instance of String\n ' return self._client.call_method('AbstractHandle.download_metadata', [h, outfile], self._service_ver, context)
def download_metadata(self, h, outfile, context=None): '\n The download_metadata function downloads metadata associated\n with the data handle and writes it to a file.\n [client_implemented]\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n :param outfile: instance of String\n ' return self._client.call_method('AbstractHandle.download_metadata', [h, outfile], self._service_ver, context)<|docstring|>The download_metadata function downloads metadata associated with the data handle and writes it to a file. [client_implemented] :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String :param outfile: instance of String<|endoftext|>
499f596cdbdaac1d90362f6c0b7feba97b96028859726cae691c2c5925612146
def hids_to_handles(self, hids, context=None): '\n Given a list of handle ids, this function returns\n a list of handles.\n :param hids: instance of list of type "HandleId" (Handle provides a\n unique reference that enables access to the data files through\n functions provided as part of the HandleService. In the case of\n using shock, the id is the node id. In the case of using shock the\n value of type is shock. In the future these values should\n enumerated. The value of url is the http address of the shock\n server, including the protocol (http or https) and if necessary\n the port. The values of remote_md5 and remote_sha1 are those\n computed on the file in the remote data store. These can be used\n to verify uploads and downloads.)\n :returns: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.hids_to_handles', [hids], self._service_ver, context)
Given a list of handle ids, this function returns a list of handles. :param hids: instance of list of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.) :returns: instance of list of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String
lib/installed_clients/AbstractHandleClient.py
hids_to_handles
ialarmedalien/kb_blast
1
python
def hids_to_handles(self, hids, context=None): '\n Given a list of handle ids, this function returns\n a list of handles.\n :param hids: instance of list of type "HandleId" (Handle provides a\n unique reference that enables access to the data files through\n functions provided as part of the HandleService. In the case of\n using shock, the id is the node id. In the case of using shock the\n value of type is shock. In the future these values should\n enumerated. The value of url is the http address of the shock\n server, including the protocol (http or https) and if necessary\n the port. The values of remote_md5 and remote_sha1 are those\n computed on the file in the remote data store. These can be used\n to verify uploads and downloads.)\n :returns: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.hids_to_handles', [hids], self._service_ver, context)
def hids_to_handles(self, hids, context=None): '\n Given a list of handle ids, this function returns\n a list of handles.\n :param hids: instance of list of type "HandleId" (Handle provides a\n unique reference that enables access to the data files through\n functions provided as part of the HandleService. In the case of\n using shock, the id is the node id. In the case of using shock the\n value of type is shock. In the future these values should\n enumerated. The value of url is the http address of the shock\n server, including the protocol (http or https) and if necessary\n the port. The values of remote_md5 and remote_sha1 are those\n computed on the file in the remote data store. These can be used\n to verify uploads and downloads.)\n :returns: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.hids_to_handles', [hids], self._service_ver, context)<|docstring|>Given a list of handle ids, this function returns a list of handles. :param hids: instance of list of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.) :returns: instance of list of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String<|endoftext|>
1f98e1d0ff4012b56d4fcf7a51d2e3813e84bc4fe3853b39f4d581121833b618
def are_readable(self, arg_1, context=None): '\n Given a list of handle ids, this function determines if\n the underlying data is readable by the caller. If any\n one of the handle ids reference unreadable data this\n function returns false.\n :param arg_1: instance of list of type "HandleId" (Handle provides a\n unique reference that enables access to the data files through\n functions provided as part of the HandleService. In the case of\n using shock, the id is the node id. In the case of using shock the\n value of type is shock. In the future these values should\n enumerated. The value of url is the http address of the shock\n server, including the protocol (http or https) and if necessary\n the port. The values of remote_md5 and remote_sha1 are those\n computed on the file in the remote data store. These can be used\n to verify uploads and downloads.)\n :returns: instance of Long\n ' return self._client.call_method('AbstractHandle.are_readable', [arg_1], self._service_ver, context)
Given a list of handle ids, this function determines if the underlying data is readable by the caller. If any one of the handle ids reference unreadable data this function returns false. :param arg_1: instance of list of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.) :returns: instance of Long
lib/installed_clients/AbstractHandleClient.py
are_readable
ialarmedalien/kb_blast
1
python
def are_readable(self, arg_1, context=None): '\n Given a list of handle ids, this function determines if\n the underlying data is readable by the caller. If any\n one of the handle ids reference unreadable data this\n function returns false.\n :param arg_1: instance of list of type "HandleId" (Handle provides a\n unique reference that enables access to the data files through\n functions provided as part of the HandleService. In the case of\n using shock, the id is the node id. In the case of using shock the\n value of type is shock. In the future these values should\n enumerated. The value of url is the http address of the shock\n server, including the protocol (http or https) and if necessary\n the port. The values of remote_md5 and remote_sha1 are those\n computed on the file in the remote data store. These can be used\n to verify uploads and downloads.)\n :returns: instance of Long\n ' return self._client.call_method('AbstractHandle.are_readable', [arg_1], self._service_ver, context)
def are_readable(self, arg_1, context=None): '\n Given a list of handle ids, this function determines if\n the underlying data is readable by the caller. If any\n one of the handle ids reference unreadable data this\n function returns false.\n :param arg_1: instance of list of type "HandleId" (Handle provides a\n unique reference that enables access to the data files through\n functions provided as part of the HandleService. In the case of\n using shock, the id is the node id. In the case of using shock the\n value of type is shock. In the future these values should\n enumerated. The value of url is the http address of the shock\n server, including the protocol (http or https) and if necessary\n the port. The values of remote_md5 and remote_sha1 are those\n computed on the file in the remote data store. These can be used\n to verify uploads and downloads.)\n :returns: instance of Long\n ' return self._client.call_method('AbstractHandle.are_readable', [arg_1], self._service_ver, context)<|docstring|>Given a list of handle ids, this function determines if the underlying data is readable by the caller. If any one of the handle ids reference unreadable data this function returns false. :param arg_1: instance of list of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.) :returns: instance of Long<|endoftext|>
4c3f0c54bbc3138a6733c1b37c55219c317b8949362ee13566bb505ff0649193
def is_owner(self, arg_1, context=None): '\n Given a list of handle ids, this function determines if the underlying\n data is owned by the caller. If any one of the handle ids reference\n unreadable data this function returns false.\n :param arg_1: instance of list of type "HandleId" (Handle provides a\n unique reference that enables access to the data files through\n functions provided as part of the HandleService. In the case of\n using shock, the id is the node id. In the case of using shock the\n value of type is shock. In the future these values should\n enumerated. The value of url is the http address of the shock\n server, including the protocol (http or https) and if necessary\n the port. The values of remote_md5 and remote_sha1 are those\n computed on the file in the remote data store. These can be used\n to verify uploads and downloads.)\n :returns: instance of Long\n ' return self._client.call_method('AbstractHandle.is_owner', [arg_1], self._service_ver, context)
Given a list of handle ids, this function determines if the underlying data is owned by the caller. If any one of the handle ids reference unreadable data this function returns false. :param arg_1: instance of list of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.) :returns: instance of Long
lib/installed_clients/AbstractHandleClient.py
is_owner
ialarmedalien/kb_blast
1
python
def is_owner(self, arg_1, context=None): '\n Given a list of handle ids, this function determines if the underlying\n data is owned by the caller. If any one of the handle ids reference\n unreadable data this function returns false.\n :param arg_1: instance of list of type "HandleId" (Handle provides a\n unique reference that enables access to the data files through\n functions provided as part of the HandleService. In the case of\n using shock, the id is the node id. In the case of using shock the\n value of type is shock. In the future these values should\n enumerated. The value of url is the http address of the shock\n server, including the protocol (http or https) and if necessary\n the port. The values of remote_md5 and remote_sha1 are those\n computed on the file in the remote data store. These can be used\n to verify uploads and downloads.)\n :returns: instance of Long\n ' return self._client.call_method('AbstractHandle.is_owner', [arg_1], self._service_ver, context)
def is_owner(self, arg_1, context=None): '\n Given a list of handle ids, this function determines if the underlying\n data is owned by the caller. If any one of the handle ids reference\n unreadable data this function returns false.\n :param arg_1: instance of list of type "HandleId" (Handle provides a\n unique reference that enables access to the data files through\n functions provided as part of the HandleService. In the case of\n using shock, the id is the node id. In the case of using shock the\n value of type is shock. In the future these values should\n enumerated. The value of url is the http address of the shock\n server, including the protocol (http or https) and if necessary\n the port. The values of remote_md5 and remote_sha1 are those\n computed on the file in the remote data store. These can be used\n to verify uploads and downloads.)\n :returns: instance of Long\n ' return self._client.call_method('AbstractHandle.is_owner', [arg_1], self._service_ver, context)<|docstring|>Given a list of handle ids, this function determines if the underlying data is owned by the caller. If any one of the handle ids reference unreadable data this function returns false. :param arg_1: instance of list of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.) :returns: instance of Long<|endoftext|>
95c645e8a900164f78a68a8e5fb295b6753e85277bd5f5fd563cd70440ce21fd
def is_readable(self, id, context=None): '\n Given a handle id, this function queries the underlying\n data store to see if the data being referred to is\n readable to by the caller.\n :param id: instance of String\n :returns: instance of Long\n ' return self._client.call_method('AbstractHandle.is_readable', [id], self._service_ver, context)
Given a handle id, this function queries the underlying data store to see if the data being referred to is readable to by the caller. :param id: instance of String :returns: instance of Long
lib/installed_clients/AbstractHandleClient.py
is_readable
ialarmedalien/kb_blast
1
python
def is_readable(self, id, context=None): '\n Given a handle id, this function queries the underlying\n data store to see if the data being referred to is\n readable to by the caller.\n :param id: instance of String\n :returns: instance of Long\n ' return self._client.call_method('AbstractHandle.is_readable', [id], self._service_ver, context)
def is_readable(self, id, context=None): '\n Given a handle id, this function queries the underlying\n data store to see if the data being referred to is\n readable to by the caller.\n :param id: instance of String\n :returns: instance of Long\n ' return self._client.call_method('AbstractHandle.is_readable', [id], self._service_ver, context)<|docstring|>Given a handle id, this function queries the underlying data store to see if the data being referred to is readable to by the caller. :param id: instance of String :returns: instance of Long<|endoftext|>
0cc64fa6f8a42dfddfb2080c00c3d341e1a2fd1276c05e47ae72430325c2c360
def list_handles(self, context=None): '\n The list function returns the set of handles that were\n created by the user.\n :returns: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.list_handles', [], self._service_ver, context)
The list function returns the set of handles that were created by the user. :returns: instance of list of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String
lib/installed_clients/AbstractHandleClient.py
list_handles
ialarmedalien/kb_blast
1
python
def list_handles(self, context=None): '\n The list function returns the set of handles that were\n created by the user.\n :returns: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.list_handles', [], self._service_ver, context)
def list_handles(self, context=None): '\n The list function returns the set of handles that were\n created by the user.\n :returns: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.list_handles', [], self._service_ver, context)<|docstring|>The list function returns the set of handles that were created by the user. :returns: instance of list of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String<|endoftext|>
56adfb5b196eaa69f589cdd59b8b0a37976ae7cc7f153f7c5b3cf266f7570616
def delete_handles(self, l, context=None): '\n The delete_handles function takes a list of handles\n and deletes them on the handle service server.\n :param l: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.delete_handles', [l], self._service_ver, context)
The delete_handles function takes a list of handles and deletes them on the handle service server. :param l: instance of list of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String
lib/installed_clients/AbstractHandleClient.py
delete_handles
ialarmedalien/kb_blast
1
python
def delete_handles(self, l, context=None): '\n The delete_handles function takes a list of handles\n and deletes them on the handle service server.\n :param l: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.delete_handles', [l], self._service_ver, context)
def delete_handles(self, l, context=None): '\n The delete_handles function takes a list of handles\n and deletes them on the handle service server.\n :param l: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.delete_handles', [l], self._service_ver, context)<|docstring|>The delete_handles function takes a list of handles and deletes them on the handle service server. :param l: instance of list of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String<|endoftext|>
0b3d1d83568d6dd306509904350d736c887826844bf77c4b0e666dc493b76a37
def give(self, user, perm, h, context=None): '\n :param user: instance of String\n :param perm: instance of String\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.give', [user, perm, h], self._service_ver, context)
:param user: instance of String :param perm: instance of String :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String
lib/installed_clients/AbstractHandleClient.py
give
ialarmedalien/kb_blast
1
python
def give(self, user, perm, h, context=None): '\n :param user: instance of String\n :param perm: instance of String\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.give', [user, perm, h], self._service_ver, context)
def give(self, user, perm, h, context=None): '\n :param user: instance of String\n :param perm: instance of String\n :param h: instance of type "Handle" -> structure: parameter "hid" of\n type "HandleId" (Handle provides a unique reference that enables\n access to the data files through functions provided as part of the\n HandleService. In the case of using shock, the id is the node id.\n In the case of using shock the value of type is shock. In the\n future these values should enumerated. The value of url is the\n http address of the shock server, including the protocol (http or\n https) and if necessary the port. The values of remote_md5 and\n remote_sha1 are those computed on the file in the remote data\n store. These can be used to verify uploads and downloads.),\n parameter "file_name" of String, parameter "id" of type "NodeId",\n parameter "type" of String, parameter "url" of String, parameter\n "remote_md5" of String, parameter "remote_sha1" of String\n ' return self._client.call_method('AbstractHandle.give', [user, perm, h], self._service_ver, context)<|docstring|>:param user: instance of String :param perm: instance of String :param h: instance of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String<|endoftext|>
e44747f2a37946c602725858bdcc5eef6c0904b02ced80f20c779e16adfcf238
def ids_to_handles(self, ids, context=None): '\n Given a list of ids, this function returns\n a list of handles. In case of Shock, the list of ids\n are shock node ids and this function the handles, which\n contains Shock url and related information.\n :param ids: instance of list of type "NodeId"\n :returns: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.ids_to_handles', [ids], self._service_ver, context)
Given a list of ids, this function returns a list of handles. In case of Shock, the list of ids are shock node ids and this function the handles, which contains Shock url and related information. :param ids: instance of list of type "NodeId" :returns: instance of list of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String
lib/installed_clients/AbstractHandleClient.py
ids_to_handles
ialarmedalien/kb_blast
1
python
def ids_to_handles(self, ids, context=None): '\n Given a list of ids, this function returns\n a list of handles. In case of Shock, the list of ids\n are shock node ids and this function the handles, which\n contains Shock url and related information.\n :param ids: instance of list of type "NodeId"\n :returns: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.ids_to_handles', [ids], self._service_ver, context)
def ids_to_handles(self, ids, context=None): '\n Given a list of ids, this function returns\n a list of handles. In case of Shock, the list of ids\n are shock node ids and this function the handles, which\n contains Shock url and related information.\n :param ids: instance of list of type "NodeId"\n :returns: instance of list of type "Handle" -> structure: parameter\n "hid" of type "HandleId" (Handle provides a unique reference that\n enables access to the data files through functions provided as\n part of the HandleService. In the case of using shock, the id is\n the node id. In the case of using shock the value of type is\n shock. In the future these values should enumerated. The value of\n url is the http address of the shock server, including the\n protocol (http or https) and if necessary the port. The values of\n remote_md5 and remote_sha1 are those computed on the file in the\n remote data store. These can be used to verify uploads and\n downloads.), parameter "file_name" of String, parameter "id" of\n type "NodeId", parameter "type" of String, parameter "url" of\n String, parameter "remote_md5" of String, parameter "remote_sha1"\n of String\n ' return self._client.call_method('AbstractHandle.ids_to_handles', [ids], self._service_ver, context)<|docstring|>Given a list of ids, this function returns a list of handles. In case of Shock, the list of ids are shock node ids and this function the handles, which contains Shock url and related information. :param ids: instance of list of type "NodeId" :returns: instance of list of type "Handle" -> structure: parameter "hid" of type "HandleId" (Handle provides a unique reference that enables access to the data files through functions provided as part of the HandleService. In the case of using shock, the id is the node id. In the case of using shock the value of type is shock. In the future these values should enumerated. The value of url is the http address of the shock server, including the protocol (http or https) and if necessary the port. The values of remote_md5 and remote_sha1 are those computed on the file in the remote data store. These can be used to verify uploads and downloads.), parameter "file_name" of String, parameter "id" of type "NodeId", parameter "type" of String, parameter "url" of String, parameter "remote_md5" of String, parameter "remote_sha1" of String<|endoftext|>
a1a2d3d83c391f5ae30600cc8c3b3dcb64719db6108db1d144c77978349bf51d
def setUp(self): '\n \n TEST_ASSETS_DIR contains external required content for test. \n \n Test func naming: test_<name of url>_url_is_resolved\n for ex if url is:path("<str:username>/",UserProfileView.as_view(),name="profile"),,\n then: test_profile_url_is_resolved\n \n ' self.TEST_ASSETS_DIR = os.path.join(settings.BASE_DIR, 'test_assets') self.user_dummy_username = 'testuser' self.user_dummy_password = dummy_password() self.client = Client() self.user = User.objects.create_user(username=self.user_dummy_username, password=self.user_dummy_password) self.client.login(username=self.user_dummy_username, password=self.user_dummy_password)
TEST_ASSETS_DIR contains external required content for test. Test func naming: test_<name of url>_url_is_resolved for ex if url is:path("<str:username>/",UserProfileView.as_view(),name="profile"),, then: test_profile_url_is_resolved
users/tests/test_urls.py
setUp
alexdeathway/Gecom
7
python
def setUp(self): '\n \n TEST_ASSETS_DIR contains external required content for test. \n \n Test func naming: test_<name of url>_url_is_resolved\n for ex if url is:path("<str:username>/",UserProfileView.as_view(),name="profile"),,\n then: test_profile_url_is_resolved\n \n ' self.TEST_ASSETS_DIR = os.path.join(settings.BASE_DIR, 'test_assets') self.user_dummy_username = 'testuser' self.user_dummy_password = dummy_password() self.client = Client() self.user = User.objects.create_user(username=self.user_dummy_username, password=self.user_dummy_password) self.client.login(username=self.user_dummy_username, password=self.user_dummy_password)
def setUp(self): '\n \n TEST_ASSETS_DIR contains external required content for test. \n \n Test func naming: test_<name of url>_url_is_resolved\n for ex if url is:path("<str:username>/",UserProfileView.as_view(),name="profile"),,\n then: test_profile_url_is_resolved\n \n ' self.TEST_ASSETS_DIR = os.path.join(settings.BASE_DIR, 'test_assets') self.user_dummy_username = 'testuser' self.user_dummy_password = dummy_password() self.client = Client() self.user = User.objects.create_user(username=self.user_dummy_username, password=self.user_dummy_password) self.client.login(username=self.user_dummy_username, password=self.user_dummy_password)<|docstring|>TEST_ASSETS_DIR contains external required content for test. Test func naming: test_<name of url>_url_is_resolved for ex if url is:path("<str:username>/",UserProfileView.as_view(),name="profile"),, then: test_profile_url_is_resolved<|endoftext|>
e2fa460302e49825479fc9705d13cfb89b4e35102564756e0e68a2d41474417d
def get_spar_components(self, document='Client:Foo', **kwargs) -> ComponentSummaryRoot: 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a synchronous HTTP request. Returns the http data only\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ComponentSummaryRoot\n Response Object\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)
Get SPAR components # noqa: E501 This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501 This method makes a synchronous HTTP request. Returns the http data only Args: document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"] Keyword Args: _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. Returns: ComponentSummaryRoot Response Object
code/python/SPAREngine/v3/fds/sdk/SPAREngine/api/components_api.py
get_spar_components
factset/enterprise-sdk
6
python
def get_spar_components(self, document='Client:Foo', **kwargs) -> ComponentSummaryRoot: 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a synchronous HTTP request. Returns the http data only\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ComponentSummaryRoot\n Response Object\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)
def get_spar_components(self, document='Client:Foo', **kwargs) -> ComponentSummaryRoot: 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a synchronous HTTP request. Returns the http data only\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ComponentSummaryRoot\n Response Object\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)<|docstring|>Get SPAR components # noqa: E501 This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501 This method makes a synchronous HTTP request. Returns the http data only Args: document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"] Keyword Args: _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. Returns: ComponentSummaryRoot Response Object<|endoftext|>
39399eeb1b4484c1a4b367fd700ef904a23fbe7c7776df6954d5b94112377748
def get_spar_components_with_http_info(self, document='Client:Foo', **kwargs) -> typing.Tuple[(ComponentSummaryRoot, int, typing.MutableMapping)]: 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a synchronous HTTP request. Returns http data, http status and headers\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ComponentSummaryRoot\n Response Object\n int\n Http Status Code\n dict\n Dictionary of the response headers\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)
Get SPAR components # noqa: E501 This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501 This method makes a synchronous HTTP request. Returns http data, http status and headers Args: document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"] Keyword Args: _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. Returns: ComponentSummaryRoot Response Object int Http Status Code dict Dictionary of the response headers
code/python/SPAREngine/v3/fds/sdk/SPAREngine/api/components_api.py
get_spar_components_with_http_info
factset/enterprise-sdk
6
python
def get_spar_components_with_http_info(self, document='Client:Foo', **kwargs) -> typing.Tuple[(ComponentSummaryRoot, int, typing.MutableMapping)]: 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a synchronous HTTP request. Returns http data, http status and headers\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ComponentSummaryRoot\n Response Object\n int\n Http Status Code\n dict\n Dictionary of the response headers\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)
def get_spar_components_with_http_info(self, document='Client:Foo', **kwargs) -> typing.Tuple[(ComponentSummaryRoot, int, typing.MutableMapping)]: 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a synchronous HTTP request. Returns http data, http status and headers\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ComponentSummaryRoot\n Response Object\n int\n Http Status Code\n dict\n Dictionary of the response headers\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)<|docstring|>Get SPAR components # noqa: E501 This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501 This method makes a synchronous HTTP request. Returns http data, http status and headers Args: document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"] Keyword Args: _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. Returns: ComponentSummaryRoot Response Object int Http Status Code dict Dictionary of the response headers<|endoftext|>
5aa11e1ee17d4574e9609763c1ed6876fa21c459cb8d824df3fb18548d81fdc4
def get_spar_components_async(self, document='Client:Foo', **kwargs) -> 'ApplyResult[ComponentSummaryRoot]': 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ApplyResult[ComponentSummaryRoot]\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)
Get SPAR components # noqa: E501 This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501 This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult Args: document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"] Keyword Args: _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. Returns: ApplyResult[ComponentSummaryRoot]
code/python/SPAREngine/v3/fds/sdk/SPAREngine/api/components_api.py
get_spar_components_async
factset/enterprise-sdk
6
python
def get_spar_components_async(self, document='Client:Foo', **kwargs) -> 'ApplyResult[ComponentSummaryRoot]': 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ApplyResult[ComponentSummaryRoot]\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)
def get_spar_components_async(self, document='Client:Foo', **kwargs) -> 'ApplyResult[ComponentSummaryRoot]': 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ApplyResult[ComponentSummaryRoot]\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)<|docstring|>Get SPAR components # noqa: E501 This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501 This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult Args: document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"] Keyword Args: _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. Returns: ApplyResult[ComponentSummaryRoot]<|endoftext|>
8ad6fb8adeef973b2f56940256309ed251c520736cf79d9a44922d7c5f83b500
def get_spar_components_with_http_info_async(self, document='Client:Foo', **kwargs) -> 'ApplyResult[typing.Tuple[ComponentSummaryRoot, int, typing.MutableMapping]]': 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ApplyResult[(ComponentSummaryRoot, int, typing.Dict)]\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)
Get SPAR components # noqa: E501 This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501 This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult Args: document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"] Keyword Args: _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. Returns: ApplyResult[(ComponentSummaryRoot, int, typing.Dict)]
code/python/SPAREngine/v3/fds/sdk/SPAREngine/api/components_api.py
get_spar_components_with_http_info_async
factset/enterprise-sdk
6
python
def get_spar_components_with_http_info_async(self, document='Client:Foo', **kwargs) -> 'ApplyResult[typing.Tuple[ComponentSummaryRoot, int, typing.MutableMapping]]': 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ApplyResult[(ComponentSummaryRoot, int, typing.Dict)]\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)
def get_spar_components_with_http_info_async(self, document='Client:Foo', **kwargs) -> 'ApplyResult[typing.Tuple[ComponentSummaryRoot, int, typing.MutableMapping]]': 'Get SPAR components # noqa: E501\n\n This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501\n This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult\n\n Args:\n document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"]\n\n Keyword Args:\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _spec_property_naming (bool): True if the variable names in the input data\n are serialized names, as specified in the OpenAPI document.\n False if the variable names in the input data\n are pythonic names, e.g. snake case (default)\n _content_type (str/None): force body content-type.\n Default is None and content-type will be predicted by allowed\n content-types and body.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n Returns:\n ApplyResult[(ComponentSummaryRoot, int, typing.Dict)]\n ' self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True) kwargs['document'] = document return self.get_spar_components_endpoint.call_with_http_info(**kwargs)<|docstring|>Get SPAR components # noqa: E501 This endpoint returns the list of SPAR components in a given SPAR document. # noqa: E501 This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult Args: document (str): Document Name. defaults to "Client:Foo", must be one of ["Client:Foo"] Keyword Args: _preload_content (bool): if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. _request_timeout (int/float/tuple): timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. Default is None. _check_input_type (bool): specifies if type checking should be done one the data sent to the server. Default is True. _check_return_type (bool): specifies if type checking should be done one the data received from the server. Default is True. _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _content_type (str/None): force body content-type. Default is None and content-type will be predicted by allowed content-types and body. _host_index (int/None): specifies the index of the server that we want to use. Default is read from the configuration. Returns: ApplyResult[(ComponentSummaryRoot, int, typing.Dict)]<|endoftext|>
9c2a23dc224f0aad43417a7b88ae90f4c1e9b6c032e8b764d4cad1f986fc41b2
@contextmanager def key(mesg=None): "\n Wrapper for curses module to simplify getting a single keypress from the terminal (default), a buttonbox, or a\n figure. Set slab.psychoacoustics.input_method = 'buttonbox' to use a custom USB buttonbox or to 'figure' to open\n a figure called 'stairs' (if not already opened by the `slab.Staricase.plot` method). Optianally takes a string\n argument which is printed in the terminal for conveying instructions to the participant.\n\n Example::\n\n with slab.key('Waiting for buttons 1 (yes) or 2 (no).') as key:\n response = key.getch()\n " if (input_method == 'keyboard'): if (curses is None): raise ImportError('You need curses to use the keypress class (pip install curses (or windows-curses))') curses.filter() stdscr = curses.initscr() curses.noecho() curses.cbreak() stdscr.clear() stdscr.refresh() if (mesg is not None): stdscr.addstr(str(mesg)) (yield stdscr) curses.nocbreak() curses.echo() curses.endwin() elif (input_method == 'buttonbox'): if (mesg is not None): print(mesg) (yield _Buttonbox) elif (input_method == 'figure'): if (mesg is not None): print(mesg) (yield _FigChar) else: raise ValueError('Unknown input method!')
Wrapper for curses module to simplify getting a single keypress from the terminal (default), a buttonbox, or a figure. Set slab.psychoacoustics.input_method = 'buttonbox' to use a custom USB buttonbox or to 'figure' to open a figure called 'stairs' (if not already opened by the `slab.Staricase.plot` method). Optianally takes a string argument which is printed in the terminal for conveying instructions to the participant. Example:: with slab.key('Waiting for buttons 1 (yes) or 2 (no).') as key: response = key.getch()
slab/psychoacoustics.py
key
jakab13/slab
7
python
@contextmanager def key(mesg=None): "\n Wrapper for curses module to simplify getting a single keypress from the terminal (default), a buttonbox, or a\n figure. Set slab.psychoacoustics.input_method = 'buttonbox' to use a custom USB buttonbox or to 'figure' to open\n a figure called 'stairs' (if not already opened by the `slab.Staricase.plot` method). Optianally takes a string\n argument which is printed in the terminal for conveying instructions to the participant.\n\n Example::\n\n with slab.key('Waiting for buttons 1 (yes) or 2 (no).') as key:\n response = key.getch()\n " if (input_method == 'keyboard'): if (curses is None): raise ImportError('You need curses to use the keypress class (pip install curses (or windows-curses))') curses.filter() stdscr = curses.initscr() curses.noecho() curses.cbreak() stdscr.clear() stdscr.refresh() if (mesg is not None): stdscr.addstr(str(mesg)) (yield stdscr) curses.nocbreak() curses.echo() curses.endwin() elif (input_method == 'buttonbox'): if (mesg is not None): print(mesg) (yield _Buttonbox) elif (input_method == 'figure'): if (mesg is not None): print(mesg) (yield _FigChar) else: raise ValueError('Unknown input method!')
@contextmanager def key(mesg=None): "\n Wrapper for curses module to simplify getting a single keypress from the terminal (default), a buttonbox, or a\n figure. Set slab.psychoacoustics.input_method = 'buttonbox' to use a custom USB buttonbox or to 'figure' to open\n a figure called 'stairs' (if not already opened by the `slab.Staricase.plot` method). Optianally takes a string\n argument which is printed in the terminal for conveying instructions to the participant.\n\n Example::\n\n with slab.key('Waiting for buttons 1 (yes) or 2 (no).') as key:\n response = key.getch()\n " if (input_method == 'keyboard'): if (curses is None): raise ImportError('You need curses to use the keypress class (pip install curses (or windows-curses))') curses.filter() stdscr = curses.initscr() curses.noecho() curses.cbreak() stdscr.clear() stdscr.refresh() if (mesg is not None): stdscr.addstr(str(mesg)) (yield stdscr) curses.nocbreak() curses.echo() curses.endwin() elif (input_method == 'buttonbox'): if (mesg is not None): print(mesg) (yield _Buttonbox) elif (input_method == 'figure'): if (mesg is not None): print(mesg) (yield _FigChar) else: raise ValueError('Unknown input method!')<|docstring|>Wrapper for curses module to simplify getting a single keypress from the terminal (default), a buttonbox, or a figure. Set slab.psychoacoustics.input_method = 'buttonbox' to use a custom USB buttonbox or to 'figure' to open a figure called 'stairs' (if not already opened by the `slab.Staricase.plot` method). Optianally takes a string argument which is printed in the terminal for conveying instructions to the participant. Example:: with slab.key('Waiting for buttons 1 (yes) or 2 (no).') as key: response = key.getch()<|endoftext|>
5ea076528259b07d8b63a3e9011272eb64c48457365023304ef21289484d4dc6
def load_config(filename): "\n Reads a text file with variable assignments. This is a simple convenience method that allows easy writing and\n loading of configuration text files. Experiments sometimes use configuration files when experimenters (who might\n not by Python programmers) need to set parameters without changing the code. The format is a plain text file with a\n variable assignment on each line, because it is meant to be written and changed by humans. These variables and their\n values are then accessible as a namedtuple.\n\n Arguments:\n filename (str | pathlib.Path): path to the file to be read.\n Returns:\n (collections.namedtuple): a tuple containing the variables and values defined in the text file.\n Example::\n\n # assuming there is a file named 'example.txt' with the following content:\n samplerate = 32000\n pause_duration = 30\n speeds = [60,120,180]\n # call load_config to parse the file into a named tuple:\n conf = load_config('example.txt')\n conf.speeds\n # Out: [60, 120, 180]\n " from collections import namedtuple with open(filename, 'r') as f: lines = f.readlines() if lines: var_names = [] values = [] for line in lines: (var, val) = line.strip().split('=') var_names.append(var.strip()) values.append(eval(val.strip())) config_tuple = namedtuple('config', var_names) return config_tuple(*values)
Reads a text file with variable assignments. This is a simple convenience method that allows easy writing and loading of configuration text files. Experiments sometimes use configuration files when experimenters (who might not by Python programmers) need to set parameters without changing the code. The format is a plain text file with a variable assignment on each line, because it is meant to be written and changed by humans. These variables and their values are then accessible as a namedtuple. Arguments: filename (str | pathlib.Path): path to the file to be read. Returns: (collections.namedtuple): a tuple containing the variables and values defined in the text file. Example:: # assuming there is a file named 'example.txt' with the following content: samplerate = 32000 pause_duration = 30 speeds = [60,120,180] # call load_config to parse the file into a named tuple: conf = load_config('example.txt') conf.speeds # Out: [60, 120, 180]
slab/psychoacoustics.py
load_config
jakab13/slab
7
python
def load_config(filename): "\n Reads a text file with variable assignments. This is a simple convenience method that allows easy writing and\n loading of configuration text files. Experiments sometimes use configuration files when experimenters (who might\n not by Python programmers) need to set parameters without changing the code. The format is a plain text file with a\n variable assignment on each line, because it is meant to be written and changed by humans. These variables and their\n values are then accessible as a namedtuple.\n\n Arguments:\n filename (str | pathlib.Path): path to the file to be read.\n Returns:\n (collections.namedtuple): a tuple containing the variables and values defined in the text file.\n Example::\n\n # assuming there is a file named 'example.txt' with the following content:\n samplerate = 32000\n pause_duration = 30\n speeds = [60,120,180]\n # call load_config to parse the file into a named tuple:\n conf = load_config('example.txt')\n conf.speeds\n # Out: [60, 120, 180]\n " from collections import namedtuple with open(filename, 'r') as f: lines = f.readlines() if lines: var_names = [] values = [] for line in lines: (var, val) = line.strip().split('=') var_names.append(var.strip()) values.append(eval(val.strip())) config_tuple = namedtuple('config', var_names) return config_tuple(*values)
def load_config(filename): "\n Reads a text file with variable assignments. This is a simple convenience method that allows easy writing and\n loading of configuration text files. Experiments sometimes use configuration files when experimenters (who might\n not by Python programmers) need to set parameters without changing the code. The format is a plain text file with a\n variable assignment on each line, because it is meant to be written and changed by humans. These variables and their\n values are then accessible as a namedtuple.\n\n Arguments:\n filename (str | pathlib.Path): path to the file to be read.\n Returns:\n (collections.namedtuple): a tuple containing the variables and values defined in the text file.\n Example::\n\n # assuming there is a file named 'example.txt' with the following content:\n samplerate = 32000\n pause_duration = 30\n speeds = [60,120,180]\n # call load_config to parse the file into a named tuple:\n conf = load_config('example.txt')\n conf.speeds\n # Out: [60, 120, 180]\n " from collections import namedtuple with open(filename, 'r') as f: lines = f.readlines() if lines: var_names = [] values = [] for line in lines: (var, val) = line.strip().split('=') var_names.append(var.strip()) values.append(eval(val.strip())) config_tuple = namedtuple('config', var_names) return config_tuple(*values)<|docstring|>Reads a text file with variable assignments. This is a simple convenience method that allows easy writing and loading of configuration text files. Experiments sometimes use configuration files when experimenters (who might not by Python programmers) need to set parameters without changing the code. The format is a plain text file with a variable assignment on each line, because it is meant to be written and changed by humans. These variables and their values are then accessible as a namedtuple. Arguments: filename (str | pathlib.Path): path to the file to be read. Returns: (collections.namedtuple): a tuple containing the variables and values defined in the text file. Example:: # assuming there is a file named 'example.txt' with the following content: samplerate = 32000 pause_duration = 30 speeds = [60,120,180] # call load_config to parse the file into a named tuple: conf = load_config('example.txt') conf.speeds # Out: [60, 120, 180]<|endoftext|>
3d8951f0bb8911f112b568d5bddb961aa997a6bdd236168a88e231a6a205320a
def save_pickle(self, file_name, clobber=False): '\n Save the object as pickle file.\n\n Arguments:\n file_name (str | pathlib.Path): name of the file to create.\n clobber (bool): overwrite existing file with the same name, defaults to False.\n Returns:\n (bool): True if writing was successful.\n ' if isinstance(file_name, pathlib.Path): file_name = str(file_name) if (pathlib.Path(file_name).exists() and (not clobber)): raise FileExistsError('Select clobber=True to overwrite.') with open(file_name, 'wb') as fp: pickle.dump(self.__dict__, fp, protocol=pickle.HIGHEST_PROTOCOL) return True
Save the object as pickle file. Arguments: file_name (str | pathlib.Path): name of the file to create. clobber (bool): overwrite existing file with the same name, defaults to False. Returns: (bool): True if writing was successful.
slab/psychoacoustics.py
save_pickle
jakab13/slab
7
python
def save_pickle(self, file_name, clobber=False): '\n Save the object as pickle file.\n\n Arguments:\n file_name (str | pathlib.Path): name of the file to create.\n clobber (bool): overwrite existing file with the same name, defaults to False.\n Returns:\n (bool): True if writing was successful.\n ' if isinstance(file_name, pathlib.Path): file_name = str(file_name) if (pathlib.Path(file_name).exists() and (not clobber)): raise FileExistsError('Select clobber=True to overwrite.') with open(file_name, 'wb') as fp: pickle.dump(self.__dict__, fp, protocol=pickle.HIGHEST_PROTOCOL) return True
def save_pickle(self, file_name, clobber=False): '\n Save the object as pickle file.\n\n Arguments:\n file_name (str | pathlib.Path): name of the file to create.\n clobber (bool): overwrite existing file with the same name, defaults to False.\n Returns:\n (bool): True if writing was successful.\n ' if isinstance(file_name, pathlib.Path): file_name = str(file_name) if (pathlib.Path(file_name).exists() and (not clobber)): raise FileExistsError('Select clobber=True to overwrite.') with open(file_name, 'wb') as fp: pickle.dump(self.__dict__, fp, protocol=pickle.HIGHEST_PROTOCOL) return True<|docstring|>Save the object as pickle file. Arguments: file_name (str | pathlib.Path): name of the file to create. clobber (bool): overwrite existing file with the same name, defaults to False. Returns: (bool): True if writing was successful.<|endoftext|>
b8fc90fd7a9be32104fcf8b918a6f925d618a9b4697dab1c203c8503a3b75061
def load_pickle(self, file_name): '\n Read pickle file and deserialize the object into `self.__dict__`.\n\n Attributes:\n file_name (str | pathlib.Path): name of the file to read.\n ' if isinstance(file_name, pathlib.Path): file_name = str(file_name) with open(file_name, 'rb') as fp: self.__dict__ = pickle.load(fp)
Read pickle file and deserialize the object into `self.__dict__`. Attributes: file_name (str | pathlib.Path): name of the file to read.
slab/psychoacoustics.py
load_pickle
jakab13/slab
7
python
def load_pickle(self, file_name): '\n Read pickle file and deserialize the object into `self.__dict__`.\n\n Attributes:\n file_name (str | pathlib.Path): name of the file to read.\n ' if isinstance(file_name, pathlib.Path): file_name = str(file_name) with open(file_name, 'rb') as fp: self.__dict__ = pickle.load(fp)
def load_pickle(self, file_name): '\n Read pickle file and deserialize the object into `self.__dict__`.\n\n Attributes:\n file_name (str | pathlib.Path): name of the file to read.\n ' if isinstance(file_name, pathlib.Path): file_name = str(file_name) with open(file_name, 'rb') as fp: self.__dict__ = pickle.load(fp)<|docstring|>Read pickle file and deserialize the object into `self.__dict__`. Attributes: file_name (str | pathlib.Path): name of the file to read.<|endoftext|>
e3b836ac8a781a7c2039aaf049b1499ea7eb7fa90d9cae134715d48b66ee9b2e
def save_json(self, file_name=None, clobber=False): "\n Save the object as JSON file. The object's __dict__ is serialized and saved in standard JSON format, so that it\n can be easily reconstituted (see load_json method). Raises FileExistsError if the file exists, unless `clobber`\n is True. When `file_name` in None (default), the method returns the JSON string, in case you want to inspect it.\n Note that Numpy arrays are not serializable and are converted to Python int. This works because the\n Trialsequence and Staircase classes use arrays of indices. If your instances of these classes contain arrays of\n float, use `save_pickle` instead.\n\n Arguments:\n file_name (str | pathlib.Path): name of the file to create. If None or 'stdout', return a JSON object.\n clobber (bool): overwrite existing file with the same name, defaults to False.\n Returns:\n (bool): True if writing was successful.\n " def default(i): return (int(i) if isinstance(i, numpy.int64) else i) if isinstance(file_name, pathlib.Path): file_name = str(file_name) if ((file_name is None) or (file_name == 'stdout')): return json.dumps(self.__dict__, indent=2, default=default) if (pathlib.Path(file_name).exists() and (not clobber)): raise FileExistsError('Select clobber=True to overwrite.') try: with open(file_name, 'w') as f: json.dump(self.__dict__, f, indent=2, default=default) return True except (TypeError, ValueError): print('Your sequence contains data which is not JSON serializable, use the save_pickle method instead.')
Save the object as JSON file. The object's __dict__ is serialized and saved in standard JSON format, so that it can be easily reconstituted (see load_json method). Raises FileExistsError if the file exists, unless `clobber` is True. When `file_name` in None (default), the method returns the JSON string, in case you want to inspect it. Note that Numpy arrays are not serializable and are converted to Python int. This works because the Trialsequence and Staircase classes use arrays of indices. If your instances of these classes contain arrays of float, use `save_pickle` instead. Arguments: file_name (str | pathlib.Path): name of the file to create. If None or 'stdout', return a JSON object. clobber (bool): overwrite existing file with the same name, defaults to False. Returns: (bool): True if writing was successful.
slab/psychoacoustics.py
save_json
jakab13/slab
7
python
def save_json(self, file_name=None, clobber=False): "\n Save the object as JSON file. The object's __dict__ is serialized and saved in standard JSON format, so that it\n can be easily reconstituted (see load_json method). Raises FileExistsError if the file exists, unless `clobber`\n is True. When `file_name` in None (default), the method returns the JSON string, in case you want to inspect it.\n Note that Numpy arrays are not serializable and are converted to Python int. This works because the\n Trialsequence and Staircase classes use arrays of indices. If your instances of these classes contain arrays of\n float, use `save_pickle` instead.\n\n Arguments:\n file_name (str | pathlib.Path): name of the file to create. If None or 'stdout', return a JSON object.\n clobber (bool): overwrite existing file with the same name, defaults to False.\n Returns:\n (bool): True if writing was successful.\n " def default(i): return (int(i) if isinstance(i, numpy.int64) else i) if isinstance(file_name, pathlib.Path): file_name = str(file_name) if ((file_name is None) or (file_name == 'stdout')): return json.dumps(self.__dict__, indent=2, default=default) if (pathlib.Path(file_name).exists() and (not clobber)): raise FileExistsError('Select clobber=True to overwrite.') try: with open(file_name, 'w') as f: json.dump(self.__dict__, f, indent=2, default=default) return True except (TypeError, ValueError): print('Your sequence contains data which is not JSON serializable, use the save_pickle method instead.')
def save_json(self, file_name=None, clobber=False): "\n Save the object as JSON file. The object's __dict__ is serialized and saved in standard JSON format, so that it\n can be easily reconstituted (see load_json method). Raises FileExistsError if the file exists, unless `clobber`\n is True. When `file_name` in None (default), the method returns the JSON string, in case you want to inspect it.\n Note that Numpy arrays are not serializable and are converted to Python int. This works because the\n Trialsequence and Staircase classes use arrays of indices. If your instances of these classes contain arrays of\n float, use `save_pickle` instead.\n\n Arguments:\n file_name (str | pathlib.Path): name of the file to create. If None or 'stdout', return a JSON object.\n clobber (bool): overwrite existing file with the same name, defaults to False.\n Returns:\n (bool): True if writing was successful.\n " def default(i): return (int(i) if isinstance(i, numpy.int64) else i) if isinstance(file_name, pathlib.Path): file_name = str(file_name) if ((file_name is None) or (file_name == 'stdout')): return json.dumps(self.__dict__, indent=2, default=default) if (pathlib.Path(file_name).exists() and (not clobber)): raise FileExistsError('Select clobber=True to overwrite.') try: with open(file_name, 'w') as f: json.dump(self.__dict__, f, indent=2, default=default) return True except (TypeError, ValueError): print('Your sequence contains data which is not JSON serializable, use the save_pickle method instead.')<|docstring|>Save the object as JSON file. The object's __dict__ is serialized and saved in standard JSON format, so that it can be easily reconstituted (see load_json method). Raises FileExistsError if the file exists, unless `clobber` is True. When `file_name` in None (default), the method returns the JSON string, in case you want to inspect it. Note that Numpy arrays are not serializable and are converted to Python int. This works because the Trialsequence and Staircase classes use arrays of indices. If your instances of these classes contain arrays of float, use `save_pickle` instead. Arguments: file_name (str | pathlib.Path): name of the file to create. If None or 'stdout', return a JSON object. clobber (bool): overwrite existing file with the same name, defaults to False. Returns: (bool): True if writing was successful.<|endoftext|>
ac91fab17882aee6f88a837b98ef31cdaccf58b62605d5910bfbec762292ec61
def load_json(self, file_name): '\n Read JSON file and deserialize the object into `self.__dict__`.\n\n Attributes:\n file_name (str | pathlib.Path): name of the file to read.\n ' if isinstance(file_name, pathlib.Path): file_name = str(file_name) with open(file_name, 'r') as f: self.__dict__ = json.load(f)
Read JSON file and deserialize the object into `self.__dict__`. Attributes: file_name (str | pathlib.Path): name of the file to read.
slab/psychoacoustics.py
load_json
jakab13/slab
7
python
def load_json(self, file_name): '\n Read JSON file and deserialize the object into `self.__dict__`.\n\n Attributes:\n file_name (str | pathlib.Path): name of the file to read.\n ' if isinstance(file_name, pathlib.Path): file_name = str(file_name) with open(file_name, 'r') as f: self.__dict__ = json.load(f)
def load_json(self, file_name): '\n Read JSON file and deserialize the object into `self.__dict__`.\n\n Attributes:\n file_name (str | pathlib.Path): name of the file to read.\n ' if isinstance(file_name, pathlib.Path): file_name = str(file_name) with open(file_name, 'r') as f: self.__dict__ = json.load(f)<|docstring|>Read JSON file and deserialize the object into `self.__dict__`. Attributes: file_name (str | pathlib.Path): name of the file to read.<|endoftext|>
834a74cb9ffe78e5f316f03b088fcc8ed4649eeef76c4ba1dd26b0213ded198a
def present_afc_trial(self, target, distractors, key_codes=range(49, 58), isi=0.25, print_info=True): '\n Present the reference and distractor sounds in random order and acquire a response keypress.\n The subject has to identify at which position the reference was played. The result (True if response was correct\n or False if response was wrong) is stored in the sequence via the `add_response` method.\n\n Arguments:\n target (instance of slab.Sound): sound that ought to be identified in the trial\n distractors (instance or list of slab.Sound): distractor sound(s)\n key_codes (list of int): ascii codes for the response keys (get code for button \'1\': ord(\'1\') --> 49)\n pressing the second button in the list is equivalent to the response "the reference was the second sound\n played in this trial". Defaults to the key codes for buttons \'1\' to \'9\'\n isi (int or float): inter stimulus interval which is the pause between the end of one sound and the start\n of the next one.\n print_info (bool): If true, call the `print_trial_info` method afterwards\n ' if isinstance(distractors, list): stims = ([target] + distractors) else: stims = [target, distractors] order = numpy.random.permutation(len(stims)) for idx in order: stim = stims[idx] stim.play() plt.pause(isi) with key() as k: response = k.getch() interval = numpy.where((order == 0))[0][0] interval_key = key_codes[interval] response = (response == interval_key) self.add_response(response) if print_info: self.print_trial_info()
Present the reference and distractor sounds in random order and acquire a response keypress. The subject has to identify at which position the reference was played. The result (True if response was correct or False if response was wrong) is stored in the sequence via the `add_response` method. Arguments: target (instance of slab.Sound): sound that ought to be identified in the trial distractors (instance or list of slab.Sound): distractor sound(s) key_codes (list of int): ascii codes for the response keys (get code for button '1': ord('1') --> 49) pressing the second button in the list is equivalent to the response "the reference was the second sound played in this trial". Defaults to the key codes for buttons '1' to '9' isi (int or float): inter stimulus interval which is the pause between the end of one sound and the start of the next one. print_info (bool): If true, call the `print_trial_info` method afterwards
slab/psychoacoustics.py
present_afc_trial
jakab13/slab
7
python
def present_afc_trial(self, target, distractors, key_codes=range(49, 58), isi=0.25, print_info=True): '\n Present the reference and distractor sounds in random order and acquire a response keypress.\n The subject has to identify at which position the reference was played. The result (True if response was correct\n or False if response was wrong) is stored in the sequence via the `add_response` method.\n\n Arguments:\n target (instance of slab.Sound): sound that ought to be identified in the trial\n distractors (instance or list of slab.Sound): distractor sound(s)\n key_codes (list of int): ascii codes for the response keys (get code for button \'1\': ord(\'1\') --> 49)\n pressing the second button in the list is equivalent to the response "the reference was the second sound\n played in this trial". Defaults to the key codes for buttons \'1\' to \'9\'\n isi (int or float): inter stimulus interval which is the pause between the end of one sound and the start\n of the next one.\n print_info (bool): If true, call the `print_trial_info` method afterwards\n ' if isinstance(distractors, list): stims = ([target] + distractors) else: stims = [target, distractors] order = numpy.random.permutation(len(stims)) for idx in order: stim = stims[idx] stim.play() plt.pause(isi) with key() as k: response = k.getch() interval = numpy.where((order == 0))[0][0] interval_key = key_codes[interval] response = (response == interval_key) self.add_response(response) if print_info: self.print_trial_info()
def present_afc_trial(self, target, distractors, key_codes=range(49, 58), isi=0.25, print_info=True): '\n Present the reference and distractor sounds in random order and acquire a response keypress.\n The subject has to identify at which position the reference was played. The result (True if response was correct\n or False if response was wrong) is stored in the sequence via the `add_response` method.\n\n Arguments:\n target (instance of slab.Sound): sound that ought to be identified in the trial\n distractors (instance or list of slab.Sound): distractor sound(s)\n key_codes (list of int): ascii codes for the response keys (get code for button \'1\': ord(\'1\') --> 49)\n pressing the second button in the list is equivalent to the response "the reference was the second sound\n played in this trial". Defaults to the key codes for buttons \'1\' to \'9\'\n isi (int or float): inter stimulus interval which is the pause between the end of one sound and the start\n of the next one.\n print_info (bool): If true, call the `print_trial_info` method afterwards\n ' if isinstance(distractors, list): stims = ([target] + distractors) else: stims = [target, distractors] order = numpy.random.permutation(len(stims)) for idx in order: stim = stims[idx] stim.play() plt.pause(isi) with key() as k: response = k.getch() interval = numpy.where((order == 0))[0][0] interval_key = key_codes[interval] response = (response == interval_key) self.add_response(response) if print_info: self.print_trial_info()<|docstring|>Present the reference and distractor sounds in random order and acquire a response keypress. The subject has to identify at which position the reference was played. The result (True if response was correct or False if response was wrong) is stored in the sequence via the `add_response` method. Arguments: target (instance of slab.Sound): sound that ought to be identified in the trial distractors (instance or list of slab.Sound): distractor sound(s) key_codes (list of int): ascii codes for the response keys (get code for button '1': ord('1') --> 49) pressing the second button in the list is equivalent to the response "the reference was the second sound played in this trial". Defaults to the key codes for buttons '1' to '9' isi (int or float): inter stimulus interval which is the pause between the end of one sound and the start of the next one. print_info (bool): If true, call the `print_trial_info` method afterwards<|endoftext|>
476c25d1ba409a1ccedafe92f58880f27e07ea0dd7274561172af831a3f44c5a
def present_tone_trial(self, stimulus, correct_key_idx=0, key_codes=range(49, 58), print_info=True): "\n Present the reference and distractor sounds in random order and acquire a response keypress.\n The result (True if response was correct or False if response was wrong) is stored in the sequence via the\n `add_response` method.\n\n Arguments:\n stimulus (slab.Sound): sound played in the trial.\n correct_key_idx (int | list of int): index of the key in `key_codes` that represents a correct response.\n Response is correct if `response == key_codes[correct_key_idx]`. Can be a list of ints if several keys\n are counted as correct response.\n key_codes (list of int): ascii codes for the response keys (get code for button '1': ord('1') --> 49).\n print_info (bool): If true, call the `print_trial_info` method afterwards.\n " stimulus.play() with slab.key() as k: response = k.getch() response = (response in [key_codes[i] for i in correct_key_idx]) self.add_response(response) if print_info: self.print_trial_info()
Present the reference and distractor sounds in random order and acquire a response keypress. The result (True if response was correct or False if response was wrong) is stored in the sequence via the `add_response` method. Arguments: stimulus (slab.Sound): sound played in the trial. correct_key_idx (int | list of int): index of the key in `key_codes` that represents a correct response. Response is correct if `response == key_codes[correct_key_idx]`. Can be a list of ints if several keys are counted as correct response. key_codes (list of int): ascii codes for the response keys (get code for button '1': ord('1') --> 49). print_info (bool): If true, call the `print_trial_info` method afterwards.
slab/psychoacoustics.py
present_tone_trial
jakab13/slab
7
python
def present_tone_trial(self, stimulus, correct_key_idx=0, key_codes=range(49, 58), print_info=True): "\n Present the reference and distractor sounds in random order and acquire a response keypress.\n The result (True if response was correct or False if response was wrong) is stored in the sequence via the\n `add_response` method.\n\n Arguments:\n stimulus (slab.Sound): sound played in the trial.\n correct_key_idx (int | list of int): index of the key in `key_codes` that represents a correct response.\n Response is correct if `response == key_codes[correct_key_idx]`. Can be a list of ints if several keys\n are counted as correct response.\n key_codes (list of int): ascii codes for the response keys (get code for button '1': ord('1') --> 49).\n print_info (bool): If true, call the `print_trial_info` method afterwards.\n " stimulus.play() with slab.key() as k: response = k.getch() response = (response in [key_codes[i] for i in correct_key_idx]) self.add_response(response) if print_info: self.print_trial_info()
def present_tone_trial(self, stimulus, correct_key_idx=0, key_codes=range(49, 58), print_info=True): "\n Present the reference and distractor sounds in random order and acquire a response keypress.\n The result (True if response was correct or False if response was wrong) is stored in the sequence via the\n `add_response` method.\n\n Arguments:\n stimulus (slab.Sound): sound played in the trial.\n correct_key_idx (int | list of int): index of the key in `key_codes` that represents a correct response.\n Response is correct if `response == key_codes[correct_key_idx]`. Can be a list of ints if several keys\n are counted as correct response.\n key_codes (list of int): ascii codes for the response keys (get code for button '1': ord('1') --> 49).\n print_info (bool): If true, call the `print_trial_info` method afterwards.\n " stimulus.play() with slab.key() as k: response = k.getch() response = (response in [key_codes[i] for i in correct_key_idx]) self.add_response(response) if print_info: self.print_trial_info()<|docstring|>Present the reference and distractor sounds in random order and acquire a response keypress. The result (True if response was correct or False if response was wrong) is stored in the sequence via the `add_response` method. Arguments: stimulus (slab.Sound): sound played in the trial. correct_key_idx (int | list of int): index of the key in `key_codes` that represents a correct response. Response is correct if `response == key_codes[correct_key_idx]`. Can be a list of ints if several keys are counted as correct response. key_codes (list of int): ascii codes for the response keys (get code for button '1': ord('1') --> 49). print_info (bool): If true, call the `print_trial_info` method afterwards.<|endoftext|>
7abec54fd80fa595ef9ca680a60f4575d6dd97685058f9c9bac722fb19939dcc
def simulate_response(self, threshold=None, transition_width=2, intervals=1, hitrates=None): '\n Return a simulated response to the current condition index value by calculating the hitrate from a\n psychometric (logistic) function. This is only sensible if trials is numeric and an interval scale representing\n a continuous stimulus value.\n\n Arguments:\n threshold(None | int | float): Midpoint of the psychometric function for adaptive testing. When the\n intensity of the current trial is equal to the `threshold` the hitrate is 50 percent.\n transition_width (int | float): range of stimulus intensities over which the hitrate increases\n from 0.25 to 0.75.\n intervals (int): use 1 (default) to indicate a yes/no trial, 2 or more to indicate an alternative forced\n choice trial. The number of choices determines the probability for a correct response by chance.\n hitrates (None | list | numpy.ndarray): list or numpy array of hitrates for the different conditions,\n to allow custom rates instead of simulation. If given, `threshold` and `transition_width` are not used.\n If a single value is given, this value is used.\n ' slope = (0.5 / transition_width) if isinstance(self, slab.psychoacoustics.Trialsequence): current_condition = self.trials[self.this_n] elif isinstance(self, slab.psychoacoustics.Staircase): current_condition = self._next_intensity else: return None if (hitrates is None): if (threshold is None): raise ValueError("threshold can't be None if hitrates is None!") hitrate = (1 / (1 + numpy.exp(((4 * slope) * (threshold - current_condition))))) elif isinstance(hitrates, (list, numpy.ndarray)): hitrate = hitrates[current_condition] else: hitrate = hitrates hit = (numpy.random.rand() < hitrate) if (hit or (intervals == 1)): return hit return (numpy.random.rand() < (1 / intervals))
Return a simulated response to the current condition index value by calculating the hitrate from a psychometric (logistic) function. This is only sensible if trials is numeric and an interval scale representing a continuous stimulus value. Arguments: threshold(None | int | float): Midpoint of the psychometric function for adaptive testing. When the intensity of the current trial is equal to the `threshold` the hitrate is 50 percent. transition_width (int | float): range of stimulus intensities over which the hitrate increases from 0.25 to 0.75. intervals (int): use 1 (default) to indicate a yes/no trial, 2 or more to indicate an alternative forced choice trial. The number of choices determines the probability for a correct response by chance. hitrates (None | list | numpy.ndarray): list or numpy array of hitrates for the different conditions, to allow custom rates instead of simulation. If given, `threshold` and `transition_width` are not used. If a single value is given, this value is used.
slab/psychoacoustics.py
simulate_response
jakab13/slab
7
python
def simulate_response(self, threshold=None, transition_width=2, intervals=1, hitrates=None): '\n Return a simulated response to the current condition index value by calculating the hitrate from a\n psychometric (logistic) function. This is only sensible if trials is numeric and an interval scale representing\n a continuous stimulus value.\n\n Arguments:\n threshold(None | int | float): Midpoint of the psychometric function for adaptive testing. When the\n intensity of the current trial is equal to the `threshold` the hitrate is 50 percent.\n transition_width (int | float): range of stimulus intensities over which the hitrate increases\n from 0.25 to 0.75.\n intervals (int): use 1 (default) to indicate a yes/no trial, 2 or more to indicate an alternative forced\n choice trial. The number of choices determines the probability for a correct response by chance.\n hitrates (None | list | numpy.ndarray): list or numpy array of hitrates for the different conditions,\n to allow custom rates instead of simulation. If given, `threshold` and `transition_width` are not used.\n If a single value is given, this value is used.\n ' slope = (0.5 / transition_width) if isinstance(self, slab.psychoacoustics.Trialsequence): current_condition = self.trials[self.this_n] elif isinstance(self, slab.psychoacoustics.Staircase): current_condition = self._next_intensity else: return None if (hitrates is None): if (threshold is None): raise ValueError("threshold can't be None if hitrates is None!") hitrate = (1 / (1 + numpy.exp(((4 * slope) * (threshold - current_condition))))) elif isinstance(hitrates, (list, numpy.ndarray)): hitrate = hitrates[current_condition] else: hitrate = hitrates hit = (numpy.random.rand() < hitrate) if (hit or (intervals == 1)): return hit return (numpy.random.rand() < (1 / intervals))
def simulate_response(self, threshold=None, transition_width=2, intervals=1, hitrates=None): '\n Return a simulated response to the current condition index value by calculating the hitrate from a\n psychometric (logistic) function. This is only sensible if trials is numeric and an interval scale representing\n a continuous stimulus value.\n\n Arguments:\n threshold(None | int | float): Midpoint of the psychometric function for adaptive testing. When the\n intensity of the current trial is equal to the `threshold` the hitrate is 50 percent.\n transition_width (int | float): range of stimulus intensities over which the hitrate increases\n from 0.25 to 0.75.\n intervals (int): use 1 (default) to indicate a yes/no trial, 2 or more to indicate an alternative forced\n choice trial. The number of choices determines the probability for a correct response by chance.\n hitrates (None | list | numpy.ndarray): list or numpy array of hitrates for the different conditions,\n to allow custom rates instead of simulation. If given, `threshold` and `transition_width` are not used.\n If a single value is given, this value is used.\n ' slope = (0.5 / transition_width) if isinstance(self, slab.psychoacoustics.Trialsequence): current_condition = self.trials[self.this_n] elif isinstance(self, slab.psychoacoustics.Staircase): current_condition = self._next_intensity else: return None if (hitrates is None): if (threshold is None): raise ValueError("threshold can't be None if hitrates is None!") hitrate = (1 / (1 + numpy.exp(((4 * slope) * (threshold - current_condition))))) elif isinstance(hitrates, (list, numpy.ndarray)): hitrate = hitrates[current_condition] else: hitrate = hitrates hit = (numpy.random.rand() < hitrate) if (hit or (intervals == 1)): return hit return (numpy.random.rand() < (1 / intervals))<|docstring|>Return a simulated response to the current condition index value by calculating the hitrate from a psychometric (logistic) function. This is only sensible if trials is numeric and an interval scale representing a continuous stimulus value. Arguments: threshold(None | int | float): Midpoint of the psychometric function for adaptive testing. When the intensity of the current trial is equal to the `threshold` the hitrate is 50 percent. transition_width (int | float): range of stimulus intensities over which the hitrate increases from 0.25 to 0.75. intervals (int): use 1 (default) to indicate a yes/no trial, 2 or more to indicate an alternative forced choice trial. The number of choices determines the probability for a correct response by chance. hitrates (None | list | numpy.ndarray): list or numpy array of hitrates for the different conditions, to allow custom rates instead of simulation. If given, `threshold` and `transition_width` are not used. If a single value is given, this value is used.<|endoftext|>
c187e80dcfa73f9a71b79fc904a2fb9f99be45664f93cef7fcdfe2c7caaad128
def __next__(self): '\n Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes\n `this_trial` and `this_n`. If the trials have ended this method will raise a StopIteration.\n Returns:\n (int): current element of the list in `trials`\n ' self.this_n += 1 self.n_remaining -= 1 if (self.n_remaining < 0): if (self.kind == 'infinite'): self.trials = self._create_simple_sequence(len(self.conditions), self.n_reps, dont_start_with=self.trials[(- 1)]) self.this_n = 0 self.n_remaining = (self.n_trials - 1) else: self.this_trial = [] self.finished = True if self.finished: raise StopIteration if (self.trials[self.this_n] == 0): self.this_trial = 0 else: self.this_trial = self.conditions[(self.trials[self.this_n] - 1)] return self.this_trial
Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes `this_trial` and `this_n`. If the trials have ended this method will raise a StopIteration. Returns: (int): current element of the list in `trials`
slab/psychoacoustics.py
__next__
jakab13/slab
7
python
def __next__(self): '\n Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes\n `this_trial` and `this_n`. If the trials have ended this method will raise a StopIteration.\n Returns:\n (int): current element of the list in `trials`\n ' self.this_n += 1 self.n_remaining -= 1 if (self.n_remaining < 0): if (self.kind == 'infinite'): self.trials = self._create_simple_sequence(len(self.conditions), self.n_reps, dont_start_with=self.trials[(- 1)]) self.this_n = 0 self.n_remaining = (self.n_trials - 1) else: self.this_trial = [] self.finished = True if self.finished: raise StopIteration if (self.trials[self.this_n] == 0): self.this_trial = 0 else: self.this_trial = self.conditions[(self.trials[self.this_n] - 1)] return self.this_trial
def __next__(self): '\n Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes\n `this_trial` and `this_n`. If the trials have ended this method will raise a StopIteration.\n Returns:\n (int): current element of the list in `trials`\n ' self.this_n += 1 self.n_remaining -= 1 if (self.n_remaining < 0): if (self.kind == 'infinite'): self.trials = self._create_simple_sequence(len(self.conditions), self.n_reps, dont_start_with=self.trials[(- 1)]) self.this_n = 0 self.n_remaining = (self.n_trials - 1) else: self.this_trial = [] self.finished = True if self.finished: raise StopIteration if (self.trials[self.this_n] == 0): self.this_trial = 0 else: self.this_trial = self.conditions[(self.trials[self.this_n] - 1)] return self.this_trial<|docstring|>Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes `this_trial` and `this_n`. If the trials have ended this method will raise a StopIteration. Returns: (int): current element of the list in `trials`<|endoftext|>
faad33f44aac5bbb8fa7693e6630e5334595436eb1a2ad79de814637960c9cef
def add_response(self, response): "\n Append response to the list in the `data` attribute belonging to the current trial (see Trialsequence doc).\n\n Attributes:\n response (any): data to append to the list. Can be anything but save_json method won't be available if\n the content of `response` is not JSON serializable (if it's an object for example).\n " if (self.this_n < 0): print("Can't add response because trial hasn't started yet!") else: self.data[self.this_n].append(response)
Append response to the list in the `data` attribute belonging to the current trial (see Trialsequence doc). Attributes: response (any): data to append to the list. Can be anything but save_json method won't be available if the content of `response` is not JSON serializable (if it's an object for example).
slab/psychoacoustics.py
add_response
jakab13/slab
7
python
def add_response(self, response): "\n Append response to the list in the `data` attribute belonging to the current trial (see Trialsequence doc).\n\n Attributes:\n response (any): data to append to the list. Can be anything but save_json method won't be available if\n the content of `response` is not JSON serializable (if it's an object for example).\n " if (self.this_n < 0): print("Can't add response because trial hasn't started yet!") else: self.data[self.this_n].append(response)
def add_response(self, response): "\n Append response to the list in the `data` attribute belonging to the current trial (see Trialsequence doc).\n\n Attributes:\n response (any): data to append to the list. Can be anything but save_json method won't be available if\n the content of `response` is not JSON serializable (if it's an object for example).\n " if (self.this_n < 0): print("Can't add response because trial hasn't started yet!") else: self.data[self.this_n].append(response)<|docstring|>Append response to the list in the `data` attribute belonging to the current trial (see Trialsequence doc). Attributes: response (any): data to append to the list. Can be anything but save_json method won't be available if the content of `response` is not JSON serializable (if it's an object for example).<|endoftext|>
e23b4e023ca79b2e16608b62b4f2a5a3a6597715536d09403e77db30cb232f56
def print_trial_info(self): ' Convenience method for printing current trial information. ' print(f"{self.label} | trial # {self.this_n} of {('inf' if (self.kind == 'infinite') else self.n_trials)} ({('inf' if (self.kind == 'infinite') else self.n_remaining)} remaining): condition {self.this_trial}, last response: {self.data[(self.this_n - 1)]}")
Convenience method for printing current trial information.
slab/psychoacoustics.py
print_trial_info
jakab13/slab
7
python
def print_trial_info(self): ' ' print(f"{self.label} | trial # {self.this_n} of {('inf' if (self.kind == 'infinite') else self.n_trials)} ({('inf' if (self.kind == 'infinite') else self.n_remaining)} remaining): condition {self.this_trial}, last response: {self.data[(self.this_n - 1)]}")
def print_trial_info(self): ' ' print(f"{self.label} | trial # {self.this_n} of {('inf' if (self.kind == 'infinite') else self.n_trials)} ({('inf' if (self.kind == 'infinite') else self.n_remaining)} remaining): condition {self.this_trial}, last response: {self.data[(self.this_n - 1)]}")<|docstring|>Convenience method for printing current trial information.<|endoftext|>
eacbb060c1b19f334854dfb0bf2c349cd3582146bb3a52ce3a63cf44632fae33
@staticmethod def _create_simple_sequence(n_conditions, n_reps, dont_start_with=None): '\n Create a randomized sequence of integers without direct repetitions of any element.\n\n Arguments:\n n_conditions (int): the number of conditions in the list. The array returned contains integers from 1\n to the value of `n_conditions`.\n n_reps (int): number that each element is repeated. Length of the returned array is `n_conditions * n_reps`\n dont_start_with (int): if not None, dont start the sequence with this integer. Can be useful if several\n sequences are used and the final trial of the last sequence should not be the same as the first\n element of the next sequence.\n Returns:\n (numpy.ndarray): randomized sequence of length n_conditions * n_reps without direct repetitions of any\n element.\n ' permute = list(range(1, (n_conditions + 1))) if (dont_start_with is not None): trials = [dont_start_with] else: trials = [] for _ in range(n_reps): numpy.random.shuffle(permute) if (len(trials) > 0): while (trials[(- 1)] == permute[0]): numpy.random.shuffle(permute) trials += permute if (dont_start_with is not None): trials = trials[1:] return numpy.array(trials)
Create a randomized sequence of integers without direct repetitions of any element. Arguments: n_conditions (int): the number of conditions in the list. The array returned contains integers from 1 to the value of `n_conditions`. n_reps (int): number that each element is repeated. Length of the returned array is `n_conditions * n_reps` dont_start_with (int): if not None, dont start the sequence with this integer. Can be useful if several sequences are used and the final trial of the last sequence should not be the same as the first element of the next sequence. Returns: (numpy.ndarray): randomized sequence of length n_conditions * n_reps without direct repetitions of any element.
slab/psychoacoustics.py
_create_simple_sequence
jakab13/slab
7
python
@staticmethod def _create_simple_sequence(n_conditions, n_reps, dont_start_with=None): '\n Create a randomized sequence of integers without direct repetitions of any element.\n\n Arguments:\n n_conditions (int): the number of conditions in the list. The array returned contains integers from 1\n to the value of `n_conditions`.\n n_reps (int): number that each element is repeated. Length of the returned array is `n_conditions * n_reps`\n dont_start_with (int): if not None, dont start the sequence with this integer. Can be useful if several\n sequences are used and the final trial of the last sequence should not be the same as the first\n element of the next sequence.\n Returns:\n (numpy.ndarray): randomized sequence of length n_conditions * n_reps without direct repetitions of any\n element.\n ' permute = list(range(1, (n_conditions + 1))) if (dont_start_with is not None): trials = [dont_start_with] else: trials = [] for _ in range(n_reps): numpy.random.shuffle(permute) if (len(trials) > 0): while (trials[(- 1)] == permute[0]): numpy.random.shuffle(permute) trials += permute if (dont_start_with is not None): trials = trials[1:] return numpy.array(trials)
@staticmethod def _create_simple_sequence(n_conditions, n_reps, dont_start_with=None): '\n Create a randomized sequence of integers without direct repetitions of any element.\n\n Arguments:\n n_conditions (int): the number of conditions in the list. The array returned contains integers from 1\n to the value of `n_conditions`.\n n_reps (int): number that each element is repeated. Length of the returned array is `n_conditions * n_reps`\n dont_start_with (int): if not None, dont start the sequence with this integer. Can be useful if several\n sequences are used and the final trial of the last sequence should not be the same as the first\n element of the next sequence.\n Returns:\n (numpy.ndarray): randomized sequence of length n_conditions * n_reps without direct repetitions of any\n element.\n ' permute = list(range(1, (n_conditions + 1))) if (dont_start_with is not None): trials = [dont_start_with] else: trials = [] for _ in range(n_reps): numpy.random.shuffle(permute) if (len(trials) > 0): while (trials[(- 1)] == permute[0]): numpy.random.shuffle(permute) trials += permute if (dont_start_with is not None): trials = trials[1:] return numpy.array(trials)<|docstring|>Create a randomized sequence of integers without direct repetitions of any element. Arguments: n_conditions (int): the number of conditions in the list. The array returned contains integers from 1 to the value of `n_conditions`. n_reps (int): number that each element is repeated. Length of the returned array is `n_conditions * n_reps` dont_start_with (int): if not None, dont start the sequence with this integer. Can be useful if several sequences are used and the final trial of the last sequence should not be the same as the first element of the next sequence. Returns: (numpy.ndarray): randomized sequence of length n_conditions * n_reps without direct repetitions of any element.<|endoftext|>
e5c7eb77c5eb7f6559abc306e0c7e29d960d25f6c3e1588c1f2e608d4335e3f7
@staticmethod def _deviant_indices(n_standard, deviant_freq=0.1): '\n Create sequence for an oddball experiment which contains two conditions: standards (1) and deviants (0).\n\n Arguments:\n n_standard (int): number of standard trials, encoded as 1, in the sequence.\n deviant_freq (float): frequency of deviants, encoded as 0, in the sequence. Also determines the minimum\n number of standards between two deviants which is 3 if deviant_freq <= .1, 2 if deviant_freq <= .2 and\n 1 if deviant_freq <= .3. A deviant frequency > .3 is not supported.\n Returns:\n (numpy.ndarray): sequence of length n_standard+(n_standard*deviant_freq) with deviants.\n ' if (deviant_freq <= 0.1): min_dist = 3 elif (deviant_freq <= 0.2): min_dist = 2 elif (deviant_freq <= 0.3): min_dist = 1 else: raise ValueError("Deviant frequency can't be greater than 0.3!") n_deviants = int((n_standard * deviant_freq)) indices = range(n_standard) deviant_indices = numpy.random.choice(indices, n_deviants, replace=False) deviant_indices.sort() dist = numpy.diff(deviant_indices) while (numpy.min(dist) < min_dist): deviant_indices = numpy.random.choice(indices, n_deviants, replace=False) deviant_indices.sort() dist = numpy.diff(deviant_indices) return deviant_indices
Create sequence for an oddball experiment which contains two conditions: standards (1) and deviants (0). Arguments: n_standard (int): number of standard trials, encoded as 1, in the sequence. deviant_freq (float): frequency of deviants, encoded as 0, in the sequence. Also determines the minimum number of standards between two deviants which is 3 if deviant_freq <= .1, 2 if deviant_freq <= .2 and 1 if deviant_freq <= .3. A deviant frequency > .3 is not supported. Returns: (numpy.ndarray): sequence of length n_standard+(n_standard*deviant_freq) with deviants.
slab/psychoacoustics.py
_deviant_indices
jakab13/slab
7
python
@staticmethod def _deviant_indices(n_standard, deviant_freq=0.1): '\n Create sequence for an oddball experiment which contains two conditions: standards (1) and deviants (0).\n\n Arguments:\n n_standard (int): number of standard trials, encoded as 1, in the sequence.\n deviant_freq (float): frequency of deviants, encoded as 0, in the sequence. Also determines the minimum\n number of standards between two deviants which is 3 if deviant_freq <= .1, 2 if deviant_freq <= .2 and\n 1 if deviant_freq <= .3. A deviant frequency > .3 is not supported.\n Returns:\n (numpy.ndarray): sequence of length n_standard+(n_standard*deviant_freq) with deviants.\n ' if (deviant_freq <= 0.1): min_dist = 3 elif (deviant_freq <= 0.2): min_dist = 2 elif (deviant_freq <= 0.3): min_dist = 1 else: raise ValueError("Deviant frequency can't be greater than 0.3!") n_deviants = int((n_standard * deviant_freq)) indices = range(n_standard) deviant_indices = numpy.random.choice(indices, n_deviants, replace=False) deviant_indices.sort() dist = numpy.diff(deviant_indices) while (numpy.min(dist) < min_dist): deviant_indices = numpy.random.choice(indices, n_deviants, replace=False) deviant_indices.sort() dist = numpy.diff(deviant_indices) return deviant_indices
@staticmethod def _deviant_indices(n_standard, deviant_freq=0.1): '\n Create sequence for an oddball experiment which contains two conditions: standards (1) and deviants (0).\n\n Arguments:\n n_standard (int): number of standard trials, encoded as 1, in the sequence.\n deviant_freq (float): frequency of deviants, encoded as 0, in the sequence. Also determines the minimum\n number of standards between two deviants which is 3 if deviant_freq <= .1, 2 if deviant_freq <= .2 and\n 1 if deviant_freq <= .3. A deviant frequency > .3 is not supported.\n Returns:\n (numpy.ndarray): sequence of length n_standard+(n_standard*deviant_freq) with deviants.\n ' if (deviant_freq <= 0.1): min_dist = 3 elif (deviant_freq <= 0.2): min_dist = 2 elif (deviant_freq <= 0.3): min_dist = 1 else: raise ValueError("Deviant frequency can't be greater than 0.3!") n_deviants = int((n_standard * deviant_freq)) indices = range(n_standard) deviant_indices = numpy.random.choice(indices, n_deviants, replace=False) deviant_indices.sort() dist = numpy.diff(deviant_indices) while (numpy.min(dist) < min_dist): deviant_indices = numpy.random.choice(indices, n_deviants, replace=False) deviant_indices.sort() dist = numpy.diff(deviant_indices) return deviant_indices<|docstring|>Create sequence for an oddball experiment which contains two conditions: standards (1) and deviants (0). Arguments: n_standard (int): number of standard trials, encoded as 1, in the sequence. deviant_freq (float): frequency of deviants, encoded as 0, in the sequence. Also determines the minimum number of standards between two deviants which is 3 if deviant_freq <= .1, 2 if deviant_freq <= .2 and 1 if deviant_freq <= .3. A deviant frequency > .3 is not supported. Returns: (numpy.ndarray): sequence of length n_standard+(n_standard*deviant_freq) with deviants.<|endoftext|>
6213ab943d17296c4614764e76919a25549d35ea0a262434f63a0bc548fc5bb7
@staticmethod def _create_random_permutation(n_conditions, n_reps): '\n Create a completely random sequence of integers.\n\n Arguments:\n n_conditions (int): the number of conditions in the list. The array returned contains integers from 1\n to the value of `n_conditions`.\n n_reps (int): number that each element is repeated. Length of the returned array is n_conditions * n_reps.\n Returns:\n (numpy.ndarray): randomized sequence.\n ' return numpy.random.permutation(numpy.tile(list(range(1, (n_conditions + 1))), n_reps))
Create a completely random sequence of integers. Arguments: n_conditions (int): the number of conditions in the list. The array returned contains integers from 1 to the value of `n_conditions`. n_reps (int): number that each element is repeated. Length of the returned array is n_conditions * n_reps. Returns: (numpy.ndarray): randomized sequence.
slab/psychoacoustics.py
_create_random_permutation
jakab13/slab
7
python
@staticmethod def _create_random_permutation(n_conditions, n_reps): '\n Create a completely random sequence of integers.\n\n Arguments:\n n_conditions (int): the number of conditions in the list. The array returned contains integers from 1\n to the value of `n_conditions`.\n n_reps (int): number that each element is repeated. Length of the returned array is n_conditions * n_reps.\n Returns:\n (numpy.ndarray): randomized sequence.\n ' return numpy.random.permutation(numpy.tile(list(range(1, (n_conditions + 1))), n_reps))
@staticmethod def _create_random_permutation(n_conditions, n_reps): '\n Create a completely random sequence of integers.\n\n Arguments:\n n_conditions (int): the number of conditions in the list. The array returned contains integers from 1\n to the value of `n_conditions`.\n n_reps (int): number that each element is repeated. Length of the returned array is n_conditions * n_reps.\n Returns:\n (numpy.ndarray): randomized sequence.\n ' return numpy.random.permutation(numpy.tile(list(range(1, (n_conditions + 1))), n_reps))<|docstring|>Create a completely random sequence of integers. Arguments: n_conditions (int): the number of conditions in the list. The array returned contains integers from 1 to the value of `n_conditions`. n_reps (int): number that each element is repeated. Length of the returned array is n_conditions * n_reps. Returns: (numpy.ndarray): randomized sequence.<|endoftext|>
12cc35da06d73cd06e11ab63a323562201d539f74af65f9c3e4d5a634ad482a4
def get_future_trial(self, n=1): '\n Returns the condition of a trial n iterations into the future or past, without advancing the trials.\n\n Arguments:\n n (int): number of iterations into the future or past (negative numbers).\n Returns:\n (any): element of the list stored in the `conditions` attribute belonging to the trial n\n iterations into the past/future. Returns None if attempting to go beyond the first/last trial\n ' if ((n > self.n_remaining) or ((self.this_n + n) < 0)): return None return self.conditions[(self.trials[(self.this_n + n)] - 1)]
Returns the condition of a trial n iterations into the future or past, without advancing the trials. Arguments: n (int): number of iterations into the future or past (negative numbers). Returns: (any): element of the list stored in the `conditions` attribute belonging to the trial n iterations into the past/future. Returns None if attempting to go beyond the first/last trial
slab/psychoacoustics.py
get_future_trial
jakab13/slab
7
python
def get_future_trial(self, n=1): '\n Returns the condition of a trial n iterations into the future or past, without advancing the trials.\n\n Arguments:\n n (int): number of iterations into the future or past (negative numbers).\n Returns:\n (any): element of the list stored in the `conditions` attribute belonging to the trial n\n iterations into the past/future. Returns None if attempting to go beyond the first/last trial\n ' if ((n > self.n_remaining) or ((self.this_n + n) < 0)): return None return self.conditions[(self.trials[(self.this_n + n)] - 1)]
def get_future_trial(self, n=1): '\n Returns the condition of a trial n iterations into the future or past, without advancing the trials.\n\n Arguments:\n n (int): number of iterations into the future or past (negative numbers).\n Returns:\n (any): element of the list stored in the `conditions` attribute belonging to the trial n\n iterations into the past/future. Returns None if attempting to go beyond the first/last trial\n ' if ((n > self.n_remaining) or ((self.this_n + n) < 0)): return None return self.conditions[(self.trials[(self.this_n + n)] - 1)]<|docstring|>Returns the condition of a trial n iterations into the future or past, without advancing the trials. Arguments: n (int): number of iterations into the future or past (negative numbers). Returns: (any): element of the list stored in the `conditions` attribute belonging to the trial n iterations into the past/future. Returns None if attempting to go beyond the first/last trial<|endoftext|>
72d1d8066b07f1efa0e88de0212c6d69b5d68ea00b2193330cd9897ee8bf50f0
def transitions(self): '\n Count the number of transitions between conditions.\n\n Returns:\n (numpy.ndarray): table of shape `n_conditions` x `n_conditions` where the rows represent the condition\n transitioning from and the columns represent the condition transitioning to. For example [0, 2] shows the\n number of transitions from condition 1 to condition 3. If the `kind` of the sequence is "non_repeating",\n the diagonal is 0 because no condition transitions into itself.\n ' transitions = numpy.zeros((self.n_conditions, self.n_conditions)) for (i, j) in zip(self.trials, self.trials[1:]): transitions[((i - 1), (j - 1))] += 1 return transitions
Count the number of transitions between conditions. Returns: (numpy.ndarray): table of shape `n_conditions` x `n_conditions` where the rows represent the condition transitioning from and the columns represent the condition transitioning to. For example [0, 2] shows the number of transitions from condition 1 to condition 3. If the `kind` of the sequence is "non_repeating", the diagonal is 0 because no condition transitions into itself.
slab/psychoacoustics.py
transitions
jakab13/slab
7
python
def transitions(self): '\n Count the number of transitions between conditions.\n\n Returns:\n (numpy.ndarray): table of shape `n_conditions` x `n_conditions` where the rows represent the condition\n transitioning from and the columns represent the condition transitioning to. For example [0, 2] shows the\n number of transitions from condition 1 to condition 3. If the `kind` of the sequence is "non_repeating",\n the diagonal is 0 because no condition transitions into itself.\n ' transitions = numpy.zeros((self.n_conditions, self.n_conditions)) for (i, j) in zip(self.trials, self.trials[1:]): transitions[((i - 1), (j - 1))] += 1 return transitions
def transitions(self): '\n Count the number of transitions between conditions.\n\n Returns:\n (numpy.ndarray): table of shape `n_conditions` x `n_conditions` where the rows represent the condition\n transitioning from and the columns represent the condition transitioning to. For example [0, 2] shows the\n number of transitions from condition 1 to condition 3. If the `kind` of the sequence is "non_repeating",\n the diagonal is 0 because no condition transitions into itself.\n ' transitions = numpy.zeros((self.n_conditions, self.n_conditions)) for (i, j) in zip(self.trials, self.trials[1:]): transitions[((i - 1), (j - 1))] += 1 return transitions<|docstring|>Count the number of transitions between conditions. Returns: (numpy.ndarray): table of shape `n_conditions` x `n_conditions` where the rows represent the condition transitioning from and the columns represent the condition transitioning to. For example [0, 2] shows the number of transitions from condition 1 to condition 3. If the `kind` of the sequence is "non_repeating", the diagonal is 0 because no condition transitions into itself.<|endoftext|>
2ed2d265378da4e994911f8dfaa0b6a65cb38260e2e57d7126f0f02d3f2c4c9b
def condition_probabilities(self): '\n Return the frequency with which each condition appears in the sequence.\n\n Returns:\n (list): list of floats floats, where every element represents the frequency of one condition.\n The fist element is the frequency of the first condition and so on.\n ' probabilities = [] for i in range(self.n_conditions): num = self.trials.count(i) num /= self.n_trials probabilities.append(num) return probabilities
Return the frequency with which each condition appears in the sequence. Returns: (list): list of floats floats, where every element represents the frequency of one condition. The fist element is the frequency of the first condition and so on.
slab/psychoacoustics.py
condition_probabilities
jakab13/slab
7
python
def condition_probabilities(self): '\n Return the frequency with which each condition appears in the sequence.\n\n Returns:\n (list): list of floats floats, where every element represents the frequency of one condition.\n The fist element is the frequency of the first condition and so on.\n ' probabilities = [] for i in range(self.n_conditions): num = self.trials.count(i) num /= self.n_trials probabilities.append(num) return probabilities
def condition_probabilities(self): '\n Return the frequency with which each condition appears in the sequence.\n\n Returns:\n (list): list of floats floats, where every element represents the frequency of one condition.\n The fist element is the frequency of the first condition and so on.\n ' probabilities = [] for i in range(self.n_conditions): num = self.trials.count(i) num /= self.n_trials probabilities.append(num) return probabilities<|docstring|>Return the frequency with which each condition appears in the sequence. Returns: (list): list of floats floats, where every element represents the frequency of one condition. The fist element is the frequency of the first condition and so on.<|endoftext|>
b62f21276619ed5d1b495cb8d44dca24a70f6e56e8b5a91c62c9aedc6dc2d726
def response_summary(self): '\n Generate a summary of the responses for each condition. The function counts how often a specific response\n was given to a condition for all conditions and each possible response (including None).\n\n Returns:\n (list of lists | None): indices of the outer list represent the conditions in the sequence. Each inner\n list contains the number of responses per response key, with the response keys sorted in ascending order,\n the last element always represents None. If the sequence is not finished yet, None is returned.\n Examples::\n\n import slab\n import random\n sequence = slab.Trialsequence(conditions=3, n_reps=10) # a sequence with three conditions\n # iterate trough the list and generate a random response. The response can be either yes (1), no (0) or\n # there can be no response at all (None)\n for trial in sequence:\n response = random.choice([0, 1, None])\n sequence.add_response(response)\n sequence.response_summary()\n # Out: [[1, 1, 7], [2, 5, 3], [4, 4, 2]]\n # The first sublist shows that the subject responded to the first condition once with no (0),\n # once with yes (1) and did not give a response seven times, the second and third list show\n # prevalence of the same response keys for conditions two and three.\n ' if self.finished: response_keys = [item for sublist in self.data for item in sublist] response_keys = list(set((response_keys + [None]))) response_keys = sorted(response_keys, key=(lambda x: ((x is None), x))) responses = [] for condition in self.conditions: idx = [i for (i, cond) in enumerate(self.trials) if (cond == condition)] condition_data = [self.data[i] for i in idx] count = collections.Counter([item for sublist in condition_data for item in sublist]) resp_1cond = [] for r in response_keys: resp_1cond.append(count[r]) responses.append(resp_1cond) return responses else: return None
Generate a summary of the responses for each condition. The function counts how often a specific response was given to a condition for all conditions and each possible response (including None). Returns: (list of lists | None): indices of the outer list represent the conditions in the sequence. Each inner list contains the number of responses per response key, with the response keys sorted in ascending order, the last element always represents None. If the sequence is not finished yet, None is returned. Examples:: import slab import random sequence = slab.Trialsequence(conditions=3, n_reps=10) # a sequence with three conditions # iterate trough the list and generate a random response. The response can be either yes (1), no (0) or # there can be no response at all (None) for trial in sequence: response = random.choice([0, 1, None]) sequence.add_response(response) sequence.response_summary() # Out: [[1, 1, 7], [2, 5, 3], [4, 4, 2]] # The first sublist shows that the subject responded to the first condition once with no (0), # once with yes (1) and did not give a response seven times, the second and third list show # prevalence of the same response keys for conditions two and three.
slab/psychoacoustics.py
response_summary
jakab13/slab
7
python
def response_summary(self): '\n Generate a summary of the responses for each condition. The function counts how often a specific response\n was given to a condition for all conditions and each possible response (including None).\n\n Returns:\n (list of lists | None): indices of the outer list represent the conditions in the sequence. Each inner\n list contains the number of responses per response key, with the response keys sorted in ascending order,\n the last element always represents None. If the sequence is not finished yet, None is returned.\n Examples::\n\n import slab\n import random\n sequence = slab.Trialsequence(conditions=3, n_reps=10) # a sequence with three conditions\n # iterate trough the list and generate a random response. The response can be either yes (1), no (0) or\n # there can be no response at all (None)\n for trial in sequence:\n response = random.choice([0, 1, None])\n sequence.add_response(response)\n sequence.response_summary()\n # Out: [[1, 1, 7], [2, 5, 3], [4, 4, 2]]\n # The first sublist shows that the subject responded to the first condition once with no (0),\n # once with yes (1) and did not give a response seven times, the second and third list show\n # prevalence of the same response keys for conditions two and three.\n ' if self.finished: response_keys = [item for sublist in self.data for item in sublist] response_keys = list(set((response_keys + [None]))) response_keys = sorted(response_keys, key=(lambda x: ((x is None), x))) responses = [] for condition in self.conditions: idx = [i for (i, cond) in enumerate(self.trials) if (cond == condition)] condition_data = [self.data[i] for i in idx] count = collections.Counter([item for sublist in condition_data for item in sublist]) resp_1cond = [] for r in response_keys: resp_1cond.append(count[r]) responses.append(resp_1cond) return responses else: return None
def response_summary(self): '\n Generate a summary of the responses for each condition. The function counts how often a specific response\n was given to a condition for all conditions and each possible response (including None).\n\n Returns:\n (list of lists | None): indices of the outer list represent the conditions in the sequence. Each inner\n list contains the number of responses per response key, with the response keys sorted in ascending order,\n the last element always represents None. If the sequence is not finished yet, None is returned.\n Examples::\n\n import slab\n import random\n sequence = slab.Trialsequence(conditions=3, n_reps=10) # a sequence with three conditions\n # iterate trough the list and generate a random response. The response can be either yes (1), no (0) or\n # there can be no response at all (None)\n for trial in sequence:\n response = random.choice([0, 1, None])\n sequence.add_response(response)\n sequence.response_summary()\n # Out: [[1, 1, 7], [2, 5, 3], [4, 4, 2]]\n # The first sublist shows that the subject responded to the first condition once with no (0),\n # once with yes (1) and did not give a response seven times, the second and third list show\n # prevalence of the same response keys for conditions two and three.\n ' if self.finished: response_keys = [item for sublist in self.data for item in sublist] response_keys = list(set((response_keys + [None]))) response_keys = sorted(response_keys, key=(lambda x: ((x is None), x))) responses = [] for condition in self.conditions: idx = [i for (i, cond) in enumerate(self.trials) if (cond == condition)] condition_data = [self.data[i] for i in idx] count = collections.Counter([item for sublist in condition_data for item in sublist]) resp_1cond = [] for r in response_keys: resp_1cond.append(count[r]) responses.append(resp_1cond) return responses else: return None<|docstring|>Generate a summary of the responses for each condition. The function counts how often a specific response was given to a condition for all conditions and each possible response (including None). Returns: (list of lists | None): indices of the outer list represent the conditions in the sequence. Each inner list contains the number of responses per response key, with the response keys sorted in ascending order, the last element always represents None. If the sequence is not finished yet, None is returned. Examples:: import slab import random sequence = slab.Trialsequence(conditions=3, n_reps=10) # a sequence with three conditions # iterate trough the list and generate a random response. The response can be either yes (1), no (0) or # there can be no response at all (None) for trial in sequence: response = random.choice([0, 1, None]) sequence.add_response(response) sequence.response_summary() # Out: [[1, 1, 7], [2, 5, 3], [4, 4, 2]] # The first sublist shows that the subject responded to the first condition once with no (0), # once with yes (1) and did not give a response seven times, the second and third list show # prevalence of the same response keys for conditions two and three.<|endoftext|>
49825a038d936f19f3b384dc62547f4e30459949d93f04c8ecd428b667ce0580
def plot(self, axis=None, show=True): '\n Plot the trial sequence as scatter plot.\n\n Arguments:\n axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated\n show (bool): show the plot immediately, defaults to True\n ' if (plt is None): raise ImportError('Plotting requires matplotlib!') if (axis is None): axis = plt.subplot() axis.scatter(range(self.n_trials), self.trials) axis.set(title='Trial sequence', xlabel='Trials', ylabel='Condition index') if show: plt.show()
Plot the trial sequence as scatter plot. Arguments: axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated show (bool): show the plot immediately, defaults to True
slab/psychoacoustics.py
plot
jakab13/slab
7
python
def plot(self, axis=None, show=True): '\n Plot the trial sequence as scatter plot.\n\n Arguments:\n axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated\n show (bool): show the plot immediately, defaults to True\n ' if (plt is None): raise ImportError('Plotting requires matplotlib!') if (axis is None): axis = plt.subplot() axis.scatter(range(self.n_trials), self.trials) axis.set(title='Trial sequence', xlabel='Trials', ylabel='Condition index') if show: plt.show()
def plot(self, axis=None, show=True): '\n Plot the trial sequence as scatter plot.\n\n Arguments:\n axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated\n show (bool): show the plot immediately, defaults to True\n ' if (plt is None): raise ImportError('Plotting requires matplotlib!') if (axis is None): axis = plt.subplot() axis.scatter(range(self.n_trials), self.trials) axis.set(title='Trial sequence', xlabel='Trials', ylabel='Condition index') if show: plt.show()<|docstring|>Plot the trial sequence as scatter plot. Arguments: axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated show (bool): show the plot immediately, defaults to True<|endoftext|>
d441562c60a4634b07021cdfaf57f1c6b357399a1f2d8f3dc6f97220ac3cd69c
def __next__(self): '\n Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes\n this_trial, this_n, and this_index. If the trials have ended this method will raise a StopIteration.\n\n Returns:\n (int | float | StopIteration): the intensity for the next trial which is calculated by the\n `_next_intensity` method. If the sequence is finished a StopIteration is returned instead.\n ' if (not self.finished): self.this_trial_n += 1 self.intensities.append(self._next_intensity) return self._next_intensity else: self._psychometric_function() raise StopIteration
Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes this_trial, this_n, and this_index. If the trials have ended this method will raise a StopIteration. Returns: (int | float | StopIteration): the intensity for the next trial which is calculated by the `_next_intensity` method. If the sequence is finished a StopIteration is returned instead.
slab/psychoacoustics.py
__next__
jakab13/slab
7
python
def __next__(self): '\n Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes\n this_trial, this_n, and this_index. If the trials have ended this method will raise a StopIteration.\n\n Returns:\n (int | float | StopIteration): the intensity for the next trial which is calculated by the\n `_next_intensity` method. If the sequence is finished a StopIteration is returned instead.\n ' if (not self.finished): self.this_trial_n += 1 self.intensities.append(self._next_intensity) return self._next_intensity else: self._psychometric_function() raise StopIteration
def __next__(self): '\n Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes\n this_trial, this_n, and this_index. If the trials have ended this method will raise a StopIteration.\n\n Returns:\n (int | float | StopIteration): the intensity for the next trial which is calculated by the\n `_next_intensity` method. If the sequence is finished a StopIteration is returned instead.\n ' if (not self.finished): self.this_trial_n += 1 self.intensities.append(self._next_intensity) return self._next_intensity else: self._psychometric_function() raise StopIteration<|docstring|>Is called when iterating trough a sequenceAdvances to next trial and returns it. Updates attributes this_trial, this_n, and this_index. If the trials have ended this method will raise a StopIteration. Returns: (int | float | StopIteration): the intensity for the next trial which is calculated by the `_next_intensity` method. If the sequence is finished a StopIteration is returned instead.<|endoftext|>
bf08868ab56455efa3800e30fa6d3de2b216bab1a3f4b6b927510eb668ee506d
def add_response(self, result, intensity=None): '\n Add a True or 1 to indicate a correct/detected trial\n or False or 0 to indicate an incorrect/missed trial.\n This is essential to advance the staircase to a new intensity level.\n Supplying an `intensity` value indicates that you did not use\n the recommended intensity in your last trial and the staircase will\n replace its recorded value with the one supplied.\n ' if (self._next_intensity <= self.min_val): result = False else: result = bool(result) self.data.append(result) if (intensity is not None): self.intensities.pop() self.intensities.append(intensity) if (self.this_trial_n > 0): if result: if ((len(self.data) > 1) and (self.data[(- 2)] == result)): self.correct_counter += 1 else: self.correct_counter = 1 elif ((len(self.data) > 1) and (self.data[(- 2)] == result)): self.correct_counter -= 1 else: self.correct_counter = (- 1) self.calculate_next_intensity()
Add a True or 1 to indicate a correct/detected trial or False or 0 to indicate an incorrect/missed trial. This is essential to advance the staircase to a new intensity level. Supplying an `intensity` value indicates that you did not use the recommended intensity in your last trial and the staircase will replace its recorded value with the one supplied.
slab/psychoacoustics.py
add_response
jakab13/slab
7
python
def add_response(self, result, intensity=None): '\n Add a True or 1 to indicate a correct/detected trial\n or False or 0 to indicate an incorrect/missed trial.\n This is essential to advance the staircase to a new intensity level.\n Supplying an `intensity` value indicates that you did not use\n the recommended intensity in your last trial and the staircase will\n replace its recorded value with the one supplied.\n ' if (self._next_intensity <= self.min_val): result = False else: result = bool(result) self.data.append(result) if (intensity is not None): self.intensities.pop() self.intensities.append(intensity) if (self.this_trial_n > 0): if result: if ((len(self.data) > 1) and (self.data[(- 2)] == result)): self.correct_counter += 1 else: self.correct_counter = 1 elif ((len(self.data) > 1) and (self.data[(- 2)] == result)): self.correct_counter -= 1 else: self.correct_counter = (- 1) self.calculate_next_intensity()
def add_response(self, result, intensity=None): '\n Add a True or 1 to indicate a correct/detected trial\n or False or 0 to indicate an incorrect/missed trial.\n This is essential to advance the staircase to a new intensity level.\n Supplying an `intensity` value indicates that you did not use\n the recommended intensity in your last trial and the staircase will\n replace its recorded value with the one supplied.\n ' if (self._next_intensity <= self.min_val): result = False else: result = bool(result) self.data.append(result) if (intensity is not None): self.intensities.pop() self.intensities.append(intensity) if (self.this_trial_n > 0): if result: if ((len(self.data) > 1) and (self.data[(- 2)] == result)): self.correct_counter += 1 else: self.correct_counter = 1 elif ((len(self.data) > 1) and (self.data[(- 2)] == result)): self.correct_counter -= 1 else: self.correct_counter = (- 1) self.calculate_next_intensity()<|docstring|>Add a True or 1 to indicate a correct/detected trial or False or 0 to indicate an incorrect/missed trial. This is essential to advance the staircase to a new intensity level. Supplying an `intensity` value indicates that you did not use the recommended intensity in your last trial and the staircase will replace its recorded value with the one supplied.<|endoftext|>
619292b15f4a4fa34a281706f30b92c4f6f7607831163252adde975d6251ef35
def calculate_next_intensity(self): ' Based on current intensity, counter of correct responses, and current direction. ' if (not self.reversal_intensities): if (self.data[(- 1)] is True): reversal = bool((self.current_direction == 'up')) self.current_direction = 'down' else: reversal = bool((self.current_direction == 'down')) self.current_direction = 'up' elif (self.correct_counter >= self.n_down): reversal = bool((self.current_direction != 'down')) self.current_direction = 'down' elif (self.correct_counter <= (- self.n_up)): reversal = bool((self.current_direction != 'up')) self.current_direction = 'up' else: reversal = False if reversal: self.reversal_points.append(self.this_trial_n) self.reversal_intensities.append(self.intensities[(- 1)]) if (len(self.reversal_intensities) >= self.n_reversals): self.finished = True if (len(self.reversal_intensities) >= len(self.step_sizes)): self.step_size_current = self.step_sizes[(- 1)] else: _sz = len(self.reversal_intensities) self.step_size_current = self.step_sizes[_sz] if (self.current_direction == 'up'): self.step_size_current *= self.step_up_factor if (not self.reversal_intensities): if (self.data[(- 1)] == 1): self._intensity_dec() else: self._intensity_inc() elif (self.correct_counter >= self.n_down): self._intensity_dec() elif (self.correct_counter <= (- self.n_up)): self._intensity_inc()
Based on current intensity, counter of correct responses, and current direction.
slab/psychoacoustics.py
calculate_next_intensity
jakab13/slab
7
python
def calculate_next_intensity(self): ' ' if (not self.reversal_intensities): if (self.data[(- 1)] is True): reversal = bool((self.current_direction == 'up')) self.current_direction = 'down' else: reversal = bool((self.current_direction == 'down')) self.current_direction = 'up' elif (self.correct_counter >= self.n_down): reversal = bool((self.current_direction != 'down')) self.current_direction = 'down' elif (self.correct_counter <= (- self.n_up)): reversal = bool((self.current_direction != 'up')) self.current_direction = 'up' else: reversal = False if reversal: self.reversal_points.append(self.this_trial_n) self.reversal_intensities.append(self.intensities[(- 1)]) if (len(self.reversal_intensities) >= self.n_reversals): self.finished = True if (len(self.reversal_intensities) >= len(self.step_sizes)): self.step_size_current = self.step_sizes[(- 1)] else: _sz = len(self.reversal_intensities) self.step_size_current = self.step_sizes[_sz] if (self.current_direction == 'up'): self.step_size_current *= self.step_up_factor if (not self.reversal_intensities): if (self.data[(- 1)] == 1): self._intensity_dec() else: self._intensity_inc() elif (self.correct_counter >= self.n_down): self._intensity_dec() elif (self.correct_counter <= (- self.n_up)): self._intensity_inc()
def calculate_next_intensity(self): ' ' if (not self.reversal_intensities): if (self.data[(- 1)] is True): reversal = bool((self.current_direction == 'up')) self.current_direction = 'down' else: reversal = bool((self.current_direction == 'down')) self.current_direction = 'up' elif (self.correct_counter >= self.n_down): reversal = bool((self.current_direction != 'down')) self.current_direction = 'down' elif (self.correct_counter <= (- self.n_up)): reversal = bool((self.current_direction != 'up')) self.current_direction = 'up' else: reversal = False if reversal: self.reversal_points.append(self.this_trial_n) self.reversal_intensities.append(self.intensities[(- 1)]) if (len(self.reversal_intensities) >= self.n_reversals): self.finished = True if (len(self.reversal_intensities) >= len(self.step_sizes)): self.step_size_current = self.step_sizes[(- 1)] else: _sz = len(self.reversal_intensities) self.step_size_current = self.step_sizes[_sz] if (self.current_direction == 'up'): self.step_size_current *= self.step_up_factor if (not self.reversal_intensities): if (self.data[(- 1)] == 1): self._intensity_dec() else: self._intensity_inc() elif (self.correct_counter >= self.n_down): self._intensity_dec() elif (self.correct_counter <= (- self.n_up)): self._intensity_inc()<|docstring|>Based on current intensity, counter of correct responses, and current direction.<|endoftext|>
2eb18c3ff34cde4a368e498f59001a48d68873fdc7dc310b4ebc9061ed4ba0d2
def _intensity_inc(self): ' increment the current intensity and reset counter. ' if (self.step_type == 'db'): self._next_intensity *= (10.0 ** (self.step_size_current / 20.0)) elif (self.step_type == 'log'): self._next_intensity *= (10.0 ** self.step_size_current) elif (self.step_type == 'lin'): self._next_intensity += self.step_size_current if ((self.max_val is not None) and (self._next_intensity > self.max_val)): self._next_intensity = self.max_val self.correct_counter = 0
increment the current intensity and reset counter.
slab/psychoacoustics.py
_intensity_inc
jakab13/slab
7
python
def _intensity_inc(self): ' ' if (self.step_type == 'db'): self._next_intensity *= (10.0 ** (self.step_size_current / 20.0)) elif (self.step_type == 'log'): self._next_intensity *= (10.0 ** self.step_size_current) elif (self.step_type == 'lin'): self._next_intensity += self.step_size_current if ((self.max_val is not None) and (self._next_intensity > self.max_val)): self._next_intensity = self.max_val self.correct_counter = 0
def _intensity_inc(self): ' ' if (self.step_type == 'db'): self._next_intensity *= (10.0 ** (self.step_size_current / 20.0)) elif (self.step_type == 'log'): self._next_intensity *= (10.0 ** self.step_size_current) elif (self.step_type == 'lin'): self._next_intensity += self.step_size_current if ((self.max_val is not None) and (self._next_intensity > self.max_val)): self._next_intensity = self.max_val self.correct_counter = 0<|docstring|>increment the current intensity and reset counter.<|endoftext|>
322bcee652197f6b2808900d74dd5c84335bd593e680038a638eb8179b467d06
def _intensity_dec(self): ' decrement the current intensity and reset counter. ' if (self.step_type == 'db'): self._next_intensity /= (10.0 ** (self.step_size_current / 20.0)) if (self.step_type == 'log'): self._next_intensity /= (10.0 ** self.step_size_current) elif (self.step_type == 'lin'): self._next_intensity -= self.step_size_current self.correct_counter = 0 if ((self.min_val is not None) and (self._next_intensity < self.min_val)): self._next_intensity = self.min_val
decrement the current intensity and reset counter.
slab/psychoacoustics.py
_intensity_dec
jakab13/slab
7
python
def _intensity_dec(self): ' ' if (self.step_type == 'db'): self._next_intensity /= (10.0 ** (self.step_size_current / 20.0)) if (self.step_type == 'log'): self._next_intensity /= (10.0 ** self.step_size_current) elif (self.step_type == 'lin'): self._next_intensity -= self.step_size_current self.correct_counter = 0 if ((self.min_val is not None) and (self._next_intensity < self.min_val)): self._next_intensity = self.min_val
def _intensity_dec(self): ' ' if (self.step_type == 'db'): self._next_intensity /= (10.0 ** (self.step_size_current / 20.0)) if (self.step_type == 'log'): self._next_intensity /= (10.0 ** self.step_size_current) elif (self.step_type == 'lin'): self._next_intensity -= self.step_size_current self.correct_counter = 0 if ((self.min_val is not None) and (self._next_intensity < self.min_val)): self._next_intensity = self.min_val<|docstring|>decrement the current intensity and reset counter.<|endoftext|>
24a22da12d87f5a2ce20389c92160c2400d1728ba1ee52057b1d0582d934144a
def threshold(self, n=0): "\n Returns the average of the last n reversals.\n\n Arguments:\n n (int): number of reversals to average over, if 0 use `n_reversals` - 1.\n Returns:\n the arithmetic (if `step_type`==='lin') or geometric mean of the `reversal_intensities`.\n " if self.finished: if ((n == 0) or (n > self.n_reversals)): n = (int(self.n_reversals) - 1) if (self.step_type == 'lin'): return numpy.mean(self.reversal_intensities[(- n):]) return numpy.exp(numpy.mean(numpy.log(self.reversal_intensities[(- n):]))) return None
Returns the average of the last n reversals. Arguments: n (int): number of reversals to average over, if 0 use `n_reversals` - 1. Returns: the arithmetic (if `step_type`==='lin') or geometric mean of the `reversal_intensities`.
slab/psychoacoustics.py
threshold
jakab13/slab
7
python
def threshold(self, n=0): "\n Returns the average of the last n reversals.\n\n Arguments:\n n (int): number of reversals to average over, if 0 use `n_reversals` - 1.\n Returns:\n the arithmetic (if `step_type`==='lin') or geometric mean of the `reversal_intensities`.\n " if self.finished: if ((n == 0) or (n > self.n_reversals)): n = (int(self.n_reversals) - 1) if (self.step_type == 'lin'): return numpy.mean(self.reversal_intensities[(- n):]) return numpy.exp(numpy.mean(numpy.log(self.reversal_intensities[(- n):]))) return None
def threshold(self, n=0): "\n Returns the average of the last n reversals.\n\n Arguments:\n n (int): number of reversals to average over, if 0 use `n_reversals` - 1.\n Returns:\n the arithmetic (if `step_type`==='lin') or geometric mean of the `reversal_intensities`.\n " if self.finished: if ((n == 0) or (n > self.n_reversals)): n = (int(self.n_reversals) - 1) if (self.step_type == 'lin'): return numpy.mean(self.reversal_intensities[(- n):]) return numpy.exp(numpy.mean(numpy.log(self.reversal_intensities[(- n):]))) return None<|docstring|>Returns the average of the last n reversals. Arguments: n (int): number of reversals to average over, if 0 use `n_reversals` - 1. Returns: the arithmetic (if `step_type`==='lin') or geometric mean of the `reversal_intensities`.<|endoftext|>
7a66828dbafb7827fc73113ab3bc3efb84c1c83f1be76e078b63fd85a7fa103d
def print_trial_info(self): ' Convenience method for printing current trial information. ' print(f'{self.label} | trial # {self.this_trial_n}: reversals: {len(self.reversal_points)}/{self.n_reversals}, intensity {(round(self.intensities[(- 1)], 2) if self.intensities else round(self._next_intensity, 2))}, going {self.current_direction}, response {(self.data[(- 1)] if self.data else None)}')
Convenience method for printing current trial information.
slab/psychoacoustics.py
print_trial_info
jakab13/slab
7
python
def print_trial_info(self): ' ' print(f'{self.label} | trial # {self.this_trial_n}: reversals: {len(self.reversal_points)}/{self.n_reversals}, intensity {(round(self.intensities[(- 1)], 2) if self.intensities else round(self._next_intensity, 2))}, going {self.current_direction}, response {(self.data[(- 1)] if self.data else None)}')
def print_trial_info(self): ' ' print(f'{self.label} | trial # {self.this_trial_n}: reversals: {len(self.reversal_points)}/{self.n_reversals}, intensity {(round(self.intensities[(- 1)], 2) if self.intensities else round(self._next_intensity, 2))}, going {self.current_direction}, response {(self.data[(- 1)] if self.data else None)}')<|docstring|>Convenience method for printing current trial information.<|endoftext|>
fa90fcde3513eca73a56e4486c9be50e4f15fe7d6b38d1455f100acc765b7bd7
def save_csv(self, filename): '\n Write a csv text file with the stimulus values in the 1st line and the corresponding responses in the 2nd.\n\n Arguments:\n filename (str): the name under which the csv file is saved.\n Returns:\n (bool): True if saving was successful, False if there are no trials to save.\n ' if (self.this_trial_n < 1): return False with open(filename, 'w') as f: raw_intensities = str(self.intensities) raw_intensities = raw_intensities.replace('[', '').replace(']', '') f.write(raw_intensities) f.write('\n') responses = str(numpy.multiply(self.data, 1)) responses = responses.replace('[', '').replace(']', '') responses = responses.replace(' ', ', ') f.write(responses) return True
Write a csv text file with the stimulus values in the 1st line and the corresponding responses in the 2nd. Arguments: filename (str): the name under which the csv file is saved. Returns: (bool): True if saving was successful, False if there are no trials to save.
slab/psychoacoustics.py
save_csv
jakab13/slab
7
python
def save_csv(self, filename): '\n Write a csv text file with the stimulus values in the 1st line and the corresponding responses in the 2nd.\n\n Arguments:\n filename (str): the name under which the csv file is saved.\n Returns:\n (bool): True if saving was successful, False if there are no trials to save.\n ' if (self.this_trial_n < 1): return False with open(filename, 'w') as f: raw_intensities = str(self.intensities) raw_intensities = raw_intensities.replace('[', ).replace(']', ) f.write(raw_intensities) f.write('\n') responses = str(numpy.multiply(self.data, 1)) responses = responses.replace('[', ).replace(']', ) responses = responses.replace(' ', ', ') f.write(responses) return True
def save_csv(self, filename): '\n Write a csv text file with the stimulus values in the 1st line and the corresponding responses in the 2nd.\n\n Arguments:\n filename (str): the name under which the csv file is saved.\n Returns:\n (bool): True if saving was successful, False if there are no trials to save.\n ' if (self.this_trial_n < 1): return False with open(filename, 'w') as f: raw_intensities = str(self.intensities) raw_intensities = raw_intensities.replace('[', ).replace(']', ) f.write(raw_intensities) f.write('\n') responses = str(numpy.multiply(self.data, 1)) responses = responses.replace('[', ).replace(']', ) responses = responses.replace(' ', ', ') f.write(responses) return True<|docstring|>Write a csv text file with the stimulus values in the 1st line and the corresponding responses in the 2nd. Arguments: filename (str): the name under which the csv file is saved. Returns: (bool): True if saving was successful, False if there are no trials to save.<|endoftext|>
fbb8725e8f03a568a40d7826875e2c868e4c7803a4b2fdd8326938480339126b
def plot(self, axis=None, show=True): '\n Plot the staircase. If called after each trial, one plot is created and updated.\n\n Arguments:\n axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated\n show (bool): whether to show the plot right after drawing.\n ' if (plt is None): raise ImportError('Plotting requires matplotlib!') if self.intensities: x = numpy.arange((- self.n_pretrials), (len(self.intensities) - self.n_pretrials)) y = numpy.array(self.intensities) responses = numpy.array(self.data) if (axis is None): fig = plt.figure('stairs') axis = fig.gca() axis.clear() axis.plot(x, y) axis.set_xlim((- self.n_pretrials), max(20, (((self.this_trial_n + 15) // 10) * 10))) axis.set_ylim((min(0, min(y)) if (self.min_val == (- numpy.Inf)) else self.min_val), (max(y) if (self.max_val == numpy.Inf) else self.max_val)) axis.scatter(x[responses], y[responses], color='green') axis.scatter(x[(~ responses)], y[(~ responses)], color='red') axis.scatter(((len(self.intensities) - self.n_pretrials) + 1), self._next_intensity, color='grey') axis.set_ylabel('Dependent variable') axis.set_xlabel('Trial') axis.set_title('Staircase') if self.finished: axis.hlines(self.threshold(), min(x), max(x), 'r') plt.draw() if show: plt.pause(0.01)
Plot the staircase. If called after each trial, one plot is created and updated. Arguments: axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated show (bool): whether to show the plot right after drawing.
slab/psychoacoustics.py
plot
jakab13/slab
7
python
def plot(self, axis=None, show=True): '\n Plot the staircase. If called after each trial, one plot is created and updated.\n\n Arguments:\n axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated\n show (bool): whether to show the plot right after drawing.\n ' if (plt is None): raise ImportError('Plotting requires matplotlib!') if self.intensities: x = numpy.arange((- self.n_pretrials), (len(self.intensities) - self.n_pretrials)) y = numpy.array(self.intensities) responses = numpy.array(self.data) if (axis is None): fig = plt.figure('stairs') axis = fig.gca() axis.clear() axis.plot(x, y) axis.set_xlim((- self.n_pretrials), max(20, (((self.this_trial_n + 15) // 10) * 10))) axis.set_ylim((min(0, min(y)) if (self.min_val == (- numpy.Inf)) else self.min_val), (max(y) if (self.max_val == numpy.Inf) else self.max_val)) axis.scatter(x[responses], y[responses], color='green') axis.scatter(x[(~ responses)], y[(~ responses)], color='red') axis.scatter(((len(self.intensities) - self.n_pretrials) + 1), self._next_intensity, color='grey') axis.set_ylabel('Dependent variable') axis.set_xlabel('Trial') axis.set_title('Staircase') if self.finished: axis.hlines(self.threshold(), min(x), max(x), 'r') plt.draw() if show: plt.pause(0.01)
def plot(self, axis=None, show=True): '\n Plot the staircase. If called after each trial, one plot is created and updated.\n\n Arguments:\n axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated\n show (bool): whether to show the plot right after drawing.\n ' if (plt is None): raise ImportError('Plotting requires matplotlib!') if self.intensities: x = numpy.arange((- self.n_pretrials), (len(self.intensities) - self.n_pretrials)) y = numpy.array(self.intensities) responses = numpy.array(self.data) if (axis is None): fig = plt.figure('stairs') axis = fig.gca() axis.clear() axis.plot(x, y) axis.set_xlim((- self.n_pretrials), max(20, (((self.this_trial_n + 15) // 10) * 10))) axis.set_ylim((min(0, min(y)) if (self.min_val == (- numpy.Inf)) else self.min_val), (max(y) if (self.max_val == numpy.Inf) else self.max_val)) axis.scatter(x[responses], y[responses], color='green') axis.scatter(x[(~ responses)], y[(~ responses)], color='red') axis.scatter(((len(self.intensities) - self.n_pretrials) + 1), self._next_intensity, color='grey') axis.set_ylabel('Dependent variable') axis.set_xlabel('Trial') axis.set_title('Staircase') if self.finished: axis.hlines(self.threshold(), min(x), max(x), 'r') plt.draw() if show: plt.pause(0.01)<|docstring|>Plot the staircase. If called after each trial, one plot is created and updated. Arguments: axis (matplotlib.pyplot.Axes): plot axis to draw on, if none a new plot is generated show (bool): whether to show the plot right after drawing.<|endoftext|>
7aeca0c9dd1aecf2e340cb01bab147331d6d64113da4141e89397cbfd4f12fa3
@staticmethod def close_plot(): ' Closes a staircase plot (if not drawn into a specified axis) - used for plotting after each trial. ' plt.close('stairs')
Closes a staircase plot (if not drawn into a specified axis) - used for plotting after each trial.
slab/psychoacoustics.py
close_plot
jakab13/slab
7
python
@staticmethod def close_plot(): ' ' plt.close('stairs')
@staticmethod def close_plot(): ' ' plt.close('stairs')<|docstring|>Closes a staircase plot (if not drawn into a specified axis) - used for plotting after each trial.<|endoftext|>
217c7b0050b15d57e85176d6f22d664cc67c25cbaed5ae139708f623064a64e4
def _psychometric_function(self): '\n Create a psychometric function by binning data from a staircase procedure.\n Called automatically when staircase is finished. Sets attributes `pf_intensites` (array of intensity values\n where each is the center of an intensity bin), `pf_percent_correct` (array of mean percent correct in each bin),\n `pf_responses_per_intensity` (array of number of responses contributing to each mean).\n ' intensities = numpy.array(self.intensities) responses = numpy.array(self.data) binned_resp = [] binned_intensities = [] n_points = [] intensities = numpy.round(intensities, decimals=8) unique_intensities = numpy.unique(intensities) for this_intensity in unique_intensities: these_responses = responses[(intensities == this_intensity)] binned_intensities.append(this_intensity) binned_resp.append(numpy.mean(these_responses)) n_points.append(len(these_responses)) self.pf_intensities = binned_intensities self.pf_percent_correct = binned_resp self.pf_responses_per_intensity = n_points
Create a psychometric function by binning data from a staircase procedure. Called automatically when staircase is finished. Sets attributes `pf_intensites` (array of intensity values where each is the center of an intensity bin), `pf_percent_correct` (array of mean percent correct in each bin), `pf_responses_per_intensity` (array of number of responses contributing to each mean).
slab/psychoacoustics.py
_psychometric_function
jakab13/slab
7
python
def _psychometric_function(self): '\n Create a psychometric function by binning data from a staircase procedure.\n Called automatically when staircase is finished. Sets attributes `pf_intensites` (array of intensity values\n where each is the center of an intensity bin), `pf_percent_correct` (array of mean percent correct in each bin),\n `pf_responses_per_intensity` (array of number of responses contributing to each mean).\n ' intensities = numpy.array(self.intensities) responses = numpy.array(self.data) binned_resp = [] binned_intensities = [] n_points = [] intensities = numpy.round(intensities, decimals=8) unique_intensities = numpy.unique(intensities) for this_intensity in unique_intensities: these_responses = responses[(intensities == this_intensity)] binned_intensities.append(this_intensity) binned_resp.append(numpy.mean(these_responses)) n_points.append(len(these_responses)) self.pf_intensities = binned_intensities self.pf_percent_correct = binned_resp self.pf_responses_per_intensity = n_points
def _psychometric_function(self): '\n Create a psychometric function by binning data from a staircase procedure.\n Called automatically when staircase is finished. Sets attributes `pf_intensites` (array of intensity values\n where each is the center of an intensity bin), `pf_percent_correct` (array of mean percent correct in each bin),\n `pf_responses_per_intensity` (array of number of responses contributing to each mean).\n ' intensities = numpy.array(self.intensities) responses = numpy.array(self.data) binned_resp = [] binned_intensities = [] n_points = [] intensities = numpy.round(intensities, decimals=8) unique_intensities = numpy.unique(intensities) for this_intensity in unique_intensities: these_responses = responses[(intensities == this_intensity)] binned_intensities.append(this_intensity) binned_resp.append(numpy.mean(these_responses)) n_points.append(len(these_responses)) self.pf_intensities = binned_intensities self.pf_percent_correct = binned_resp self.pf_responses_per_intensity = n_points<|docstring|>Create a psychometric function by binning data from a staircase procedure. Called automatically when staircase is finished. Sets attributes `pf_intensites` (array of intensity values where each is the center of an intensity bin), `pf_percent_correct` (array of mean percent correct in each bin), `pf_responses_per_intensity` (array of number of responses contributing to each mean).<|endoftext|>
e56af3b1f5878be291c7e0674e997efbcd96a14184f4fc5ba93c4170da25f211
def write(self, data, tag=None): '\n Safely write data to the file which is opened just before writing and closed immediately after to avoid\n data loss. Call this method at the end of each trial to save the response and trial state.\n\n Arguments:\n data (any): data to save must be JSON serializable [string, list, dict, ...]). If data is an object,\n the __dict__ is extracted and saved.\n tag (str): The tag is prepended as a key. If None is provided, the current time is used.\n ' if hasattr(data, '__dict__'): data = data.__dict__ try: data = json.loads(data) except (json.JSONDecodeError, TypeError): pass if ((tag is None) or (tag == 'time')): tag = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') with open(self.path, 'a') as file: file.write(json.dumps({tag: data})) file.write('\n')
Safely write data to the file which is opened just before writing and closed immediately after to avoid data loss. Call this method at the end of each trial to save the response and trial state. Arguments: data (any): data to save must be JSON serializable [string, list, dict, ...]). If data is an object, the __dict__ is extracted and saved. tag (str): The tag is prepended as a key. If None is provided, the current time is used.
slab/psychoacoustics.py
write
jakab13/slab
7
python
def write(self, data, tag=None): '\n Safely write data to the file which is opened just before writing and closed immediately after to avoid\n data loss. Call this method at the end of each trial to save the response and trial state.\n\n Arguments:\n data (any): data to save must be JSON serializable [string, list, dict, ...]). If data is an object,\n the __dict__ is extracted and saved.\n tag (str): The tag is prepended as a key. If None is provided, the current time is used.\n ' if hasattr(data, '__dict__'): data = data.__dict__ try: data = json.loads(data) except (json.JSONDecodeError, TypeError): pass if ((tag is None) or (tag == 'time')): tag = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') with open(self.path, 'a') as file: file.write(json.dumps({tag: data})) file.write('\n')
def write(self, data, tag=None): '\n Safely write data to the file which is opened just before writing and closed immediately after to avoid\n data loss. Call this method at the end of each trial to save the response and trial state.\n\n Arguments:\n data (any): data to save must be JSON serializable [string, list, dict, ...]). If data is an object,\n the __dict__ is extracted and saved.\n tag (str): The tag is prepended as a key. If None is provided, the current time is used.\n ' if hasattr(data, '__dict__'): data = data.__dict__ try: data = json.loads(data) except (json.JSONDecodeError, TypeError): pass if ((tag is None) or (tag == 'time')): tag = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') with open(self.path, 'a') as file: file.write(json.dumps({tag: data})) file.write('\n')<|docstring|>Safely write data to the file which is opened just before writing and closed immediately after to avoid data loss. Call this method at the end of each trial to save the response and trial state. Arguments: data (any): data to save must be JSON serializable [string, list, dict, ...]). If data is an object, the __dict__ is extracted and saved. tag (str): The tag is prepended as a key. If None is provided, the current time is used.<|endoftext|>
cbda71c049b4fb5779f651af78d0f189f5d2c71d06323e87639275f8a0a673ad
@staticmethod def read_file(filename, tag=None): '\n Read a results file and return the content.\n\n Arguments:\n filename (str | pathlib.Path):\n tag (None | str):\n Returns:\n (list | dict): The content of the file. If tag is None, the whole file is returned, else only the\n dictionaries with that tag as a key are returned. The content will be a list of dictionaries or a\n dictionary if there is only a single element.\n ' content = [] with open(filename) as file: if (tag is None): for line in file: content.append(json.loads(line)) else: for line in file: jwd = json.loads(line) if (tag in jwd): content.append(jwd[tag]) if (len(content) == 1): content = content[0] return content
Read a results file and return the content. Arguments: filename (str | pathlib.Path): tag (None | str): Returns: (list | dict): The content of the file. If tag is None, the whole file is returned, else only the dictionaries with that tag as a key are returned. The content will be a list of dictionaries or a dictionary if there is only a single element.
slab/psychoacoustics.py
read_file
jakab13/slab
7
python
@staticmethod def read_file(filename, tag=None): '\n Read a results file and return the content.\n\n Arguments:\n filename (str | pathlib.Path):\n tag (None | str):\n Returns:\n (list | dict): The content of the file. If tag is None, the whole file is returned, else only the\n dictionaries with that tag as a key are returned. The content will be a list of dictionaries or a\n dictionary if there is only a single element.\n ' content = [] with open(filename) as file: if (tag is None): for line in file: content.append(json.loads(line)) else: for line in file: jwd = json.loads(line) if (tag in jwd): content.append(jwd[tag]) if (len(content) == 1): content = content[0] return content
@staticmethod def read_file(filename, tag=None): '\n Read a results file and return the content.\n\n Arguments:\n filename (str | pathlib.Path):\n tag (None | str):\n Returns:\n (list | dict): The content of the file. If tag is None, the whole file is returned, else only the\n dictionaries with that tag as a key are returned. The content will be a list of dictionaries or a\n dictionary if there is only a single element.\n ' content = [] with open(filename) as file: if (tag is None): for line in file: content.append(json.loads(line)) else: for line in file: jwd = json.loads(line) if (tag in jwd): content.append(jwd[tag]) if (len(content) == 1): content = content[0] return content<|docstring|>Read a results file and return the content. Arguments: filename (str | pathlib.Path): tag (None | str): Returns: (list | dict): The content of the file. If tag is None, the whole file is returned, else only the dictionaries with that tag as a key are returned. The content will be a list of dictionaries or a dictionary if there is only a single element.<|endoftext|>
a051ef2bb82185cbb53f3a49fdef5281a0077fd1f3b9646edd7d0f4e6c96d02d
def read(self, tag=None): ' Wrapper for the read_file method. ' return ResultsFile.read_file(self.path, tag)
Wrapper for the read_file method.
slab/psychoacoustics.py
read
jakab13/slab
7
python
def read(self, tag=None): ' ' return ResultsFile.read_file(self.path, tag)
def read(self, tag=None): ' ' return ResultsFile.read_file(self.path, tag)<|docstring|>Wrapper for the read_file method.<|endoftext|>
8969d64c1a29a0f11bcaa43fc9711ab0bb71927637d87fbc488f52505c3b3b3d
@staticmethod def previous_file(subject=None): '\n Returns the name of the most recently used results file for a given subject.\n Intended for extracting information from a previous file when running partial experiments.\n\n Arguments:\n subject (str): the subject name name under which the file is stored.\n Returns:\n (pathlib.Path): full path to the most recent results file.\n ' path = (pathlib.Path(results_folder) / pathlib.Path(subject)) files = [f for f in path.glob((subject + '*')) if f.is_file()] files.sort() return files[(- 1)]
Returns the name of the most recently used results file for a given subject. Intended for extracting information from a previous file when running partial experiments. Arguments: subject (str): the subject name name under which the file is stored. Returns: (pathlib.Path): full path to the most recent results file.
slab/psychoacoustics.py
previous_file
jakab13/slab
7
python
@staticmethod def previous_file(subject=None): '\n Returns the name of the most recently used results file for a given subject.\n Intended for extracting information from a previous file when running partial experiments.\n\n Arguments:\n subject (str): the subject name name under which the file is stored.\n Returns:\n (pathlib.Path): full path to the most recent results file.\n ' path = (pathlib.Path(results_folder) / pathlib.Path(subject)) files = [f for f in path.glob((subject + '*')) if f.is_file()] files.sort() return files[(- 1)]
@staticmethod def previous_file(subject=None): '\n Returns the name of the most recently used results file for a given subject.\n Intended for extracting information from a previous file when running partial experiments.\n\n Arguments:\n subject (str): the subject name name under which the file is stored.\n Returns:\n (pathlib.Path): full path to the most recent results file.\n ' path = (pathlib.Path(results_folder) / pathlib.Path(subject)) files = [f for f in path.glob((subject + '*')) if f.is_file()] files.sort() return files[(- 1)]<|docstring|>Returns the name of the most recently used results file for a given subject. Intended for extracting information from a previous file when running partial experiments. Arguments: subject (str): the subject name name under which the file is stored. Returns: (pathlib.Path): full path to the most recent results file.<|endoftext|>
231e209878d15a81d28dbc7762733302f61f2f410f8e6af50803d8b37c0c4039
def clear(self): ' Clears the file by erasing all content. ' with open(self.path, 'w') as file: file.write('')
Clears the file by erasing all content.
slab/psychoacoustics.py
clear
jakab13/slab
7
python
def clear(self): ' ' with open(self.path, 'w') as file: file.write()
def clear(self): ' ' with open(self.path, 'w') as file: file.write()<|docstring|>Clears the file by erasing all content.<|endoftext|>
edeab087a02c2953273f7a8cee25074961bf00d33ee8b84b54754992831ca1d2
def play(self): ' Play a random, but never the previous, stimulus from the list. ' if self.sequence: previous = self.sequence[(- 1)] else: previous = None idx = previous while (idx == previous): idx = numpy.random.randint(len(self)) self.sequence.append(idx) self[idx].play()
Play a random, but never the previous, stimulus from the list.
slab/psychoacoustics.py
play
jakab13/slab
7
python
def play(self): ' ' if self.sequence: previous = self.sequence[(- 1)] else: previous = None idx = previous while (idx == previous): idx = numpy.random.randint(len(self)) self.sequence.append(idx) self[idx].play()
def play(self): ' ' if self.sequence: previous = self.sequence[(- 1)] else: previous = None idx = previous while (idx == previous): idx = numpy.random.randint(len(self)) self.sequence.append(idx) self[idx].play()<|docstring|>Play a random, but never the previous, stimulus from the list.<|endoftext|>
b265408b06f6e11dec9603a6720398d5c45be51f83611135d723d36a5e232583
def random_choice(self, n=1): '\n Pick (without replacement) random elements from the list.\n\n Arguments:\n n (int): number of elements to pick.\n Returns:\n (list): list of n random elements.\n ' idxs = numpy.random.randint(0, len(self), size=n) return [self[i] for i in idxs]
Pick (without replacement) random elements from the list. Arguments: n (int): number of elements to pick. Returns: (list): list of n random elements.
slab/psychoacoustics.py
random_choice
jakab13/slab
7
python
def random_choice(self, n=1): '\n Pick (without replacement) random elements from the list.\n\n Arguments:\n n (int): number of elements to pick.\n Returns:\n (list): list of n random elements.\n ' idxs = numpy.random.randint(0, len(self), size=n) return [self[i] for i in idxs]
def random_choice(self, n=1): '\n Pick (without replacement) random elements from the list.\n\n Arguments:\n n (int): number of elements to pick.\n Returns:\n (list): list of n random elements.\n ' idxs = numpy.random.randint(0, len(self), size=n) return [self[i] for i in idxs]<|docstring|>Pick (without replacement) random elements from the list. Arguments: n (int): number of elements to pick. Returns: (list): list of n random elements.<|endoftext|>
eb5bc8c8563aec0dae2e016ec94aa82bc37fdc3867c1c931415bf98a33e26d4e
def write(self, filename): '\n Save the Precomputed object as a zip file containing all sounds as wav files.\n\n Arguments:\n filename (str | pathlib.Path): full path to under which the file is saved.\n ' with zipfile.ZipFile(filename, mode='a') as zip_file: for (idx, sound) in enumerate(self): f = io.BytesIO() sound.write(f) f.seek(0) zip_file.writestr(f's_{idx}.wav', f.read()) f.close()
Save the Precomputed object as a zip file containing all sounds as wav files. Arguments: filename (str | pathlib.Path): full path to under which the file is saved.
slab/psychoacoustics.py
write
jakab13/slab
7
python
def write(self, filename): '\n Save the Precomputed object as a zip file containing all sounds as wav files.\n\n Arguments:\n filename (str | pathlib.Path): full path to under which the file is saved.\n ' with zipfile.ZipFile(filename, mode='a') as zip_file: for (idx, sound) in enumerate(self): f = io.BytesIO() sound.write(f) f.seek(0) zip_file.writestr(f's_{idx}.wav', f.read()) f.close()
def write(self, filename): '\n Save the Precomputed object as a zip file containing all sounds as wav files.\n\n Arguments:\n filename (str | pathlib.Path): full path to under which the file is saved.\n ' with zipfile.ZipFile(filename, mode='a') as zip_file: for (idx, sound) in enumerate(self): f = io.BytesIO() sound.write(f) f.seek(0) zip_file.writestr(f's_{idx}.wav', f.read()) f.close()<|docstring|>Save the Precomputed object as a zip file containing all sounds as wav files. Arguments: filename (str | pathlib.Path): full path to under which the file is saved.<|endoftext|>
799570b77c2d18ec63b9e30917b1a3efd79663e92ba4b8754a11bd1087e18051
@staticmethod def read(filename): '\n Read a zip file containing wav files.\n\n Arguments:\n filename (str | pathlib.Path): full path to the file to be read.\n Returns:\n (slab.Precomputed): the file content.\n ' stims = Precomputed([]) with zipfile.ZipFile(filename, 'r') as zipped: files = zipped.namelist() for file in files: wav_bytes = zipped.read(file) stims.append(slab.Sound.read(io.BytesIO(wav_bytes))) return stims
Read a zip file containing wav files. Arguments: filename (str | pathlib.Path): full path to the file to be read. Returns: (slab.Precomputed): the file content.
slab/psychoacoustics.py
read
jakab13/slab
7
python
@staticmethod def read(filename): '\n Read a zip file containing wav files.\n\n Arguments:\n filename (str | pathlib.Path): full path to the file to be read.\n Returns:\n (slab.Precomputed): the file content.\n ' stims = Precomputed([]) with zipfile.ZipFile(filename, 'r') as zipped: files = zipped.namelist() for file in files: wav_bytes = zipped.read(file) stims.append(slab.Sound.read(io.BytesIO(wav_bytes))) return stims
@staticmethod def read(filename): '\n Read a zip file containing wav files.\n\n Arguments:\n filename (str | pathlib.Path): full path to the file to be read.\n Returns:\n (slab.Precomputed): the file content.\n ' stims = Precomputed([]) with zipfile.ZipFile(filename, 'r') as zipped: files = zipped.namelist() for file in files: wav_bytes = zipped.read(file) stims.append(slab.Sound.read(io.BytesIO(wav_bytes))) return stims<|docstring|>Read a zip file containing wav files. Arguments: filename (str | pathlib.Path): full path to the file to be read. Returns: (slab.Precomputed): the file content.<|endoftext|>
105c3528ed18eb0840d2f59d49c672bb6efe6a4ef6e47d69462f8e3282228587
def t_unk() -> TestShaped: '\n Creates an object with an unknown shape, for testing.\n ' return TestShaped(None)
Creates an object with an unknown shape, for testing.
tests/gpflow/experimental/check_shapes/utils.py
t_unk
joelberkeley/GPflow
0
python
def t_unk() -> TestShaped: '\n \n ' return TestShaped(None)
def t_unk() -> TestShaped: '\n \n ' return TestShaped(None)<|docstring|>Creates an object with an unknown shape, for testing.<|endoftext|>