body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
ea60013c7b755352106a9e077d92dd5bb6058f20c9efb74e275435defcfc55b6
def GetNShortestLeases(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
GetNShortestLeases
hhio618/bluezelle-py
3
python
def GetNShortestLeases(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def GetNShortestLeases(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
4054b60e13f3f5d1b2c69233d11239e96b2c724cf3f73494b558c4913b90cdb2
def Keys(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
Keys
hhio618/bluezelle-py
3
python
def Keys(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Keys(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
3247a68e132c65a63f165a959ebbe71760060304f38fb54ee05995ec6a2efc83
def Rename(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
Rename
hhio618/bluezelle-py
3
python
def Rename(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Rename(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
d16fb6840dfe535dee155b2012540373942640ffff6c1a5af4a50c0342215d0e
def MultiUpdate(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
MultiUpdate
hhio618/bluezelle-py
3
python
def MultiUpdate(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def MultiUpdate(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
858be8bc50d441b7623904f4cd46ae09725374b5e53b1c9a58a8ec3ae875c1d6
def DeleteAll(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
DeleteAll
hhio618/bluezelle-py
3
python
def DeleteAll(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def DeleteAll(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
7ca24f652d178fdf4c28035c02fc186461b8f1923cc6436afa404ae2b5c16c21
def KeyValues(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
KeyValues
hhio618/bluezelle-py
3
python
def KeyValues(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def KeyValues(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
e28c520506eb6a256b9c630f0252cb22aa1d1a704de585575b97de40d0a5b668
def Has(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
Has
hhio618/bluezelle-py
3
python
def Has(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Has(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
9e70d92d49b9a21c614c136809aa3da38247fbe38403c973d492a9cddaf1fd32
def GetLease(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
GetLease
hhio618/bluezelle-py
3
python
def GetLease(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def GetLease(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
3764ebe6633c5e906b54eaf37debaa3e5b3f744d8e896c7ff64db17b73751ee2
def Read(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
Read
hhio618/bluezelle-py
3
python
def Read(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Read(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
2d46e9c475ada9fc2889c292053a0cf453cd3c31c291583d37d2014a88dad334
def Upsert(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
Upsert
hhio618/bluezelle-py
3
python
def Upsert(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Upsert(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
6c7969e831e90ee079d0466f425a66437c4e2a3898c0945e366e919393297007
def Create(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
Create
hhio618/bluezelle-py
3
python
def Create(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Create(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
00f0e0ea37338e2a1b4c04cceeebb14ea9fe9a8ad6708d54602334f231b084bf
def Update(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
Update
hhio618/bluezelle-py
3
python
def Update(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Update(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
2ade8e4307c9fb2598b1dd7f197970c1573dcdd054acfe4c4dcdf50283a83b15
def Delete(self, request, context): 'Missing associated documentation comment in .proto file.' context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
bluzelle/codec/crud/tx_pb2_grpc.py
Delete
hhio618/bluezelle-py
3
python
def Delete(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
def Delete(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')<|docstring|>Missing associated documentation comment in .proto file.<|endoftext|>
b38b2d60bd1120c273f33383aec02bd185e0e2659aa9d375645d53d4f46747e4
def py_func_import(py_func, kwargs): 'Imports and executes the function py_func.' (path, create_function) = py_func.rsplit('.', 1) logging.info('Importing path %s', path) mod = importlib.import_module(path) met = getattr(mod, create_function) return met(**kwargs)
Imports and executes the function py_func.
py/kubeflow/testing/run_e2e_workflow.py
py_func_import
fediazgon/testing
0
python
def py_func_import(py_func, kwargs): (path, create_function) = py_func.rsplit('.', 1) logging.info('Importing path %s', path) mod = importlib.import_module(path) met = getattr(mod, create_function) return met(**kwargs)
def py_func_import(py_func, kwargs): (path, create_function) = py_func.rsplit('.', 1) logging.info('Importing path %s', path) mod = importlib.import_module(path) met = getattr(mod, create_function) return met(**kwargs)<|docstring|>Imports and executes the function py_func.<|endoftext|>
7a68b469a20e351badd17e3c9727b7b90576e3ce1e641c14f21d5955860d4fa1
def create_started_file(bucket, ui_urls): 'Create the started file in gcs for gubernator.' contents = prow_artifacts.create_started(ui_urls) target = os.path.join(prow_artifacts.get_gcs_dir(bucket), 'started.json') util.upload_to_gcs(contents, target)
Create the started file in gcs for gubernator.
py/kubeflow/testing/run_e2e_workflow.py
create_started_file
fediazgon/testing
0
python
def create_started_file(bucket, ui_urls): contents = prow_artifacts.create_started(ui_urls) target = os.path.join(prow_artifacts.get_gcs_dir(bucket), 'started.json') util.upload_to_gcs(contents, target)
def create_started_file(bucket, ui_urls): contents = prow_artifacts.create_started(ui_urls) target = os.path.join(prow_artifacts.get_gcs_dir(bucket), 'started.json') util.upload_to_gcs(contents, target)<|docstring|>Create the started file in gcs for gubernator.<|endoftext|>
95003aa53b9eaab13d2ec8249ea9b1a5984d8382403ce46c77440a2f442f5b47
def request(url, payload=None, method='get'): 'Simple request wrapper\n\n Takes a couple of variables and wraps around the requests\n module\n\n Args:\n url: API URL\n method: Query method (default: {"get"})\n payload: JSON payload (default: {None})\n\n Returns:\n Dataset as result from query\n JSON Object\n ' r = None if (payload is not None): r = requests.post((base_api + url), headers=ctrlr['headers'], json=payload) elif (method == 'get'): r = requests.get((base_api + url), headers=ctrlr['headers']) elif (method == 'delete'): r = requests.delete((base_api + url), headers=ctrlr['headers']) return r.json()
Simple request wrapper Takes a couple of variables and wraps around the requests module Args: url: API URL method: Query method (default: {"get"}) payload: JSON payload (default: {None}) Returns: Dataset as result from query JSON Object
security/zerotier-nc.py
request
Ismail774403783/-
1
python
def request(url, payload=None, method='get'): 'Simple request wrapper\n\n Takes a couple of variables and wraps around the requests\n module\n\n Args:\n url: API URL\n method: Query method (default: {"get"})\n payload: JSON payload (default: {None})\n\n Returns:\n Dataset as result from query\n JSON Object\n ' r = None if (payload is not None): r = requests.post((base_api + url), headers=ctrlr['headers'], json=payload) elif (method == 'get'): r = requests.get((base_api + url), headers=ctrlr['headers']) elif (method == 'delete'): r = requests.delete((base_api + url), headers=ctrlr['headers']) return r.json()
def request(url, payload=None, method='get'): 'Simple request wrapper\n\n Takes a couple of variables and wraps around the requests\n module\n\n Args:\n url: API URL\n method: Query method (default: {"get"})\n payload: JSON payload (default: {None})\n\n Returns:\n Dataset as result from query\n JSON Object\n ' r = None if (payload is not None): r = requests.post((base_api + url), headers=ctrlr['headers'], json=payload) elif (method == 'get'): r = requests.get((base_api + url), headers=ctrlr['headers']) elif (method == 'delete'): r = requests.delete((base_api + url), headers=ctrlr['headers']) return r.json()<|docstring|>Simple request wrapper Takes a couple of variables and wraps around the requests module Args: url: API URL method: Query method (default: {"get"}) payload: JSON payload (default: {None}) Returns: Dataset as result from query JSON Object<|endoftext|>
7024d94fc5246f312e45d25e1fab7ecb452c6ea58ab01deccf325b4e3b56e568
def get_filepath(): 'Get filepath according to OS' if (platform.system() == 'Linux'): return '/var/lib/zerotier-one' elif (platform.system() == 'Darwin'): return '/Library/Application Support/ZeroTier/One' elif ((platform.system() == 'FreeBSD') or (platform.system() == 'OpenBSD')): return '/var/db/zerotier-one' elif (platform.system() == 'Windows'): return 'C:\\ProgramData\\ZeroTier\\One'
Get filepath according to OS
security/zerotier-nc.py
get_filepath
Ismail774403783/-
1
python
def get_filepath(): if (platform.system() == 'Linux'): return '/var/lib/zerotier-one' elif (platform.system() == 'Darwin'): return '/Library/Application Support/ZeroTier/One' elif ((platform.system() == 'FreeBSD') or (platform.system() == 'OpenBSD')): return '/var/db/zerotier-one' elif (platform.system() == 'Windows'): return 'C:\\ProgramData\\ZeroTier\\One'
def get_filepath(): if (platform.system() == 'Linux'): return '/var/lib/zerotier-one' elif (platform.system() == 'Darwin'): return '/Library/Application Support/ZeroTier/One' elif ((platform.system() == 'FreeBSD') or (platform.system() == 'OpenBSD')): return '/var/db/zerotier-one' elif (platform.system() == 'Windows'): return 'C:\\ProgramData\\ZeroTier\\One'<|docstring|>Get filepath according to OS<|endoftext|>
5b063b0cbf35e2cd855d94865811fc6cc1229b7c455808456db5bbd0cbe29459
def set_headers(): 'Sets authentication headers globally\n\n Automatically detect system and reads authtoken.secret\n to set authenticaiton headers used in request method\n globally.\n ' with open((get_filepath() + '/authtoken.secret')) as file: ctrlr['headers'] = {'X-ZT1-Auth': file.read()}
Sets authentication headers globally Automatically detect system and reads authtoken.secret to set authenticaiton headers used in request method globally.
security/zerotier-nc.py
set_headers
Ismail774403783/-
1
python
def set_headers(): 'Sets authentication headers globally\n\n Automatically detect system and reads authtoken.secret\n to set authenticaiton headers used in request method\n globally.\n ' with open((get_filepath() + '/authtoken.secret')) as file: ctrlr['headers'] = {'X-ZT1-Auth': file.read()}
def set_headers(): 'Sets authentication headers globally\n\n Automatically detect system and reads authtoken.secret\n to set authenticaiton headers used in request method\n globally.\n ' with open((get_filepath() + '/authtoken.secret')) as file: ctrlr['headers'] = {'X-ZT1-Auth': file.read()}<|docstring|>Sets authentication headers globally Automatically detect system and reads authtoken.secret to set authenticaiton headers used in request method globally.<|endoftext|>
7b54341c2874ecdef703da2477611fb9bd658afdbc9cc3b968dffae8306c1636
def rotation_matrix(u, theta): 'Return matrix that implements the rotation around the vector :math:`u`\n by the angle :math:`\\theta`, cf.\n https://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle.\n\n :param u: rotation vector\n :param theta: rotation angle\n ' cpm = numpy.array([[0.0, (- u[2]), u[1]], [u[2], 0.0, (- u[0])], [(- u[1]), u[0], 0.0]]) c = numpy.cos(theta) s = numpy.sin(theta) R = (((numpy.eye(3) * c) + (s * cpm)) + ((1.0 - c) * numpy.outer(u, u))) return R
Return matrix that implements the rotation around the vector :math:`u` by the angle :math:`\theta`, cf. https://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle. :param u: rotation vector :param theta: rotation angle
nanopores/py4gmsh/extra.py
rotation_matrix
mitschabaude/nanopores
8
python
def rotation_matrix(u, theta): 'Return matrix that implements the rotation around the vector :math:`u`\n by the angle :math:`\\theta`, cf.\n https://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle.\n\n :param u: rotation vector\n :param theta: rotation angle\n ' cpm = numpy.array([[0.0, (- u[2]), u[1]], [u[2], 0.0, (- u[0])], [(- u[1]), u[0], 0.0]]) c = numpy.cos(theta) s = numpy.sin(theta) R = (((numpy.eye(3) * c) + (s * cpm)) + ((1.0 - c) * numpy.outer(u, u))) return R
def rotation_matrix(u, theta): 'Return matrix that implements the rotation around the vector :math:`u`\n by the angle :math:`\\theta`, cf.\n https://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle.\n\n :param u: rotation vector\n :param theta: rotation angle\n ' cpm = numpy.array([[0.0, (- u[2]), u[1]], [u[2], 0.0, (- u[0])], [(- u[1]), u[0], 0.0]]) c = numpy.cos(theta) s = numpy.sin(theta) R = (((numpy.eye(3) * c) + (s * cpm)) + ((1.0 - c) * numpy.outer(u, u))) return R<|docstring|>Return matrix that implements the rotation around the vector :math:`u` by the angle :math:`\theta`, cf. https://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle. :param u: rotation vector :param theta: rotation angle<|endoftext|>
f2dc4d03b1814418ecc4fa6d912a66233c8024b75d6c68b959a49f2a4c75e521
def add_circle(radius, lcar, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), compound=False, num_sections=3): 'Add circle in the :math:`y`-:math:`z`-plane.\n ' X = [[0.0, 0.0, 0.0]] if (num_sections == 4): X = [[0.0, 0.0, 0.0], [0.0, radius, 0.0], [0.0, 0.0, radius], [0.0, (- radius), 0.0], [0.0, 0.0, (- radius)]] else: for k in range(num_sections): alpha = (((2 * numpy.pi) * k) / num_sections) X.append([0.0, (radius * numpy.cos(alpha)), (radius * numpy.sin(alpha))]) X = [(numpy.dot(R, x) + x0) for x in X] Comment('Points') p = [Point(x, lcar) for x in X] Comment('Circle arcs') c = [] for k in range(1, (len(p) - 1)): c.append(Circle([p[k], p[0], p[(k + 1)]])) c.append(Circle([p[(- 1)], p[0], p[1]])) if compound: c = [CompoundLine(c)] return c
Add circle in the :math:`y`-:math:`z`-plane.
nanopores/py4gmsh/extra.py
add_circle
mitschabaude/nanopores
8
python
def add_circle(radius, lcar, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), compound=False, num_sections=3): '\n ' X = [[0.0, 0.0, 0.0]] if (num_sections == 4): X = [[0.0, 0.0, 0.0], [0.0, radius, 0.0], [0.0, 0.0, radius], [0.0, (- radius), 0.0], [0.0, 0.0, (- radius)]] else: for k in range(num_sections): alpha = (((2 * numpy.pi) * k) / num_sections) X.append([0.0, (radius * numpy.cos(alpha)), (radius * numpy.sin(alpha))]) X = [(numpy.dot(R, x) + x0) for x in X] Comment('Points') p = [Point(x, lcar) for x in X] Comment('Circle arcs') c = [] for k in range(1, (len(p) - 1)): c.append(Circle([p[k], p[0], p[(k + 1)]])) c.append(Circle([p[(- 1)], p[0], p[1]])) if compound: c = [CompoundLine(c)] return c
def add_circle(radius, lcar, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), compound=False, num_sections=3): '\n ' X = [[0.0, 0.0, 0.0]] if (num_sections == 4): X = [[0.0, 0.0, 0.0], [0.0, radius, 0.0], [0.0, 0.0, radius], [0.0, (- radius), 0.0], [0.0, 0.0, (- radius)]] else: for k in range(num_sections): alpha = (((2 * numpy.pi) * k) / num_sections) X.append([0.0, (radius * numpy.cos(alpha)), (radius * numpy.sin(alpha))]) X = [(numpy.dot(R, x) + x0) for x in X] Comment('Points') p = [Point(x, lcar) for x in X] Comment('Circle arcs') c = [] for k in range(1, (len(p) - 1)): c.append(Circle([p[k], p[0], p[(k + 1)]])) c.append(Circle([p[(- 1)], p[0], p[1]])) if compound: c = [CompoundLine(c)] return c<|docstring|>Add circle in the :math:`y`-:math:`z`-plane.<|endoftext|>
9a5341a780bb29af76bedc07f17e750ccd2873f560a95ebf2de35d4cd295cee7
def add_ball(x0, radius, lcar, with_volume=True, holes=None, label=None): 'Creates a ball with a given radius around a given midpoint :math:`x_0`.\n ' if (holes is None): holes = [] p = [Point(x0, lcar=lcar), Point([(x0[0] + radius), x0[1], x0[2]], lcar=lcar), Point([x0[0], (x0[1] + radius), x0[2]], lcar=lcar), Point([x0[0], x0[1], (x0[2] + radius)], lcar=lcar), Point([(x0[0] - radius), x0[1], x0[2]], lcar=lcar), Point([x0[0], (x0[1] - radius), x0[2]], lcar=lcar), Point([x0[0], x0[1], (x0[2] - radius)], lcar=lcar)] c = [Circle([p[1], p[0], p[6]]), Circle([p[6], p[0], p[4]]), Circle([p[4], p[0], p[3]]), Circle([p[3], p[0], p[1]]), Circle([p[1], p[0], p[2]]), Circle([p[2], p[0], p[4]]), Circle([p[4], p[0], p[5]]), Circle([p[5], p[0], p[1]]), Circle([p[6], p[0], p[2]]), Circle([p[2], p[0], p[3]]), Circle([p[3], p[0], p[5]]), Circle([p[5], p[0], p[6]])] ll = [LineLoop([c[4], c[9], c[3]]), LineLoop([c[8], ('-' + c[4]), c[0]]), LineLoop([c[11], ('-' + c[7]), ('-' + c[0])]), LineLoop([c[7], ('-' + c[3]), c[10]]), LineLoop([('-' + c[9]), c[5], c[2]]), LineLoop([('-' + c[10]), ('-' + c[2]), c[6]]), LineLoop([('-' + c[1]), ('-' + c[6]), ('-' + c[11])]), LineLoop([('-' + c[5]), ('-' + c[8]), c[1]])] s = [RuledSurface(l) for l in ll] surface_loop = SurfaceLoop(s) if holes: surface_loop = Array(([surface_loop] + holes)) if with_volume: volume = Volume(surface_loop) if label: PhysicalVolume(volume, label) else: volume = None return (volume, surface_loop, s)
Creates a ball with a given radius around a given midpoint :math:`x_0`.
nanopores/py4gmsh/extra.py
add_ball
mitschabaude/nanopores
8
python
def add_ball(x0, radius, lcar, with_volume=True, holes=None, label=None): '\n ' if (holes is None): holes = [] p = [Point(x0, lcar=lcar), Point([(x0[0] + radius), x0[1], x0[2]], lcar=lcar), Point([x0[0], (x0[1] + radius), x0[2]], lcar=lcar), Point([x0[0], x0[1], (x0[2] + radius)], lcar=lcar), Point([(x0[0] - radius), x0[1], x0[2]], lcar=lcar), Point([x0[0], (x0[1] - radius), x0[2]], lcar=lcar), Point([x0[0], x0[1], (x0[2] - radius)], lcar=lcar)] c = [Circle([p[1], p[0], p[6]]), Circle([p[6], p[0], p[4]]), Circle([p[4], p[0], p[3]]), Circle([p[3], p[0], p[1]]), Circle([p[1], p[0], p[2]]), Circle([p[2], p[0], p[4]]), Circle([p[4], p[0], p[5]]), Circle([p[5], p[0], p[1]]), Circle([p[6], p[0], p[2]]), Circle([p[2], p[0], p[3]]), Circle([p[3], p[0], p[5]]), Circle([p[5], p[0], p[6]])] ll = [LineLoop([c[4], c[9], c[3]]), LineLoop([c[8], ('-' + c[4]), c[0]]), LineLoop([c[11], ('-' + c[7]), ('-' + c[0])]), LineLoop([c[7], ('-' + c[3]), c[10]]), LineLoop([('-' + c[9]), c[5], c[2]]), LineLoop([('-' + c[10]), ('-' + c[2]), c[6]]), LineLoop([('-' + c[1]), ('-' + c[6]), ('-' + c[11])]), LineLoop([('-' + c[5]), ('-' + c[8]), c[1]])] s = [RuledSurface(l) for l in ll] surface_loop = SurfaceLoop(s) if holes: surface_loop = Array(([surface_loop] + holes)) if with_volume: volume = Volume(surface_loop) if label: PhysicalVolume(volume, label) else: volume = None return (volume, surface_loop, s)
def add_ball(x0, radius, lcar, with_volume=True, holes=None, label=None): '\n ' if (holes is None): holes = [] p = [Point(x0, lcar=lcar), Point([(x0[0] + radius), x0[1], x0[2]], lcar=lcar), Point([x0[0], (x0[1] + radius), x0[2]], lcar=lcar), Point([x0[0], x0[1], (x0[2] + radius)], lcar=lcar), Point([(x0[0] - radius), x0[1], x0[2]], lcar=lcar), Point([x0[0], (x0[1] - radius), x0[2]], lcar=lcar), Point([x0[0], x0[1], (x0[2] - radius)], lcar=lcar)] c = [Circle([p[1], p[0], p[6]]), Circle([p[6], p[0], p[4]]), Circle([p[4], p[0], p[3]]), Circle([p[3], p[0], p[1]]), Circle([p[1], p[0], p[2]]), Circle([p[2], p[0], p[4]]), Circle([p[4], p[0], p[5]]), Circle([p[5], p[0], p[1]]), Circle([p[6], p[0], p[2]]), Circle([p[2], p[0], p[3]]), Circle([p[3], p[0], p[5]]), Circle([p[5], p[0], p[6]])] ll = [LineLoop([c[4], c[9], c[3]]), LineLoop([c[8], ('-' + c[4]), c[0]]), LineLoop([c[11], ('-' + c[7]), ('-' + c[0])]), LineLoop([c[7], ('-' + c[3]), c[10]]), LineLoop([('-' + c[9]), c[5], c[2]]), LineLoop([('-' + c[10]), ('-' + c[2]), c[6]]), LineLoop([('-' + c[1]), ('-' + c[6]), ('-' + c[11])]), LineLoop([('-' + c[5]), ('-' + c[8]), c[1]])] s = [RuledSurface(l) for l in ll] surface_loop = SurfaceLoop(s) if holes: surface_loop = Array(([surface_loop] + holes)) if with_volume: volume = Volume(surface_loop) if label: PhysicalVolume(volume, label) else: volume = None return (volume, surface_loop, s)<|docstring|>Creates a ball with a given radius around a given midpoint :math:`x_0`.<|endoftext|>
1b765b8e9737af58039ba1cdac2777a140f042f7905c33cd848fef90ba1ed0fe
def add_torus(irad, orad, lcar, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None): 'Create Gmsh code for the torus under the coordinate transformation\n\n .. math::\n \\hat{x} = R x + x_0.\n\n :param irad: inner radius of the torus\n :param orad: outer radius of the torus\n ' Comment((76 * '-')) Comment('Torus') x0t = numpy.dot(R, numpy.array([0.0, orad, 0.0])) c = add_circle(irad, lcar, R=R, x0=(x0 + x0t)) rot_axis = [0.0, 0.0, 1.0] rot_axis = numpy.dot(R, rot_axis) point_on_rot_axis = [0.0, 0.0, 0.0] point_on_rot_axis = (numpy.dot(R, point_on_rot_axis) + x0) previous = c angle = '2*Pi/3' all_names = [] for i in range(3): Comment(('Round no. %s' % (i + 1))) for k in range(len(previous)): tmp_name = Extrude(('Line{%s}' % previous[k]), rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=angle) all_names.append(tmp_name) previous[k] = (tmp_name + '[0]') all_surfaces = [(name + '[1]') for name in all_names] surface_loop = SurfaceLoop(all_surfaces) vol = Volume(surface_loop) if label: PhysicalVolume(vol, label) Comment((76 * '-')) return
Create Gmsh code for the torus under the coordinate transformation .. math:: \hat{x} = R x + x_0. :param irad: inner radius of the torus :param orad: outer radius of the torus
nanopores/py4gmsh/extra.py
add_torus
mitschabaude/nanopores
8
python
def add_torus(irad, orad, lcar, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None): 'Create Gmsh code for the torus under the coordinate transformation\n\n .. math::\n \\hat{x} = R x + x_0.\n\n :param irad: inner radius of the torus\n :param orad: outer radius of the torus\n ' Comment((76 * '-')) Comment('Torus') x0t = numpy.dot(R, numpy.array([0.0, orad, 0.0])) c = add_circle(irad, lcar, R=R, x0=(x0 + x0t)) rot_axis = [0.0, 0.0, 1.0] rot_axis = numpy.dot(R, rot_axis) point_on_rot_axis = [0.0, 0.0, 0.0] point_on_rot_axis = (numpy.dot(R, point_on_rot_axis) + x0) previous = c angle = '2*Pi/3' all_names = [] for i in range(3): Comment(('Round no. %s' % (i + 1))) for k in range(len(previous)): tmp_name = Extrude(('Line{%s}' % previous[k]), rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=angle) all_names.append(tmp_name) previous[k] = (tmp_name + '[0]') all_surfaces = [(name + '[1]') for name in all_names] surface_loop = SurfaceLoop(all_surfaces) vol = Volume(surface_loop) if label: PhysicalVolume(vol, label) Comment((76 * '-')) return
def add_torus(irad, orad, lcar, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None): 'Create Gmsh code for the torus under the coordinate transformation\n\n .. math::\n \\hat{x} = R x + x_0.\n\n :param irad: inner radius of the torus\n :param orad: outer radius of the torus\n ' Comment((76 * '-')) Comment('Torus') x0t = numpy.dot(R, numpy.array([0.0, orad, 0.0])) c = add_circle(irad, lcar, R=R, x0=(x0 + x0t)) rot_axis = [0.0, 0.0, 1.0] rot_axis = numpy.dot(R, rot_axis) point_on_rot_axis = [0.0, 0.0, 0.0] point_on_rot_axis = (numpy.dot(R, point_on_rot_axis) + x0) previous = c angle = '2*Pi/3' all_names = [] for i in range(3): Comment(('Round no. %s' % (i + 1))) for k in range(len(previous)): tmp_name = Extrude(('Line{%s}' % previous[k]), rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=angle) all_names.append(tmp_name) previous[k] = (tmp_name + '[0]') all_surfaces = [(name + '[1]') for name in all_names] surface_loop = SurfaceLoop(all_surfaces) vol = Volume(surface_loop) if label: PhysicalVolume(vol, label) Comment((76 * '-')) return<|docstring|>Create Gmsh code for the torus under the coordinate transformation .. math:: \hat{x} = R x + x_0. :param irad: inner radius of the torus :param orad: outer radius of the torus<|endoftext|>
a08aa5d50990d9e126b01de4d3ae18079b736567a66802753cbfc88962ec9625
def add_torus2(irad, orad, lcar, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None): 'Create Gmsh code for the torus under the coordinate transformation\n\n .. math::\n \\hat{x} = R x + x_0.\n\n :param irad: inner radius of the torus\n :param orad: outer radius of the torus\n ' Comment((76 * '-')) Comment('Torus') x0t = numpy.dot(R, numpy.array([0.0, orad, 0.0])) c = add_circle(irad, lcar, R=R, x0=(x0 + x0t)) ll = LineLoop(c) s = PlaneSurface(ll) rot_axis = [0.0, 0.0, 1.0] rot_axis = numpy.dot(R, rot_axis) point_on_rot_axis = [0.0, 0.0, 0.0] point_on_rot_axis = (numpy.dot(R, point_on_rot_axis) + x0) previous = s all_names = [] num_steps = 3 for _ in range(num_steps): tmp_name = Extrude(('Surface{%s}' % previous), rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=('2*Pi/%d' % num_steps)) previous = (tmp_name + '[0]') all_names.append(tmp_name) all_volumes = [(name + '[1]') for name in all_names] vol = CompoundVolume(all_volumes) if label: PhysicalVolume(vol, label) Comment((76 * '-')) return
Create Gmsh code for the torus under the coordinate transformation .. math:: \hat{x} = R x + x_0. :param irad: inner radius of the torus :param orad: outer radius of the torus
nanopores/py4gmsh/extra.py
add_torus2
mitschabaude/nanopores
8
python
def add_torus2(irad, orad, lcar, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None): 'Create Gmsh code for the torus under the coordinate transformation\n\n .. math::\n \\hat{x} = R x + x_0.\n\n :param irad: inner radius of the torus\n :param orad: outer radius of the torus\n ' Comment((76 * '-')) Comment('Torus') x0t = numpy.dot(R, numpy.array([0.0, orad, 0.0])) c = add_circle(irad, lcar, R=R, x0=(x0 + x0t)) ll = LineLoop(c) s = PlaneSurface(ll) rot_axis = [0.0, 0.0, 1.0] rot_axis = numpy.dot(R, rot_axis) point_on_rot_axis = [0.0, 0.0, 0.0] point_on_rot_axis = (numpy.dot(R, point_on_rot_axis) + x0) previous = s all_names = [] num_steps = 3 for _ in range(num_steps): tmp_name = Extrude(('Surface{%s}' % previous), rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=('2*Pi/%d' % num_steps)) previous = (tmp_name + '[0]') all_names.append(tmp_name) all_volumes = [(name + '[1]') for name in all_names] vol = CompoundVolume(all_volumes) if label: PhysicalVolume(vol, label) Comment((76 * '-')) return
def add_torus2(irad, orad, lcar, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None): 'Create Gmsh code for the torus under the coordinate transformation\n\n .. math::\n \\hat{x} = R x + x_0.\n\n :param irad: inner radius of the torus\n :param orad: outer radius of the torus\n ' Comment((76 * '-')) Comment('Torus') x0t = numpy.dot(R, numpy.array([0.0, orad, 0.0])) c = add_circle(irad, lcar, R=R, x0=(x0 + x0t)) ll = LineLoop(c) s = PlaneSurface(ll) rot_axis = [0.0, 0.0, 1.0] rot_axis = numpy.dot(R, rot_axis) point_on_rot_axis = [0.0, 0.0, 0.0] point_on_rot_axis = (numpy.dot(R, point_on_rot_axis) + x0) previous = s all_names = [] num_steps = 3 for _ in range(num_steps): tmp_name = Extrude(('Surface{%s}' % previous), rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=('2*Pi/%d' % num_steps)) previous = (tmp_name + '[0]') all_names.append(tmp_name) all_volumes = [(name + '[1]') for name in all_names] vol = CompoundVolume(all_volumes) if label: PhysicalVolume(vol, label) Comment((76 * '-')) return<|docstring|>Create Gmsh code for the torus under the coordinate transformation .. math:: \hat{x} = R x + x_0. :param irad: inner radius of the torus :param orad: outer radius of the torus<|endoftext|>
863fe965d77b3928c013135d64b61eee6a05401024d4c17485cefac7e2173510
def add_pipe(outer_radius, inner_radius, length, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None, lcar=0.1): 'Hollow cylinder.\n Define a rectangle, extrude it by rotation.\n ' Comment('Define rectangle.') X = numpy.array([[0.0, outer_radius, ((- 0.5) * length)], [0.0, outer_radius, (0.5 * length)], [0.0, inner_radius, (0.5 * length)], [0.0, inner_radius, ((- 0.5) * length)]]) X = [(numpy.dot(R, x) + x0) for x in X] p = [Point(x, lcar) for x in X] e = [Line(p[0], p[1]), Line(p[1], p[2]), Line(p[2], p[3]), Line(p[3], p[0])] rot_axis = [0.0, 0.0, 1.0] rot_axis = numpy.dot(R, rot_axis) point_on_rot_axis = [0.0, 0.0, 0.0] point_on_rot_axis = (numpy.dot(R, point_on_rot_axis) + x0) previous = e angle = '2*Pi/3' all_names = [] Comment('Extrude in 3 steps.') for i in range(3): Comment(('Step %s' % (i + 1))) for k in range(len(previous)): name = Extrude(('Line{%s}' % previous[k]), rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=angle) all_names.append((name + '[1]')) previous[k] = (name + '[0]') all_surfaces = all_names surface_loop = SurfaceLoop(all_surfaces) vol = Volume(surface_loop) if label: PhysicalVolume(vol, label) return
Hollow cylinder. Define a rectangle, extrude it by rotation.
nanopores/py4gmsh/extra.py
add_pipe
mitschabaude/nanopores
8
python
def add_pipe(outer_radius, inner_radius, length, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None, lcar=0.1): 'Hollow cylinder.\n Define a rectangle, extrude it by rotation.\n ' Comment('Define rectangle.') X = numpy.array([[0.0, outer_radius, ((- 0.5) * length)], [0.0, outer_radius, (0.5 * length)], [0.0, inner_radius, (0.5 * length)], [0.0, inner_radius, ((- 0.5) * length)]]) X = [(numpy.dot(R, x) + x0) for x in X] p = [Point(x, lcar) for x in X] e = [Line(p[0], p[1]), Line(p[1], p[2]), Line(p[2], p[3]), Line(p[3], p[0])] rot_axis = [0.0, 0.0, 1.0] rot_axis = numpy.dot(R, rot_axis) point_on_rot_axis = [0.0, 0.0, 0.0] point_on_rot_axis = (numpy.dot(R, point_on_rot_axis) + x0) previous = e angle = '2*Pi/3' all_names = [] Comment('Extrude in 3 steps.') for i in range(3): Comment(('Step %s' % (i + 1))) for k in range(len(previous)): name = Extrude(('Line{%s}' % previous[k]), rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=angle) all_names.append((name + '[1]')) previous[k] = (name + '[0]') all_surfaces = all_names surface_loop = SurfaceLoop(all_surfaces) vol = Volume(surface_loop) if label: PhysicalVolume(vol, label) return
def add_pipe(outer_radius, inner_radius, length, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None, lcar=0.1): 'Hollow cylinder.\n Define a rectangle, extrude it by rotation.\n ' Comment('Define rectangle.') X = numpy.array([[0.0, outer_radius, ((- 0.5) * length)], [0.0, outer_radius, (0.5 * length)], [0.0, inner_radius, (0.5 * length)], [0.0, inner_radius, ((- 0.5) * length)]]) X = [(numpy.dot(R, x) + x0) for x in X] p = [Point(x, lcar) for x in X] e = [Line(p[0], p[1]), Line(p[1], p[2]), Line(p[2], p[3]), Line(p[3], p[0])] rot_axis = [0.0, 0.0, 1.0] rot_axis = numpy.dot(R, rot_axis) point_on_rot_axis = [0.0, 0.0, 0.0] point_on_rot_axis = (numpy.dot(R, point_on_rot_axis) + x0) previous = e angle = '2*Pi/3' all_names = [] Comment('Extrude in 3 steps.') for i in range(3): Comment(('Step %s' % (i + 1))) for k in range(len(previous)): name = Extrude(('Line{%s}' % previous[k]), rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=angle) all_names.append((name + '[1]')) previous[k] = (name + '[0]') all_surfaces = all_names surface_loop = SurfaceLoop(all_surfaces) vol = Volume(surface_loop) if label: PhysicalVolume(vol, label) return<|docstring|>Hollow cylinder. Define a rectangle, extrude it by rotation.<|endoftext|>
34250d826df39346ee48e4d07bf373d22f55d90edc673022ae965a453449fa81
def add_pipe2(outer_radius, inner_radius, length, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None, lcar=0.1): 'Hollow cylinder.\n Define a ring, extrude it by translation.\n ' c_inner = add_circle(inner_radius, lcar, R=R, x0=x0) ll_inner = LineLoop(c_inner) c_outer = add_circle(outer_radius, lcar, R=R, x0=x0) ll_outer = LineLoop(c_outer) surf = PlaneSurface(','.join([ll_outer, ll_inner])) name = Extrude(('Surface{%s}' % surf), translation_axis=[length, 0, 0]) vol = (name + '[0]') if label: PhysicalVolume(vol, label) return vol
Hollow cylinder. Define a ring, extrude it by translation.
nanopores/py4gmsh/extra.py
add_pipe2
mitschabaude/nanopores
8
python
def add_pipe2(outer_radius, inner_radius, length, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None, lcar=0.1): 'Hollow cylinder.\n Define a ring, extrude it by translation.\n ' c_inner = add_circle(inner_radius, lcar, R=R, x0=x0) ll_inner = LineLoop(c_inner) c_outer = add_circle(outer_radius, lcar, R=R, x0=x0) ll_outer = LineLoop(c_outer) surf = PlaneSurface(','.join([ll_outer, ll_inner])) name = Extrude(('Surface{%s}' % surf), translation_axis=[length, 0, 0]) vol = (name + '[0]') if label: PhysicalVolume(vol, label) return vol
def add_pipe2(outer_radius, inner_radius, length, R=numpy.eye(3), x0=numpy.array([0.0, 0.0, 0.0]), label=None, lcar=0.1): 'Hollow cylinder.\n Define a ring, extrude it by translation.\n ' c_inner = add_circle(inner_radius, lcar, R=R, x0=x0) ll_inner = LineLoop(c_inner) c_outer = add_circle(outer_radius, lcar, R=R, x0=x0) ll_outer = LineLoop(c_outer) surf = PlaneSurface(','.join([ll_outer, ll_inner])) name = Extrude(('Surface{%s}' % surf), translation_axis=[length, 0, 0]) vol = (name + '[0]') if label: PhysicalVolume(vol, label) return vol<|docstring|>Hollow cylinder. Define a ring, extrude it by translation.<|endoftext|>
a2681365f1172a14496b9c25b85fa3b03cab5eba870c3c8a13af41fd77061930
def strStr(self, haystack, needle): '\n :type haystack: str\n :type needle: str\n :rtype: int\n ' if (not needle): return 0 for i in range(len(haystack)): if ((haystack[i] == needle[0]) and (haystack[i:(i + len(needle))] == needle) and ((i + len(needle)) < len(haystack))): return i return (- 1)
:type haystack: str :type needle: str :rtype: int
DS-400/Easy/28-Implement strStr()/LinearTimeSlice.py
strStr
ericchen12377/Leetcode-Algorithm-Python
2
python
def strStr(self, haystack, needle): '\n :type haystack: str\n :type needle: str\n :rtype: int\n ' if (not needle): return 0 for i in range(len(haystack)): if ((haystack[i] == needle[0]) and (haystack[i:(i + len(needle))] == needle) and ((i + len(needle)) < len(haystack))): return i return (- 1)
def strStr(self, haystack, needle): '\n :type haystack: str\n :type needle: str\n :rtype: int\n ' if (not needle): return 0 for i in range(len(haystack)): if ((haystack[i] == needle[0]) and (haystack[i:(i + len(needle))] == needle) and ((i + len(needle)) < len(haystack))): return i return (- 1)<|docstring|>:type haystack: str :type needle: str :rtype: int<|endoftext|>
85f55e1aa6af7a83ef95b0e5029609cf620a322bdf43362324090603955b9277
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None) -> None: 'Initialize climate.group platform.' async_add_entities([ClimateGroup(config.get(CONF_NAME), config[CONF_ENTITIES], config.get(CONF_EXCLUDE), config.get(CONF_TEMPERATURE_UNIT))])
Initialize climate.group platform.
custom_components/climate_group/climate.py
async_setup_platform
bvweerd/climate_group
60
python
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None) -> None: async_add_entities([ClimateGroup(config.get(CONF_NAME), config[CONF_ENTITIES], config.get(CONF_EXCLUDE), config.get(CONF_TEMPERATURE_UNIT))])
async def async_setup_platform(hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None) -> None: async_add_entities([ClimateGroup(config.get(CONF_NAME), config[CONF_ENTITIES], config.get(CONF_EXCLUDE), config.get(CONF_TEMPERATURE_UNIT))])<|docstring|>Initialize climate.group platform.<|endoftext|>
e06d1505fecbc891687701d1a6dff57903ee7dc99112539aa448de684f33c438
def _find_state_attributes(states: List[State], key: str) -> Iterator[Any]: 'Find attributes with matching key from states.' for state in states: value = state.attributes.get(key) if (value is not None): (yield value)
Find attributes with matching key from states.
custom_components/climate_group/climate.py
_find_state_attributes
bvweerd/climate_group
60
python
def _find_state_attributes(states: List[State], key: str) -> Iterator[Any]: for state in states: value = state.attributes.get(key) if (value is not None): (yield value)
def _find_state_attributes(states: List[State], key: str) -> Iterator[Any]: for state in states: value = state.attributes.get(key) if (value is not None): (yield value)<|docstring|>Find attributes with matching key from states.<|endoftext|>
dd8d1a773680af56dad82a8ec5153e0808fc42daa6549bbed21c8e454a130091
def _mean(*args): 'Return the mean of the supplied values.' return (sum(args) / len(args))
Return the mean of the supplied values.
custom_components/climate_group/climate.py
_mean
bvweerd/climate_group
60
python
def _mean(*args): return (sum(args) / len(args))
def _mean(*args): return (sum(args) / len(args))<|docstring|>Return the mean of the supplied values.<|endoftext|>
9af21f98edd4f611fc181b958432234eb787e2473b4d0727abbf8a644c73303c
def _reduce_attribute(states: List[State], key: str, default: Optional[Any]=None, reduce: Callable[(..., Any)]=_mean) -> Any: 'Find the first attribute matching key from states.\n If none are found, return default.\n ' attrs = list(_find_state_attributes(states, key)) if (not attrs): return default if (len(attrs) == 1): return attrs[0] return reduce(*attrs)
Find the first attribute matching key from states. If none are found, return default.
custom_components/climate_group/climate.py
_reduce_attribute
bvweerd/climate_group
60
python
def _reduce_attribute(states: List[State], key: str, default: Optional[Any]=None, reduce: Callable[(..., Any)]=_mean) -> Any: 'Find the first attribute matching key from states.\n If none are found, return default.\n ' attrs = list(_find_state_attributes(states, key)) if (not attrs): return default if (len(attrs) == 1): return attrs[0] return reduce(*attrs)
def _reduce_attribute(states: List[State], key: str, default: Optional[Any]=None, reduce: Callable[(..., Any)]=_mean) -> Any: 'Find the first attribute matching key from states.\n If none are found, return default.\n ' attrs = list(_find_state_attributes(states, key)) if (not attrs): return default if (len(attrs) == 1): return attrs[0] return reduce(*attrs)<|docstring|>Find the first attribute matching key from states. If none are found, return default.<|endoftext|>
e487e642a4fe0244ef7b5aeaf9284f87ce7766231f56e23d29da05398b7382fb
def __init__(self, name: str, entity_ids: List[str], excluded: List[str], unit: str) -> None: 'Initialize a climate group.' self._name = name self._entity_ids = entity_ids if ('c' in unit.lower()): self._unit = TEMP_CELSIUS else: self._unit = TEMP_FAHRENHEIT self._min_temp = 0 self._max_temp = 0 self._current_temp = 0 self._target_temp = 0 self._target_temp_high = None self._target_temp_low = None self._mode = None self._action = None self._mode_list = None self._available = True self._supported_features = 0 self._async_unsub_state_changed = None self._fan_modes = None self._fan_mode = None self._swing_modes = None self._swing_mode = None self._preset_modes = None self._preset = None self._excluded = excluded
Initialize a climate group.
custom_components/climate_group/climate.py
__init__
bvweerd/climate_group
60
python
def __init__(self, name: str, entity_ids: List[str], excluded: List[str], unit: str) -> None: self._name = name self._entity_ids = entity_ids if ('c' in unit.lower()): self._unit = TEMP_CELSIUS else: self._unit = TEMP_FAHRENHEIT self._min_temp = 0 self._max_temp = 0 self._current_temp = 0 self._target_temp = 0 self._target_temp_high = None self._target_temp_low = None self._mode = None self._action = None self._mode_list = None self._available = True self._supported_features = 0 self._async_unsub_state_changed = None self._fan_modes = None self._fan_mode = None self._swing_modes = None self._swing_mode = None self._preset_modes = None self._preset = None self._excluded = excluded
def __init__(self, name: str, entity_ids: List[str], excluded: List[str], unit: str) -> None: self._name = name self._entity_ids = entity_ids if ('c' in unit.lower()): self._unit = TEMP_CELSIUS else: self._unit = TEMP_FAHRENHEIT self._min_temp = 0 self._max_temp = 0 self._current_temp = 0 self._target_temp = 0 self._target_temp_high = None self._target_temp_low = None self._mode = None self._action = None self._mode_list = None self._available = True self._supported_features = 0 self._async_unsub_state_changed = None self._fan_modes = None self._fan_mode = None self._swing_modes = None self._swing_mode = None self._preset_modes = None self._preset = None self._excluded = excluded<|docstring|>Initialize a climate group.<|endoftext|>
ef7f73e08111ae1091011d84cf9988e822555085a0708a72ba26608a71eed535
async def async_added_to_hass(self) -> None: 'Register callbacks.' @callback def async_state_changed_listener(entity_id: str, old_state: State, new_state: State): 'Handle child updates.' self.async_schedule_update_ha_state(True) self._async_unsub_state_changed = async_track_state_change(self.hass, self._entity_ids, async_state_changed_listener) (await self.async_update())
Register callbacks.
custom_components/climate_group/climate.py
async_added_to_hass
bvweerd/climate_group
60
python
async def async_added_to_hass(self) -> None: @callback def async_state_changed_listener(entity_id: str, old_state: State, new_state: State): 'Handle child updates.' self.async_schedule_update_ha_state(True) self._async_unsub_state_changed = async_track_state_change(self.hass, self._entity_ids, async_state_changed_listener) (await self.async_update())
async def async_added_to_hass(self) -> None: @callback def async_state_changed_listener(entity_id: str, old_state: State, new_state: State): 'Handle child updates.' self.async_schedule_update_ha_state(True) self._async_unsub_state_changed = async_track_state_change(self.hass, self._entity_ids, async_state_changed_listener) (await self.async_update())<|docstring|>Register callbacks.<|endoftext|>
4ff7a9825c1d387d2f7a0dd21421d12651ebc5503164b9f8e55973a339d12d3d
async def async_will_remove_from_hass(self): 'Handle removal from HASS.' if (self._async_unsub_state_changed is not None): self._async_unsub_state_changed() self._async_unsub_state_changed = None
Handle removal from HASS.
custom_components/climate_group/climate.py
async_will_remove_from_hass
bvweerd/climate_group
60
python
async def async_will_remove_from_hass(self): if (self._async_unsub_state_changed is not None): self._async_unsub_state_changed() self._async_unsub_state_changed = None
async def async_will_remove_from_hass(self): if (self._async_unsub_state_changed is not None): self._async_unsub_state_changed() self._async_unsub_state_changed = None<|docstring|>Handle removal from HASS.<|endoftext|>
65675120159e174c0364afd0352982491c288339694b6b7114dfe2c923334056
@property def name(self) -> str: 'Return the name of the entity.' return self._name
Return the name of the entity.
custom_components/climate_group/climate.py
name
bvweerd/climate_group
60
python
@property def name(self) -> str: return self._name
@property def name(self) -> str: return self._name<|docstring|>Return the name of the entity.<|endoftext|>
3b47cc5bdb017f78c62fe7c06fffc1403b9a192bd989d166ba1c00ee9852a140
@property def available(self) -> bool: 'Return whether the climate group is available.' return self._available
Return whether the climate group is available.
custom_components/climate_group/climate.py
available
bvweerd/climate_group
60
python
@property def available(self) -> bool: return self._available
@property def available(self) -> bool: return self._available<|docstring|>Return whether the climate group is available.<|endoftext|>
8b3e22dc9366f309fc9ff66587460aaa216de4f40ddb68d54a5b2c75cd7220f0
@property def supported_features(self) -> int: 'Flag supported features.' return self._supported_features
Flag supported features.
custom_components/climate_group/climate.py
supported_features
bvweerd/climate_group
60
python
@property def supported_features(self) -> int: return self._supported_features
@property def supported_features(self) -> int: return self._supported_features<|docstring|>Flag supported features.<|endoftext|>
73cba7b3de315dc181486cb5da53e2d1dcd10e30b3c5709ce7e83ae9689191db
@property def hvac_mode(self): 'What is the thermostat intending to do' return self._mode
What is the thermostat intending to do
custom_components/climate_group/climate.py
hvac_mode
bvweerd/climate_group
60
python
@property def hvac_mode(self): return self._mode
@property def hvac_mode(self): return self._mode<|docstring|>What is the thermostat intending to do<|endoftext|>
ffaa61e3dd5881a9c4fc2fc6fc364a4ead2d825c7b7e863b25be11f397ce2fb6
@property def hvac_action(self): 'What is the thermostat _actually_ doing right now' return self._action
What is the thermostat _actually_ doing right now
custom_components/climate_group/climate.py
hvac_action
bvweerd/climate_group
60
python
@property def hvac_action(self): return self._action
@property def hvac_action(self): return self._action<|docstring|>What is the thermostat _actually_ doing right now<|endoftext|>
dff241d37818e8e685d63ce316bac2fb71dd0c0f44f78b23f72d4abb25e9f090
@property def temperature_unit(self): 'Return the unit of measurement that is used.' return self._unit
Return the unit of measurement that is used.
custom_components/climate_group/climate.py
temperature_unit
bvweerd/climate_group
60
python
@property def temperature_unit(self): return self._unit
@property def temperature_unit(self): return self._unit<|docstring|>Return the unit of measurement that is used.<|endoftext|>
ccabbf0cd6486506b0efba11c3a0f3965025ca23df48109d9be7421e61cea69c
@property def should_poll(self) -> bool: 'No polling needed for a climate group.' return False
No polling needed for a climate group.
custom_components/climate_group/climate.py
should_poll
bvweerd/climate_group
60
python
@property def should_poll(self) -> bool: return False
@property def should_poll(self) -> bool: return False<|docstring|>No polling needed for a climate group.<|endoftext|>
62e18a61969f45bd1ccdbf12b2ea18d6a5af1b5641742bce9d2c7921807a2611
@property def device_state_attributes(self): 'Return the state attributes for the climate group.' return {ATTR_ENTITY_ID: self._entity_ids}
Return the state attributes for the climate group.
custom_components/climate_group/climate.py
device_state_attributes
bvweerd/climate_group
60
python
@property def device_state_attributes(self): return {ATTR_ENTITY_ID: self._entity_ids}
@property def device_state_attributes(self): return {ATTR_ENTITY_ID: self._entity_ids}<|docstring|>Return the state attributes for the climate group.<|endoftext|>
0e9c6cf9dab3d5ca40bf44456e3d7c6e410f997932c7b0beddefc4a605f8f4ac
async def async_set_temperature(self, **kwargs): 'Forward the turn_on command to all climate in the climate group.' data = {ATTR_ENTITY_ID: self._entity_ids} if (ATTR_HVAC_MODE in kwargs): hvac_mode = kwargs.get(ATTR_HVAC_MODE) (await self.async_set_hvac_mode(hvac_mode)) elif ((ATTR_TEMPERATURE in kwargs) or (ATTR_TARGET_TEMP_LOW in kwargs) or (ATTR_TARGET_TEMP_HIGH in kwargs)): if (ATTR_TEMPERATURE in kwargs): temperature = kwargs.get(ATTR_TEMPERATURE) data[ATTR_TEMPERATURE] = temperature elif ((ATTR_TARGET_TEMP_LOW in kwargs) or (ATTR_TARGET_TEMP_HIGH in kwargs)): temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW) temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH) data[climate.ATTR_TARGET_TEMP_LOW] = temperature_low data[climate.ATTR_TARGET_TEMP_HIGH] = temperature_high (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE, data, blocking=True))
Forward the turn_on command to all climate in the climate group.
custom_components/climate_group/climate.py
async_set_temperature
bvweerd/climate_group
60
python
async def async_set_temperature(self, **kwargs): data = {ATTR_ENTITY_ID: self._entity_ids} if (ATTR_HVAC_MODE in kwargs): hvac_mode = kwargs.get(ATTR_HVAC_MODE) (await self.async_set_hvac_mode(hvac_mode)) elif ((ATTR_TEMPERATURE in kwargs) or (ATTR_TARGET_TEMP_LOW in kwargs) or (ATTR_TARGET_TEMP_HIGH in kwargs)): if (ATTR_TEMPERATURE in kwargs): temperature = kwargs.get(ATTR_TEMPERATURE) data[ATTR_TEMPERATURE] = temperature elif ((ATTR_TARGET_TEMP_LOW in kwargs) or (ATTR_TARGET_TEMP_HIGH in kwargs)): temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW) temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH) data[climate.ATTR_TARGET_TEMP_LOW] = temperature_low data[climate.ATTR_TARGET_TEMP_HIGH] = temperature_high (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE, data, blocking=True))
async def async_set_temperature(self, **kwargs): data = {ATTR_ENTITY_ID: self._entity_ids} if (ATTR_HVAC_MODE in kwargs): hvac_mode = kwargs.get(ATTR_HVAC_MODE) (await self.async_set_hvac_mode(hvac_mode)) elif ((ATTR_TEMPERATURE in kwargs) or (ATTR_TARGET_TEMP_LOW in kwargs) or (ATTR_TARGET_TEMP_HIGH in kwargs)): if (ATTR_TEMPERATURE in kwargs): temperature = kwargs.get(ATTR_TEMPERATURE) data[ATTR_TEMPERATURE] = temperature elif ((ATTR_TARGET_TEMP_LOW in kwargs) or (ATTR_TARGET_TEMP_HIGH in kwargs)): temperature_low = kwargs.get(ATTR_TARGET_TEMP_LOW) temperature_high = kwargs.get(ATTR_TARGET_TEMP_HIGH) data[climate.ATTR_TARGET_TEMP_LOW] = temperature_low data[climate.ATTR_TARGET_TEMP_HIGH] = temperature_high (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE, data, blocking=True))<|docstring|>Forward the turn_on command to all climate in the climate group.<|endoftext|>
2f9d61a7192826c3a8ab17e5b6e317b29f0fc992d55ab0d39ecc42c102f49c53
async def async_set_operation_mode(self, operation_mode): 'Forward the turn_on command to all climate in the climate group. LEGACY CALL.\n This will be used only if the hass version is old.' data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_HVAC_MODE: operation_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE, data, blocking=True))
Forward the turn_on command to all climate in the climate group. LEGACY CALL. This will be used only if the hass version is old.
custom_components/climate_group/climate.py
async_set_operation_mode
bvweerd/climate_group
60
python
async def async_set_operation_mode(self, operation_mode): 'Forward the turn_on command to all climate in the climate group. LEGACY CALL.\n This will be used only if the hass version is old.' data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_HVAC_MODE: operation_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE, data, blocking=True))
async def async_set_operation_mode(self, operation_mode): 'Forward the turn_on command to all climate in the climate group. LEGACY CALL.\n This will be used only if the hass version is old.' data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_HVAC_MODE: operation_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE, data, blocking=True))<|docstring|>Forward the turn_on command to all climate in the climate group. LEGACY CALL. This will be used only if the hass version is old.<|endoftext|>
0d802f5675c474222dea0a68fd12c988bc8920e13351d0068b0cb7cfb698afc1
@property def fan_mode(self): 'Return the current fan mode.' return self._fan_mode
Return the current fan mode.
custom_components/climate_group/climate.py
fan_mode
bvweerd/climate_group
60
python
@property def fan_mode(self): return self._fan_mode
@property def fan_mode(self): return self._fan_mode<|docstring|>Return the current fan mode.<|endoftext|>
2da6ba793b2c7f0467ab94a3da126c4a1bf98e0f10fccdd5d706879319d9baef
@property def fan_modes(self): 'Return a list of available fan modes.' return self._fan_modes
Return a list of available fan modes.
custom_components/climate_group/climate.py
fan_modes
bvweerd/climate_group
60
python
@property def fan_modes(self): return self._fan_modes
@property def fan_modes(self): return self._fan_modes<|docstring|>Return a list of available fan modes.<|endoftext|>
3ac30cfbc0d5ce7c9478b95f54e14cdc711540f2c769474116d21492d19a0667
@property def swing_mode(self): 'Return the current swing mode.' return self._swing_mode
Return the current swing mode.
custom_components/climate_group/climate.py
swing_mode
bvweerd/climate_group
60
python
@property def swing_mode(self): return self._swing_mode
@property def swing_mode(self): return self._swing_mode<|docstring|>Return the current swing mode.<|endoftext|>
9d717f46eef511cc04ecd444636f67f899a1979308f3d1c97c816bfce238042f
@property def swing_modes(self): 'Return a list of available swing modes.' return self._swing_modes
Return a list of available swing modes.
custom_components/climate_group/climate.py
swing_modes
bvweerd/climate_group
60
python
@property def swing_modes(self): return self._swing_modes
@property def swing_modes(self): return self._swing_modes<|docstring|>Return a list of available swing modes.<|endoftext|>
0d6e8e091acb07fb8df5eadbf85254e243d942d4246ec38bb56657bac0f7abab
@property def preset_mode(self): 'Return the current preset mode, e.g., home, away, temp.' return self._preset
Return the current preset mode, e.g., home, away, temp.
custom_components/climate_group/climate.py
preset_mode
bvweerd/climate_group
60
python
@property def preset_mode(self): return self._preset
@property def preset_mode(self): return self._preset<|docstring|>Return the current preset mode, e.g., home, away, temp.<|endoftext|>
0c9db8c6de6a0b593c5d512858eae0d7886d489a5c952b33d7051656c1f3f4c4
@property def preset_modes(self): 'Return a list of available preset modes.' return self._preset_modes
Return a list of available preset modes.
custom_components/climate_group/climate.py
preset_modes
bvweerd/climate_group
60
python
@property def preset_modes(self): return self._preset_modes
@property def preset_modes(self): return self._preset_modes<|docstring|>Return a list of available preset modes.<|endoftext|>
f584c2f498cf92cd13bc3ba290893651445195c33fb5a91b615daf9fb1b33309
async def async_set_hvac_mode(self, hvac_mode): 'Forward the turn_on command to all climate in the climate group.' data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_HVAC_MODE: hvac_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE, data, blocking=True))
Forward the turn_on command to all climate in the climate group.
custom_components/climate_group/climate.py
async_set_hvac_mode
bvweerd/climate_group
60
python
async def async_set_hvac_mode(self, hvac_mode): data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_HVAC_MODE: hvac_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE, data, blocking=True))
async def async_set_hvac_mode(self, hvac_mode): data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_HVAC_MODE: hvac_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_HVAC_MODE, data, blocking=True))<|docstring|>Forward the turn_on command to all climate in the climate group.<|endoftext|>
3ea1702cfa8c193dc33c33230209bf2b72366088224680bc4febba7cff5f8b30
async def async_update(self): 'Query all members and determine the climate group state.' raw_states = [self.hass.states.get(x) for x in self._entity_ids] states = list(filter(None, raw_states)) filtered_states = list(filter((lambda x: (x.attributes.get(ATTR_PRESET_MODE, None) not in self._excluded)), states)) if (not filtered_states): filtered_states = states _LOGGER.debug(f'Excluded by config: {self._excluded}') _LOGGER.debug(f'Resulting filtered states: {filtered_states}') all_modes = [x.state for x in filtered_states] self._mode = None for hvac_mode in (HVAC_MODES[1:] + [HVAC_MODE_OFF]): if any([(mode == hvac_mode) for mode in all_modes]): self._mode = hvac_mode break all_actions = [state.attributes.get(ATTR_HVAC_ACTION, None) for state in filtered_states] for hvac_action in HVAC_ACTIONS: if any([(action == hvac_action) for action in all_actions]): self._action = hvac_action break all_fan_modes = [state.attributes.get(ATTR_FAN_MODE, None) for state in filtered_states] self._fan_mode = None if all_fan_modes: self._fan_mode = Counter(itertools.chain(all_fan_modes)).most_common(1)[0][0] all_swing_modes = [state.attributes.get(ATTR_SWING_MODE, None) for state in filtered_states] self._swing_mode = None if all_swing_modes: self._swing_mode = Counter(itertools.chain(all_swing_modes)).most_common(1)[0][0] all_presets = [state.attributes.get(ATTR_PRESET_MODE, None) for state in filtered_states] self._preset = None if all_presets: self._preset = Counter(itertools.chain(all_presets)).most_common(1)[0][0] self._target_temp = _reduce_attribute(filtered_states, ATTR_TEMPERATURE) self._target_temp_low = _reduce_attribute(filtered_states, ATTR_TARGET_TEMP_LOW) self._target_temp_high = _reduce_attribute(filtered_states, ATTR_TARGET_TEMP_HIGH) self._current_temp = _reduce_attribute(filtered_states, ATTR_CURRENT_TEMPERATURE) _LOGGER.debug(f'Target temp: {self._target_temp}; Target temp low: {self._target_temp_low}; Target temp high: {self._target_temp_high}; Current temp: {self._current_temp}') self._min_temp = _reduce_attribute(states, ATTR_MIN_TEMP, reduce=max) self._max_temp = _reduce_attribute(states, ATTR_MAX_TEMP, reduce=min) self._mode_list = None all_mode_lists = list(_find_state_attributes(states, ATTR_HVAC_MODES)) if all_mode_lists: self._mode_list = list(set().union(*all_mode_lists)) self._supported_features = 0 for support in _find_state_attributes(states, ATTR_SUPPORTED_FEATURES): self._supported_features |= support self._supported_features &= SUPPORT_FLAGS self._fan_modes = None fan_modes = [] for fan_mode in _find_state_attributes(states, ATTR_FAN_MODES): fan_modes.extend(fan_mode) if len(fan_modes): self._fan_modes = set(fan_modes) self._swing_modes = None swing_modes = [] for swing_mode in _find_state_attributes(states, ATTR_SWING_MODES): swing_modes.extend(swing_mode) if len(swing_modes): self._swing_modes = set(swing_modes) self._preset_modes = None presets = [] for preset in _find_state_attributes(states, ATTR_PRESET_MODES): presets.extend(preset) if len(presets): self._preset_modes = set(presets) _LOGGER.debug(f'State update complete. Supported: {self._supported_features}, mode: {self._mode}')
Query all members and determine the climate group state.
custom_components/climate_group/climate.py
async_update
bvweerd/climate_group
60
python
async def async_update(self): raw_states = [self.hass.states.get(x) for x in self._entity_ids] states = list(filter(None, raw_states)) filtered_states = list(filter((lambda x: (x.attributes.get(ATTR_PRESET_MODE, None) not in self._excluded)), states)) if (not filtered_states): filtered_states = states _LOGGER.debug(f'Excluded by config: {self._excluded}') _LOGGER.debug(f'Resulting filtered states: {filtered_states}') all_modes = [x.state for x in filtered_states] self._mode = None for hvac_mode in (HVAC_MODES[1:] + [HVAC_MODE_OFF]): if any([(mode == hvac_mode) for mode in all_modes]): self._mode = hvac_mode break all_actions = [state.attributes.get(ATTR_HVAC_ACTION, None) for state in filtered_states] for hvac_action in HVAC_ACTIONS: if any([(action == hvac_action) for action in all_actions]): self._action = hvac_action break all_fan_modes = [state.attributes.get(ATTR_FAN_MODE, None) for state in filtered_states] self._fan_mode = None if all_fan_modes: self._fan_mode = Counter(itertools.chain(all_fan_modes)).most_common(1)[0][0] all_swing_modes = [state.attributes.get(ATTR_SWING_MODE, None) for state in filtered_states] self._swing_mode = None if all_swing_modes: self._swing_mode = Counter(itertools.chain(all_swing_modes)).most_common(1)[0][0] all_presets = [state.attributes.get(ATTR_PRESET_MODE, None) for state in filtered_states] self._preset = None if all_presets: self._preset = Counter(itertools.chain(all_presets)).most_common(1)[0][0] self._target_temp = _reduce_attribute(filtered_states, ATTR_TEMPERATURE) self._target_temp_low = _reduce_attribute(filtered_states, ATTR_TARGET_TEMP_LOW) self._target_temp_high = _reduce_attribute(filtered_states, ATTR_TARGET_TEMP_HIGH) self._current_temp = _reduce_attribute(filtered_states, ATTR_CURRENT_TEMPERATURE) _LOGGER.debug(f'Target temp: {self._target_temp}; Target temp low: {self._target_temp_low}; Target temp high: {self._target_temp_high}; Current temp: {self._current_temp}') self._min_temp = _reduce_attribute(states, ATTR_MIN_TEMP, reduce=max) self._max_temp = _reduce_attribute(states, ATTR_MAX_TEMP, reduce=min) self._mode_list = None all_mode_lists = list(_find_state_attributes(states, ATTR_HVAC_MODES)) if all_mode_lists: self._mode_list = list(set().union(*all_mode_lists)) self._supported_features = 0 for support in _find_state_attributes(states, ATTR_SUPPORTED_FEATURES): self._supported_features |= support self._supported_features &= SUPPORT_FLAGS self._fan_modes = None fan_modes = [] for fan_mode in _find_state_attributes(states, ATTR_FAN_MODES): fan_modes.extend(fan_mode) if len(fan_modes): self._fan_modes = set(fan_modes) self._swing_modes = None swing_modes = [] for swing_mode in _find_state_attributes(states, ATTR_SWING_MODES): swing_modes.extend(swing_mode) if len(swing_modes): self._swing_modes = set(swing_modes) self._preset_modes = None presets = [] for preset in _find_state_attributes(states, ATTR_PRESET_MODES): presets.extend(preset) if len(presets): self._preset_modes = set(presets) _LOGGER.debug(f'State update complete. Supported: {self._supported_features}, mode: {self._mode}')
async def async_update(self): raw_states = [self.hass.states.get(x) for x in self._entity_ids] states = list(filter(None, raw_states)) filtered_states = list(filter((lambda x: (x.attributes.get(ATTR_PRESET_MODE, None) not in self._excluded)), states)) if (not filtered_states): filtered_states = states _LOGGER.debug(f'Excluded by config: {self._excluded}') _LOGGER.debug(f'Resulting filtered states: {filtered_states}') all_modes = [x.state for x in filtered_states] self._mode = None for hvac_mode in (HVAC_MODES[1:] + [HVAC_MODE_OFF]): if any([(mode == hvac_mode) for mode in all_modes]): self._mode = hvac_mode break all_actions = [state.attributes.get(ATTR_HVAC_ACTION, None) for state in filtered_states] for hvac_action in HVAC_ACTIONS: if any([(action == hvac_action) for action in all_actions]): self._action = hvac_action break all_fan_modes = [state.attributes.get(ATTR_FAN_MODE, None) for state in filtered_states] self._fan_mode = None if all_fan_modes: self._fan_mode = Counter(itertools.chain(all_fan_modes)).most_common(1)[0][0] all_swing_modes = [state.attributes.get(ATTR_SWING_MODE, None) for state in filtered_states] self._swing_mode = None if all_swing_modes: self._swing_mode = Counter(itertools.chain(all_swing_modes)).most_common(1)[0][0] all_presets = [state.attributes.get(ATTR_PRESET_MODE, None) for state in filtered_states] self._preset = None if all_presets: self._preset = Counter(itertools.chain(all_presets)).most_common(1)[0][0] self._target_temp = _reduce_attribute(filtered_states, ATTR_TEMPERATURE) self._target_temp_low = _reduce_attribute(filtered_states, ATTR_TARGET_TEMP_LOW) self._target_temp_high = _reduce_attribute(filtered_states, ATTR_TARGET_TEMP_HIGH) self._current_temp = _reduce_attribute(filtered_states, ATTR_CURRENT_TEMPERATURE) _LOGGER.debug(f'Target temp: {self._target_temp}; Target temp low: {self._target_temp_low}; Target temp high: {self._target_temp_high}; Current temp: {self._current_temp}') self._min_temp = _reduce_attribute(states, ATTR_MIN_TEMP, reduce=max) self._max_temp = _reduce_attribute(states, ATTR_MAX_TEMP, reduce=min) self._mode_list = None all_mode_lists = list(_find_state_attributes(states, ATTR_HVAC_MODES)) if all_mode_lists: self._mode_list = list(set().union(*all_mode_lists)) self._supported_features = 0 for support in _find_state_attributes(states, ATTR_SUPPORTED_FEATURES): self._supported_features |= support self._supported_features &= SUPPORT_FLAGS self._fan_modes = None fan_modes = [] for fan_mode in _find_state_attributes(states, ATTR_FAN_MODES): fan_modes.extend(fan_mode) if len(fan_modes): self._fan_modes = set(fan_modes) self._swing_modes = None swing_modes = [] for swing_mode in _find_state_attributes(states, ATTR_SWING_MODES): swing_modes.extend(swing_mode) if len(swing_modes): self._swing_modes = set(swing_modes) self._preset_modes = None presets = [] for preset in _find_state_attributes(states, ATTR_PRESET_MODES): presets.extend(preset) if len(presets): self._preset_modes = set(presets) _LOGGER.debug(f'State update complete. Supported: {self._supported_features}, mode: {self._mode}')<|docstring|>Query all members and determine the climate group state.<|endoftext|>
8955900ff33aea96343796fbfde78152f28f3b5ff9994350624c4686afa1eb8a
async def async_set_fan_mode(self, fan_mode: str): 'Forward the fan_mode to all climate in the climate group.' data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_FAN_MODE: fan_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_FAN_MODE, data, blocking=True))
Forward the fan_mode to all climate in the climate group.
custom_components/climate_group/climate.py
async_set_fan_mode
bvweerd/climate_group
60
python
async def async_set_fan_mode(self, fan_mode: str): data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_FAN_MODE: fan_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_FAN_MODE, data, blocking=True))
async def async_set_fan_mode(self, fan_mode: str): data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_FAN_MODE: fan_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_FAN_MODE, data, blocking=True))<|docstring|>Forward the fan_mode to all climate in the climate group.<|endoftext|>
0e7bb510df7538f7bde7d0bcc95ba9f3e98664bd736e2d6fbe97819976ddb518
async def async_set_swing_mode(self, swing_mode: str): 'Forward the swing_mode to all climate in the climate group.' data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_SWING_MODE: swing_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_SWING_MODE, data, blocking=True))
Forward the swing_mode to all climate in the climate group.
custom_components/climate_group/climate.py
async_set_swing_mode
bvweerd/climate_group
60
python
async def async_set_swing_mode(self, swing_mode: str): data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_SWING_MODE: swing_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_SWING_MODE, data, blocking=True))
async def async_set_swing_mode(self, swing_mode: str): data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_SWING_MODE: swing_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_SWING_MODE, data, blocking=True))<|docstring|>Forward the swing_mode to all climate in the climate group.<|endoftext|>
914e54ddbf9c00f5c5cb0198c2209fbfe591bd24b464fd04b6f8267a6c06e659
async def async_set_preset_mode(self, preset_mode: str): 'Forward the preset_mode to all climate in the climate group.' data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_PRESET_MODE: preset_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_PRESET_MODE, data, blocking=True))
Forward the preset_mode to all climate in the climate group.
custom_components/climate_group/climate.py
async_set_preset_mode
bvweerd/climate_group
60
python
async def async_set_preset_mode(self, preset_mode: str): data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_PRESET_MODE: preset_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_PRESET_MODE, data, blocking=True))
async def async_set_preset_mode(self, preset_mode: str): data = {ATTR_ENTITY_ID: self._entity_ids, ATTR_PRESET_MODE: preset_mode} (await self.hass.services.async_call(climate.DOMAIN, climate.SERVICE_SET_PRESET_MODE, data, blocking=True))<|docstring|>Forward the preset_mode to all climate in the climate group.<|endoftext|>
3c29fcc4163bee2effe08da33b71c8cab63f336189a32f6051c9f2911ae2258f
@callback def async_state_changed_listener(entity_id: str, old_state: State, new_state: State): 'Handle child updates.' self.async_schedule_update_ha_state(True)
Handle child updates.
custom_components/climate_group/climate.py
async_state_changed_listener
bvweerd/climate_group
60
python
@callback def async_state_changed_listener(entity_id: str, old_state: State, new_state: State): self.async_schedule_update_ha_state(True)
@callback def async_state_changed_listener(entity_id: str, old_state: State, new_state: State): self.async_schedule_update_ha_state(True)<|docstring|>Handle child updates.<|endoftext|>
c19a18bbeb205d256d1164d6a2915940f28d6972f3a65f1ffabfed7aff003761
def test_global_setup_hooks(self): 'Test setup_hooks.\n\n Test that setup_hooks listed in the [global] section of setup.cfg are\n executed in order.\n ' (stdout, _, return_code) = self.run_setup('egg_info') assert ('test_hook_1\ntest_hook_2' in stdout) assert (return_code == 0)
Test setup_hooks. Test that setup_hooks listed in the [global] section of setup.cfg are executed in order.
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pbr/tests/test_hooks.py
test_global_setup_hooks
ShadowLordAlpha/lumberyard
1,738
python
def test_global_setup_hooks(self): 'Test setup_hooks.\n\n Test that setup_hooks listed in the [global] section of setup.cfg are\n executed in order.\n ' (stdout, _, return_code) = self.run_setup('egg_info') assert ('test_hook_1\ntest_hook_2' in stdout) assert (return_code == 0)
def test_global_setup_hooks(self): 'Test setup_hooks.\n\n Test that setup_hooks listed in the [global] section of setup.cfg are\n executed in order.\n ' (stdout, _, return_code) = self.run_setup('egg_info') assert ('test_hook_1\ntest_hook_2' in stdout) assert (return_code == 0)<|docstring|>Test setup_hooks. Test that setup_hooks listed in the [global] section of setup.cfg are executed in order.<|endoftext|>
f891bd3610b661b2c3283de5c9b86eaa00122e1acb421b2f384d2e5eb8a93c81
def load_from_np(filename, arr_idx_der): '\n arr_idx_der 1 for rho and 2 for p\n ' arr = np.load(filename) arr_t = arr[(:, 0)] arr_der = arr[(:, arr_idx_der)] return (arr_t, arr_der)
arr_idx_der 1 for rho and 2 for p
sandbox/gas/tube/compare-fixed-time.py
load_from_np
j8xixo12/solvcon
16
python
def load_from_np(filename, arr_idx_der): '\n \n ' arr = np.load(filename) arr_t = arr[(:, 0)] arr_der = arr[(:, arr_idx_der)] return (arr_t, arr_der)
def load_from_np(filename, arr_idx_der): '\n \n ' arr = np.load(filename) arr_t = arr[(:, 0)] arr_der = arr[(:, arr_idx_der)] return (arr_t, arr_der)<|docstring|>arr_idx_der 1 for rho and 2 for p<|endoftext|>
b23ee2b9449d8749e2088d9ffb106f49f1aa29632d30d666e8dcc134302a75dc
def der_plots(step): '\n :param step: integeter, nth step\n ' (f, ((ax1, ax2, ax3), (ax4, ax5, ax6), (ax7, ax8, ax9))) = plt.subplots(3, 3, sharex='col', sharey='row') t = der_plot(step, 1, f, ax1, ax4, ax7) der_plot(step, 2, f, ax2, ax5, ax8) der_plot(step, 3, f, ax3, ax6, ax9) f.suptitle((('SOLVCON 3D CESE vs. 1D ANALYTICAL - ' + t) + ' sec.'))
:param step: integeter, nth step
sandbox/gas/tube/compare-fixed-time.py
der_plots
j8xixo12/solvcon
16
python
def der_plots(step): '\n \n ' (f, ((ax1, ax2, ax3), (ax4, ax5, ax6), (ax7, ax8, ax9))) = plt.subplots(3, 3, sharex='col', sharey='row') t = der_plot(step, 1, f, ax1, ax4, ax7) der_plot(step, 2, f, ax2, ax5, ax8) der_plot(step, 3, f, ax3, ax6, ax9) f.suptitle((('SOLVCON 3D CESE vs. 1D ANALYTICAL - ' + t) + ' sec.'))
def der_plots(step): '\n \n ' (f, ((ax1, ax2, ax3), (ax4, ax5, ax6), (ax7, ax8, ax9))) = plt.subplots(3, 3, sharex='col', sharey='row') t = der_plot(step, 1, f, ax1, ax4, ax7) der_plot(step, 2, f, ax2, ax5, ax8) der_plot(step, 3, f, ax3, ax6, ax9) f.suptitle((('SOLVCON 3D CESE vs. 1D ANALYTICAL - ' + t) + ' sec.'))<|docstring|>:param step: integeter, nth step<|endoftext|>
1a1423ff94b5ffff7ac097d5ca185c56418f7156d87f36a91d003bc199900bdf
def open(self, configuration): 'Open the SDL2 Window\n\n *Parameters:*\n\n - `configuration`: Configurations parameters from Application\n ' if (sdl2.SDL_InitSubSystem(sdl2.SDL_INIT_VIDEO) != 0): msg = ("Can't open window: %s" % sdl2.SDL_GetError()) logger.critical(msg) raise SDL2Error(msg) flags = 0 if (configuration.fullscreen and configuration.width and configuration.height): flags |= sdl2.SDL_WINDOW_FULLSCREEN elif configuration.fullscreen: flags |= sdl2.SDL_WINDOW_FULLSCREEN_DESKTOP if (not configuration.decorated): flags |= sdl2.SDL_WINDOW_BORDERLESS if configuration.resizable: flags |= sdl2.SDL_WINDOW_RESIZABLE if configuration.highdpi: flags |= sdl2.SDL_WINDOW_ALLOW_HIGHDPI self.window = sdl2.SDL_CreateWindow(configuration.name.encode('ascii'), configuration.x, configuration.y, configuration.width, configuration.height, flags) if (not self.window): msg = ("Can't open window: %s" % sdl2.SDL_GetError()) logger.critical(msg) raise SDL2Error(msg) logger.debug('SDL2 window opened with configuration: %s', (configuration,)) self.info = sdl2.SDL_SysWMinfo() sdl2.SDL_VERSION(self.info.version) sdl2.SDL_GetWindowWMInfo(self.window, ctypes.byref(self.info))
Open the SDL2 Window *Parameters:* - `configuration`: Configurations parameters from Application
vulk/context.py
open
js78/vulk
35
python
def open(self, configuration): 'Open the SDL2 Window\n\n *Parameters:*\n\n - `configuration`: Configurations parameters from Application\n ' if (sdl2.SDL_InitSubSystem(sdl2.SDL_INIT_VIDEO) != 0): msg = ("Can't open window: %s" % sdl2.SDL_GetError()) logger.critical(msg) raise SDL2Error(msg) flags = 0 if (configuration.fullscreen and configuration.width and configuration.height): flags |= sdl2.SDL_WINDOW_FULLSCREEN elif configuration.fullscreen: flags |= sdl2.SDL_WINDOW_FULLSCREEN_DESKTOP if (not configuration.decorated): flags |= sdl2.SDL_WINDOW_BORDERLESS if configuration.resizable: flags |= sdl2.SDL_WINDOW_RESIZABLE if configuration.highdpi: flags |= sdl2.SDL_WINDOW_ALLOW_HIGHDPI self.window = sdl2.SDL_CreateWindow(configuration.name.encode('ascii'), configuration.x, configuration.y, configuration.width, configuration.height, flags) if (not self.window): msg = ("Can't open window: %s" % sdl2.SDL_GetError()) logger.critical(msg) raise SDL2Error(msg) logger.debug('SDL2 window opened with configuration: %s', (configuration,)) self.info = sdl2.SDL_SysWMinfo() sdl2.SDL_VERSION(self.info.version) sdl2.SDL_GetWindowWMInfo(self.window, ctypes.byref(self.info))
def open(self, configuration): 'Open the SDL2 Window\n\n *Parameters:*\n\n - `configuration`: Configurations parameters from Application\n ' if (sdl2.SDL_InitSubSystem(sdl2.SDL_INIT_VIDEO) != 0): msg = ("Can't open window: %s" % sdl2.SDL_GetError()) logger.critical(msg) raise SDL2Error(msg) flags = 0 if (configuration.fullscreen and configuration.width and configuration.height): flags |= sdl2.SDL_WINDOW_FULLSCREEN elif configuration.fullscreen: flags |= sdl2.SDL_WINDOW_FULLSCREEN_DESKTOP if (not configuration.decorated): flags |= sdl2.SDL_WINDOW_BORDERLESS if configuration.resizable: flags |= sdl2.SDL_WINDOW_RESIZABLE if configuration.highdpi: flags |= sdl2.SDL_WINDOW_ALLOW_HIGHDPI self.window = sdl2.SDL_CreateWindow(configuration.name.encode('ascii'), configuration.x, configuration.y, configuration.width, configuration.height, flags) if (not self.window): msg = ("Can't open window: %s" % sdl2.SDL_GetError()) logger.critical(msg) raise SDL2Error(msg) logger.debug('SDL2 window opened with configuration: %s', (configuration,)) self.info = sdl2.SDL_SysWMinfo() sdl2.SDL_VERSION(self.info.version) sdl2.SDL_GetWindowWMInfo(self.window, ctypes.byref(self.info))<|docstring|>Open the SDL2 Window *Parameters:* - `configuration`: Configurations parameters from Application<|endoftext|>
7bc15dc6bc00da5b827bc8a02304ede6a6a2b0fbfcace9b1c25cfd9b8dce1d9a
def __init__(self, window, debug=False, extra_layers=None): 'Create context\n\n Args:\n window (VulkWindow): SDL2 window\n debug (bool): Enable debug\n extra_layers (list[str]): List of Vulkan layers\n ' self.window = window self.debug_enabled = debug self.extra_layers = (extra_layers or []) self.instance = None self.pfn = {} self.debug_callback = None self.surface = None self.physical_device = None self.physical_device_properties = None self.physical_device_features = None self.device = None self.graphic_queue = None self.present_queue = None self.queue_family_indices = None self.swapchain = None self.swapchain_images = None self.swapchain_format = None self.width = 0 self.height = 0 self.final_image = None self.final_image_view = None self._semaphore_available = None self._semaphore_copied = None self._direct_semaphores = [] self.commandpool = None self.commandbuffers = None self.vma_allocator = None self.reload_count = 0
Create context Args: window (VulkWindow): SDL2 window debug (bool): Enable debug extra_layers (list[str]): List of Vulkan layers
vulk/context.py
__init__
js78/vulk
35
python
def __init__(self, window, debug=False, extra_layers=None): 'Create context\n\n Args:\n window (VulkWindow): SDL2 window\n debug (bool): Enable debug\n extra_layers (list[str]): List of Vulkan layers\n ' self.window = window self.debug_enabled = debug self.extra_layers = (extra_layers or []) self.instance = None self.pfn = {} self.debug_callback = None self.surface = None self.physical_device = None self.physical_device_properties = None self.physical_device_features = None self.device = None self.graphic_queue = None self.present_queue = None self.queue_family_indices = None self.swapchain = None self.swapchain_images = None self.swapchain_format = None self.width = 0 self.height = 0 self.final_image = None self.final_image_view = None self._semaphore_available = None self._semaphore_copied = None self._direct_semaphores = [] self.commandpool = None self.commandbuffers = None self.vma_allocator = None self.reload_count = 0
def __init__(self, window, debug=False, extra_layers=None): 'Create context\n\n Args:\n window (VulkWindow): SDL2 window\n debug (bool): Enable debug\n extra_layers (list[str]): List of Vulkan layers\n ' self.window = window self.debug_enabled = debug self.extra_layers = (extra_layers or []) self.instance = None self.pfn = {} self.debug_callback = None self.surface = None self.physical_device = None self.physical_device_properties = None self.physical_device_features = None self.device = None self.graphic_queue = None self.present_queue = None self.queue_family_indices = None self.swapchain = None self.swapchain_images = None self.swapchain_format = None self.width = 0 self.height = 0 self.final_image = None self.final_image_view = None self._semaphore_available = None self._semaphore_copied = None self._direct_semaphores = [] self.commandpool = None self.commandbuffers = None self.vma_allocator = None self.reload_count = 0<|docstring|>Create context Args: window (VulkWindow): SDL2 window debug (bool): Enable debug extra_layers (list[str]): List of Vulkan layers<|endoftext|>
3c5d71377b8134abedf3be00ed6136b3b0702d7a8be48a1e7bd4cfa32bbe31fb
def _get_instance_extensions(self): 'Get extensions which depend on the window\n\n Returns:\n Extensions list (list[str])\n ' available_extensions = [e.extensionName for e in vk.vkEnumerateInstanceExtensionProperties(None)] logger.debug('Available instance extensions: %s', available_extensions) extension_mapping = {sdl2.SDL_SYSWM_X11: vk.VK_KHR_XLIB_SURFACE_EXTENSION_NAME, sdl2.SDL_SYSWM_WINDOWS: vk.VK_KHR_WIN32_SURFACE_EXTENSION_NAME, sdl2.SDL_SYSWM_WAYLAND: vk.VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, sdl2.SDL_SYSWM_MIR: vk.VK_KHR_MIR_SURFACE_EXTENSION_NAME} sdl_subsystem = self.window.info.subsystem if (sdl_subsystem not in extension_mapping): msg = ('Vulkan not supported on this plateform: %s' % sdl_subsystem) logger.critical(msg) raise VulkError(msg) enabled_extensions = [] enabled_extensions.append(vk.VK_KHR_SURFACE_EXTENSION_NAME) enabled_extensions.append(extension_mapping[sdl_subsystem]) if self.debug_enabled: if (vk.VK_EXT_DEBUG_REPORT_EXTENSION_NAME in available_extensions): enabled_extensions.append(vk.VK_EXT_DEBUG_REPORT_EXTENSION_NAME) else: self.debug_enabled = False logger.warning('Vulkan debug extension not present and debugmode asked, disabling Vulkan debug mode') if (not all(((e in available_extensions) for e in enabled_extensions))): msg = 'Vulkan extensions are not all available' logger.critical(msg) raise VulkError(msg) return enabled_extensions
Get extensions which depend on the window Returns: Extensions list (list[str])
vulk/context.py
_get_instance_extensions
js78/vulk
35
python
def _get_instance_extensions(self): 'Get extensions which depend on the window\n\n Returns:\n Extensions list (list[str])\n ' available_extensions = [e.extensionName for e in vk.vkEnumerateInstanceExtensionProperties(None)] logger.debug('Available instance extensions: %s', available_extensions) extension_mapping = {sdl2.SDL_SYSWM_X11: vk.VK_KHR_XLIB_SURFACE_EXTENSION_NAME, sdl2.SDL_SYSWM_WINDOWS: vk.VK_KHR_WIN32_SURFACE_EXTENSION_NAME, sdl2.SDL_SYSWM_WAYLAND: vk.VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, sdl2.SDL_SYSWM_MIR: vk.VK_KHR_MIR_SURFACE_EXTENSION_NAME} sdl_subsystem = self.window.info.subsystem if (sdl_subsystem not in extension_mapping): msg = ('Vulkan not supported on this plateform: %s' % sdl_subsystem) logger.critical(msg) raise VulkError(msg) enabled_extensions = [] enabled_extensions.append(vk.VK_KHR_SURFACE_EXTENSION_NAME) enabled_extensions.append(extension_mapping[sdl_subsystem]) if self.debug_enabled: if (vk.VK_EXT_DEBUG_REPORT_EXTENSION_NAME in available_extensions): enabled_extensions.append(vk.VK_EXT_DEBUG_REPORT_EXTENSION_NAME) else: self.debug_enabled = False logger.warning('Vulkan debug extension not present and debugmode asked, disabling Vulkan debug mode') if (not all(((e in available_extensions) for e in enabled_extensions))): msg = 'Vulkan extensions are not all available' logger.critical(msg) raise VulkError(msg) return enabled_extensions
def _get_instance_extensions(self): 'Get extensions which depend on the window\n\n Returns:\n Extensions list (list[str])\n ' available_extensions = [e.extensionName for e in vk.vkEnumerateInstanceExtensionProperties(None)] logger.debug('Available instance extensions: %s', available_extensions) extension_mapping = {sdl2.SDL_SYSWM_X11: vk.VK_KHR_XLIB_SURFACE_EXTENSION_NAME, sdl2.SDL_SYSWM_WINDOWS: vk.VK_KHR_WIN32_SURFACE_EXTENSION_NAME, sdl2.SDL_SYSWM_WAYLAND: vk.VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME, sdl2.SDL_SYSWM_MIR: vk.VK_KHR_MIR_SURFACE_EXTENSION_NAME} sdl_subsystem = self.window.info.subsystem if (sdl_subsystem not in extension_mapping): msg = ('Vulkan not supported on this plateform: %s' % sdl_subsystem) logger.critical(msg) raise VulkError(msg) enabled_extensions = [] enabled_extensions.append(vk.VK_KHR_SURFACE_EXTENSION_NAME) enabled_extensions.append(extension_mapping[sdl_subsystem]) if self.debug_enabled: if (vk.VK_EXT_DEBUG_REPORT_EXTENSION_NAME in available_extensions): enabled_extensions.append(vk.VK_EXT_DEBUG_REPORT_EXTENSION_NAME) else: self.debug_enabled = False logger.warning('Vulkan debug extension not present and debugmode asked, disabling Vulkan debug mode') if (not all(((e in available_extensions) for e in enabled_extensions))): msg = 'Vulkan extensions are not all available' logger.critical(msg) raise VulkError(msg) return enabled_extensions<|docstring|>Get extensions which depend on the window Returns: Extensions list (list[str])<|endoftext|>
2a8174db4cf3fa709747b6d2e4190d285a738efb318b0793e49bb1b697c0457f
@staticmethod def _get_device_extensions(physical_device): 'Get device extensions\n\n *Parameters:*\n\n - `physical_device`: The VkPhysicalDevice to check\n\n *Returns:*\n\n Extension list\n ' available_extensions = [e.extensionName for e in vk.vkEnumerateDeviceExtensionProperties(physical_device, None)] logger.debug('Available device extensions: %s', available_extensions) enabled_extensions = [] enabled_extensions.append(vk.VK_KHR_SWAPCHAIN_EXTENSION_NAME) if (not all(((e in available_extensions) for e in enabled_extensions))): msg = 'Vulkan extensions are not all available' logger.critical(msg) raise VulkError(msg) return enabled_extensions
Get device extensions *Parameters:* - `physical_device`: The VkPhysicalDevice to check *Returns:* Extension list
vulk/context.py
_get_device_extensions
js78/vulk
35
python
@staticmethod def _get_device_extensions(physical_device): 'Get device extensions\n\n *Parameters:*\n\n - `physical_device`: The VkPhysicalDevice to check\n\n *Returns:*\n\n Extension list\n ' available_extensions = [e.extensionName for e in vk.vkEnumerateDeviceExtensionProperties(physical_device, None)] logger.debug('Available device extensions: %s', available_extensions) enabled_extensions = [] enabled_extensions.append(vk.VK_KHR_SWAPCHAIN_EXTENSION_NAME) if (not all(((e in available_extensions) for e in enabled_extensions))): msg = 'Vulkan extensions are not all available' logger.critical(msg) raise VulkError(msg) return enabled_extensions
@staticmethod def _get_device_extensions(physical_device): 'Get device extensions\n\n *Parameters:*\n\n - `physical_device`: The VkPhysicalDevice to check\n\n *Returns:*\n\n Extension list\n ' available_extensions = [e.extensionName for e in vk.vkEnumerateDeviceExtensionProperties(physical_device, None)] logger.debug('Available device extensions: %s', available_extensions) enabled_extensions = [] enabled_extensions.append(vk.VK_KHR_SWAPCHAIN_EXTENSION_NAME) if (not all(((e in available_extensions) for e in enabled_extensions))): msg = 'Vulkan extensions are not all available' logger.critical(msg) raise VulkError(msg) return enabled_extensions<|docstring|>Get device extensions *Parameters:* - `physical_device`: The VkPhysicalDevice to check *Returns:* Extension list<|endoftext|>
d7b6d6a40d263137e7773a1452d410450953fde19055b974a94ed311fbb5958a
def _get_layers(self): 'Get all enabled layers\n\n Simple algorythm: return everything in debug mode else nothing\n\n *Returns:*\n\n List of all enabled layers\n ' if (not self.debug_enabled): return [] layers = [l.layerName for l in vk.vkEnumerateInstanceLayerProperties()] logger.debug('Available layers: %s', layers) standard = 'VK_LAYER_LUNARG_standard_validation' if (standard in layers): logger.debug('Selecting only %s', standard) layers = [standard] layers.extend(self.extra_layers) return layers
Get all enabled layers Simple algorythm: return everything in debug mode else nothing *Returns:* List of all enabled layers
vulk/context.py
_get_layers
js78/vulk
35
python
def _get_layers(self): 'Get all enabled layers\n\n Simple algorythm: return everything in debug mode else nothing\n\n *Returns:*\n\n List of all enabled layers\n ' if (not self.debug_enabled): return [] layers = [l.layerName for l in vk.vkEnumerateInstanceLayerProperties()] logger.debug('Available layers: %s', layers) standard = 'VK_LAYER_LUNARG_standard_validation' if (standard in layers): logger.debug('Selecting only %s', standard) layers = [standard] layers.extend(self.extra_layers) return layers
def _get_layers(self): 'Get all enabled layers\n\n Simple algorythm: return everything in debug mode else nothing\n\n *Returns:*\n\n List of all enabled layers\n ' if (not self.debug_enabled): return [] layers = [l.layerName for l in vk.vkEnumerateInstanceLayerProperties()] logger.debug('Available layers: %s', layers) standard = 'VK_LAYER_LUNARG_standard_validation' if (standard in layers): logger.debug('Selecting only %s', standard) layers = [standard] layers.extend(self.extra_layers) return layers<|docstring|>Get all enabled layers Simple algorythm: return everything in debug mode else nothing *Returns:* List of all enabled layers<|endoftext|>
b02fdb7c7bc7c65248e377ecbf4cd077f464f90a437e4424e2cc5421b6292faa
@staticmethod def _get_queue_families(physical_device, surface, pfn): 'Get graphic and present queue families\n\n Check for graphic and presentation queue families.\n\n *Parameters:*\n\n - `physical_device`: The `VkPhysicalDevice` to check for\n - `surface`: The `VkSurfaceKHR` to present\n - `pfn`: Function `vkGetPhysicalDeviceSurfaceSupportKHR` callable\n\n *Returns:*\n\n A tuple with graphic index and present index or None\n ' queue_families = vk.vkGetPhysicalDeviceQueueFamilyProperties(physical_device) graphic_index = (- 1) present_index = (- 1) for (i, queue_family) in enumerate(queue_families): if (queue_family.queueCount <= 0): continue present_available = pfn(physical_device, i, surface) if (queue_family.queueFlags & vk.VK_QUEUE_GRAPHICS_BIT): graphic_index = i if present_available: present_index = i if ((graphic_index == (- 1)) or (present_index == (- 1))): return None return (graphic_index, present_index)
Get graphic and present queue families Check for graphic and presentation queue families. *Parameters:* - `physical_device`: The `VkPhysicalDevice` to check for - `surface`: The `VkSurfaceKHR` to present - `pfn`: Function `vkGetPhysicalDeviceSurfaceSupportKHR` callable *Returns:* A tuple with graphic index and present index or None
vulk/context.py
_get_queue_families
js78/vulk
35
python
@staticmethod def _get_queue_families(physical_device, surface, pfn): 'Get graphic and present queue families\n\n Check for graphic and presentation queue families.\n\n *Parameters:*\n\n - `physical_device`: The `VkPhysicalDevice` to check for\n - `surface`: The `VkSurfaceKHR` to present\n - `pfn`: Function `vkGetPhysicalDeviceSurfaceSupportKHR` callable\n\n *Returns:*\n\n A tuple with graphic index and present index or None\n ' queue_families = vk.vkGetPhysicalDeviceQueueFamilyProperties(physical_device) graphic_index = (- 1) present_index = (- 1) for (i, queue_family) in enumerate(queue_families): if (queue_family.queueCount <= 0): continue present_available = pfn(physical_device, i, surface) if (queue_family.queueFlags & vk.VK_QUEUE_GRAPHICS_BIT): graphic_index = i if present_available: present_index = i if ((graphic_index == (- 1)) or (present_index == (- 1))): return None return (graphic_index, present_index)
@staticmethod def _get_queue_families(physical_device, surface, pfn): 'Get graphic and present queue families\n\n Check for graphic and presentation queue families.\n\n *Parameters:*\n\n - `physical_device`: The `VkPhysicalDevice` to check for\n - `surface`: The `VkSurfaceKHR` to present\n - `pfn`: Function `vkGetPhysicalDeviceSurfaceSupportKHR` callable\n\n *Returns:*\n\n A tuple with graphic index and present index or None\n ' queue_families = vk.vkGetPhysicalDeviceQueueFamilyProperties(physical_device) graphic_index = (- 1) present_index = (- 1) for (i, queue_family) in enumerate(queue_families): if (queue_family.queueCount <= 0): continue present_available = pfn(physical_device, i, surface) if (queue_family.queueFlags & vk.VK_QUEUE_GRAPHICS_BIT): graphic_index = i if present_available: present_index = i if ((graphic_index == (- 1)) or (present_index == (- 1))): return None return (graphic_index, present_index)<|docstring|>Get graphic and present queue families Check for graphic and presentation queue families. *Parameters:* - `physical_device`: The `VkPhysicalDevice` to check for - `surface`: The `VkSurfaceKHR` to present - `pfn`: Function `vkGetPhysicalDeviceSurfaceSupportKHR` callable *Returns:* A tuple with graphic index and present index or None<|endoftext|>
7eeb069d171a77e4d9b162de7fb4cea2bdae1972a1bc57658e15ad1f8ad6f7bf
def _get_pfn(self): 'Get extension function pointers\n\n Get only functions used in `VulkContext`, vulkan instance must exist\n ' if (not self.instance): msg = '_create_instance must be called before _get_pfn' logger.critical(msg) raise VulkError(msg) def add_pfn(name): try: self.pfn[name] = vk.vkGetInstanceProcAddr(self.instance, name) except ImportError: msg = ("Can't get address of %s extension function" % name) logger.critical(msg) raise VulkError(msg) extension_functions = {'vkDestroySurfaceKHR', 'vkGetPhysicalDeviceSurfaceSupportKHR', 'vkGetPhysicalDeviceSurfaceCapabilitiesKHR', 'vkGetPhysicalDeviceSurfaceFormatsKHR', 'vkGetPhysicalDeviceSurfacePresentModesKHR', 'vkCreateSwapchainKHR', 'vkDestroySwapchainKHR', 'vkGetSwapchainImagesKHR', 'vkAcquireNextImageKHR', 'vkQueuePresentKHR'} debug_extension_functions = {'vkCreateDebugReportCallbackEXT', 'vkDestroyDebugReportCallbackEXT'} if self.debug_enabled: extension_functions.update(debug_extension_functions) for name in extension_functions: add_pfn(name)
Get extension function pointers Get only functions used in `VulkContext`, vulkan instance must exist
vulk/context.py
_get_pfn
js78/vulk
35
python
def _get_pfn(self): 'Get extension function pointers\n\n Get only functions used in `VulkContext`, vulkan instance must exist\n ' if (not self.instance): msg = '_create_instance must be called before _get_pfn' logger.critical(msg) raise VulkError(msg) def add_pfn(name): try: self.pfn[name] = vk.vkGetInstanceProcAddr(self.instance, name) except ImportError: msg = ("Can't get address of %s extension function" % name) logger.critical(msg) raise VulkError(msg) extension_functions = {'vkDestroySurfaceKHR', 'vkGetPhysicalDeviceSurfaceSupportKHR', 'vkGetPhysicalDeviceSurfaceCapabilitiesKHR', 'vkGetPhysicalDeviceSurfaceFormatsKHR', 'vkGetPhysicalDeviceSurfacePresentModesKHR', 'vkCreateSwapchainKHR', 'vkDestroySwapchainKHR', 'vkGetSwapchainImagesKHR', 'vkAcquireNextImageKHR', 'vkQueuePresentKHR'} debug_extension_functions = {'vkCreateDebugReportCallbackEXT', 'vkDestroyDebugReportCallbackEXT'} if self.debug_enabled: extension_functions.update(debug_extension_functions) for name in extension_functions: add_pfn(name)
def _get_pfn(self): 'Get extension function pointers\n\n Get only functions used in `VulkContext`, vulkan instance must exist\n ' if (not self.instance): msg = '_create_instance must be called before _get_pfn' logger.critical(msg) raise VulkError(msg) def add_pfn(name): try: self.pfn[name] = vk.vkGetInstanceProcAddr(self.instance, name) except ImportError: msg = ("Can't get address of %s extension function" % name) logger.critical(msg) raise VulkError(msg) extension_functions = {'vkDestroySurfaceKHR', 'vkGetPhysicalDeviceSurfaceSupportKHR', 'vkGetPhysicalDeviceSurfaceCapabilitiesKHR', 'vkGetPhysicalDeviceSurfaceFormatsKHR', 'vkGetPhysicalDeviceSurfacePresentModesKHR', 'vkCreateSwapchainKHR', 'vkDestroySwapchainKHR', 'vkGetSwapchainImagesKHR', 'vkAcquireNextImageKHR', 'vkQueuePresentKHR'} debug_extension_functions = {'vkCreateDebugReportCallbackEXT', 'vkDestroyDebugReportCallbackEXT'} if self.debug_enabled: extension_functions.update(debug_extension_functions) for name in extension_functions: add_pfn(name)<|docstring|>Get extension function pointers Get only functions used in `VulkContext`, vulkan instance must exist<|endoftext|>
7b5107afda4f464cc480a66acb2d40d0eee3b2935d117d78f2e70ca38f847368
def _create_instance(self): 'Create Vulkan instance' extensions = self._get_instance_extensions() layers = self._get_layers() app_info = vk.VkApplicationInfo(sType=vk.VK_STRUCTURE_TYPE_APPLICATION_INFO, pApplicationName='Vulk-app', applicationVersion=vk.VK_MAKE_VERSION(1, 0, 0), pEngineName=ENGINE_NAME, engineVersion=vk.VK_MAKE_VERSION(1, 0, 0), apiVersion=vk.VK_API_VERSION_1_0) instance_create_info = vk.VkInstanceCreateInfo(sType=vk.VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, flags=0, pApplicationInfo=app_info, enabledExtensionCount=len(extensions), ppEnabledExtensionNames=extensions, enabledLayerCount=len(layers), ppEnabledLayerNames=layers) self.instance = vk.vkCreateInstance(instance_create_info, None)
Create Vulkan instance
vulk/context.py
_create_instance
js78/vulk
35
python
def _create_instance(self): extensions = self._get_instance_extensions() layers = self._get_layers() app_info = vk.VkApplicationInfo(sType=vk.VK_STRUCTURE_TYPE_APPLICATION_INFO, pApplicationName='Vulk-app', applicationVersion=vk.VK_MAKE_VERSION(1, 0, 0), pEngineName=ENGINE_NAME, engineVersion=vk.VK_MAKE_VERSION(1, 0, 0), apiVersion=vk.VK_API_VERSION_1_0) instance_create_info = vk.VkInstanceCreateInfo(sType=vk.VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, flags=0, pApplicationInfo=app_info, enabledExtensionCount=len(extensions), ppEnabledExtensionNames=extensions, enabledLayerCount=len(layers), ppEnabledLayerNames=layers) self.instance = vk.vkCreateInstance(instance_create_info, None)
def _create_instance(self): extensions = self._get_instance_extensions() layers = self._get_layers() app_info = vk.VkApplicationInfo(sType=vk.VK_STRUCTURE_TYPE_APPLICATION_INFO, pApplicationName='Vulk-app', applicationVersion=vk.VK_MAKE_VERSION(1, 0, 0), pEngineName=ENGINE_NAME, engineVersion=vk.VK_MAKE_VERSION(1, 0, 0), apiVersion=vk.VK_API_VERSION_1_0) instance_create_info = vk.VkInstanceCreateInfo(sType=vk.VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, flags=0, pApplicationInfo=app_info, enabledExtensionCount=len(extensions), ppEnabledExtensionNames=extensions, enabledLayerCount=len(layers), ppEnabledLayerNames=layers) self.instance = vk.vkCreateInstance(instance_create_info, None)<|docstring|>Create Vulkan instance<|endoftext|>
120f606067bd19eabe85805c27e5646451ecd604e77bb4975fcf942e4ed58baa
def _create_debug_callback(self): 'Create debug callback\n\n It works only on debug mode\n ' if (not self.debug_enabled): return vulkan_debug_mapping = {vk.VK_DEBUG_REPORT_DEBUG_BIT_EXT: logging.DEBUG, vk.VK_DEBUG_REPORT_WARNING_BIT_EXT: logging.WARNING, vk.VK_DEBUG_REPORT_ERROR_BIT_EXT: logging.ERROR, vk.VK_DEBUG_REPORT_INFORMATION_BIT_EXT: logging.INFO, vk.VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT: logging.WARNING} def debug_function(*args): logger.log(vulkan_debug_mapping[args[0]], 'VULKAN: %s', args[6]) flags = ((vk.VK_DEBUG_REPORT_ERROR_BIT_EXT | vk.VK_DEBUG_REPORT_WARNING_BIT_EXT) | vk.VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) debug_create_info = vk.VkDebugReportCallbackCreateInfoEXT(sType=vk.VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, flags=flags, pfnCallback=debug_function) self.debug_callback = self.pfn['vkCreateDebugReportCallbackEXT'](self.instance, debug_create_info, None)
Create debug callback It works only on debug mode
vulk/context.py
_create_debug_callback
js78/vulk
35
python
def _create_debug_callback(self): 'Create debug callback\n\n It works only on debug mode\n ' if (not self.debug_enabled): return vulkan_debug_mapping = {vk.VK_DEBUG_REPORT_DEBUG_BIT_EXT: logging.DEBUG, vk.VK_DEBUG_REPORT_WARNING_BIT_EXT: logging.WARNING, vk.VK_DEBUG_REPORT_ERROR_BIT_EXT: logging.ERROR, vk.VK_DEBUG_REPORT_INFORMATION_BIT_EXT: logging.INFO, vk.VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT: logging.WARNING} def debug_function(*args): logger.log(vulkan_debug_mapping[args[0]], 'VULKAN: %s', args[6]) flags = ((vk.VK_DEBUG_REPORT_ERROR_BIT_EXT | vk.VK_DEBUG_REPORT_WARNING_BIT_EXT) | vk.VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) debug_create_info = vk.VkDebugReportCallbackCreateInfoEXT(sType=vk.VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, flags=flags, pfnCallback=debug_function) self.debug_callback = self.pfn['vkCreateDebugReportCallbackEXT'](self.instance, debug_create_info, None)
def _create_debug_callback(self): 'Create debug callback\n\n It works only on debug mode\n ' if (not self.debug_enabled): return vulkan_debug_mapping = {vk.VK_DEBUG_REPORT_DEBUG_BIT_EXT: logging.DEBUG, vk.VK_DEBUG_REPORT_WARNING_BIT_EXT: logging.WARNING, vk.VK_DEBUG_REPORT_ERROR_BIT_EXT: logging.ERROR, vk.VK_DEBUG_REPORT_INFORMATION_BIT_EXT: logging.INFO, vk.VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT: logging.WARNING} def debug_function(*args): logger.log(vulkan_debug_mapping[args[0]], 'VULKAN: %s', args[6]) flags = ((vk.VK_DEBUG_REPORT_ERROR_BIT_EXT | vk.VK_DEBUG_REPORT_WARNING_BIT_EXT) | vk.VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) debug_create_info = vk.VkDebugReportCallbackCreateInfoEXT(sType=vk.VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, flags=flags, pfnCallback=debug_function) self.debug_callback = self.pfn['vkCreateDebugReportCallbackEXT'](self.instance, debug_create_info, None)<|docstring|>Create debug callback It works only on debug mode<|endoftext|>
5636cb4f868a996ff7b80b52aad489e346906c8e3053894699a7e3306b3e52f9
def _create_surface(self): 'Create Vulkan surface' info = self.window.info def call_platform(name, surface_create): f = vk.vkGetInstanceProcAddr(self.instance, name) return f(self.instance, surface_create, None) def xlib(): logger.info('Create XLIB surface') surface_create = vk.VkXlibSurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, dpy=info.info.x11.display, window=info.info.x11.window, flags=0) return call_platform('vkCreateXlibSurfaceKHR', surface_create) def mir(): logger.info('Create MIR surface') surface_create = vk.VkMirSurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR, connection=info.info.mir.connection, mirSurface=info.info.mir.surface, flags=0) return call_platform('vkCreateMirSurfaceKHR', surface_create) def wayland(): logger.info('Create WAYLAND surface') surface_create = vk.VkWaylandSurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, display=info.info.wl.display, surface=info.info.surface, flags=0) return call_platform('vkCreateWaylandSurfaceKHR', surface_create) def windows(): logger.info('Create WINDOWS surface') surface_create = vk.VkWin32SurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, hinstance=info.info.win.hinstance, hwnd=info.info.win.window, flags=0) return call_platform('vkCreateWin32SurfaceKHR', surface_create) def android(): raise VulkError('Android not supported for now') surface_mapping = {sdl2.SDL_SYSWM_X11: xlib, sdl2.SDL_SYSWM_MIR: mir, sdl2.SDL_SYSWM_WAYLAND: wayland, sdl2.SDL_SYSWM_WINDOWS: windows, sdl2.SDL_SYSWM_ANDROID: android} self.surface = surface_mapping[info.subsystem]()
Create Vulkan surface
vulk/context.py
_create_surface
js78/vulk
35
python
def _create_surface(self): info = self.window.info def call_platform(name, surface_create): f = vk.vkGetInstanceProcAddr(self.instance, name) return f(self.instance, surface_create, None) def xlib(): logger.info('Create XLIB surface') surface_create = vk.VkXlibSurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, dpy=info.info.x11.display, window=info.info.x11.window, flags=0) return call_platform('vkCreateXlibSurfaceKHR', surface_create) def mir(): logger.info('Create MIR surface') surface_create = vk.VkMirSurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR, connection=info.info.mir.connection, mirSurface=info.info.mir.surface, flags=0) return call_platform('vkCreateMirSurfaceKHR', surface_create) def wayland(): logger.info('Create WAYLAND surface') surface_create = vk.VkWaylandSurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, display=info.info.wl.display, surface=info.info.surface, flags=0) return call_platform('vkCreateWaylandSurfaceKHR', surface_create) def windows(): logger.info('Create WINDOWS surface') surface_create = vk.VkWin32SurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, hinstance=info.info.win.hinstance, hwnd=info.info.win.window, flags=0) return call_platform('vkCreateWin32SurfaceKHR', surface_create) def android(): raise VulkError('Android not supported for now') surface_mapping = {sdl2.SDL_SYSWM_X11: xlib, sdl2.SDL_SYSWM_MIR: mir, sdl2.SDL_SYSWM_WAYLAND: wayland, sdl2.SDL_SYSWM_WINDOWS: windows, sdl2.SDL_SYSWM_ANDROID: android} self.surface = surface_mapping[info.subsystem]()
def _create_surface(self): info = self.window.info def call_platform(name, surface_create): f = vk.vkGetInstanceProcAddr(self.instance, name) return f(self.instance, surface_create, None) def xlib(): logger.info('Create XLIB surface') surface_create = vk.VkXlibSurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, dpy=info.info.x11.display, window=info.info.x11.window, flags=0) return call_platform('vkCreateXlibSurfaceKHR', surface_create) def mir(): logger.info('Create MIR surface') surface_create = vk.VkMirSurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR, connection=info.info.mir.connection, mirSurface=info.info.mir.surface, flags=0) return call_platform('vkCreateMirSurfaceKHR', surface_create) def wayland(): logger.info('Create WAYLAND surface') surface_create = vk.VkWaylandSurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, display=info.info.wl.display, surface=info.info.surface, flags=0) return call_platform('vkCreateWaylandSurfaceKHR', surface_create) def windows(): logger.info('Create WINDOWS surface') surface_create = vk.VkWin32SurfaceCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, hinstance=info.info.win.hinstance, hwnd=info.info.win.window, flags=0) return call_platform('vkCreateWin32SurfaceKHR', surface_create) def android(): raise VulkError('Android not supported for now') surface_mapping = {sdl2.SDL_SYSWM_X11: xlib, sdl2.SDL_SYSWM_MIR: mir, sdl2.SDL_SYSWM_WAYLAND: wayland, sdl2.SDL_SYSWM_WINDOWS: windows, sdl2.SDL_SYSWM_ANDROID: android} self.surface = surface_mapping[info.subsystem]()<|docstring|>Create Vulkan surface<|endoftext|>
7a0e3be5eeb6c46220ccd6b73d08c18342e42d53d519099eea2d9902dc2dceee
def _create_physical_device(self): 'Create Vulkan physical device\n\n The best physical device is selected through criteria.\n ' physical_devices = vk.vkEnumeratePhysicalDevices(self.instance) if (not physical_devices): msg = 'No physical device found' logger.critical(msg) raise VulkError(msg) features = [vk.vkGetPhysicalDeviceFeatures(p) for p in physical_devices] properties = [vk.vkGetPhysicalDeviceProperties(p) for p in physical_devices] logger.debug('Available physical devices: %s', [p.deviceName for p in properties]) selected_index = 0 best_score = 0 for (i, d) in enumerate(physical_devices): score = 0 if (properties[i].deviceType == vk.VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU): score += 1000 score += properties[i].limits.maxImageDimension2D if (not VulkContext._get_queue_families(d, self.surface, self.pfn['vkGetPhysicalDeviceSurfaceSupportKHR'])): score = 0 if (score > best_score): best_score = score selected_index = i if (best_score == 0): msg = 'No available physical device' logger.critical(msg) raise VulkError(msg) self.physical_device = physical_devices[selected_index] self.physical_device_properties = properties[selected_index] self.physical_device_features = features[selected_index] logger.debug('%s device selected', self.physical_device_properties.deviceName)
Create Vulkan physical device The best physical device is selected through criteria.
vulk/context.py
_create_physical_device
js78/vulk
35
python
def _create_physical_device(self): 'Create Vulkan physical device\n\n The best physical device is selected through criteria.\n ' physical_devices = vk.vkEnumeratePhysicalDevices(self.instance) if (not physical_devices): msg = 'No physical device found' logger.critical(msg) raise VulkError(msg) features = [vk.vkGetPhysicalDeviceFeatures(p) for p in physical_devices] properties = [vk.vkGetPhysicalDeviceProperties(p) for p in physical_devices] logger.debug('Available physical devices: %s', [p.deviceName for p in properties]) selected_index = 0 best_score = 0 for (i, d) in enumerate(physical_devices): score = 0 if (properties[i].deviceType == vk.VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU): score += 1000 score += properties[i].limits.maxImageDimension2D if (not VulkContext._get_queue_families(d, self.surface, self.pfn['vkGetPhysicalDeviceSurfaceSupportKHR'])): score = 0 if (score > best_score): best_score = score selected_index = i if (best_score == 0): msg = 'No available physical device' logger.critical(msg) raise VulkError(msg) self.physical_device = physical_devices[selected_index] self.physical_device_properties = properties[selected_index] self.physical_device_features = features[selected_index] logger.debug('%s device selected', self.physical_device_properties.deviceName)
def _create_physical_device(self): 'Create Vulkan physical device\n\n The best physical device is selected through criteria.\n ' physical_devices = vk.vkEnumeratePhysicalDevices(self.instance) if (not physical_devices): msg = 'No physical device found' logger.critical(msg) raise VulkError(msg) features = [vk.vkGetPhysicalDeviceFeatures(p) for p in physical_devices] properties = [vk.vkGetPhysicalDeviceProperties(p) for p in physical_devices] logger.debug('Available physical devices: %s', [p.deviceName for p in properties]) selected_index = 0 best_score = 0 for (i, d) in enumerate(physical_devices): score = 0 if (properties[i].deviceType == vk.VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU): score += 1000 score += properties[i].limits.maxImageDimension2D if (not VulkContext._get_queue_families(d, self.surface, self.pfn['vkGetPhysicalDeviceSurfaceSupportKHR'])): score = 0 if (score > best_score): best_score = score selected_index = i if (best_score == 0): msg = 'No available physical device' logger.critical(msg) raise VulkError(msg) self.physical_device = physical_devices[selected_index] self.physical_device_properties = properties[selected_index] self.physical_device_features = features[selected_index] logger.debug('%s device selected', self.physical_device_properties.deviceName)<|docstring|>Create Vulkan physical device The best physical device is selected through criteria.<|endoftext|>
1c85089c8f48a14df439f991c73791e739bca80cb121af20727ccf0d527b78c9
def _create_device(self): 'Create Vulkan logical device' extensions = VulkContext._get_device_extensions(self.physical_device) layers = self._get_layers() (graphic_index, present_index) = VulkContext._get_queue_families(self.physical_device, self.surface, self.pfn['vkGetPhysicalDeviceSurfaceSupportKHR']) queues_create = [vk.VkDeviceQueueCreateInfo(sType=vk.VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, flags=0, queueFamilyIndex=i, queueCount=1, pQueuePriorities=[1]) for i in {graphic_index, present_index}] device_create = vk.VkDeviceCreateInfo(sType=vk.VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, flags=0, queueCreateInfoCount=len(queues_create), pQueueCreateInfos=queues_create, enabledLayerCount=len(layers), ppEnabledLayerNames=layers, enabledExtensionCount=len(extensions), ppEnabledExtensionNames=extensions, pEnabledFeatures=self.physical_device_features) self.device = vk.vkCreateDevice(self.physical_device, device_create, None) self.graphic_queue = vk.vkGetDeviceQueue(self.device, graphic_index, 0) self.present_queue = vk.vkGetDeviceQueue(self.device, present_index, 0) self.queue_family_indices = {'graphic': graphic_index, 'present': present_index}
Create Vulkan logical device
vulk/context.py
_create_device
js78/vulk
35
python
def _create_device(self): extensions = VulkContext._get_device_extensions(self.physical_device) layers = self._get_layers() (graphic_index, present_index) = VulkContext._get_queue_families(self.physical_device, self.surface, self.pfn['vkGetPhysicalDeviceSurfaceSupportKHR']) queues_create = [vk.VkDeviceQueueCreateInfo(sType=vk.VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, flags=0, queueFamilyIndex=i, queueCount=1, pQueuePriorities=[1]) for i in {graphic_index, present_index}] device_create = vk.VkDeviceCreateInfo(sType=vk.VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, flags=0, queueCreateInfoCount=len(queues_create), pQueueCreateInfos=queues_create, enabledLayerCount=len(layers), ppEnabledLayerNames=layers, enabledExtensionCount=len(extensions), ppEnabledExtensionNames=extensions, pEnabledFeatures=self.physical_device_features) self.device = vk.vkCreateDevice(self.physical_device, device_create, None) self.graphic_queue = vk.vkGetDeviceQueue(self.device, graphic_index, 0) self.present_queue = vk.vkGetDeviceQueue(self.device, present_index, 0) self.queue_family_indices = {'graphic': graphic_index, 'present': present_index}
def _create_device(self): extensions = VulkContext._get_device_extensions(self.physical_device) layers = self._get_layers() (graphic_index, present_index) = VulkContext._get_queue_families(self.physical_device, self.surface, self.pfn['vkGetPhysicalDeviceSurfaceSupportKHR']) queues_create = [vk.VkDeviceQueueCreateInfo(sType=vk.VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, flags=0, queueFamilyIndex=i, queueCount=1, pQueuePriorities=[1]) for i in {graphic_index, present_index}] device_create = vk.VkDeviceCreateInfo(sType=vk.VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, flags=0, queueCreateInfoCount=len(queues_create), pQueueCreateInfos=queues_create, enabledLayerCount=len(layers), ppEnabledLayerNames=layers, enabledExtensionCount=len(extensions), ppEnabledExtensionNames=extensions, pEnabledFeatures=self.physical_device_features) self.device = vk.vkCreateDevice(self.physical_device, device_create, None) self.graphic_queue = vk.vkGetDeviceQueue(self.device, graphic_index, 0) self.present_queue = vk.vkGetDeviceQueue(self.device, present_index, 0) self.queue_family_indices = {'graphic': graphic_index, 'present': present_index}<|docstring|>Create Vulkan logical device<|endoftext|>
0119ff783d80292de152407ae4948d5dc3c65dcaccec40d4ad1ab646b0c3ef5a
def _create_swapchain(self): 'Create Vulkan swapchain' surface_capabilities = self.pfn['vkGetPhysicalDeviceSurfaceCapabilitiesKHR'](self.physical_device, self.surface) surface_formats = self.pfn['vkGetPhysicalDeviceSurfaceFormatsKHR'](self.physical_device, self.surface) surface_present_modes = self.pfn['vkGetPhysicalDeviceSurfacePresentModesKHR'](self.physical_device, self.surface) if ((not surface_formats) or (not surface_present_modes)): msg = 'No available swapchain' logger.critical(msg) raise VulkError(msg) def get_format(formats): for f in formats: if (f.format == vk.VK_FORMAT_UNDEFINED): return f if ((f.format == vk.VK_FORMAT_B8G8R8A8_UNORM) and (f.colorSpace == vk.VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)): return f return formats[0] def get_present_mode(present_modes): for p in present_modes: if (p == vk.VK_PRESENT_MODE_MAILBOX_KHR): return p return vk.VK_PRESENT_MODE_FIFO_KHR def get_swap_extent(capabilities): uint32_max = 4294967295 if (capabilities.currentExtent.width != uint32_max): return capabilities.currentExtent (width, height) = self.window.get_size() width = max(capabilities.minImageExtent.width, min(capabilities.maxImageExtent.width, width)) height = max(capabilities.minImageExtent.height, min(capabilities.maxImageExtent.height, height)) return vk.VkExtent2D(width=width, height=height) surface_format = get_format(surface_formats) present_mode = get_present_mode(surface_present_modes) extent = get_swap_extent(surface_capabilities) image_count = (surface_capabilities.minImageCount + 1) if ((surface_capabilities.maxImageCount > 0) and (image_count > surface_capabilities.maxImageCount)): image_count = surface_capabilities.maxImageCount sharing_mode = vk.VK_SHARING_MODE_EXCLUSIVE queue_family_indices = [] if (self.queue_family_indices['graphic'] != self.queue_family_indices['present']): sharing_mode = vk.VK_SHARING_MODE_CONCURRENT queue_family_indices = [v for v in self.queue_family_indices.values()] swapchain_create = vk.VkSwapchainCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, flags=0, surface=self.surface, minImageCount=image_count, imageFormat=surface_format.format, imageColorSpace=surface_format.colorSpace, imageExtent=extent, imageArrayLayers=1, imageUsage=vk.VK_IMAGE_USAGE_TRANSFER_DST_BIT, imageSharingMode=sharing_mode, queueFamilyIndexCount=len(queue_family_indices), pQueueFamilyIndices=queue_family_indices, compositeAlpha=vk.VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, presentMode=present_mode, clipped=vk.VK_TRUE, oldSwapchain=None, preTransform=surface_capabilities.currentTransform) self.swapchain = self.pfn['vkCreateSwapchainKHR'](self.device, swapchain_create, None) self.width = extent.width self.height = extent.height self.swapchain_format = surface_format.format swapchain_raw_images = self.pfn['vkGetSwapchainImagesKHR'](self.device, self.swapchain) self.swapchain_images = [] for raw_image in swapchain_raw_images: img = vo.Image.__new__(vo.Image) img.image = raw_image img.is_swapchain = True img.format = surface_format.format img.width = self.width img.height = self.height img.depth = 1 self.swapchain_images.append(img) for image in self.swapchain_images: with vo.immediate_buffer(self) as cmd: image.update_layout(cmd, vc.ImageLayout.UNDEFINED, vc.ImageLayout.PRESENT_SRC_KHR, vc.PipelineStage.TOP_OF_PIPE, vc.PipelineStage.TOP_OF_PIPE, vc.Access.NONE, vc.Access.MEMORY_READ) logger.debug('Swapchain created with %s images', len(self.swapchain_images))
Create Vulkan swapchain
vulk/context.py
_create_swapchain
js78/vulk
35
python
def _create_swapchain(self): surface_capabilities = self.pfn['vkGetPhysicalDeviceSurfaceCapabilitiesKHR'](self.physical_device, self.surface) surface_formats = self.pfn['vkGetPhysicalDeviceSurfaceFormatsKHR'](self.physical_device, self.surface) surface_present_modes = self.pfn['vkGetPhysicalDeviceSurfacePresentModesKHR'](self.physical_device, self.surface) if ((not surface_formats) or (not surface_present_modes)): msg = 'No available swapchain' logger.critical(msg) raise VulkError(msg) def get_format(formats): for f in formats: if (f.format == vk.VK_FORMAT_UNDEFINED): return f if ((f.format == vk.VK_FORMAT_B8G8R8A8_UNORM) and (f.colorSpace == vk.VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)): return f return formats[0] def get_present_mode(present_modes): for p in present_modes: if (p == vk.VK_PRESENT_MODE_MAILBOX_KHR): return p return vk.VK_PRESENT_MODE_FIFO_KHR def get_swap_extent(capabilities): uint32_max = 4294967295 if (capabilities.currentExtent.width != uint32_max): return capabilities.currentExtent (width, height) = self.window.get_size() width = max(capabilities.minImageExtent.width, min(capabilities.maxImageExtent.width, width)) height = max(capabilities.minImageExtent.height, min(capabilities.maxImageExtent.height, height)) return vk.VkExtent2D(width=width, height=height) surface_format = get_format(surface_formats) present_mode = get_present_mode(surface_present_modes) extent = get_swap_extent(surface_capabilities) image_count = (surface_capabilities.minImageCount + 1) if ((surface_capabilities.maxImageCount > 0) and (image_count > surface_capabilities.maxImageCount)): image_count = surface_capabilities.maxImageCount sharing_mode = vk.VK_SHARING_MODE_EXCLUSIVE queue_family_indices = [] if (self.queue_family_indices['graphic'] != self.queue_family_indices['present']): sharing_mode = vk.VK_SHARING_MODE_CONCURRENT queue_family_indices = [v for v in self.queue_family_indices.values()] swapchain_create = vk.VkSwapchainCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, flags=0, surface=self.surface, minImageCount=image_count, imageFormat=surface_format.format, imageColorSpace=surface_format.colorSpace, imageExtent=extent, imageArrayLayers=1, imageUsage=vk.VK_IMAGE_USAGE_TRANSFER_DST_BIT, imageSharingMode=sharing_mode, queueFamilyIndexCount=len(queue_family_indices), pQueueFamilyIndices=queue_family_indices, compositeAlpha=vk.VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, presentMode=present_mode, clipped=vk.VK_TRUE, oldSwapchain=None, preTransform=surface_capabilities.currentTransform) self.swapchain = self.pfn['vkCreateSwapchainKHR'](self.device, swapchain_create, None) self.width = extent.width self.height = extent.height self.swapchain_format = surface_format.format swapchain_raw_images = self.pfn['vkGetSwapchainImagesKHR'](self.device, self.swapchain) self.swapchain_images = [] for raw_image in swapchain_raw_images: img = vo.Image.__new__(vo.Image) img.image = raw_image img.is_swapchain = True img.format = surface_format.format img.width = self.width img.height = self.height img.depth = 1 self.swapchain_images.append(img) for image in self.swapchain_images: with vo.immediate_buffer(self) as cmd: image.update_layout(cmd, vc.ImageLayout.UNDEFINED, vc.ImageLayout.PRESENT_SRC_KHR, vc.PipelineStage.TOP_OF_PIPE, vc.PipelineStage.TOP_OF_PIPE, vc.Access.NONE, vc.Access.MEMORY_READ) logger.debug('Swapchain created with %s images', len(self.swapchain_images))
def _create_swapchain(self): surface_capabilities = self.pfn['vkGetPhysicalDeviceSurfaceCapabilitiesKHR'](self.physical_device, self.surface) surface_formats = self.pfn['vkGetPhysicalDeviceSurfaceFormatsKHR'](self.physical_device, self.surface) surface_present_modes = self.pfn['vkGetPhysicalDeviceSurfacePresentModesKHR'](self.physical_device, self.surface) if ((not surface_formats) or (not surface_present_modes)): msg = 'No available swapchain' logger.critical(msg) raise VulkError(msg) def get_format(formats): for f in formats: if (f.format == vk.VK_FORMAT_UNDEFINED): return f if ((f.format == vk.VK_FORMAT_B8G8R8A8_UNORM) and (f.colorSpace == vk.VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)): return f return formats[0] def get_present_mode(present_modes): for p in present_modes: if (p == vk.VK_PRESENT_MODE_MAILBOX_KHR): return p return vk.VK_PRESENT_MODE_FIFO_KHR def get_swap_extent(capabilities): uint32_max = 4294967295 if (capabilities.currentExtent.width != uint32_max): return capabilities.currentExtent (width, height) = self.window.get_size() width = max(capabilities.minImageExtent.width, min(capabilities.maxImageExtent.width, width)) height = max(capabilities.minImageExtent.height, min(capabilities.maxImageExtent.height, height)) return vk.VkExtent2D(width=width, height=height) surface_format = get_format(surface_formats) present_mode = get_present_mode(surface_present_modes) extent = get_swap_extent(surface_capabilities) image_count = (surface_capabilities.minImageCount + 1) if ((surface_capabilities.maxImageCount > 0) and (image_count > surface_capabilities.maxImageCount)): image_count = surface_capabilities.maxImageCount sharing_mode = vk.VK_SHARING_MODE_EXCLUSIVE queue_family_indices = [] if (self.queue_family_indices['graphic'] != self.queue_family_indices['present']): sharing_mode = vk.VK_SHARING_MODE_CONCURRENT queue_family_indices = [v for v in self.queue_family_indices.values()] swapchain_create = vk.VkSwapchainCreateInfoKHR(sType=vk.VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, flags=0, surface=self.surface, minImageCount=image_count, imageFormat=surface_format.format, imageColorSpace=surface_format.colorSpace, imageExtent=extent, imageArrayLayers=1, imageUsage=vk.VK_IMAGE_USAGE_TRANSFER_DST_BIT, imageSharingMode=sharing_mode, queueFamilyIndexCount=len(queue_family_indices), pQueueFamilyIndices=queue_family_indices, compositeAlpha=vk.VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, presentMode=present_mode, clipped=vk.VK_TRUE, oldSwapchain=None, preTransform=surface_capabilities.currentTransform) self.swapchain = self.pfn['vkCreateSwapchainKHR'](self.device, swapchain_create, None) self.width = extent.width self.height = extent.height self.swapchain_format = surface_format.format swapchain_raw_images = self.pfn['vkGetSwapchainImagesKHR'](self.device, self.swapchain) self.swapchain_images = [] for raw_image in swapchain_raw_images: img = vo.Image.__new__(vo.Image) img.image = raw_image img.is_swapchain = True img.format = surface_format.format img.width = self.width img.height = self.height img.depth = 1 self.swapchain_images.append(img) for image in self.swapchain_images: with vo.immediate_buffer(self) as cmd: image.update_layout(cmd, vc.ImageLayout.UNDEFINED, vc.ImageLayout.PRESENT_SRC_KHR, vc.PipelineStage.TOP_OF_PIPE, vc.PipelineStage.TOP_OF_PIPE, vc.Access.NONE, vc.Access.MEMORY_READ) logger.debug('Swapchain created with %s images', len(self.swapchain_images))<|docstring|>Create Vulkan swapchain<|endoftext|>
e042e4af70b27d3b9ee3f7b3c9faccd95ccee0880e38d371e60f391e3f89d5f6
def _create_commanpool(self): 'Create the command pool used to allocate buffers' self.commandpool = vo.CommandPool(self, self.queue_family_indices['graphic'])
Create the command pool used to allocate buffers
vulk/context.py
_create_commanpool
js78/vulk
35
python
def _create_commanpool(self): self.commandpool = vo.CommandPool(self, self.queue_family_indices['graphic'])
def _create_commanpool(self): self.commandpool = vo.CommandPool(self, self.queue_family_indices['graphic'])<|docstring|>Create the command pool used to allocate buffers<|endoftext|>
84a157ebe8cb909fd49fe861d50a55a32c1edaf22b04f8aa5716dd9352ba2774
def _create_commandbuffers(self): 'Create the command buffers used to copy image' self.commandbuffers = self.commandpool.allocate_buffers(self, vc.CommandBufferLevel.PRIMARY, len(self.swapchain_images)) for (i, commandbuffer) in enumerate(self.commandbuffers): with commandbuffer.bind() as cmd: self.final_image.update_layout(cmd, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.ImageLayout.TRANSFER_SRC_OPTIMAL, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.PipelineStage.TRANSFER, vc.Access.COLOR_ATTACHMENT_WRITE, vc.Access.TRANSFER_READ) self.swapchain_images[i].update_layout(cmd, vc.ImageLayout.PRESENT_SRC_KHR, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.PipelineStage.ALL_GRAPHICS, vc.PipelineStage.TRANSFER, vc.Access.MEMORY_READ, vc.Access.TRANSFER_WRITE) self.final_image.copy_to(cmd, self.swapchain_images[i]) self.swapchain_images[i].update_layout(cmd, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.ImageLayout.PRESENT_SRC_KHR, vc.PipelineStage.TRANSFER, vc.PipelineStage.ALL_GRAPHICS, vc.Access.TRANSFER_WRITE, vc.Access.MEMORY_READ) self.final_image.update_layout(cmd, vc.ImageLayout.TRANSFER_SRC_OPTIMAL, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.PipelineStage.TRANSFER, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.Access.TRANSFER_READ, vc.Access.COLOR_ATTACHMENT_WRITE)
Create the command buffers used to copy image
vulk/context.py
_create_commandbuffers
js78/vulk
35
python
def _create_commandbuffers(self): self.commandbuffers = self.commandpool.allocate_buffers(self, vc.CommandBufferLevel.PRIMARY, len(self.swapchain_images)) for (i, commandbuffer) in enumerate(self.commandbuffers): with commandbuffer.bind() as cmd: self.final_image.update_layout(cmd, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.ImageLayout.TRANSFER_SRC_OPTIMAL, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.PipelineStage.TRANSFER, vc.Access.COLOR_ATTACHMENT_WRITE, vc.Access.TRANSFER_READ) self.swapchain_images[i].update_layout(cmd, vc.ImageLayout.PRESENT_SRC_KHR, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.PipelineStage.ALL_GRAPHICS, vc.PipelineStage.TRANSFER, vc.Access.MEMORY_READ, vc.Access.TRANSFER_WRITE) self.final_image.copy_to(cmd, self.swapchain_images[i]) self.swapchain_images[i].update_layout(cmd, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.ImageLayout.PRESENT_SRC_KHR, vc.PipelineStage.TRANSFER, vc.PipelineStage.ALL_GRAPHICS, vc.Access.TRANSFER_WRITE, vc.Access.MEMORY_READ) self.final_image.update_layout(cmd, vc.ImageLayout.TRANSFER_SRC_OPTIMAL, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.PipelineStage.TRANSFER, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.Access.TRANSFER_READ, vc.Access.COLOR_ATTACHMENT_WRITE)
def _create_commandbuffers(self): self.commandbuffers = self.commandpool.allocate_buffers(self, vc.CommandBufferLevel.PRIMARY, len(self.swapchain_images)) for (i, commandbuffer) in enumerate(self.commandbuffers): with commandbuffer.bind() as cmd: self.final_image.update_layout(cmd, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.ImageLayout.TRANSFER_SRC_OPTIMAL, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.PipelineStage.TRANSFER, vc.Access.COLOR_ATTACHMENT_WRITE, vc.Access.TRANSFER_READ) self.swapchain_images[i].update_layout(cmd, vc.ImageLayout.PRESENT_SRC_KHR, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.PipelineStage.ALL_GRAPHICS, vc.PipelineStage.TRANSFER, vc.Access.MEMORY_READ, vc.Access.TRANSFER_WRITE) self.final_image.copy_to(cmd, self.swapchain_images[i]) self.swapchain_images[i].update_layout(cmd, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.ImageLayout.PRESENT_SRC_KHR, vc.PipelineStage.TRANSFER, vc.PipelineStage.ALL_GRAPHICS, vc.Access.TRANSFER_WRITE, vc.Access.MEMORY_READ) self.final_image.update_layout(cmd, vc.ImageLayout.TRANSFER_SRC_OPTIMAL, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.PipelineStage.TRANSFER, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.Access.TRANSFER_READ, vc.Access.COLOR_ATTACHMENT_WRITE)<|docstring|>Create the command buffers used to copy image<|endoftext|>
c876fcd6c81d6ca17560c0c587dfa9f6e23e3c9cdfdf5d11803356f552022ede
def _create_semaphores(self): 'Create semaphores used during image swaping' self._semaphore_available = vo.Semaphore(self) self._semaphore_copied = vo.Semaphore(self) self._direct_semaphores = [vo.Semaphore(self), vo.Semaphore(self)]
Create semaphores used during image swaping
vulk/context.py
_create_semaphores
js78/vulk
35
python
def _create_semaphores(self): self._semaphore_available = vo.Semaphore(self) self._semaphore_copied = vo.Semaphore(self) self._direct_semaphores = [vo.Semaphore(self), vo.Semaphore(self)]
def _create_semaphores(self): self._semaphore_available = vo.Semaphore(self) self._semaphore_copied = vo.Semaphore(self) self._direct_semaphores = [vo.Semaphore(self), vo.Semaphore(self)]<|docstring|>Create semaphores used during image swaping<|endoftext|>
44f2378d030b5e70d06c302b9d4b260625a28b486e68f7a50e3a3f9fec5dc199
def create(self): 'Create Vulkan context' self._create_instance() self._get_pfn() self._create_debug_callback() self._create_surface() self._create_physical_device() self._create_device() self._create_vma() self._create_commanpool() self._create_swapchain_global()
Create Vulkan context
vulk/context.py
create
js78/vulk
35
python
def create(self): self._create_instance() self._get_pfn() self._create_debug_callback() self._create_surface() self._create_physical_device() self._create_device() self._create_vma() self._create_commanpool() self._create_swapchain_global()
def create(self): self._create_instance() self._get_pfn() self._create_debug_callback() self._create_surface() self._create_physical_device() self._create_device() self._create_vma() self._create_commanpool() self._create_swapchain_global()<|docstring|>Create Vulkan context<|endoftext|>
9872442d86b596b3f7e5415aaa53c153c2a39f502efbdcb20be1c541379dfbd3
def reload_swapchain(self): 'Create a new swapchain\n\n This function creates a swapchain and all that depends on it\n ' logger.debug('Reloading swapchain') self._destroy_swapchain_global() self._create_swapchain_global()
Create a new swapchain This function creates a swapchain and all that depends on it
vulk/context.py
reload_swapchain
js78/vulk
35
python
def reload_swapchain(self): 'Create a new swapchain\n\n This function creates a swapchain and all that depends on it\n ' logger.debug('Reloading swapchain') self._destroy_swapchain_global() self._create_swapchain_global()
def reload_swapchain(self): 'Create a new swapchain\n\n This function creates a swapchain and all that depends on it\n ' logger.debug('Reloading swapchain') self._destroy_swapchain_global() self._create_swapchain_global()<|docstring|>Create a new swapchain This function creates a swapchain and all that depends on it<|endoftext|>
bf29f4b49bef0d42b651dc3357a06ca2c4b75e728289f6f01a5182ce4d72e959
def resize(self): 'Resize context when window is resized' (width, height) = self.window.get_size() if ((self.width != width) and (self.height != height)): self.reload_swapchain()
Resize context when window is resized
vulk/context.py
resize
js78/vulk
35
python
def resize(self): (width, height) = self.window.get_size() if ((self.width != width) and (self.height != height)): self.reload_swapchain()
def resize(self): (width, height) = self.window.get_size() if ((self.width != width) and (self.height != height)): self.reload_swapchain()<|docstring|>Resize context when window is resized<|endoftext|>
3e8b78d607b171599dc29f22a8285b7746485e39b35094e6785ea6ad23ecfaa5
def clear_final_image(self, colors): '\n Clear the final image with `colors`\n\n *Parameters:*\n\n - `colors`: `list` of 4 `float` (rgba)\n ' clear_color = vo.ClearColorValue(float32=colors) ranges = [vo.ImageSubresourceRange(vc.ImageAspect.COLOR, 0, 1, 0, 1)] with vo.immediate_buffer(self) as cmd: self.final_image.update_layout(cmd, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.PipelineStage.TRANSFER, vc.Access.COLOR_ATTACHMENT_WRITE, vc.Access.TRANSFER_WRITE) cmd.clear_color_image(self.final_image, vc.ImageLayout.TRANSFER_DST_OPTIMAL, clear_color, ranges) self.final_image.update_layout(cmd, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.PipelineStage.TRANSFER, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.Access.TRANSFER_WRITE, vc.Access.COLOR_ATTACHMENT_WRITE)
Clear the final image with `colors` *Parameters:* - `colors`: `list` of 4 `float` (rgba)
vulk/context.py
clear_final_image
js78/vulk
35
python
def clear_final_image(self, colors): '\n Clear the final image with `colors`\n\n *Parameters:*\n\n - `colors`: `list` of 4 `float` (rgba)\n ' clear_color = vo.ClearColorValue(float32=colors) ranges = [vo.ImageSubresourceRange(vc.ImageAspect.COLOR, 0, 1, 0, 1)] with vo.immediate_buffer(self) as cmd: self.final_image.update_layout(cmd, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.PipelineStage.TRANSFER, vc.Access.COLOR_ATTACHMENT_WRITE, vc.Access.TRANSFER_WRITE) cmd.clear_color_image(self.final_image, vc.ImageLayout.TRANSFER_DST_OPTIMAL, clear_color, ranges) self.final_image.update_layout(cmd, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.PipelineStage.TRANSFER, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.Access.TRANSFER_WRITE, vc.Access.COLOR_ATTACHMENT_WRITE)
def clear_final_image(self, colors): '\n Clear the final image with `colors`\n\n *Parameters:*\n\n - `colors`: `list` of 4 `float` (rgba)\n ' clear_color = vo.ClearColorValue(float32=colors) ranges = [vo.ImageSubresourceRange(vc.ImageAspect.COLOR, 0, 1, 0, 1)] with vo.immediate_buffer(self) as cmd: self.final_image.update_layout(cmd, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.PipelineStage.TRANSFER, vc.Access.COLOR_ATTACHMENT_WRITE, vc.Access.TRANSFER_WRITE) cmd.clear_color_image(self.final_image, vc.ImageLayout.TRANSFER_DST_OPTIMAL, clear_color, ranges) self.final_image.update_layout(cmd, vc.ImageLayout.TRANSFER_DST_OPTIMAL, vc.ImageLayout.COLOR_ATTACHMENT_OPTIMAL, vc.PipelineStage.TRANSFER, vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT, vc.Access.TRANSFER_WRITE, vc.Access.COLOR_ATTACHMENT_WRITE)<|docstring|>Clear the final image with `colors` *Parameters:* - `colors`: `list` of 4 `float` (rgba)<|endoftext|>
d2b68bf8237cbfe7772b067a14c9a6fe4d0745ddfd490175fd2702d3e6730955
def swap(self, semaphores=None): 'Display final image on screen.\n\n This function makes all the rendering work. To proceed, it copies the\n `final_image` into the current swapchain image previously acquired.\n You can pass custom semaphores (and you should) to synchronize the\n command.\n\n Args:\n semaphore (list[Semaphore]): semaphores to wait on\n\n **Note: `final_image` layout is handled by `VulkContext`. You must\n let it to COLOR_ATTACHMENT_OPTIMAL**\n ' try: index = self.pfn['vkAcquireNextImageKHR'](self.device, self.swapchain, vk.UINT64_MAX, self._semaphore_available.semaphore, None) except vk.VkErrorOutOfDateKhr: logger.warning('Swapchain out of date, reloading...') self.reload_swapchain() return wait_semaphores = [self._semaphore_available] if semaphores: wait_semaphores.extend([s for s in semaphores if s]) wait_masks = [vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT] wait_masks *= len(wait_semaphores) copied_semaphores = [self._semaphore_copied.semaphore] submit = vk.VkSubmitInfo(sType=vk.VK_STRUCTURE_TYPE_SUBMIT_INFO, waitSemaphoreCount=len(wait_semaphores), pWaitSemaphores=[s.semaphore for s in wait_semaphores], pWaitDstStageMask=wait_masks, commandBufferCount=1, pCommandBuffers=[self.commandbuffers[index].commandbuffer], signalSemaphoreCount=len(copied_semaphores), pSignalSemaphores=copied_semaphores) vk.vkQueueSubmit(self.graphic_queue, 1, [submit], None) present = vk.VkPresentInfoKHR(sType=vk.VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, waitSemaphoreCount=len(copied_semaphores), pWaitSemaphores=copied_semaphores, swapchainCount=1, pSwapchains=[self.swapchain], pImageIndices=[index], pResults=None) self.pfn['vkQueuePresentKHR'](self.present_queue, present) vk.vkDeviceWaitIdle(self.device)
Display final image on screen. This function makes all the rendering work. To proceed, it copies the `final_image` into the current swapchain image previously acquired. You can pass custom semaphores (and you should) to synchronize the command. Args: semaphore (list[Semaphore]): semaphores to wait on **Note: `final_image` layout is handled by `VulkContext`. You must let it to COLOR_ATTACHMENT_OPTIMAL**
vulk/context.py
swap
js78/vulk
35
python
def swap(self, semaphores=None): 'Display final image on screen.\n\n This function makes all the rendering work. To proceed, it copies the\n `final_image` into the current swapchain image previously acquired.\n You can pass custom semaphores (and you should) to synchronize the\n command.\n\n Args:\n semaphore (list[Semaphore]): semaphores to wait on\n\n **Note: `final_image` layout is handled by `VulkContext`. You must\n let it to COLOR_ATTACHMENT_OPTIMAL**\n ' try: index = self.pfn['vkAcquireNextImageKHR'](self.device, self.swapchain, vk.UINT64_MAX, self._semaphore_available.semaphore, None) except vk.VkErrorOutOfDateKhr: logger.warning('Swapchain out of date, reloading...') self.reload_swapchain() return wait_semaphores = [self._semaphore_available] if semaphores: wait_semaphores.extend([s for s in semaphores if s]) wait_masks = [vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT] wait_masks *= len(wait_semaphores) copied_semaphores = [self._semaphore_copied.semaphore] submit = vk.VkSubmitInfo(sType=vk.VK_STRUCTURE_TYPE_SUBMIT_INFO, waitSemaphoreCount=len(wait_semaphores), pWaitSemaphores=[s.semaphore for s in wait_semaphores], pWaitDstStageMask=wait_masks, commandBufferCount=1, pCommandBuffers=[self.commandbuffers[index].commandbuffer], signalSemaphoreCount=len(copied_semaphores), pSignalSemaphores=copied_semaphores) vk.vkQueueSubmit(self.graphic_queue, 1, [submit], None) present = vk.VkPresentInfoKHR(sType=vk.VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, waitSemaphoreCount=len(copied_semaphores), pWaitSemaphores=copied_semaphores, swapchainCount=1, pSwapchains=[self.swapchain], pImageIndices=[index], pResults=None) self.pfn['vkQueuePresentKHR'](self.present_queue, present) vk.vkDeviceWaitIdle(self.device)
def swap(self, semaphores=None): 'Display final image on screen.\n\n This function makes all the rendering work. To proceed, it copies the\n `final_image` into the current swapchain image previously acquired.\n You can pass custom semaphores (and you should) to synchronize the\n command.\n\n Args:\n semaphore (list[Semaphore]): semaphores to wait on\n\n **Note: `final_image` layout is handled by `VulkContext`. You must\n let it to COLOR_ATTACHMENT_OPTIMAL**\n ' try: index = self.pfn['vkAcquireNextImageKHR'](self.device, self.swapchain, vk.UINT64_MAX, self._semaphore_available.semaphore, None) except vk.VkErrorOutOfDateKhr: logger.warning('Swapchain out of date, reloading...') self.reload_swapchain() return wait_semaphores = [self._semaphore_available] if semaphores: wait_semaphores.extend([s for s in semaphores if s]) wait_masks = [vc.PipelineStage.COLOR_ATTACHMENT_OUTPUT] wait_masks *= len(wait_semaphores) copied_semaphores = [self._semaphore_copied.semaphore] submit = vk.VkSubmitInfo(sType=vk.VK_STRUCTURE_TYPE_SUBMIT_INFO, waitSemaphoreCount=len(wait_semaphores), pWaitSemaphores=[s.semaphore for s in wait_semaphores], pWaitDstStageMask=wait_masks, commandBufferCount=1, pCommandBuffers=[self.commandbuffers[index].commandbuffer], signalSemaphoreCount=len(copied_semaphores), pSignalSemaphores=copied_semaphores) vk.vkQueueSubmit(self.graphic_queue, 1, [submit], None) present = vk.VkPresentInfoKHR(sType=vk.VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, waitSemaphoreCount=len(copied_semaphores), pWaitSemaphores=copied_semaphores, swapchainCount=1, pSwapchains=[self.swapchain], pImageIndices=[index], pResults=None) self.pfn['vkQueuePresentKHR'](self.present_queue, present) vk.vkDeviceWaitIdle(self.device)<|docstring|>Display final image on screen. This function makes all the rendering work. To proceed, it copies the `final_image` into the current swapchain image previously acquired. You can pass custom semaphores (and you should) to synchronize the command. Args: semaphore (list[Semaphore]): semaphores to wait on **Note: `final_image` layout is handled by `VulkContext`. You must let it to COLOR_ATTACHMENT_OPTIMAL**<|endoftext|>
0426625494a65e09f8e8c0272dcbb8a17b6ccbd83ce86708831cec6dea136ebf
def create_test_h5file(path: str, empty: bool=False): '\n Create a H5 test file\n\n Parameters\n ----------\n path: str\n Where to create the file\n empty: bool (default=False)\n If True, fill with example data\n\n Returns\n -------\n None\n ' with h5py.File(path, 'w') as f: f.create_group('index') if empty: return f.create_group('index/test_pop') f.create_group('clusters/test_pop') f.create_dataset('index/test_pop/primary', data=np.random.random_integers(1000, size=1000)) f.create_dataset('index/test_pop/test_ctrl1', data=np.random.random_integers(1000, size=1000)) f.create_dataset('index/test_pop/test_ctrl2', data=np.random.random_integers(1000, size=1000))
Create a H5 test file Parameters ---------- path: str Where to create the file empty: bool (default=False) If True, fill with example data Returns ------- None
cytopy/tests/test_fcs.py
create_test_h5file
JANHMS/CytoPy
41
python
def create_test_h5file(path: str, empty: bool=False): '\n Create a H5 test file\n\n Parameters\n ----------\n path: str\n Where to create the file\n empty: bool (default=False)\n If True, fill with example data\n\n Returns\n -------\n None\n ' with h5py.File(path, 'w') as f: f.create_group('index') if empty: return f.create_group('index/test_pop') f.create_group('clusters/test_pop') f.create_dataset('index/test_pop/primary', data=np.random.random_integers(1000, size=1000)) f.create_dataset('index/test_pop/test_ctrl1', data=np.random.random_integers(1000, size=1000)) f.create_dataset('index/test_pop/test_ctrl2', data=np.random.random_integers(1000, size=1000))
def create_test_h5file(path: str, empty: bool=False): '\n Create a H5 test file\n\n Parameters\n ----------\n path: str\n Where to create the file\n empty: bool (default=False)\n If True, fill with example data\n\n Returns\n -------\n None\n ' with h5py.File(path, 'w') as f: f.create_group('index') if empty: return f.create_group('index/test_pop') f.create_group('clusters/test_pop') f.create_dataset('index/test_pop/primary', data=np.random.random_integers(1000, size=1000)) f.create_dataset('index/test_pop/test_ctrl1', data=np.random.random_integers(1000, size=1000)) f.create_dataset('index/test_pop/test_ctrl2', data=np.random.random_integers(1000, size=1000))<|docstring|>Create a H5 test file Parameters ---------- path: str Where to create the file empty: bool (default=False) If True, fill with example data Returns ------- None<|endoftext|>
c1fcd6cb3624f602353aa24c85194654dc2c894d88755f8e9fae5701cf79a929
def add_dummy_ctrl(fg: FileGroup, ctrl_id: str): '\n Add dummy control data to the given FileGroup\n\n Parameters\n ----------\n fg: FileGroup\n ctrl_id: str\n\n Returns\n -------\n None\n ' data = pd.DataFrame([np.random.random(size=1000) for _ in range(6)]).T fg.add_ctrl_file(ctrl_id=ctrl_id, data=data.values, channels=[f'channel{(i + 1)}' for i in range(6)], markers=[f'marker{(i + 1)}' for i in range(6)]) fg.save()
Add dummy control data to the given FileGroup Parameters ---------- fg: FileGroup ctrl_id: str Returns ------- None
cytopy/tests/test_fcs.py
add_dummy_ctrl
JANHMS/CytoPy
41
python
def add_dummy_ctrl(fg: FileGroup, ctrl_id: str): '\n Add dummy control data to the given FileGroup\n\n Parameters\n ----------\n fg: FileGroup\n ctrl_id: str\n\n Returns\n -------\n None\n ' data = pd.DataFrame([np.random.random(size=1000) for _ in range(6)]).T fg.add_ctrl_file(ctrl_id=ctrl_id, data=data.values, channels=[f'channel{(i + 1)}' for i in range(6)], markers=[f'marker{(i + 1)}' for i in range(6)]) fg.save()
def add_dummy_ctrl(fg: FileGroup, ctrl_id: str): '\n Add dummy control data to the given FileGroup\n\n Parameters\n ----------\n fg: FileGroup\n ctrl_id: str\n\n Returns\n -------\n None\n ' data = pd.DataFrame([np.random.random(size=1000) for _ in range(6)]).T fg.add_ctrl_file(ctrl_id=ctrl_id, data=data.values, channels=[f'channel{(i + 1)}' for i in range(6)], markers=[f'marker{(i + 1)}' for i in range(6)]) fg.save()<|docstring|>Add dummy control data to the given FileGroup Parameters ---------- fg: FileGroup ctrl_id: str Returns ------- None<|endoftext|>
ef8b82f71780250f5ab4cc367fa2eb889654a4490f5b3d157ac2e567a2c75c81
def __virtual__(): '\n Only load if boto is available.\n ' if ('boto_elb.exists' in __salt__): return 'boto_elb' return (False, 'boto_elb module could not be loaded')
Only load if boto is available.
salt/states/boto_elb.py
__virtual__
Flowdalic/salt
9,425
python
def __virtual__(): '\n \n ' if ('boto_elb.exists' in __salt__): return 'boto_elb' return (False, 'boto_elb module could not be loaded')
def __virtual__(): '\n \n ' if ('boto_elb.exists' in __salt__): return 'boto_elb' return (False, 'boto_elb module could not be loaded')<|docstring|>Only load if boto is available.<|endoftext|>
eede49b035b81a2753393fb12b2eca8cb9c72905bdeba2f3dde1134c64463d3e
def present(name, listeners, availability_zones=None, subnets=None, subnet_names=None, security_groups=None, scheme='internet-facing', health_check=None, attributes=None, attributes_from_pillar='boto_elb_attributes', cnames=None, alarms=None, alarms_from_pillar='boto_elb_alarms', policies=None, policies_from_pillar='boto_elb_policies', backends=None, region=None, key=None, keyid=None, profile=None, wait_for_sync=True, tags=None, instance_ids=None, instance_names=None): "\n Ensure the ELB exists.\n\n name\n Name of the ELB.\n\n availability_zones\n A list of availability zones for this ELB.\n\n listeners\n A list of listener lists; example::\n\n [\n ['443', 'HTTPS', 'arn:aws:iam::1111111:server-certificate/mycert'],\n ['8443', '80', 'HTTPS', 'HTTP', 'arn:aws:iam::1111111:server-certificate/mycert']\n ]\n\n subnets\n A list of subnet IDs in your VPC to attach to your LoadBalancer.\n\n subnet_names\n A list of subnet names in your VPC to attach to your LoadBalancer.\n\n security_groups\n The security groups assigned to your LoadBalancer within your VPC. Must\n be passed either as a list or a comma-separated string.\n\n For example, a list:\n\n .. code-block:: yaml\n\n - security_groups:\n - secgroup-one\n - secgroup-two\n\n Or as a comma-separated string:\n\n .. code-block:: yaml\n\n - security_groups: secgroup-one,secgroup-two\n\n scheme\n The type of a LoadBalancer, ``internet-facing`` or ``internal``. Once\n set, can not be modified.\n\n health_check\n A dict defining the health check for this ELB.\n\n attributes\n A dict defining the attributes to set on this ELB.\n Unknown keys will be silently ignored.\n\n See the :mod:`salt.modules.boto_elb.set_attributes` function for\n recognized attributes.\n\n attributes_from_pillar\n name of pillar dict that contains attributes. Attributes defined for this specific\n state will override those from pillar.\n\n cnames\n A list of cname dicts with attributes needed for the DNS add_record state.\n By default the boto_route53.add_record state will be used, which requires: name, zone, ttl, and identifier.\n See the boto_route53 state for information about these attributes.\n Other DNS modules can be called by specifying the provider keyword.\n the cnames dict will be passed to the state as kwargs.\n\n See the :mod:`salt.states.boto_route53` state for information about\n these attributes.\n\n alarms:\n a dictionary of name->boto_cloudwatch_alarm sections to be associated with this ELB.\n All attributes should be specified except for dimension which will be\n automatically set to this ELB.\n\n See the :mod:`salt.states.boto_cloudwatch_alarm` state for information\n about these attributes.\n\n alarms_from_pillar:\n name of pillar dict that contains alarm settings. Alarms defined for this specific\n state will override those from pillar.\n\n region\n Region to connect to.\n\n key\n Secret key to be used.\n\n keyid\n Access key to be used.\n\n profile\n A dict with region, key and keyid, or a pillar key (string)\n that contains a dict with region, key and keyid.\n\n wait_for_sync\n Wait for an INSYNC change status from Route53.\n\n tags\n dict of tags\n\n instance_ids\n list of instance ids. The state will ensure that these, and ONLY these, instances\n are registered with the ELB. This is additive with instance_names.\n\n instance_names\n list of instance names. The state will ensure that these, and ONLY these, instances\n are registered with the ELB. This is additive with instance_ids.\n " tmp = __salt__['config.option'](attributes_from_pillar, {}) attributes = (salt.utils.dictupdate.update(tmp, attributes) if attributes else tmp) ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} if (not isinstance(security_groups, (str, list, type(None)))): msg = "The 'security_group' parameter must be either a list or a comma-separated string." log.error(msg) ret.update({'comment': msg, 'result': False}) return ret if isinstance(security_groups, str): security_groups = security_groups.split(',') _ret = _elb_present(name, availability_zones, listeners, subnets, subnet_names, security_groups, scheme, region, key, keyid, profile) ret.update({'changes': _ret['changes'], 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret exists = __salt__['boto_elb.exists'](name, region, key, keyid, profile) if ((not exists) and __opts__['test']): return ret if attributes: _ret = _attributes_present(name, attributes, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _health_check_present(name, health_check, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret if cnames: lb = __salt__['boto_elb.get_elb_config'](name, region, key, keyid, profile) if lb: for cname in cnames: _ret = None dns_provider = 'boto_route53' cname.update({'record_type': 'CNAME', 'value': lb['dns_name']}) if ('provider' in cname): dns_provider = cname.pop('provider') if (dns_provider == 'boto_route53'): for p in ('profile', 'key', 'keyid', 'region', 'wait_for_sync'): cname[p] = (locals().get(p) if (p not in cname) else cname[p]) _ret = __states__['boto_route53.present'](**cname) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _alarms_present(name, alarms, alarms_from_pillar, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _policies_present(name, policies, policies_from_pillar, listeners, backends, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _tags_present(name, tags, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret if (not instance_ids): instance_ids = [] if instance_names: running_states = ('pending', 'rebooting', 'running', 'stopping', 'stopped') for n in instance_names: instance_ids += __salt__['boto_ec2.find_instances'](name=n, region=region, key=key, keyid=keyid, profile=profile, in_states=running_states) if instance_ids: if __opts__['test']: if __salt__['boto_elb.set_instances'](name, instance_ids, True, region, key, keyid, profile): ret['comment'] += ' ELB {} instances would be updated.'.format(name) ret['result'] = None else: success = __salt__['boto_elb.set_instances'](name, instance_ids, False, region, key, keyid, profile) if (not success): ret['comment'] += 'Failed to set requested instances.' ret['result'] = False return ret
Ensure the ELB exists. name Name of the ELB. availability_zones A list of availability zones for this ELB. listeners A list of listener lists; example:: [ ['443', 'HTTPS', 'arn:aws:iam::1111111:server-certificate/mycert'], ['8443', '80', 'HTTPS', 'HTTP', 'arn:aws:iam::1111111:server-certificate/mycert'] ] subnets A list of subnet IDs in your VPC to attach to your LoadBalancer. subnet_names A list of subnet names in your VPC to attach to your LoadBalancer. security_groups The security groups assigned to your LoadBalancer within your VPC. Must be passed either as a list or a comma-separated string. For example, a list: .. code-block:: yaml - security_groups: - secgroup-one - secgroup-two Or as a comma-separated string: .. code-block:: yaml - security_groups: secgroup-one,secgroup-two scheme The type of a LoadBalancer, ``internet-facing`` or ``internal``. Once set, can not be modified. health_check A dict defining the health check for this ELB. attributes A dict defining the attributes to set on this ELB. Unknown keys will be silently ignored. See the :mod:`salt.modules.boto_elb.set_attributes` function for recognized attributes. attributes_from_pillar name of pillar dict that contains attributes. Attributes defined for this specific state will override those from pillar. cnames A list of cname dicts with attributes needed for the DNS add_record state. By default the boto_route53.add_record state will be used, which requires: name, zone, ttl, and identifier. See the boto_route53 state for information about these attributes. Other DNS modules can be called by specifying the provider keyword. the cnames dict will be passed to the state as kwargs. See the :mod:`salt.states.boto_route53` state for information about these attributes. alarms: a dictionary of name->boto_cloudwatch_alarm sections to be associated with this ELB. All attributes should be specified except for dimension which will be automatically set to this ELB. See the :mod:`salt.states.boto_cloudwatch_alarm` state for information about these attributes. alarms_from_pillar: name of pillar dict that contains alarm settings. Alarms defined for this specific state will override those from pillar. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. wait_for_sync Wait for an INSYNC change status from Route53. tags dict of tags instance_ids list of instance ids. The state will ensure that these, and ONLY these, instances are registered with the ELB. This is additive with instance_names. instance_names list of instance names. The state will ensure that these, and ONLY these, instances are registered with the ELB. This is additive with instance_ids.
salt/states/boto_elb.py
present
Flowdalic/salt
9,425
python
def present(name, listeners, availability_zones=None, subnets=None, subnet_names=None, security_groups=None, scheme='internet-facing', health_check=None, attributes=None, attributes_from_pillar='boto_elb_attributes', cnames=None, alarms=None, alarms_from_pillar='boto_elb_alarms', policies=None, policies_from_pillar='boto_elb_policies', backends=None, region=None, key=None, keyid=None, profile=None, wait_for_sync=True, tags=None, instance_ids=None, instance_names=None): "\n Ensure the ELB exists.\n\n name\n Name of the ELB.\n\n availability_zones\n A list of availability zones for this ELB.\n\n listeners\n A list of listener lists; example::\n\n [\n ['443', 'HTTPS', 'arn:aws:iam::1111111:server-certificate/mycert'],\n ['8443', '80', 'HTTPS', 'HTTP', 'arn:aws:iam::1111111:server-certificate/mycert']\n ]\n\n subnets\n A list of subnet IDs in your VPC to attach to your LoadBalancer.\n\n subnet_names\n A list of subnet names in your VPC to attach to your LoadBalancer.\n\n security_groups\n The security groups assigned to your LoadBalancer within your VPC. Must\n be passed either as a list or a comma-separated string.\n\n For example, a list:\n\n .. code-block:: yaml\n\n - security_groups:\n - secgroup-one\n - secgroup-two\n\n Or as a comma-separated string:\n\n .. code-block:: yaml\n\n - security_groups: secgroup-one,secgroup-two\n\n scheme\n The type of a LoadBalancer, ``internet-facing`` or ``internal``. Once\n set, can not be modified.\n\n health_check\n A dict defining the health check for this ELB.\n\n attributes\n A dict defining the attributes to set on this ELB.\n Unknown keys will be silently ignored.\n\n See the :mod:`salt.modules.boto_elb.set_attributes` function for\n recognized attributes.\n\n attributes_from_pillar\n name of pillar dict that contains attributes. Attributes defined for this specific\n state will override those from pillar.\n\n cnames\n A list of cname dicts with attributes needed for the DNS add_record state.\n By default the boto_route53.add_record state will be used, which requires: name, zone, ttl, and identifier.\n See the boto_route53 state for information about these attributes.\n Other DNS modules can be called by specifying the provider keyword.\n the cnames dict will be passed to the state as kwargs.\n\n See the :mod:`salt.states.boto_route53` state for information about\n these attributes.\n\n alarms:\n a dictionary of name->boto_cloudwatch_alarm sections to be associated with this ELB.\n All attributes should be specified except for dimension which will be\n automatically set to this ELB.\n\n See the :mod:`salt.states.boto_cloudwatch_alarm` state for information\n about these attributes.\n\n alarms_from_pillar:\n name of pillar dict that contains alarm settings. Alarms defined for this specific\n state will override those from pillar.\n\n region\n Region to connect to.\n\n key\n Secret key to be used.\n\n keyid\n Access key to be used.\n\n profile\n A dict with region, key and keyid, or a pillar key (string)\n that contains a dict with region, key and keyid.\n\n wait_for_sync\n Wait for an INSYNC change status from Route53.\n\n tags\n dict of tags\n\n instance_ids\n list of instance ids. The state will ensure that these, and ONLY these, instances\n are registered with the ELB. This is additive with instance_names.\n\n instance_names\n list of instance names. The state will ensure that these, and ONLY these, instances\n are registered with the ELB. This is additive with instance_ids.\n " tmp = __salt__['config.option'](attributes_from_pillar, {}) attributes = (salt.utils.dictupdate.update(tmp, attributes) if attributes else tmp) ret = {'name': name, 'result': True, 'comment': , 'changes': {}} if (not isinstance(security_groups, (str, list, type(None)))): msg = "The 'security_group' parameter must be either a list or a comma-separated string." log.error(msg) ret.update({'comment': msg, 'result': False}) return ret if isinstance(security_groups, str): security_groups = security_groups.split(',') _ret = _elb_present(name, availability_zones, listeners, subnets, subnet_names, security_groups, scheme, region, key, keyid, profile) ret.update({'changes': _ret['changes'], 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret exists = __salt__['boto_elb.exists'](name, region, key, keyid, profile) if ((not exists) and __opts__['test']): return ret if attributes: _ret = _attributes_present(name, attributes, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _health_check_present(name, health_check, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret if cnames: lb = __salt__['boto_elb.get_elb_config'](name, region, key, keyid, profile) if lb: for cname in cnames: _ret = None dns_provider = 'boto_route53' cname.update({'record_type': 'CNAME', 'value': lb['dns_name']}) if ('provider' in cname): dns_provider = cname.pop('provider') if (dns_provider == 'boto_route53'): for p in ('profile', 'key', 'keyid', 'region', 'wait_for_sync'): cname[p] = (locals().get(p) if (p not in cname) else cname[p]) _ret = __states__['boto_route53.present'](**cname) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _alarms_present(name, alarms, alarms_from_pillar, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _policies_present(name, policies, policies_from_pillar, listeners, backends, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _tags_present(name, tags, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret if (not instance_ids): instance_ids = [] if instance_names: running_states = ('pending', 'rebooting', 'running', 'stopping', 'stopped') for n in instance_names: instance_ids += __salt__['boto_ec2.find_instances'](name=n, region=region, key=key, keyid=keyid, profile=profile, in_states=running_states) if instance_ids: if __opts__['test']: if __salt__['boto_elb.set_instances'](name, instance_ids, True, region, key, keyid, profile): ret['comment'] += ' ELB {} instances would be updated.'.format(name) ret['result'] = None else: success = __salt__['boto_elb.set_instances'](name, instance_ids, False, region, key, keyid, profile) if (not success): ret['comment'] += 'Failed to set requested instances.' ret['result'] = False return ret
def present(name, listeners, availability_zones=None, subnets=None, subnet_names=None, security_groups=None, scheme='internet-facing', health_check=None, attributes=None, attributes_from_pillar='boto_elb_attributes', cnames=None, alarms=None, alarms_from_pillar='boto_elb_alarms', policies=None, policies_from_pillar='boto_elb_policies', backends=None, region=None, key=None, keyid=None, profile=None, wait_for_sync=True, tags=None, instance_ids=None, instance_names=None): "\n Ensure the ELB exists.\n\n name\n Name of the ELB.\n\n availability_zones\n A list of availability zones for this ELB.\n\n listeners\n A list of listener lists; example::\n\n [\n ['443', 'HTTPS', 'arn:aws:iam::1111111:server-certificate/mycert'],\n ['8443', '80', 'HTTPS', 'HTTP', 'arn:aws:iam::1111111:server-certificate/mycert']\n ]\n\n subnets\n A list of subnet IDs in your VPC to attach to your LoadBalancer.\n\n subnet_names\n A list of subnet names in your VPC to attach to your LoadBalancer.\n\n security_groups\n The security groups assigned to your LoadBalancer within your VPC. Must\n be passed either as a list or a comma-separated string.\n\n For example, a list:\n\n .. code-block:: yaml\n\n - security_groups:\n - secgroup-one\n - secgroup-two\n\n Or as a comma-separated string:\n\n .. code-block:: yaml\n\n - security_groups: secgroup-one,secgroup-two\n\n scheme\n The type of a LoadBalancer, ``internet-facing`` or ``internal``. Once\n set, can not be modified.\n\n health_check\n A dict defining the health check for this ELB.\n\n attributes\n A dict defining the attributes to set on this ELB.\n Unknown keys will be silently ignored.\n\n See the :mod:`salt.modules.boto_elb.set_attributes` function for\n recognized attributes.\n\n attributes_from_pillar\n name of pillar dict that contains attributes. Attributes defined for this specific\n state will override those from pillar.\n\n cnames\n A list of cname dicts with attributes needed for the DNS add_record state.\n By default the boto_route53.add_record state will be used, which requires: name, zone, ttl, and identifier.\n See the boto_route53 state for information about these attributes.\n Other DNS modules can be called by specifying the provider keyword.\n the cnames dict will be passed to the state as kwargs.\n\n See the :mod:`salt.states.boto_route53` state for information about\n these attributes.\n\n alarms:\n a dictionary of name->boto_cloudwatch_alarm sections to be associated with this ELB.\n All attributes should be specified except for dimension which will be\n automatically set to this ELB.\n\n See the :mod:`salt.states.boto_cloudwatch_alarm` state for information\n about these attributes.\n\n alarms_from_pillar:\n name of pillar dict that contains alarm settings. Alarms defined for this specific\n state will override those from pillar.\n\n region\n Region to connect to.\n\n key\n Secret key to be used.\n\n keyid\n Access key to be used.\n\n profile\n A dict with region, key and keyid, or a pillar key (string)\n that contains a dict with region, key and keyid.\n\n wait_for_sync\n Wait for an INSYNC change status from Route53.\n\n tags\n dict of tags\n\n instance_ids\n list of instance ids. The state will ensure that these, and ONLY these, instances\n are registered with the ELB. This is additive with instance_names.\n\n instance_names\n list of instance names. The state will ensure that these, and ONLY these, instances\n are registered with the ELB. This is additive with instance_ids.\n " tmp = __salt__['config.option'](attributes_from_pillar, {}) attributes = (salt.utils.dictupdate.update(tmp, attributes) if attributes else tmp) ret = {'name': name, 'result': True, 'comment': , 'changes': {}} if (not isinstance(security_groups, (str, list, type(None)))): msg = "The 'security_group' parameter must be either a list or a comma-separated string." log.error(msg) ret.update({'comment': msg, 'result': False}) return ret if isinstance(security_groups, str): security_groups = security_groups.split(',') _ret = _elb_present(name, availability_zones, listeners, subnets, subnet_names, security_groups, scheme, region, key, keyid, profile) ret.update({'changes': _ret['changes'], 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret exists = __salt__['boto_elb.exists'](name, region, key, keyid, profile) if ((not exists) and __opts__['test']): return ret if attributes: _ret = _attributes_present(name, attributes, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _health_check_present(name, health_check, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret if cnames: lb = __salt__['boto_elb.get_elb_config'](name, region, key, keyid, profile) if lb: for cname in cnames: _ret = None dns_provider = 'boto_route53' cname.update({'record_type': 'CNAME', 'value': lb['dns_name']}) if ('provider' in cname): dns_provider = cname.pop('provider') if (dns_provider == 'boto_route53'): for p in ('profile', 'key', 'keyid', 'region', 'wait_for_sync'): cname[p] = (locals().get(p) if (p not in cname) else cname[p]) _ret = __states__['boto_route53.present'](**cname) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _alarms_present(name, alarms, alarms_from_pillar, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _policies_present(name, policies, policies_from_pillar, listeners, backends, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret _ret = _tags_present(name, tags, region, key, keyid, profile) ret.update({'changes': salt.utils.dictupdate.update(ret['changes'], _ret['changes']), 'comment': ' '.join([ret['comment'], _ret['comment']])}) ret['result'] = (ret['result'] if _ret['result'] else _ret['result']) if (ret['result'] is False): return ret if (not instance_ids): instance_ids = [] if instance_names: running_states = ('pending', 'rebooting', 'running', 'stopping', 'stopped') for n in instance_names: instance_ids += __salt__['boto_ec2.find_instances'](name=n, region=region, key=key, keyid=keyid, profile=profile, in_states=running_states) if instance_ids: if __opts__['test']: if __salt__['boto_elb.set_instances'](name, instance_ids, True, region, key, keyid, profile): ret['comment'] += ' ELB {} instances would be updated.'.format(name) ret['result'] = None else: success = __salt__['boto_elb.set_instances'](name, instance_ids, False, region, key, keyid, profile) if (not success): ret['comment'] += 'Failed to set requested instances.' ret['result'] = False return ret<|docstring|>Ensure the ELB exists. name Name of the ELB. availability_zones A list of availability zones for this ELB. listeners A list of listener lists; example:: [ ['443', 'HTTPS', 'arn:aws:iam::1111111:server-certificate/mycert'], ['8443', '80', 'HTTPS', 'HTTP', 'arn:aws:iam::1111111:server-certificate/mycert'] ] subnets A list of subnet IDs in your VPC to attach to your LoadBalancer. subnet_names A list of subnet names in your VPC to attach to your LoadBalancer. security_groups The security groups assigned to your LoadBalancer within your VPC. Must be passed either as a list or a comma-separated string. For example, a list: .. code-block:: yaml - security_groups: - secgroup-one - secgroup-two Or as a comma-separated string: .. code-block:: yaml - security_groups: secgroup-one,secgroup-two scheme The type of a LoadBalancer, ``internet-facing`` or ``internal``. Once set, can not be modified. health_check A dict defining the health check for this ELB. attributes A dict defining the attributes to set on this ELB. Unknown keys will be silently ignored. See the :mod:`salt.modules.boto_elb.set_attributes` function for recognized attributes. attributes_from_pillar name of pillar dict that contains attributes. Attributes defined for this specific state will override those from pillar. cnames A list of cname dicts with attributes needed for the DNS add_record state. By default the boto_route53.add_record state will be used, which requires: name, zone, ttl, and identifier. See the boto_route53 state for information about these attributes. Other DNS modules can be called by specifying the provider keyword. the cnames dict will be passed to the state as kwargs. See the :mod:`salt.states.boto_route53` state for information about these attributes. alarms: a dictionary of name->boto_cloudwatch_alarm sections to be associated with this ELB. All attributes should be specified except for dimension which will be automatically set to this ELB. See the :mod:`salt.states.boto_cloudwatch_alarm` state for information about these attributes. alarms_from_pillar: name of pillar dict that contains alarm settings. Alarms defined for this specific state will override those from pillar. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. wait_for_sync Wait for an INSYNC change status from Route53. tags dict of tags instance_ids list of instance ids. The state will ensure that these, and ONLY these, instances are registered with the ELB. This is additive with instance_names. instance_names list of instance names. The state will ensure that these, and ONLY these, instances are registered with the ELB. This is additive with instance_ids.<|endoftext|>
57ec2f46368d30cd0859a0dfa6d7e10be8ca92a944db6f17269b11a7e8600cf1
def register_instances(name, instances, region=None, key=None, keyid=None, profile=None): '\n Add EC2 instance(s) to an Elastic Load Balancer. Removing an instance from\n the ``instances`` list does not remove it from the ELB.\n\n name\n The name of the Elastic Load Balancer to add EC2 instances to.\n\n instances\n A list of EC2 instance IDs that this Elastic Load Balancer should\n distribute traffic to. This state will only ever append new instances\n to the ELB. EC2 instances already associated with this ELB will not be\n removed if they are not in the ``instances`` list.\n\n .. versionadded:: 2015.8.0\n\n .. code-block:: yaml\n\n add-instances:\n boto_elb.register_instances:\n - name: myloadbalancer\n - instances:\n - instance-id1\n - instance-id2\n ' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} lb = __salt__['boto_elb.exists'](name, region, key, keyid, profile) if (not lb): msg = 'Could not find lb {}'.format(name) log.error(msg) ret.update({'comment': msg, 'result': False}) return ret health = __salt__['boto_elb.get_instance_health'](name, region, key, keyid, profile) nodes = [value['instance_id'] for value in health if (value['description'] != 'Instance deregistration currently in progress.')] new = [value for value in instances if (value not in nodes)] if (not new): msg = 'Instance/s {} already exist.'.format(str(instances).strip('[]')) log.debug(msg) ret.update({'comment': msg}) return ret if __opts__['test']: ret['comment'] = 'ELB {} is set to register : {}.'.format(name, new) ret['result'] = None return ret state = __salt__['boto_elb.register_instances'](name, instances, region, key, keyid, profile) if state: msg = 'Load Balancer {} has been changed'.format(name) log.info(msg) new = set().union(nodes, instances) ret.update({'comment': msg, 'changes': {'old': '\n'.join(nodes), 'new': '\n'.join(list(new))}}) else: msg = 'Load balancer {} failed to add instances'.format(name) log.error(msg) ret.update({'comment': msg, 'result': False}) return ret
Add EC2 instance(s) to an Elastic Load Balancer. Removing an instance from the ``instances`` list does not remove it from the ELB. name The name of the Elastic Load Balancer to add EC2 instances to. instances A list of EC2 instance IDs that this Elastic Load Balancer should distribute traffic to. This state will only ever append new instances to the ELB. EC2 instances already associated with this ELB will not be removed if they are not in the ``instances`` list. .. versionadded:: 2015.8.0 .. code-block:: yaml add-instances: boto_elb.register_instances: - name: myloadbalancer - instances: - instance-id1 - instance-id2
salt/states/boto_elb.py
register_instances
Flowdalic/salt
9,425
python
def register_instances(name, instances, region=None, key=None, keyid=None, profile=None): '\n Add EC2 instance(s) to an Elastic Load Balancer. Removing an instance from\n the ``instances`` list does not remove it from the ELB.\n\n name\n The name of the Elastic Load Balancer to add EC2 instances to.\n\n instances\n A list of EC2 instance IDs that this Elastic Load Balancer should\n distribute traffic to. This state will only ever append new instances\n to the ELB. EC2 instances already associated with this ELB will not be\n removed if they are not in the ``instances`` list.\n\n .. versionadded:: 2015.8.0\n\n .. code-block:: yaml\n\n add-instances:\n boto_elb.register_instances:\n - name: myloadbalancer\n - instances:\n - instance-id1\n - instance-id2\n ' ret = {'name': name, 'result': True, 'comment': , 'changes': {}} lb = __salt__['boto_elb.exists'](name, region, key, keyid, profile) if (not lb): msg = 'Could not find lb {}'.format(name) log.error(msg) ret.update({'comment': msg, 'result': False}) return ret health = __salt__['boto_elb.get_instance_health'](name, region, key, keyid, profile) nodes = [value['instance_id'] for value in health if (value['description'] != 'Instance deregistration currently in progress.')] new = [value for value in instances if (value not in nodes)] if (not new): msg = 'Instance/s {} already exist.'.format(str(instances).strip('[]')) log.debug(msg) ret.update({'comment': msg}) return ret if __opts__['test']: ret['comment'] = 'ELB {} is set to register : {}.'.format(name, new) ret['result'] = None return ret state = __salt__['boto_elb.register_instances'](name, instances, region, key, keyid, profile) if state: msg = 'Load Balancer {} has been changed'.format(name) log.info(msg) new = set().union(nodes, instances) ret.update({'comment': msg, 'changes': {'old': '\n'.join(nodes), 'new': '\n'.join(list(new))}}) else: msg = 'Load balancer {} failed to add instances'.format(name) log.error(msg) ret.update({'comment': msg, 'result': False}) return ret
def register_instances(name, instances, region=None, key=None, keyid=None, profile=None): '\n Add EC2 instance(s) to an Elastic Load Balancer. Removing an instance from\n the ``instances`` list does not remove it from the ELB.\n\n name\n The name of the Elastic Load Balancer to add EC2 instances to.\n\n instances\n A list of EC2 instance IDs that this Elastic Load Balancer should\n distribute traffic to. This state will only ever append new instances\n to the ELB. EC2 instances already associated with this ELB will not be\n removed if they are not in the ``instances`` list.\n\n .. versionadded:: 2015.8.0\n\n .. code-block:: yaml\n\n add-instances:\n boto_elb.register_instances:\n - name: myloadbalancer\n - instances:\n - instance-id1\n - instance-id2\n ' ret = {'name': name, 'result': True, 'comment': , 'changes': {}} lb = __salt__['boto_elb.exists'](name, region, key, keyid, profile) if (not lb): msg = 'Could not find lb {}'.format(name) log.error(msg) ret.update({'comment': msg, 'result': False}) return ret health = __salt__['boto_elb.get_instance_health'](name, region, key, keyid, profile) nodes = [value['instance_id'] for value in health if (value['description'] != 'Instance deregistration currently in progress.')] new = [value for value in instances if (value not in nodes)] if (not new): msg = 'Instance/s {} already exist.'.format(str(instances).strip('[]')) log.debug(msg) ret.update({'comment': msg}) return ret if __opts__['test']: ret['comment'] = 'ELB {} is set to register : {}.'.format(name, new) ret['result'] = None return ret state = __salt__['boto_elb.register_instances'](name, instances, region, key, keyid, profile) if state: msg = 'Load Balancer {} has been changed'.format(name) log.info(msg) new = set().union(nodes, instances) ret.update({'comment': msg, 'changes': {'old': '\n'.join(nodes), 'new': '\n'.join(list(new))}}) else: msg = 'Load balancer {} failed to add instances'.format(name) log.error(msg) ret.update({'comment': msg, 'result': False}) return ret<|docstring|>Add EC2 instance(s) to an Elastic Load Balancer. Removing an instance from the ``instances`` list does not remove it from the ELB. name The name of the Elastic Load Balancer to add EC2 instances to. instances A list of EC2 instance IDs that this Elastic Load Balancer should distribute traffic to. This state will only ever append new instances to the ELB. EC2 instances already associated with this ELB will not be removed if they are not in the ``instances`` list. .. versionadded:: 2015.8.0 .. code-block:: yaml add-instances: boto_elb.register_instances: - name: myloadbalancer - instances: - instance-id1 - instance-id2<|endoftext|>
ac800680d1f3246adf84e686d2f88419d2af437d95b3955bec65a246966705e0
def _alarms_present(name, alarms, alarms_from_pillar, region, key, keyid, profile): 'helper method for present. ensure that cloudwatch_alarms are set' current = __salt__['config.option'](alarms_from_pillar, {}) if alarms: current = salt.utils.dictupdate.update(current, alarms) ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} for (_, info) in current.items(): info['name'] = ((name + ' ') + info['name']) info['attributes']['description'] = ((name + ' ') + info['attributes']['description']) info['attributes']['dimensions'] = {'LoadBalancerName': [name]} kwargs = {'name': info['name'], 'attributes': info['attributes'], 'region': region, 'key': key, 'keyid': keyid, 'profile': profile} results = __states__['boto_cloudwatch_alarm.present'](**kwargs) if (not results.get('result')): ret['result'] = results['result'] if (results.get('changes', {}) != {}): ret['changes'][info['name']] = results['changes'] if ('comment' in results): ret['comment'] += results['comment'] return ret
helper method for present. ensure that cloudwatch_alarms are set
salt/states/boto_elb.py
_alarms_present
Flowdalic/salt
9,425
python
def _alarms_present(name, alarms, alarms_from_pillar, region, key, keyid, profile): current = __salt__['config.option'](alarms_from_pillar, {}) if alarms: current = salt.utils.dictupdate.update(current, alarms) ret = {'name': name, 'result': True, 'comment': , 'changes': {}} for (_, info) in current.items(): info['name'] = ((name + ' ') + info['name']) info['attributes']['description'] = ((name + ' ') + info['attributes']['description']) info['attributes']['dimensions'] = {'LoadBalancerName': [name]} kwargs = {'name': info['name'], 'attributes': info['attributes'], 'region': region, 'key': key, 'keyid': keyid, 'profile': profile} results = __states__['boto_cloudwatch_alarm.present'](**kwargs) if (not results.get('result')): ret['result'] = results['result'] if (results.get('changes', {}) != {}): ret['changes'][info['name']] = results['changes'] if ('comment' in results): ret['comment'] += results['comment'] return ret
def _alarms_present(name, alarms, alarms_from_pillar, region, key, keyid, profile): current = __salt__['config.option'](alarms_from_pillar, {}) if alarms: current = salt.utils.dictupdate.update(current, alarms) ret = {'name': name, 'result': True, 'comment': , 'changes': {}} for (_, info) in current.items(): info['name'] = ((name + ' ') + info['name']) info['attributes']['description'] = ((name + ' ') + info['attributes']['description']) info['attributes']['dimensions'] = {'LoadBalancerName': [name]} kwargs = {'name': info['name'], 'attributes': info['attributes'], 'region': region, 'key': key, 'keyid': keyid, 'profile': profile} results = __states__['boto_cloudwatch_alarm.present'](**kwargs) if (not results.get('result')): ret['result'] = results['result'] if (results.get('changes', {}) != {}): ret['changes'][info['name']] = results['changes'] if ('comment' in results): ret['comment'] += results['comment'] return ret<|docstring|>helper method for present. ensure that cloudwatch_alarms are set<|endoftext|>
8e6ef0b5785d79f9cf967d59721b791b483b7d150376dda3032ee4625b26e836
def _policies_present(name, policies, policies_from_pillar, listeners, backends, region, key, keyid, profile): 'helper method for present. ensure that ELB policies are set' if (policies is None): policies = [] pillar_policies = __salt__['config.option'](policies_from_pillar, []) policies = (policies + pillar_policies) if (backends is None): backends = [] policy_names = set() for p in policies: if ('policy_name' not in p): raise SaltInvocationError('policy_name is a required value for policies.') if ('policy_type' not in p): raise SaltInvocationError('policy_type is a required value for policies.') if ('policy' not in p): raise SaltInvocationError('policy is a required value for listeners.') if (p['policy_name'] in policy_names): raise SaltInvocationError('Policy names must be unique: policy {} is declared twice.'.format(p['policy_name'])) policy_names.add(p['policy_name']) for l in listeners: for p in l.get('policies', []): if (p not in policy_names): raise SaltInvocationError('Listener {} on ELB {} refers to undefined policy {}.'.format(l['elb_port'], name, p)) for b in backends: for p in b.get('policies', []): if (p not in policy_names): raise SaltInvocationError('Backend {} on ELB {} refers to undefined policy {}.'.format(b['instance_port'], name, p)) ret = {'result': True, 'comment': '', 'changes': {}} lb = __salt__['boto_elb.get_elb_config'](name, region, key, keyid, profile) if (not lb): ret['comment'] = '{} ELB configuration could not be retrieved.'.format(name) ret['result'] = False return ret policies_by_cname = {} cnames_by_name = {} for p in policies: cname = _policy_cname(p) policies_by_cname[cname] = p cnames_by_name[p['policy_name']] = cname expected_policy_names = policies_by_cname.keys() actual_policy_names = lb['policies'] default_aws_policies = set() expected_policies_by_listener = {} for l in listeners: expected_policies_by_listener[l['elb_port']] = {cnames_by_name[p] for p in l.get('policies', [])} actual_policies_by_listener = {} for l in lb['listeners']: listener_policies = set(l.get('policies', [])) actual_policies_by_listener[l['elb_port']] = listener_policies for p in listener_policies: if re.match('^ELBSecurityPolicy-\\d{4}-\\d{2}$', p): default_aws_policies.add(p) expected_policies_by_backend = {} for b in backends: expected_policies_by_backend[b['instance_port']] = {cnames_by_name[p] for p in b.get('policies', [])} actual_policies_by_backend = {} for b in lb['backends']: backend_policies = set(b.get('policies', [])) actual_policies_by_backend[b['instance_port']] = backend_policies to_delete = [] to_create = [] for policy_name in expected_policy_names: if (policy_name not in actual_policy_names): to_create.append(policy_name) for policy_name in actual_policy_names: if (policy_name not in expected_policy_names): if (policy_name not in default_aws_policies): to_delete.append(policy_name) listeners_to_update = set() for (port, policies) in expected_policies_by_listener.items(): if (policies != actual_policies_by_listener.get(port, set())): listeners_to_update.add(port) for (port, policies) in actual_policies_by_listener.items(): if (policies != expected_policies_by_listener.get(port, set())): listeners_to_update.add(port) backends_to_update = set() for (port, policies) in expected_policies_by_backend.items(): if (policies != actual_policies_by_backend.get(port, set())): backends_to_update.add(port) for (port, policies) in actual_policies_by_backend.items(): if (policies != expected_policies_by_backend.get(port, set())): backends_to_update.add(port) if __opts__['test']: msg = [] if (to_create or to_delete): msg.append('ELB {} set to have policies modified:'.format(name)) for policy in to_create: msg.append('Policy {} added.'.format(policy)) for policy in to_delete: msg.append('Policy {} deleted.'.format(policy)) ret['result'] = None else: msg.append('Policies already set on ELB {}.'.format(name)) for listener in listeners_to_update: msg.append('Listener {} policies updated.'.format(listener)) for backend in backends_to_update: msg.append('Backend {} policies updated.'.format(backend)) ret['comment'] = ' '.join(msg) return ret if to_create: for policy_name in to_create: created = __salt__['boto_elb.create_policy'](name=name, policy_name=policy_name, policy_type=policies_by_cname[policy_name]['policy_type'], policy=policies_by_cname[policy_name]['policy'], region=region, key=key, keyid=keyid, profile=profile) if created: ret['changes'].setdefault(policy_name, {})['new'] = policy_name comment = 'Policy {} was created on ELB {}'.format(policy_name, name) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret for port in listeners_to_update: policy_set = __salt__['boto_elb.set_listener_policy'](name=name, port=port, policies=list(expected_policies_by_listener.get(port, [])), region=region, key=key, keyid=keyid, profile=profile) if policy_set: policy_key = 'listener_{}_policy'.format(port) ret['changes'][policy_key] = {'old': list(actual_policies_by_listener.get(port, [])), 'new': list(expected_policies_by_listener.get(port, []))} comment = 'Policy {} was created on ELB {} listener {}'.format(expected_policies_by_listener[port], name, port) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret for port in backends_to_update: policy_set = __salt__['boto_elb.set_backend_policy'](name=name, port=port, policies=list(expected_policies_by_backend.get(port, [])), region=region, key=key, keyid=keyid, profile=profile) if policy_set: policy_key = 'backend_{}_policy'.format(port) ret['changes'][policy_key] = {'old': list(actual_policies_by_backend.get(port, [])), 'new': list(expected_policies_by_backend.get(port, []))} comment = 'Policy {} was created on ELB {} backend {}'.format(expected_policies_by_backend[port], name, port) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret if to_delete: for policy_name in to_delete: deleted = __salt__['boto_elb.delete_policy'](name=name, policy_name=policy_name, region=region, key=key, keyid=keyid, profile=profile) if deleted: ret['changes'].setdefault(policy_name, {})['old'] = policy_name comment = 'Policy {} was deleted from ELB {}'.format(policy_name, name) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret return ret
helper method for present. ensure that ELB policies are set
salt/states/boto_elb.py
_policies_present
Flowdalic/salt
9,425
python
def _policies_present(name, policies, policies_from_pillar, listeners, backends, region, key, keyid, profile): if (policies is None): policies = [] pillar_policies = __salt__['config.option'](policies_from_pillar, []) policies = (policies + pillar_policies) if (backends is None): backends = [] policy_names = set() for p in policies: if ('policy_name' not in p): raise SaltInvocationError('policy_name is a required value for policies.') if ('policy_type' not in p): raise SaltInvocationError('policy_type is a required value for policies.') if ('policy' not in p): raise SaltInvocationError('policy is a required value for listeners.') if (p['policy_name'] in policy_names): raise SaltInvocationError('Policy names must be unique: policy {} is declared twice.'.format(p['policy_name'])) policy_names.add(p['policy_name']) for l in listeners: for p in l.get('policies', []): if (p not in policy_names): raise SaltInvocationError('Listener {} on ELB {} refers to undefined policy {}.'.format(l['elb_port'], name, p)) for b in backends: for p in b.get('policies', []): if (p not in policy_names): raise SaltInvocationError('Backend {} on ELB {} refers to undefined policy {}.'.format(b['instance_port'], name, p)) ret = {'result': True, 'comment': , 'changes': {}} lb = __salt__['boto_elb.get_elb_config'](name, region, key, keyid, profile) if (not lb): ret['comment'] = '{} ELB configuration could not be retrieved.'.format(name) ret['result'] = False return ret policies_by_cname = {} cnames_by_name = {} for p in policies: cname = _policy_cname(p) policies_by_cname[cname] = p cnames_by_name[p['policy_name']] = cname expected_policy_names = policies_by_cname.keys() actual_policy_names = lb['policies'] default_aws_policies = set() expected_policies_by_listener = {} for l in listeners: expected_policies_by_listener[l['elb_port']] = {cnames_by_name[p] for p in l.get('policies', [])} actual_policies_by_listener = {} for l in lb['listeners']: listener_policies = set(l.get('policies', [])) actual_policies_by_listener[l['elb_port']] = listener_policies for p in listener_policies: if re.match('^ELBSecurityPolicy-\\d{4}-\\d{2}$', p): default_aws_policies.add(p) expected_policies_by_backend = {} for b in backends: expected_policies_by_backend[b['instance_port']] = {cnames_by_name[p] for p in b.get('policies', [])} actual_policies_by_backend = {} for b in lb['backends']: backend_policies = set(b.get('policies', [])) actual_policies_by_backend[b['instance_port']] = backend_policies to_delete = [] to_create = [] for policy_name in expected_policy_names: if (policy_name not in actual_policy_names): to_create.append(policy_name) for policy_name in actual_policy_names: if (policy_name not in expected_policy_names): if (policy_name not in default_aws_policies): to_delete.append(policy_name) listeners_to_update = set() for (port, policies) in expected_policies_by_listener.items(): if (policies != actual_policies_by_listener.get(port, set())): listeners_to_update.add(port) for (port, policies) in actual_policies_by_listener.items(): if (policies != expected_policies_by_listener.get(port, set())): listeners_to_update.add(port) backends_to_update = set() for (port, policies) in expected_policies_by_backend.items(): if (policies != actual_policies_by_backend.get(port, set())): backends_to_update.add(port) for (port, policies) in actual_policies_by_backend.items(): if (policies != expected_policies_by_backend.get(port, set())): backends_to_update.add(port) if __opts__['test']: msg = [] if (to_create or to_delete): msg.append('ELB {} set to have policies modified:'.format(name)) for policy in to_create: msg.append('Policy {} added.'.format(policy)) for policy in to_delete: msg.append('Policy {} deleted.'.format(policy)) ret['result'] = None else: msg.append('Policies already set on ELB {}.'.format(name)) for listener in listeners_to_update: msg.append('Listener {} policies updated.'.format(listener)) for backend in backends_to_update: msg.append('Backend {} policies updated.'.format(backend)) ret['comment'] = ' '.join(msg) return ret if to_create: for policy_name in to_create: created = __salt__['boto_elb.create_policy'](name=name, policy_name=policy_name, policy_type=policies_by_cname[policy_name]['policy_type'], policy=policies_by_cname[policy_name]['policy'], region=region, key=key, keyid=keyid, profile=profile) if created: ret['changes'].setdefault(policy_name, {})['new'] = policy_name comment = 'Policy {} was created on ELB {}'.format(policy_name, name) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret for port in listeners_to_update: policy_set = __salt__['boto_elb.set_listener_policy'](name=name, port=port, policies=list(expected_policies_by_listener.get(port, [])), region=region, key=key, keyid=keyid, profile=profile) if policy_set: policy_key = 'listener_{}_policy'.format(port) ret['changes'][policy_key] = {'old': list(actual_policies_by_listener.get(port, [])), 'new': list(expected_policies_by_listener.get(port, []))} comment = 'Policy {} was created on ELB {} listener {}'.format(expected_policies_by_listener[port], name, port) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret for port in backends_to_update: policy_set = __salt__['boto_elb.set_backend_policy'](name=name, port=port, policies=list(expected_policies_by_backend.get(port, [])), region=region, key=key, keyid=keyid, profile=profile) if policy_set: policy_key = 'backend_{}_policy'.format(port) ret['changes'][policy_key] = {'old': list(actual_policies_by_backend.get(port, [])), 'new': list(expected_policies_by_backend.get(port, []))} comment = 'Policy {} was created on ELB {} backend {}'.format(expected_policies_by_backend[port], name, port) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret if to_delete: for policy_name in to_delete: deleted = __salt__['boto_elb.delete_policy'](name=name, policy_name=policy_name, region=region, key=key, keyid=keyid, profile=profile) if deleted: ret['changes'].setdefault(policy_name, {})['old'] = policy_name comment = 'Policy {} was deleted from ELB {}'.format(policy_name, name) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret return ret
def _policies_present(name, policies, policies_from_pillar, listeners, backends, region, key, keyid, profile): if (policies is None): policies = [] pillar_policies = __salt__['config.option'](policies_from_pillar, []) policies = (policies + pillar_policies) if (backends is None): backends = [] policy_names = set() for p in policies: if ('policy_name' not in p): raise SaltInvocationError('policy_name is a required value for policies.') if ('policy_type' not in p): raise SaltInvocationError('policy_type is a required value for policies.') if ('policy' not in p): raise SaltInvocationError('policy is a required value for listeners.') if (p['policy_name'] in policy_names): raise SaltInvocationError('Policy names must be unique: policy {} is declared twice.'.format(p['policy_name'])) policy_names.add(p['policy_name']) for l in listeners: for p in l.get('policies', []): if (p not in policy_names): raise SaltInvocationError('Listener {} on ELB {} refers to undefined policy {}.'.format(l['elb_port'], name, p)) for b in backends: for p in b.get('policies', []): if (p not in policy_names): raise SaltInvocationError('Backend {} on ELB {} refers to undefined policy {}.'.format(b['instance_port'], name, p)) ret = {'result': True, 'comment': , 'changes': {}} lb = __salt__['boto_elb.get_elb_config'](name, region, key, keyid, profile) if (not lb): ret['comment'] = '{} ELB configuration could not be retrieved.'.format(name) ret['result'] = False return ret policies_by_cname = {} cnames_by_name = {} for p in policies: cname = _policy_cname(p) policies_by_cname[cname] = p cnames_by_name[p['policy_name']] = cname expected_policy_names = policies_by_cname.keys() actual_policy_names = lb['policies'] default_aws_policies = set() expected_policies_by_listener = {} for l in listeners: expected_policies_by_listener[l['elb_port']] = {cnames_by_name[p] for p in l.get('policies', [])} actual_policies_by_listener = {} for l in lb['listeners']: listener_policies = set(l.get('policies', [])) actual_policies_by_listener[l['elb_port']] = listener_policies for p in listener_policies: if re.match('^ELBSecurityPolicy-\\d{4}-\\d{2}$', p): default_aws_policies.add(p) expected_policies_by_backend = {} for b in backends: expected_policies_by_backend[b['instance_port']] = {cnames_by_name[p] for p in b.get('policies', [])} actual_policies_by_backend = {} for b in lb['backends']: backend_policies = set(b.get('policies', [])) actual_policies_by_backend[b['instance_port']] = backend_policies to_delete = [] to_create = [] for policy_name in expected_policy_names: if (policy_name not in actual_policy_names): to_create.append(policy_name) for policy_name in actual_policy_names: if (policy_name not in expected_policy_names): if (policy_name not in default_aws_policies): to_delete.append(policy_name) listeners_to_update = set() for (port, policies) in expected_policies_by_listener.items(): if (policies != actual_policies_by_listener.get(port, set())): listeners_to_update.add(port) for (port, policies) in actual_policies_by_listener.items(): if (policies != expected_policies_by_listener.get(port, set())): listeners_to_update.add(port) backends_to_update = set() for (port, policies) in expected_policies_by_backend.items(): if (policies != actual_policies_by_backend.get(port, set())): backends_to_update.add(port) for (port, policies) in actual_policies_by_backend.items(): if (policies != expected_policies_by_backend.get(port, set())): backends_to_update.add(port) if __opts__['test']: msg = [] if (to_create or to_delete): msg.append('ELB {} set to have policies modified:'.format(name)) for policy in to_create: msg.append('Policy {} added.'.format(policy)) for policy in to_delete: msg.append('Policy {} deleted.'.format(policy)) ret['result'] = None else: msg.append('Policies already set on ELB {}.'.format(name)) for listener in listeners_to_update: msg.append('Listener {} policies updated.'.format(listener)) for backend in backends_to_update: msg.append('Backend {} policies updated.'.format(backend)) ret['comment'] = ' '.join(msg) return ret if to_create: for policy_name in to_create: created = __salt__['boto_elb.create_policy'](name=name, policy_name=policy_name, policy_type=policies_by_cname[policy_name]['policy_type'], policy=policies_by_cname[policy_name]['policy'], region=region, key=key, keyid=keyid, profile=profile) if created: ret['changes'].setdefault(policy_name, {})['new'] = policy_name comment = 'Policy {} was created on ELB {}'.format(policy_name, name) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret for port in listeners_to_update: policy_set = __salt__['boto_elb.set_listener_policy'](name=name, port=port, policies=list(expected_policies_by_listener.get(port, [])), region=region, key=key, keyid=keyid, profile=profile) if policy_set: policy_key = 'listener_{}_policy'.format(port) ret['changes'][policy_key] = {'old': list(actual_policies_by_listener.get(port, [])), 'new': list(expected_policies_by_listener.get(port, []))} comment = 'Policy {} was created on ELB {} listener {}'.format(expected_policies_by_listener[port], name, port) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret for port in backends_to_update: policy_set = __salt__['boto_elb.set_backend_policy'](name=name, port=port, policies=list(expected_policies_by_backend.get(port, [])), region=region, key=key, keyid=keyid, profile=profile) if policy_set: policy_key = 'backend_{}_policy'.format(port) ret['changes'][policy_key] = {'old': list(actual_policies_by_backend.get(port, [])), 'new': list(expected_policies_by_backend.get(port, []))} comment = 'Policy {} was created on ELB {} backend {}'.format(expected_policies_by_backend[port], name, port) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret if to_delete: for policy_name in to_delete: deleted = __salt__['boto_elb.delete_policy'](name=name, policy_name=policy_name, region=region, key=key, keyid=keyid, profile=profile) if deleted: ret['changes'].setdefault(policy_name, {})['old'] = policy_name comment = 'Policy {} was deleted from ELB {}'.format(policy_name, name) ret['comment'] = ' '.join([ret['comment'], comment]) ret['result'] = True else: ret['result'] = False return ret return ret<|docstring|>helper method for present. ensure that ELB policies are set<|endoftext|>
76d332637f52da60c9bad6b3f39670101a2dabc26ffb56a4c1642d6cd118a540
def absent(name, region=None, key=None, keyid=None, profile=None): '\n Ensure an ELB does not exist\n\n name\n name of the ELB\n ' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} exists = __salt__['boto_elb.exists'](name, region, key, keyid, profile) if exists: if __opts__['test']: ret['comment'] = 'ELB {} is set to be removed.'.format(name) ret['result'] = None return ret deleted = __salt__['boto_elb.delete'](name, region, key, keyid, profile) if deleted: ret['changes']['old'] = {'elb': name} ret['changes']['new'] = {'elb': None} ret['comment'] = 'ELB {} deleted.'.format(name) else: ret['result'] = False ret['comment'] = 'Failed to delete {} ELB.'.format(name) else: ret['comment'] = '{} ELB does not exist.'.format(name) return ret
Ensure an ELB does not exist name name of the ELB
salt/states/boto_elb.py
absent
Flowdalic/salt
9,425
python
def absent(name, region=None, key=None, keyid=None, profile=None): '\n Ensure an ELB does not exist\n\n name\n name of the ELB\n ' ret = {'name': name, 'result': True, 'comment': , 'changes': {}} exists = __salt__['boto_elb.exists'](name, region, key, keyid, profile) if exists: if __opts__['test']: ret['comment'] = 'ELB {} is set to be removed.'.format(name) ret['result'] = None return ret deleted = __salt__['boto_elb.delete'](name, region, key, keyid, profile) if deleted: ret['changes']['old'] = {'elb': name} ret['changes']['new'] = {'elb': None} ret['comment'] = 'ELB {} deleted.'.format(name) else: ret['result'] = False ret['comment'] = 'Failed to delete {} ELB.'.format(name) else: ret['comment'] = '{} ELB does not exist.'.format(name) return ret
def absent(name, region=None, key=None, keyid=None, profile=None): '\n Ensure an ELB does not exist\n\n name\n name of the ELB\n ' ret = {'name': name, 'result': True, 'comment': , 'changes': {}} exists = __salt__['boto_elb.exists'](name, region, key, keyid, profile) if exists: if __opts__['test']: ret['comment'] = 'ELB {} is set to be removed.'.format(name) ret['result'] = None return ret deleted = __salt__['boto_elb.delete'](name, region, key, keyid, profile) if deleted: ret['changes']['old'] = {'elb': name} ret['changes']['new'] = {'elb': None} ret['comment'] = 'ELB {} deleted.'.format(name) else: ret['result'] = False ret['comment'] = 'Failed to delete {} ELB.'.format(name) else: ret['comment'] = '{} ELB does not exist.'.format(name) return ret<|docstring|>Ensure an ELB does not exist name name of the ELB<|endoftext|>
dd7f8f6d7bd93ad3d8e829d24e44ac529e609b26ac151b1ea67b9f8a9aaa356d
def _tags_present(name, tags, region, key, keyid, profile): '\n helper function to validate tags on elb\n ' ret = {'result': True, 'comment': '', 'changes': {}} if tags: lb = __salt__['boto_elb.get_elb_config'](name, region, key, keyid, profile) tags_to_add = tags tags_to_update = {} tags_to_remove = [] if lb.get('tags'): for _tag in lb['tags']: if (_tag not in tags.keys()): if (_tag not in tags_to_remove): tags_to_remove.append(_tag) else: if (tags[_tag] != lb['tags'][_tag]): tags_to_update[_tag] = tags[_tag] tags_to_add.pop(_tag) if tags_to_remove: if __opts__['test']: msg = 'The following tag{} set to be removed: {}.'.format(('s are' if (len(tags_to_remove) > 1) else ' is'), ', '.join(tags_to_remove)) ret['comment'] = ' '.join([ret['comment'], msg]) ret['result'] = None else: _ret = __salt__['boto_elb.delete_tags'](name, tags_to_remove, region, key, keyid, profile) if (not _ret): ret['result'] = False msg = 'Error attempting to delete tag {}.'.format(tags_to_remove) ret['comment'] = ' '.join([ret['comment'], msg]) return ret if ('old' not in ret['changes']): ret['changes'] = salt.utils.dictupdate.update(ret['changes'], {'old': {'tags': {}}}) for _tag in tags_to_remove: ret['changes']['old']['tags'][_tag] = lb['tags'][_tag] if (tags_to_add or tags_to_update): if __opts__['test']: if tags_to_add: msg = 'The following tag{} set to be added: {}.'.format(('s are' if (len(tags_to_add.keys()) > 1) else ' is'), ', '.join(tags_to_add.keys())) ret['comment'] = ' '.join([ret['comment'], msg]) ret['result'] = None if tags_to_update: msg = 'The following tag {} set to be updated: {}.'.format(('values are' if (len(tags_to_update.keys()) > 1) else 'value is'), ', '.join(tags_to_update.keys())) ret['comment'] = ' '.join([ret['comment'], msg]) else: all_tag_changes = salt.utils.dictupdate.update(tags_to_add, tags_to_update) _ret = __salt__['boto_elb.set_tags'](name, all_tag_changes, region, key, keyid, profile) if (not _ret): ret['result'] = False msg = 'Error attempting to set tags.' ret['comment'] = ' '.join([ret['comment'], msg]) return ret if ('old' not in ret['changes']): ret['changes'] = salt.utils.dictupdate.update(ret['changes'], {'old': {'tags': {}}}) if ('new' not in ret['changes']): ret['changes'] = salt.utils.dictupdate.update(ret['changes'], {'new': {'tags': {}}}) for tag in all_tag_changes: ret['changes']['new']['tags'][tag] = tags[tag] if ('tags' in lb): if lb['tags']: if (tag in lb['tags']): ret['changes']['old']['tags'][tag] = lb['tags'][tag] if ((not tags_to_update) and (not tags_to_remove) and (not tags_to_add)): msg = 'Tags are already set.' ret['comment'] = ' '.join([ret['comment'], msg]) return ret
helper function to validate tags on elb
salt/states/boto_elb.py
_tags_present
Flowdalic/salt
9,425
python
def _tags_present(name, tags, region, key, keyid, profile): '\n \n ' ret = {'result': True, 'comment': , 'changes': {}} if tags: lb = __salt__['boto_elb.get_elb_config'](name, region, key, keyid, profile) tags_to_add = tags tags_to_update = {} tags_to_remove = [] if lb.get('tags'): for _tag in lb['tags']: if (_tag not in tags.keys()): if (_tag not in tags_to_remove): tags_to_remove.append(_tag) else: if (tags[_tag] != lb['tags'][_tag]): tags_to_update[_tag] = tags[_tag] tags_to_add.pop(_tag) if tags_to_remove: if __opts__['test']: msg = 'The following tag{} set to be removed: {}.'.format(('s are' if (len(tags_to_remove) > 1) else ' is'), ', '.join(tags_to_remove)) ret['comment'] = ' '.join([ret['comment'], msg]) ret['result'] = None else: _ret = __salt__['boto_elb.delete_tags'](name, tags_to_remove, region, key, keyid, profile) if (not _ret): ret['result'] = False msg = 'Error attempting to delete tag {}.'.format(tags_to_remove) ret['comment'] = ' '.join([ret['comment'], msg]) return ret if ('old' not in ret['changes']): ret['changes'] = salt.utils.dictupdate.update(ret['changes'], {'old': {'tags': {}}}) for _tag in tags_to_remove: ret['changes']['old']['tags'][_tag] = lb['tags'][_tag] if (tags_to_add or tags_to_update): if __opts__['test']: if tags_to_add: msg = 'The following tag{} set to be added: {}.'.format(('s are' if (len(tags_to_add.keys()) > 1) else ' is'), ', '.join(tags_to_add.keys())) ret['comment'] = ' '.join([ret['comment'], msg]) ret['result'] = None if tags_to_update: msg = 'The following tag {} set to be updated: {}.'.format(('values are' if (len(tags_to_update.keys()) > 1) else 'value is'), ', '.join(tags_to_update.keys())) ret['comment'] = ' '.join([ret['comment'], msg]) else: all_tag_changes = salt.utils.dictupdate.update(tags_to_add, tags_to_update) _ret = __salt__['boto_elb.set_tags'](name, all_tag_changes, region, key, keyid, profile) if (not _ret): ret['result'] = False msg = 'Error attempting to set tags.' ret['comment'] = ' '.join([ret['comment'], msg]) return ret if ('old' not in ret['changes']): ret['changes'] = salt.utils.dictupdate.update(ret['changes'], {'old': {'tags': {}}}) if ('new' not in ret['changes']): ret['changes'] = salt.utils.dictupdate.update(ret['changes'], {'new': {'tags': {}}}) for tag in all_tag_changes: ret['changes']['new']['tags'][tag] = tags[tag] if ('tags' in lb): if lb['tags']: if (tag in lb['tags']): ret['changes']['old']['tags'][tag] = lb['tags'][tag] if ((not tags_to_update) and (not tags_to_remove) and (not tags_to_add)): msg = 'Tags are already set.' ret['comment'] = ' '.join([ret['comment'], msg]) return ret
def _tags_present(name, tags, region, key, keyid, profile): '\n \n ' ret = {'result': True, 'comment': , 'changes': {}} if tags: lb = __salt__['boto_elb.get_elb_config'](name, region, key, keyid, profile) tags_to_add = tags tags_to_update = {} tags_to_remove = [] if lb.get('tags'): for _tag in lb['tags']: if (_tag not in tags.keys()): if (_tag not in tags_to_remove): tags_to_remove.append(_tag) else: if (tags[_tag] != lb['tags'][_tag]): tags_to_update[_tag] = tags[_tag] tags_to_add.pop(_tag) if tags_to_remove: if __opts__['test']: msg = 'The following tag{} set to be removed: {}.'.format(('s are' if (len(tags_to_remove) > 1) else ' is'), ', '.join(tags_to_remove)) ret['comment'] = ' '.join([ret['comment'], msg]) ret['result'] = None else: _ret = __salt__['boto_elb.delete_tags'](name, tags_to_remove, region, key, keyid, profile) if (not _ret): ret['result'] = False msg = 'Error attempting to delete tag {}.'.format(tags_to_remove) ret['comment'] = ' '.join([ret['comment'], msg]) return ret if ('old' not in ret['changes']): ret['changes'] = salt.utils.dictupdate.update(ret['changes'], {'old': {'tags': {}}}) for _tag in tags_to_remove: ret['changes']['old']['tags'][_tag] = lb['tags'][_tag] if (tags_to_add or tags_to_update): if __opts__['test']: if tags_to_add: msg = 'The following tag{} set to be added: {}.'.format(('s are' if (len(tags_to_add.keys()) > 1) else ' is'), ', '.join(tags_to_add.keys())) ret['comment'] = ' '.join([ret['comment'], msg]) ret['result'] = None if tags_to_update: msg = 'The following tag {} set to be updated: {}.'.format(('values are' if (len(tags_to_update.keys()) > 1) else 'value is'), ', '.join(tags_to_update.keys())) ret['comment'] = ' '.join([ret['comment'], msg]) else: all_tag_changes = salt.utils.dictupdate.update(tags_to_add, tags_to_update) _ret = __salt__['boto_elb.set_tags'](name, all_tag_changes, region, key, keyid, profile) if (not _ret): ret['result'] = False msg = 'Error attempting to set tags.' ret['comment'] = ' '.join([ret['comment'], msg]) return ret if ('old' not in ret['changes']): ret['changes'] = salt.utils.dictupdate.update(ret['changes'], {'old': {'tags': {}}}) if ('new' not in ret['changes']): ret['changes'] = salt.utils.dictupdate.update(ret['changes'], {'new': {'tags': {}}}) for tag in all_tag_changes: ret['changes']['new']['tags'][tag] = tags[tag] if ('tags' in lb): if lb['tags']: if (tag in lb['tags']): ret['changes']['old']['tags'][tag] = lb['tags'][tag] if ((not tags_to_update) and (not tags_to_remove) and (not tags_to_add)): msg = 'Tags are already set.' ret['comment'] = ' '.join([ret['comment'], msg]) return ret<|docstring|>helper function to validate tags on elb<|endoftext|>
6d002959168d83410a326450df7eac761ea359fce83fbb1fbf87b97cb901aa2c
def _input_prep_gates_stage2(alpha): 'Helper routine for producing sequence of gates\n for the state preparation circuit.\n\n Args:\n =====\n alpha : numeric\n Parameter for state preparation circuit\n\n Returns:\n ========\n state_prep_gates : list\n List (ordered sequence) of Cirq gates for the state preparation circuit\n\n ' state_prep_gates = [X(q10), X(q11), H(q00), X2(q01), X2(q10), X2(q11), CNOT(q00, q01), CNOT(q01, q10), CNOT(q10, q11), param_Z(alpha)(q11), CNOT(q10, q11), CNOT(q01, q10), CNOT(q00, q01), H(q00), X2inv(q01), X2(q10), X2(q11)] return state_prep_gates
Helper routine for producing sequence of gates for the state preparation circuit. Args: ===== alpha : numeric Parameter for state preparation circuit Returns: ======== state_prep_gates : list List (ordered sequence) of Cirq gates for the state preparation circuit
cusp/cusp_stage2.py
_input_prep_gates_stage2
zapatacomputing/cusp_cirq_demo
37
python
def _input_prep_gates_stage2(alpha): 'Helper routine for producing sequence of gates\n for the state preparation circuit.\n\n Args:\n =====\n alpha : numeric\n Parameter for state preparation circuit\n\n Returns:\n ========\n state_prep_gates : list\n List (ordered sequence) of Cirq gates for the state preparation circuit\n\n ' state_prep_gates = [X(q10), X(q11), H(q00), X2(q01), X2(q10), X2(q11), CNOT(q00, q01), CNOT(q01, q10), CNOT(q10, q11), param_Z(alpha)(q11), CNOT(q10, q11), CNOT(q01, q10), CNOT(q00, q01), H(q00), X2inv(q01), X2(q10), X2(q11)] return state_prep_gates
def _input_prep_gates_stage2(alpha): 'Helper routine for producing sequence of gates\n for the state preparation circuit.\n\n Args:\n =====\n alpha : numeric\n Parameter for state preparation circuit\n\n Returns:\n ========\n state_prep_gates : list\n List (ordered sequence) of Cirq gates for the state preparation circuit\n\n ' state_prep_gates = [X(q10), X(q11), H(q00), X2(q01), X2(q10), X2(q11), CNOT(q00, q01), CNOT(q01, q10), CNOT(q10, q11), param_Z(alpha)(q11), CNOT(q10, q11), CNOT(q01, q10), CNOT(q00, q01), H(q00), X2inv(q01), X2(q10), X2(q11)] return state_prep_gates<|docstring|>Helper routine for producing sequence of gates for the state preparation circuit. Args: ===== alpha : numeric Parameter for state preparation circuit Returns: ======== state_prep_gates : list List (ordered sequence) of Cirq gates for the state preparation circuit<|endoftext|>
52680b514685971d228f67f80827550848b1792a2e86990be0d0cb565c7545d8
def compression_circuit(a, b, x, z, alpha, exact=False): 'Returns compression circuit (state preparation circuit followed by\n encoding circuit).\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n exact : bool\n If True, works with wavefunction\n\n Returns:\n ========\n comp_circuit : cirq.Circuit\n Compression circuit\n ' comp_circuit = Circuit() comp_circuit.append(_input_prep_gates_stage2(alpha)) comp_circuit.append(param_CNOT(a, b, x, z, q01, q00)) comp_circuit.append(param_CNOT(a, b, x, z, q11, q10), strategy=InsertStrategy.EARLIEST) comp_circuit.append(param_CNOT(a, b, x, z, q11, q01)) if (exact == False): comp_circuit.append([MeasurementGate('r00').on(q00), MeasurementGate('r01').on(q01), MeasurementGate('r10').on(q10), MeasurementGate('r11').on(q11)]) return comp_circuit
Returns compression circuit (state preparation circuit followed by encoding circuit). Args: ===== a, b, x, z : numeric Circuit parameters for encoding circuit alpha : numeric Parameter for state preparation circuit exact : bool If True, works with wavefunction Returns: ======== comp_circuit : cirq.Circuit Compression circuit
cusp/cusp_stage2.py
compression_circuit
zapatacomputing/cusp_cirq_demo
37
python
def compression_circuit(a, b, x, z, alpha, exact=False): 'Returns compression circuit (state preparation circuit followed by\n encoding circuit).\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n exact : bool\n If True, works with wavefunction\n\n Returns:\n ========\n comp_circuit : cirq.Circuit\n Compression circuit\n ' comp_circuit = Circuit() comp_circuit.append(_input_prep_gates_stage2(alpha)) comp_circuit.append(param_CNOT(a, b, x, z, q01, q00)) comp_circuit.append(param_CNOT(a, b, x, z, q11, q10), strategy=InsertStrategy.EARLIEST) comp_circuit.append(param_CNOT(a, b, x, z, q11, q01)) if (exact == False): comp_circuit.append([MeasurementGate('r00').on(q00), MeasurementGate('r01').on(q01), MeasurementGate('r10').on(q10), MeasurementGate('r11').on(q11)]) return comp_circuit
def compression_circuit(a, b, x, z, alpha, exact=False): 'Returns compression circuit (state preparation circuit followed by\n encoding circuit).\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n exact : bool\n If True, works with wavefunction\n\n Returns:\n ========\n comp_circuit : cirq.Circuit\n Compression circuit\n ' comp_circuit = Circuit() comp_circuit.append(_input_prep_gates_stage2(alpha)) comp_circuit.append(param_CNOT(a, b, x, z, q01, q00)) comp_circuit.append(param_CNOT(a, b, x, z, q11, q10), strategy=InsertStrategy.EARLIEST) comp_circuit.append(param_CNOT(a, b, x, z, q11, q01)) if (exact == False): comp_circuit.append([MeasurementGate('r00').on(q00), MeasurementGate('r01').on(q01), MeasurementGate('r10').on(q10), MeasurementGate('r11').on(q11)]) return comp_circuit<|docstring|>Returns compression circuit (state preparation circuit followed by encoding circuit). Args: ===== a, b, x, z : numeric Circuit parameters for encoding circuit alpha : numeric Parameter for state preparation circuit exact : bool If True, works with wavefunction Returns: ======== comp_circuit : cirq.Circuit Compression circuit<|endoftext|>
0f7a3a49663e802cccce910d4093df73ed7ed7bc14ea68626c225884d6154f0d
def noisy_job(a, b, x, z, alpha, exact=False): 'Adds noise to compression circuit.\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n exact : bool\n If True, works with wavefunction\n\n Returns:\n ========\n noisy_circuit : cirq.Circuit\n Noisy version of input circuit\n param_resolvers : list\n ' job = Job(compression_circuit(a, b, x, z, alpha, exact)) noisy = DepolarizerChannel(probability=noise_level) noisy_job = noisy.transform_job(job) param_resolvers = [ParamResolver({k: v for (k, v) in e}) for e in noisy_job.sweep.param_tuples()] return (noisy_job.circuit, param_resolvers)
Adds noise to compression circuit. Args: ===== a, b, x, z : numeric Circuit parameters for encoding circuit alpha : numeric Parameter for state preparation circuit exact : bool If True, works with wavefunction Returns: ======== noisy_circuit : cirq.Circuit Noisy version of input circuit param_resolvers : list
cusp/cusp_stage2.py
noisy_job
zapatacomputing/cusp_cirq_demo
37
python
def noisy_job(a, b, x, z, alpha, exact=False): 'Adds noise to compression circuit.\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n exact : bool\n If True, works with wavefunction\n\n Returns:\n ========\n noisy_circuit : cirq.Circuit\n Noisy version of input circuit\n param_resolvers : list\n ' job = Job(compression_circuit(a, b, x, z, alpha, exact)) noisy = DepolarizerChannel(probability=noise_level) noisy_job = noisy.transform_job(job) param_resolvers = [ParamResolver({k: v for (k, v) in e}) for e in noisy_job.sweep.param_tuples()] return (noisy_job.circuit, param_resolvers)
def noisy_job(a, b, x, z, alpha, exact=False): 'Adds noise to compression circuit.\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n exact : bool\n If True, works with wavefunction\n\n Returns:\n ========\n noisy_circuit : cirq.Circuit\n Noisy version of input circuit\n param_resolvers : list\n ' job = Job(compression_circuit(a, b, x, z, alpha, exact)) noisy = DepolarizerChannel(probability=noise_level) noisy_job = noisy.transform_job(job) param_resolvers = [ParamResolver({k: v for (k, v) in e}) for e in noisy_job.sweep.param_tuples()] return (noisy_job.circuit, param_resolvers)<|docstring|>Adds noise to compression circuit. Args: ===== a, b, x, z : numeric Circuit parameters for encoding circuit alpha : numeric Parameter for state preparation circuit exact : bool If True, works with wavefunction Returns: ======== noisy_circuit : cirq.Circuit Noisy version of input circuit param_resolvers : list<|endoftext|>
5744a358771d4df61ffd50dce4ba4eaeec09ef81d4d243a96c532e6fa43df455
def _run_sim_stage2(a, b, x, z, alpha, exact=False, print_circuit=False, noisy=False): "Executes circuit a single time. Outputs 1 for a success (i.e. reference qubits are |000>)\n and 0 for a failure.\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n exact : bool\n If True, works with wavefunction\n print_circuit : bool\n If True, prints circuit\n noisy : bool\n If True, runs noisy version of circuit\n\n Returns:\n ========\n total : int\n Value of 1 if reference qubits are all 0's. Value of 0 else.\n " simulator = XmonSimulator() if noisy: (circuit_run, resolvers) = noisy_job(a, b, x, z, alpha, exact) else: circuit_run = compression_circuit(a, b, x, z, alpha, exact) if exact: if noisy: for resolver in resolvers: result = simulator.simulate(circuit=circuit_run, param_resolver=resolver) else: result = simulator.simulate(circuit=circuit_run) avg = 0 for j in range(2): avg += (np.abs(result.final_state[j]) ** 2) return avg elif noisy: for resolver in resolvers: result = simulator.run(circuit=circuit_run, param_resolver=resolver, repetitions=1) else: result = simulator.run(circuit=circuit_run, repetitions=1) reference_measurements = [] reference_labels = ['r00', 'r01', 'r10'] for j in reference_labels: reference_measurements.append(int(result.measurements[j][0])) total = 0 res = [] for y in range(3): res.append(reference_measurements[y]) if (res == [0, 0, 0]): total = 1 if (print_circuit == True): print(circuit_run.to_text_diagram(use_unicode_characters=False)) return total
Executes circuit a single time. Outputs 1 for a success (i.e. reference qubits are |000>) and 0 for a failure. Args: ===== a, b, x, z : numeric Circuit parameters for encoding circuit alpha : numeric Parameter for state preparation circuit exact : bool If True, works with wavefunction print_circuit : bool If True, prints circuit noisy : bool If True, runs noisy version of circuit Returns: ======== total : int Value of 1 if reference qubits are all 0's. Value of 0 else.
cusp/cusp_stage2.py
_run_sim_stage2
zapatacomputing/cusp_cirq_demo
37
python
def _run_sim_stage2(a, b, x, z, alpha, exact=False, print_circuit=False, noisy=False): "Executes circuit a single time. Outputs 1 for a success (i.e. reference qubits are |000>)\n and 0 for a failure.\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n exact : bool\n If True, works with wavefunction\n print_circuit : bool\n If True, prints circuit\n noisy : bool\n If True, runs noisy version of circuit\n\n Returns:\n ========\n total : int\n Value of 1 if reference qubits are all 0's. Value of 0 else.\n " simulator = XmonSimulator() if noisy: (circuit_run, resolvers) = noisy_job(a, b, x, z, alpha, exact) else: circuit_run = compression_circuit(a, b, x, z, alpha, exact) if exact: if noisy: for resolver in resolvers: result = simulator.simulate(circuit=circuit_run, param_resolver=resolver) else: result = simulator.simulate(circuit=circuit_run) avg = 0 for j in range(2): avg += (np.abs(result.final_state[j]) ** 2) return avg elif noisy: for resolver in resolvers: result = simulator.run(circuit=circuit_run, param_resolver=resolver, repetitions=1) else: result = simulator.run(circuit=circuit_run, repetitions=1) reference_measurements = [] reference_labels = ['r00', 'r01', 'r10'] for j in reference_labels: reference_measurements.append(int(result.measurements[j][0])) total = 0 res = [] for y in range(3): res.append(reference_measurements[y]) if (res == [0, 0, 0]): total = 1 if (print_circuit == True): print(circuit_run.to_text_diagram(use_unicode_characters=False)) return total
def _run_sim_stage2(a, b, x, z, alpha, exact=False, print_circuit=False, noisy=False): "Executes circuit a single time. Outputs 1 for a success (i.e. reference qubits are |000>)\n and 0 for a failure.\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n exact : bool\n If True, works with wavefunction\n print_circuit : bool\n If True, prints circuit\n noisy : bool\n If True, runs noisy version of circuit\n\n Returns:\n ========\n total : int\n Value of 1 if reference qubits are all 0's. Value of 0 else.\n " simulator = XmonSimulator() if noisy: (circuit_run, resolvers) = noisy_job(a, b, x, z, alpha, exact) else: circuit_run = compression_circuit(a, b, x, z, alpha, exact) if exact: if noisy: for resolver in resolvers: result = simulator.simulate(circuit=circuit_run, param_resolver=resolver) else: result = simulator.simulate(circuit=circuit_run) avg = 0 for j in range(2): avg += (np.abs(result.final_state[j]) ** 2) return avg elif noisy: for resolver in resolvers: result = simulator.run(circuit=circuit_run, param_resolver=resolver, repetitions=1) else: result = simulator.run(circuit=circuit_run, repetitions=1) reference_measurements = [] reference_labels = ['r00', 'r01', 'r10'] for j in reference_labels: reference_measurements.append(int(result.measurements[j][0])) total = 0 res = [] for y in range(3): res.append(reference_measurements[y]) if (res == [0, 0, 0]): total = 1 if (print_circuit == True): print(circuit_run.to_text_diagram(use_unicode_characters=False)) return total<|docstring|>Executes circuit a single time. Outputs 1 for a success (i.e. reference qubits are |000>) and 0 for a failure. Args: ===== a, b, x, z : numeric Circuit parameters for encoding circuit alpha : numeric Parameter for state preparation circuit exact : bool If True, works with wavefunction print_circuit : bool If True, prints circuit noisy : bool If True, runs noisy version of circuit Returns: ======== total : int Value of 1 if reference qubits are all 0's. Value of 0 else.<|endoftext|>
533a45464724e572af080ccea905786dcd164efdb8bbc897238a594107fe15b6
def compute_stage2_cost_function(a, b, x, z, alpha, n_repetitions, exact=False, noisy=False): 'Executes circuit multiple times and computes the average fidelity.\n over n times (n_repetitions).\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n n_repetitions : int\n Number of circuit runs\n exact : bool\n If True, works with wavefunction\n noisy : bool\n If True, runs noisy version of circuit\n\n Returns:\n ========\n avg_fid : float\n Average fidelity (maximum: 1)\n ' if ((exact == True) and (noisy == False)): return _run_sim_stage2(a, b, x, z, alpha, exact=exact, print_circuit=False, noisy=noisy) p = Pool() args = ([(a, b, x, z, alpha)] * n_repetitions) results = p.starmap(one_run, args) success_count = np.array(results).sum() avg_fid = (float(success_count) / float(n_repetitions)) p.close() return avg_fid
Executes circuit multiple times and computes the average fidelity. over n times (n_repetitions). Args: ===== a, b, x, z : numeric Circuit parameters for encoding circuit alpha : numeric Parameter for state preparation circuit n_repetitions : int Number of circuit runs exact : bool If True, works with wavefunction noisy : bool If True, runs noisy version of circuit Returns: ======== avg_fid : float Average fidelity (maximum: 1)
cusp/cusp_stage2.py
compute_stage2_cost_function
zapatacomputing/cusp_cirq_demo
37
python
def compute_stage2_cost_function(a, b, x, z, alpha, n_repetitions, exact=False, noisy=False): 'Executes circuit multiple times and computes the average fidelity.\n over n times (n_repetitions).\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n n_repetitions : int\n Number of circuit runs\n exact : bool\n If True, works with wavefunction\n noisy : bool\n If True, runs noisy version of circuit\n\n Returns:\n ========\n avg_fid : float\n Average fidelity (maximum: 1)\n ' if ((exact == True) and (noisy == False)): return _run_sim_stage2(a, b, x, z, alpha, exact=exact, print_circuit=False, noisy=noisy) p = Pool() args = ([(a, b, x, z, alpha)] * n_repetitions) results = p.starmap(one_run, args) success_count = np.array(results).sum() avg_fid = (float(success_count) / float(n_repetitions)) p.close() return avg_fid
def compute_stage2_cost_function(a, b, x, z, alpha, n_repetitions, exact=False, noisy=False): 'Executes circuit multiple times and computes the average fidelity.\n over n times (n_repetitions).\n\n Args:\n =====\n a, b, x, z : numeric\n Circuit parameters for encoding circuit\n alpha : numeric\n Parameter for state preparation circuit\n n_repetitions : int\n Number of circuit runs\n exact : bool\n If True, works with wavefunction\n noisy : bool\n If True, runs noisy version of circuit\n\n Returns:\n ========\n avg_fid : float\n Average fidelity (maximum: 1)\n ' if ((exact == True) and (noisy == False)): return _run_sim_stage2(a, b, x, z, alpha, exact=exact, print_circuit=False, noisy=noisy) p = Pool() args = ([(a, b, x, z, alpha)] * n_repetitions) results = p.starmap(one_run, args) success_count = np.array(results).sum() avg_fid = (float(success_count) / float(n_repetitions)) p.close() return avg_fid<|docstring|>Executes circuit multiple times and computes the average fidelity. over n times (n_repetitions). Args: ===== a, b, x, z : numeric Circuit parameters for encoding circuit alpha : numeric Parameter for state preparation circuit n_repetitions : int Number of circuit runs exact : bool If True, works with wavefunction noisy : bool If True, runs noisy version of circuit Returns: ======== avg_fid : float Average fidelity (maximum: 1)<|endoftext|>
94ae1cc98662f091994dee3f86f219b39b81dea48283d568d3b83eba7e745ccf
def _create_res_block(tiny, num_gn_channel, ch_down_factor=1): 'Create residual block' num_ch = ((512, 128)[tiny] // ch_down_factor) res_block = nn.Sequential(nn.Conv2d(num_ch, num_ch, 3, 1, 1), nn.GroupNorm(min(num_gn_channel, num_ch), num_ch), nn.ReLU(), nn.Conv2d(num_ch, num_ch, 1, 1, 0), nn.GroupNorm(min(num_gn_channel, num_ch), num_ch), nn.ReLU(), nn.Conv2d(num_ch, num_ch, 3, 1, 1), nn.GroupNorm(min(num_gn_channel, num_ch), num_ch), nn.ReLU()) return res_block
Create residual block
networks/networks.py
_create_res_block
TOPO-EPFL/CrossLoc
16
python
def _create_res_block(tiny, num_gn_channel, ch_down_factor=1): num_ch = ((512, 128)[tiny] // ch_down_factor) res_block = nn.Sequential(nn.Conv2d(num_ch, num_ch, 3, 1, 1), nn.GroupNorm(min(num_gn_channel, num_ch), num_ch), nn.ReLU(), nn.Conv2d(num_ch, num_ch, 1, 1, 0), nn.GroupNorm(min(num_gn_channel, num_ch), num_ch), nn.ReLU(), nn.Conv2d(num_ch, num_ch, 3, 1, 1), nn.GroupNorm(min(num_gn_channel, num_ch), num_ch), nn.ReLU()) return res_block
def _create_res_block(tiny, num_gn_channel, ch_down_factor=1): num_ch = ((512, 128)[tiny] // ch_down_factor) res_block = nn.Sequential(nn.Conv2d(num_ch, num_ch, 3, 1, 1), nn.GroupNorm(min(num_gn_channel, num_ch), num_ch), nn.ReLU(), nn.Conv2d(num_ch, num_ch, 1, 1, 0), nn.GroupNorm(min(num_gn_channel, num_ch), num_ch), nn.ReLU(), nn.Conv2d(num_ch, num_ch, 3, 1, 1), nn.GroupNorm(min(num_gn_channel, num_ch), num_ch), nn.ReLU()) return res_block<|docstring|>Create residual block<|endoftext|>
6fa4e07d96ef0d45107a496f0b965e379bfce1ad69bc48c3a1057195d6bd61dc
def _create_mlr_concatenator(num_mlr, tiny, num_gn_channel): 'Create activation concatenation block for MLR.' in_channel = ((512, 128)[tiny] * num_mlr) out_channel = (512, 128)[tiny] mlr_block = nn.Sequential(nn.Conv2d(in_channel, out_channel, 3, 1, 1), nn.GroupNorm(num_gn_channel, out_channel), nn.ReLU(), nn.Conv2d(out_channel, out_channel, 1, 1, 0), nn.GroupNorm(num_gn_channel, out_channel), nn.ReLU(), nn.Conv2d(out_channel, out_channel, 3, 1, 1), nn.GroupNorm(num_gn_channel, out_channel), nn.ReLU()) return mlr_block
Create activation concatenation block for MLR.
networks/networks.py
_create_mlr_concatenator
TOPO-EPFL/CrossLoc
16
python
def _create_mlr_concatenator(num_mlr, tiny, num_gn_channel): in_channel = ((512, 128)[tiny] * num_mlr) out_channel = (512, 128)[tiny] mlr_block = nn.Sequential(nn.Conv2d(in_channel, out_channel, 3, 1, 1), nn.GroupNorm(num_gn_channel, out_channel), nn.ReLU(), nn.Conv2d(out_channel, out_channel, 1, 1, 0), nn.GroupNorm(num_gn_channel, out_channel), nn.ReLU(), nn.Conv2d(out_channel, out_channel, 3, 1, 1), nn.GroupNorm(num_gn_channel, out_channel), nn.ReLU()) return mlr_block
def _create_mlr_concatenator(num_mlr, tiny, num_gn_channel): in_channel = ((512, 128)[tiny] * num_mlr) out_channel = (512, 128)[tiny] mlr_block = nn.Sequential(nn.Conv2d(in_channel, out_channel, 3, 1, 1), nn.GroupNorm(num_gn_channel, out_channel), nn.ReLU(), nn.Conv2d(out_channel, out_channel, 1, 1, 0), nn.GroupNorm(num_gn_channel, out_channel), nn.ReLU(), nn.Conv2d(out_channel, out_channel, 3, 1, 1), nn.GroupNorm(num_gn_channel, out_channel), nn.ReLU()) return mlr_block<|docstring|>Create activation concatenation block for MLR.<|endoftext|>
191949665319607e38a2a08347743ac90c2a451f11f7369a6940dae4dc352ac6
def _create_mlr_skip_layer(num_mlr, tiny, num_gn_channel): 'Create skip layer for MLR' in_channel = ((512, 128)[tiny] * num_mlr) out_channel = (512, 128)[tiny] skip_block = nn.Sequential(nn.Conv2d(in_channel, out_channel, 1, 1, 0), nn.GroupNorm(num_gn_channel, out_channel)) return skip_block
Create skip layer for MLR
networks/networks.py
_create_mlr_skip_layer
TOPO-EPFL/CrossLoc
16
python
def _create_mlr_skip_layer(num_mlr, tiny, num_gn_channel): in_channel = ((512, 128)[tiny] * num_mlr) out_channel = (512, 128)[tiny] skip_block = nn.Sequential(nn.Conv2d(in_channel, out_channel, 1, 1, 0), nn.GroupNorm(num_gn_channel, out_channel)) return skip_block
def _create_mlr_skip_layer(num_mlr, tiny, num_gn_channel): in_channel = ((512, 128)[tiny] * num_mlr) out_channel = (512, 128)[tiny] skip_block = nn.Sequential(nn.Conv2d(in_channel, out_channel, 1, 1, 0), nn.GroupNorm(num_gn_channel, out_channel)) return skip_block<|docstring|>Create skip layer for MLR<|endoftext|>
23f5c8a46f7f959f953b4be0b771539f13e9c8f88cf60547bd0c2ec7c206f7e7
def __init__(self, mean, tiny): '\n Constructor.\n ' super(Network, self).__init__() self.conv1 = nn.Conv2d(1, 32, 3, 1, 1) self.conv2 = nn.Conv2d(32, 64, 3, 2, 1) self.conv3 = nn.Conv2d(64, 128, 3, 2, 1) self.conv4 = nn.Conv2d(128, (256, 128)[tiny], 3, 2, 1) self.res1_conv1 = nn.Conv2d((256, 128)[tiny], (256, 128)[tiny], 3, 1, 1) self.res1_conv2 = nn.Conv2d((256, 128)[tiny], (256, 128)[tiny], 1, 1, 0) self.res1_conv3 = nn.Conv2d((256, 128)[tiny], (256, 128)[tiny], 3, 1, 1) self.res2_conv1 = nn.Conv2d((256, 128)[tiny], (512, 128)[tiny], 3, 1, 1) self.res2_conv2 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.res2_conv3 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 3, 1, 1) if (not tiny): self.res2_skip = nn.Conv2d(256, 512, 1, 1, 0) self.res3_conv1 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.res3_conv2 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.res3_conv3 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.fc1 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.fc2 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.fc3 = nn.Conv2d((512, 128)[tiny], 3, 1, 1, 0) self.register_buffer('mean', torch.tensor(mean.size()).cuda()) self.mean = mean.clone() self.tiny = tiny
Constructor.
networks/networks.py
__init__
TOPO-EPFL/CrossLoc
16
python
def __init__(self, mean, tiny): '\n \n ' super(Network, self).__init__() self.conv1 = nn.Conv2d(1, 32, 3, 1, 1) self.conv2 = nn.Conv2d(32, 64, 3, 2, 1) self.conv3 = nn.Conv2d(64, 128, 3, 2, 1) self.conv4 = nn.Conv2d(128, (256, 128)[tiny], 3, 2, 1) self.res1_conv1 = nn.Conv2d((256, 128)[tiny], (256, 128)[tiny], 3, 1, 1) self.res1_conv2 = nn.Conv2d((256, 128)[tiny], (256, 128)[tiny], 1, 1, 0) self.res1_conv3 = nn.Conv2d((256, 128)[tiny], (256, 128)[tiny], 3, 1, 1) self.res2_conv1 = nn.Conv2d((256, 128)[tiny], (512, 128)[tiny], 3, 1, 1) self.res2_conv2 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.res2_conv3 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 3, 1, 1) if (not tiny): self.res2_skip = nn.Conv2d(256, 512, 1, 1, 0) self.res3_conv1 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.res3_conv2 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.res3_conv3 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.fc1 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.fc2 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.fc3 = nn.Conv2d((512, 128)[tiny], 3, 1, 1, 0) self.register_buffer('mean', torch.tensor(mean.size()).cuda()) self.mean = mean.clone() self.tiny = tiny
def __init__(self, mean, tiny): '\n \n ' super(Network, self).__init__() self.conv1 = nn.Conv2d(1, 32, 3, 1, 1) self.conv2 = nn.Conv2d(32, 64, 3, 2, 1) self.conv3 = nn.Conv2d(64, 128, 3, 2, 1) self.conv4 = nn.Conv2d(128, (256, 128)[tiny], 3, 2, 1) self.res1_conv1 = nn.Conv2d((256, 128)[tiny], (256, 128)[tiny], 3, 1, 1) self.res1_conv2 = nn.Conv2d((256, 128)[tiny], (256, 128)[tiny], 1, 1, 0) self.res1_conv3 = nn.Conv2d((256, 128)[tiny], (256, 128)[tiny], 3, 1, 1) self.res2_conv1 = nn.Conv2d((256, 128)[tiny], (512, 128)[tiny], 3, 1, 1) self.res2_conv2 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.res2_conv3 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 3, 1, 1) if (not tiny): self.res2_skip = nn.Conv2d(256, 512, 1, 1, 0) self.res3_conv1 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.res3_conv2 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.res3_conv3 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.fc1 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.fc2 = nn.Conv2d((512, 128)[tiny], (512, 128)[tiny], 1, 1, 0) self.fc3 = nn.Conv2d((512, 128)[tiny], 3, 1, 1, 0) self.register_buffer('mean', torch.tensor(mean.size()).cuda()) self.mean = mean.clone() self.tiny = tiny<|docstring|>Constructor.<|endoftext|>
44ef22f01c69ec6c2ef5ff381a26fc4f4cb32afa21e31ba59c3e163ea33aed33
def forward(self, inputs): '\n Forward pass.\n\n inputs -- 4D data tensor (BxCxHxW)\n ' x = inputs x = F.relu(self.conv1(x)) x = F.relu(self.conv2(x)) x = F.relu(self.conv3(x)) res = F.relu(self.conv4(x)) x = F.relu(self.res1_conv1(res)) x = F.relu(self.res1_conv2(x)) x = F.relu(self.res1_conv3(x)) res = (res + x) x = F.relu(self.res2_conv1(res)) x = F.relu(self.res2_conv2(x)) x = F.relu(self.res2_conv3(x)) if (not self.tiny): res = self.res2_skip(res) res = (res + x) x = F.relu(self.res3_conv1(res)) x = F.relu(self.res3_conv2(x)) x = F.relu(self.res3_conv3(x)) res = (res + x) sc = F.relu(self.fc1(res)) sc = F.relu(self.fc2(sc)) sc = self.fc3(sc) sc[(:, 0)] += self.mean[0] sc[(:, 1)] += self.mean[1] sc[(:, 2)] += self.mean[2] return sc
Forward pass. inputs -- 4D data tensor (BxCxHxW)
networks/networks.py
forward
TOPO-EPFL/CrossLoc
16
python
def forward(self, inputs): '\n Forward pass.\n\n inputs -- 4D data tensor (BxCxHxW)\n ' x = inputs x = F.relu(self.conv1(x)) x = F.relu(self.conv2(x)) x = F.relu(self.conv3(x)) res = F.relu(self.conv4(x)) x = F.relu(self.res1_conv1(res)) x = F.relu(self.res1_conv2(x)) x = F.relu(self.res1_conv3(x)) res = (res + x) x = F.relu(self.res2_conv1(res)) x = F.relu(self.res2_conv2(x)) x = F.relu(self.res2_conv3(x)) if (not self.tiny): res = self.res2_skip(res) res = (res + x) x = F.relu(self.res3_conv1(res)) x = F.relu(self.res3_conv2(x)) x = F.relu(self.res3_conv3(x)) res = (res + x) sc = F.relu(self.fc1(res)) sc = F.relu(self.fc2(sc)) sc = self.fc3(sc) sc[(:, 0)] += self.mean[0] sc[(:, 1)] += self.mean[1] sc[(:, 2)] += self.mean[2] return sc
def forward(self, inputs): '\n Forward pass.\n\n inputs -- 4D data tensor (BxCxHxW)\n ' x = inputs x = F.relu(self.conv1(x)) x = F.relu(self.conv2(x)) x = F.relu(self.conv3(x)) res = F.relu(self.conv4(x)) x = F.relu(self.res1_conv1(res)) x = F.relu(self.res1_conv2(x)) x = F.relu(self.res1_conv3(x)) res = (res + x) x = F.relu(self.res2_conv1(res)) x = F.relu(self.res2_conv2(x)) x = F.relu(self.res2_conv3(x)) if (not self.tiny): res = self.res2_skip(res) res = (res + x) x = F.relu(self.res3_conv1(res)) x = F.relu(self.res3_conv2(x)) x = F.relu(self.res3_conv3(x)) res = (res + x) sc = F.relu(self.fc1(res)) sc = F.relu(self.fc2(sc)) sc = self.fc3(sc) sc[(:, 0)] += self.mean[0] sc[(:, 1)] += self.mean[1] sc[(:, 2)] += self.mean[2] return sc<|docstring|>Forward pass. inputs -- 4D data tensor (BxCxHxW)<|endoftext|>
e6c775ee86492dff5d392ede90ed0cbf32427d9cabccff167d4bedd1d788c985
def forward(self, inputs): '\n Forward pass.\n\n @param inputs 4D data tensor (BxCxHxW)\n ' x = inputs 'Encoder' x = F.relu(self.norm1(self.conv1(x))) x = F.relu(self.norm2(self.conv2(x))) x = F.relu(self.norm3(self.conv3(x))) res = F.relu(self.norm4(self.conv4(x))) x = F.relu(self.res1_norm1(self.res1_conv1(res))) x = F.relu(self.res1_norm2(self.res1_conv2(x))) x = F.relu(self.res1_norm3(self.res1_conv3(x))) res = F.relu((res + x)) x = F.relu(self.res2_norm1(self.res2_conv1(res))) x = F.relu(self.res2_norm2(self.res2_conv2(x))) x = F.relu(self.res2_norm3(self.res2_conv3(x))) if (not self.tiny): res = self.res2_skip_norm(self.res2_skip(res)) res = F.relu((res + x)) for i in range(len(self.enc_add_res_block_ls)): x = self.enc_add_res_block_ls[i](res) res = F.relu((res + x)) return res
Forward pass. @param inputs 4D data tensor (BxCxHxW)
networks/networks.py
forward
TOPO-EPFL/CrossLoc
16
python
def forward(self, inputs): '\n Forward pass.\n\n @param inputs 4D data tensor (BxCxHxW)\n ' x = inputs 'Encoder' x = F.relu(self.norm1(self.conv1(x))) x = F.relu(self.norm2(self.conv2(x))) x = F.relu(self.norm3(self.conv3(x))) res = F.relu(self.norm4(self.conv4(x))) x = F.relu(self.res1_norm1(self.res1_conv1(res))) x = F.relu(self.res1_norm2(self.res1_conv2(x))) x = F.relu(self.res1_norm3(self.res1_conv3(x))) res = F.relu((res + x)) x = F.relu(self.res2_norm1(self.res2_conv1(res))) x = F.relu(self.res2_norm2(self.res2_conv2(x))) x = F.relu(self.res2_norm3(self.res2_conv3(x))) if (not self.tiny): res = self.res2_skip_norm(self.res2_skip(res)) res = F.relu((res + x)) for i in range(len(self.enc_add_res_block_ls)): x = self.enc_add_res_block_ls[i](res) res = F.relu((res + x)) return res
def forward(self, inputs): '\n Forward pass.\n\n @param inputs 4D data tensor (BxCxHxW)\n ' x = inputs 'Encoder' x = F.relu(self.norm1(self.conv1(x))) x = F.relu(self.norm2(self.conv2(x))) x = F.relu(self.norm3(self.conv3(x))) res = F.relu(self.norm4(self.conv4(x))) x = F.relu(self.res1_norm1(self.res1_conv1(res))) x = F.relu(self.res1_norm2(self.res1_conv2(x))) x = F.relu(self.res1_norm3(self.res1_conv3(x))) res = F.relu((res + x)) x = F.relu(self.res2_norm1(self.res2_conv1(res))) x = F.relu(self.res2_norm2(self.res2_conv2(x))) x = F.relu(self.res2_norm3(self.res2_conv3(x))) if (not self.tiny): res = self.res2_skip_norm(self.res2_skip(res)) res = F.relu((res + x)) for i in range(len(self.enc_add_res_block_ls)): x = self.enc_add_res_block_ls[i](res) res = F.relu((res + x)) return res<|docstring|>Forward pass. @param inputs 4D data tensor (BxCxHxW)<|endoftext|>
9c6b124072c0b3a710694a9137cc00054984d603f12f094264a598406246e65f
def forward(self, inputs, up_height=None, up_width=None): '\n Forward pass.\n\n @param inputs 4D data tensor (BxCxHxW)\n @param up_height Scalar, up-sampling target tensor height\n @param up_width Scalar, up-sampling target tensor width\n ' res = inputs for i in range(len(self.dec_add_res_block_ls)): x = self.dec_add_res_block_ls[i](res) res = F.relu((res + x)) x = F.relu(self.res3_norm1(self.res3_conv1(res))) x = F.relu(self.res3_norm2(self.res3_conv2(x))) x = F.relu(self.res3_norm3(self.res3_conv3(x))) res = F.relu((res + x)) sc = F.relu(self.fc1_norm(self.fc1(res))) sc = F.relu(self.fc2_norm(self.fc2(sc))) if self.full_size_output: sc = self.duc_upsample(sc) sc = F.interpolate(sc, (up_height, up_width), mode='bilinear', align_corners=False) sc = self.fc3(sc) sc[(:, :self.num_task_channel)] += self.mean[(None, :, None, None)] if self.num_pos_channel: pos_output = F.hardtanh(sc[(:, self.num_task_channel:)].clone(), min_val=(- 16.1), max_val=13.82) pos_output = torch.exp(pos_output) sc[(:, self.num_task_channel:)] = pos_output return sc
Forward pass. @param inputs 4D data tensor (BxCxHxW) @param up_height Scalar, up-sampling target tensor height @param up_width Scalar, up-sampling target tensor width
networks/networks.py
forward
TOPO-EPFL/CrossLoc
16
python
def forward(self, inputs, up_height=None, up_width=None): '\n Forward pass.\n\n @param inputs 4D data tensor (BxCxHxW)\n @param up_height Scalar, up-sampling target tensor height\n @param up_width Scalar, up-sampling target tensor width\n ' res = inputs for i in range(len(self.dec_add_res_block_ls)): x = self.dec_add_res_block_ls[i](res) res = F.relu((res + x)) x = F.relu(self.res3_norm1(self.res3_conv1(res))) x = F.relu(self.res3_norm2(self.res3_conv2(x))) x = F.relu(self.res3_norm3(self.res3_conv3(x))) res = F.relu((res + x)) sc = F.relu(self.fc1_norm(self.fc1(res))) sc = F.relu(self.fc2_norm(self.fc2(sc))) if self.full_size_output: sc = self.duc_upsample(sc) sc = F.interpolate(sc, (up_height, up_width), mode='bilinear', align_corners=False) sc = self.fc3(sc) sc[(:, :self.num_task_channel)] += self.mean[(None, :, None, None)] if self.num_pos_channel: pos_output = F.hardtanh(sc[(:, self.num_task_channel:)].clone(), min_val=(- 16.1), max_val=13.82) pos_output = torch.exp(pos_output) sc[(:, self.num_task_channel:)] = pos_output return sc
def forward(self, inputs, up_height=None, up_width=None): '\n Forward pass.\n\n @param inputs 4D data tensor (BxCxHxW)\n @param up_height Scalar, up-sampling target tensor height\n @param up_width Scalar, up-sampling target tensor width\n ' res = inputs for i in range(len(self.dec_add_res_block_ls)): x = self.dec_add_res_block_ls[i](res) res = F.relu((res + x)) x = F.relu(self.res3_norm1(self.res3_conv1(res))) x = F.relu(self.res3_norm2(self.res3_conv2(x))) x = F.relu(self.res3_norm3(self.res3_conv3(x))) res = F.relu((res + x)) sc = F.relu(self.fc1_norm(self.fc1(res))) sc = F.relu(self.fc2_norm(self.fc2(sc))) if self.full_size_output: sc = self.duc_upsample(sc) sc = F.interpolate(sc, (up_height, up_width), mode='bilinear', align_corners=False) sc = self.fc3(sc) sc[(:, :self.num_task_channel)] += self.mean[(None, :, None, None)] if self.num_pos_channel: pos_output = F.hardtanh(sc[(:, self.num_task_channel:)].clone(), min_val=(- 16.1), max_val=13.82) pos_output = torch.exp(pos_output) sc[(:, self.num_task_channel:)] = pos_output return sc<|docstring|>Forward pass. @param inputs 4D data tensor (BxCxHxW) @param up_height Scalar, up-sampling target tensor height @param up_width Scalar, up-sampling target tensor width<|endoftext|>